From bcd3c724fdad7d9cf78297d82ac8eafba04d64b7 Mon Sep 17 00:00:00 2001 From: Darren Date: Mon, 4 Nov 2019 12:43:36 +0800 Subject: [PATCH] Bump to version 0.1.0 (#1) --- .github/workflows/unittest.yml | 27 ++ .gitignore | 1 + README.md | 83 +++++++ aiocqlengine/__init__.py | 1 + aiocqlengine/models.py | 142 +++++++++++ aiocqlengine/query.py | 436 +++++++++++++++++++++++++++++++++ poetry.lock | 304 +++++++++++++++++++++++ pyproject.toml | 10 +- src/aiocqlengine/__init__.py | 1 - tests/conftest.py | 51 ++++ tests/test_aiocqlengine.py | 90 ++++++- 11 files changed, 1140 insertions(+), 6 deletions(-) create mode 100644 .github/workflows/unittest.yml create mode 100644 aiocqlengine/__init__.py create mode 100644 aiocqlengine/models.py create mode 100644 aiocqlengine/query.py create mode 100644 poetry.lock delete mode 100644 src/aiocqlengine/__init__.py create mode 100644 tests/conftest.py diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml new file mode 100644 index 0000000..3bf4df9 --- /dev/null +++ b/.github/workflows/unittest.yml @@ -0,0 +1,27 @@ +on: [push] + +name: unittest + +jobs: + + unittest: + runs-on: ubuntu-latest + name: Unittest with pytest + services: + cassandra: + image: cassandra + ports: + - 9042:9042 + env: + CASS_DRIVER_NO_CYTHON: 1 + + steps: + - uses: actions/checkout@v1 + - uses: actions/setup-python@v1 + with: + python-version: '3.x' + - run: | + pip install poetry + poetry install + - name: Run test + run: poetry run pytest diff --git a/.gitignore b/.gitignore index 4840363..5e4b3fd 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ # Custom .idea/ +.venv/ # Created by https://www.gitignore.io/api/macos,python # Edit at https://www.gitignore.io/?templates=macos,python diff --git a/README.md b/README.md index e69de29..2874025 100644 --- a/README.md +++ b/README.md @@ -0,0 +1,83 @@ +# aiocqlengine +Async wrapper for cqlengine of cassandra python driver. + +This project is built on [cassandra-python-driver](https://github.com/datastax/python-driver) and [aiocassandra](https://github.com/aio-libs/aiocassandra). + +[![Actions Status](https://github.com/charact3/aiocqlengine/workflows/unittest/badge.svg)](https://github.com/charact3/aiocqlengine/actions) + + +## Get Started + +```python +import asyncio +import uuid +import os + +from aiocassandra import aiosession +from aiocqlengine.models import AioModel +from cassandra.cluster import Cluster +from cassandra.cqlengine import columns, connection, management + +cluster = Cluster() +session = cluster.connect() + + +class User(AioModel): + user_id = columns.UUID(primary_key=True) + username = columns.Text() + + +async def main(): + aiosession(session) + + # Set aiosession for cqlengine + session.set_keyspace('example_keyspace') + connection.set_session(session) + + # Model.objects.create() and Model.create() in async way: + user_id = uuid.uuid4() + await User.objects.async_create(user_id=user_id, username='user1') + # also can use: await User.async_create(user_id=user_id, username='user1) + + # Model.objects.all() and Model.all() in async way: + print(list(await User.async_all())) + print(list(await User.objects.filter(user_id=user_id).async_all())) + + # Model.object.update() in async way: + await User.objects(user_id=user_id).async_update(username='updated-user1') + + # Model.objects.get() and Model.get() in async way: + user = await User.objects.async_get(user_id=user_id) + assert user.user_id == (await User.async_get(user_id=user_id)).user_id + print(user, user.username) + + # obj.save() in async way: + user.username = 'saved-user1' + await user.async_save() + + # obj.delete() in async way: + await user.async_delete() + + # Didn't break original functions + print('Left users: ', len(User.objects.all())) + + +def create_keyspace(keyspace): + os.environ['CQLENG_ALLOW_SCHEMA_MANAGEMENT'] = 'true' + connection.register_connection('cqlengine', session=session, default=True) + management.create_keyspace_simple(keyspace, replication_factor=1) + management.sync_table(User, keyspaces=[keyspace]) + + +create_keyspace('example_keyspace') + +loop = asyncio.get_event_loop() +loop.run_until_complete(main()) +cluster.shutdown() +loop.close() + +``` + + +## License +This project is under MIT license. diff --git a/aiocqlengine/__init__.py b/aiocqlengine/__init__.py new file mode 100644 index 0000000..3dc1f76 --- /dev/null +++ b/aiocqlengine/__init__.py @@ -0,0 +1 @@ +__version__ = "0.1.0" diff --git a/aiocqlengine/models.py b/aiocqlengine/models.py new file mode 100644 index 0000000..10cdfa5 --- /dev/null +++ b/aiocqlengine/models.py @@ -0,0 +1,142 @@ +from cassandra.cqlengine.models import Model, PolymorphicModelException +from cassandra.cqlengine.query import ValidationError + +from aiocqlengine.query import AioDMLQuery, AioQuerySet + + +class AioModel(Model): + __abstract__ = True + __dmlquery__ = AioDMLQuery + __queryset__ = AioQuerySet + + @classmethod + async def async_create(cls, **kwargs): + extra_columns = set(kwargs.keys()) - set(cls._columns.keys()) + if extra_columns: + raise ValidationError( + "Incorrect columns passed: {0}".format(extra_columns)) + return await cls.objects.async_create(**kwargs) + + async def async_delete(self): + """ + Deletes the object from the database + """ + await self.__dmlquery__( + self.__class__, + self, + batch=self._batch, + timestamp=self._timestamp, + consistency=self.__consistency__, + timeout=self._timeout, + conditional=self._conditional, + if_exists=self._if_exists, + ).async_delete() + + @classmethod + async def async_all(cls): + """ + Returns a queryset representing all stored objects. + + This is a pass-through to the model objects().async_all() + """ + return await cls.objects.async_all() + + @classmethod + async def async_get(cls, *args, **kwargs): + """ + Returns a single object based on the passed filter constraints. + + This is a pass-through to the model objects(). + :method:`~cqlengine.queries.get`. + """ + return await cls.objects.async_get(*args, **kwargs) + + async def async_save(self): + # handle polymorphic models + if self._is_polymorphic: + if self._is_polymorphic_base: + raise PolymorphicModelException( + "cannot save polymorphic base model") + else: + setattr(self, self._discriminator_column_name, + self.__discriminator_value__) + + self.validate() + await self.__dmlquery__( + self.__class__, + self, + batch=self._batch, + ttl=self._ttl, + timestamp=self._timestamp, + consistency=self.__consistency__, + if_not_exists=self._if_not_exists, + conditional=self._conditional, + timeout=self._timeout, + if_exists=self._if_exists, + ).async_save() + + self._set_persisted() + self._timestamp = None + return self + + async def async_update(self, **values): + """ + Performs an update on the model instance. You can pass in values to + set on the model for updating, or you can call without values to + execute an update against any modified fields. + If no fields on the model have been modified since loading, + no query will be performed. Model validation is performed normally. + Setting a value to `None` is equivalent to running a CQL `DELETE` on + that column. + + It is possible to do a blind update, that is, to update a field without + having first selected the object out of the database. + See :ref:`Blind Updates ` + """ + for column_id, v in values.items(): + col = self._columns.get(column_id) + + # check for nonexistant columns + if col is None: + raise ValidationError( + "{0}.{1} has no column named: {2}".format( + self.__module__, self.__class__.__name__, column_id)) + + # check for primary key update attempts + if col.is_primary_key: + current_value = getattr(self, column_id) + if v != current_value: + raise ValidationError( + "Cannot apply update to primary key '{0}' for {1}.{2}". + format(column_id, self.__module__, + self.__class__.__name__)) + + setattr(self, column_id, v) + + # handle polymorphic models + if self._is_polymorphic: + if self._is_polymorphic_base: + raise PolymorphicModelException( + "cannot update polymorphic base model") + else: + setattr(self, self._discriminator_column_name, + self.__discriminator_value__) + + self.validate() + await self.__dmlquery__( + self.__class__, + self, + batch=self._batch, + ttl=self._ttl, + timestamp=self._timestamp, + consistency=self.__consistency__, + conditional=self._conditional, + timeout=self._timeout, + if_exists=self._if_exists, + ).async_update() + + self._set_persisted() + + self._timestamp = None + + return self diff --git a/aiocqlengine/query.py b/aiocqlengine/query.py new file mode 100644 index 0000000..3aa728a --- /dev/null +++ b/aiocqlengine/query.py @@ -0,0 +1,436 @@ +""" +Patch cqlengine, add async functions. +""" +from datetime import datetime, timedelta +from warnings import warn +import time + +import six +from cassandra.cqlengine.query import ( + DMLQuery, + ModelQuerySet, + check_applied, + SimpleStatement, + conn, + ValidationError, + EqualsOperator, + BatchQuery, +) +from cassandra.cqlengine import CQLEngineException +from cassandra.cqlengine import columns +from cassandra.cqlengine.statements import ( + UpdateStatement, + DeleteStatement, + BaseCQLStatement, + InsertStatement, +) + + +async def _execute_statement(model, + statement, + consistency_level, + timeout, + connection=None): + """ + Based on cassandra.cqlengine.query._execute_statement + """ + params = statement.get_context() + s = SimpleStatement( + str(statement), + consistency_level=consistency_level, + fetch_size=statement.fetch_size, + ) + if model._partition_key_index: + key_values = statement.partition_key_values(model._partition_key_index) + if not any(v is None for v in key_values): + parts = model._routing_key_from_values( + key_values, + conn.get_cluster(connection).protocol_version) + s.routing_key = parts + s.keyspace = model._get_keyspace() + connection = connection or model._get_connection() + return await execute(s, params, timeout=timeout, connection=connection) + + +async def execute( + query, + params=None, + consistency_level=None, + timeout=conn.NOT_SET, + connection=None, +): + """ + Based on cassandra.cqlengine.connection.execute + """ + + _connection = conn.get_connection(connection) + + if isinstance(query, SimpleStatement): + pass # + elif isinstance(query, BaseCQLStatement): + params = query.get_context() + query = SimpleStatement( + str(query), + consistency_level=consistency_level, + fetch_size=query.fetch_size, + ) + elif isinstance(query, str): + query = SimpleStatement(query, consistency_level=consistency_level) + + result = await _connection.session.execute_future(query, + params, + timeout=timeout) + + return result + + +class AioDMLQuery(DMLQuery): + async def _async_execute(self, statement): + connection = (self.instance._get_connection() + if self.instance else self.model._get_connection()) + if self._batch: + if self._batch._connection: + if (not self._batch._connection_explicit and connection + and connection != self._batch._connection): + raise CQLEngineException( + "BatchQuery queries must be executed " + "on the same connection") + else: + # set the BatchQuery connection from the model + self._batch._connection = connection + return self._batch.add_query(statement) + else: + results = await _execute_statement( + self.model, + statement, + self._consistency, + self._timeout, + connection=connection, + ) + if self._if_not_exists or self._if_exists or self._conditional: + check_applied(results) + return results + + async def async_delete(self): + """ Deletes one instance """ + if self.instance is None: + raise CQLEngineException("DML Query instance attribute is None") + + ds = DeleteStatement( + self.column_family_name, + timestamp=self._timestamp, + conditionals=self._conditional, + if_exists=self._if_exists, + ) + for name, col in self.model._primary_keys.items(): + val = getattr(self.instance, name) + if val is None and not col.partition_key: + continue + ds.add_where(col, EqualsOperator(), val) + await self._async_execute(ds) + + async def async_save(self): + """ + Creates / updates a row. + This is a blind insert call. + All validation and cleaning needs to happen + prior to calling this. + """ + if self.instance is None: + raise CQLEngineException("DML Query intance attribute is None") + assert type(self.instance) == self.model + + nulled_fields = set() + if self.instance._has_counter or self.instance._can_update(): + if self.instance._has_counter: + warn("'create' and 'save' actions on Counters are deprecated. " + "A future version will disallow this. Use the 'update' " + "mechanism instead.") + return await self.async_update() + else: + insert = InsertStatement( + self.column_family_name, + ttl=self._ttl, + timestamp=self._timestamp, + if_not_exists=self._if_not_exists, + ) + static_save_only = (False if len( + self.instance._clustering_keys) == 0 else True) + for name, col in self.instance._clustering_keys.items(): + static_save_only = static_save_only and col._val_is_null( + getattr(self.instance, name, None)) + for name, col in self.instance._columns.items(): + if (static_save_only and not col.static + and not col.partition_key): + continue + val = getattr(self.instance, name, None) + if col._val_is_null(val): + if self.instance._values[name].changed: + nulled_fields.add(col.db_field_name) + continue + if col.has_default and not self.instance._values[name].changed: + # Ensure default columns included in a save() + # are marked as explicit, to get them *persisted* properly + self.instance._values[name].explicit = True + insert.add_assignment(col, getattr(self.instance, name, None)) + + # skip query execution if it's empty + # caused by pointless update queries + if not insert.is_empty: + await self._async_execute(insert) + # delete any nulled columns + if not static_save_only: + self._delete_null_columns() + + async def async_update(self): + """ + updates a row. + This is a blind update call. + All validation and cleaning needs to happen + prior to calling this. + """ + if self.instance is None: + raise CQLEngineException("DML Query intance attribute is None") + assert type(self.instance) == self.model + null_clustering_key = (False if len( + self.instance._clustering_keys) == 0 else True) + static_changed_only = True + statement = UpdateStatement( + self.column_family_name, + ttl=self._ttl, + timestamp=self._timestamp, + conditionals=self._conditional, + if_exists=self._if_exists, + ) + for name, col in self.instance._clustering_keys.items(): + null_clustering_key = null_clustering_key and col._val_is_null( + getattr(self.instance, name, None)) + + updated_columns = set() + # get defined fields and their column names + for name, col in self.model._columns.items(): + # if clustering key is null, don't include non static columns + if (null_clustering_key and not col.static + and not col.partition_key): + continue + if not col.is_primary_key: + val = getattr(self.instance, name, None) + val_mgr = self.instance._values[name] + + if val is None: + continue + + if not val_mgr.changed and not isinstance( + col, columns.Counter): + continue + + static_changed_only = static_changed_only and col.static + statement.add_update(col, val, previous=val_mgr.previous_value) + updated_columns.add(col.db_field_name) + + if statement.assignments: + for name, col in self.model._primary_keys.items(): + # only include clustering key if clustering key is not null, + # and non static columns are changed to avoid cql error + if (null_clustering_key + or static_changed_only) and (not col.partition_key): + continue + statement.add_where(col, EqualsOperator(), + getattr(self.instance, name)) + await self._async_execute(statement) + + if not null_clustering_key: + # remove conditions on fields that have been updated + delete_conditionals = ([ + condition for condition in self._conditional + if condition.field not in updated_columns + ] if self._conditional else None) + self._delete_null_columns(delete_conditionals) + + +class AioQuerySet(ModelQuerySet): + async def _async_execute_query(self): + if self._batch: + raise CQLEngineException("Only inserts, updates, " + "and deletes are available in batch mode") + if self._result_cache is None: + results = await self._async_execute(self._select_query()) + self._result_generator = (i for i in results) + self._result_cache = [] + self._construct_result = self._maybe_inject_deferred( + self._get_result_constructor()) + + # "DISTINCT COUNT()" is not supported in C* < 2.2, + # so we need to materialize all results to get + # len() and count() working with DISTINCT queries + if self._materialize_results or self._distinct_fields: + self._fill_result_cache() + + async def _async_execute(self, statement): + if self._batch: + return self._batch.add_query(statement) + else: + connection = self._connection or self.model._get_connection() + result = await _execute_statement( + self.model, + statement, + self._consistency, + self._timeout, + connection=connection, + ) + if self._if_not_exists or self._if_exists or self._conditional: + check_applied(result) + return result + + async def async_create(self, **kwargs): + return (await self.model(**kwargs).batch(self._batch).ttl( + self._ttl).consistency(self._consistency).if_not_exists( + self._if_not_exists).timestamp(self._timestamp).if_exists( + self._if_exists).using(connection=self._connection + ).async_save()) + + async def async_all(self): + await self._async_execute_query() + return self + + async def async_get(self, *args, **kwargs): + if args or kwargs: + return await self.filter(*args, **kwargs).async_get() + + await self._async_execute_query() + + # Check that the resultset only contains one element, + # avoiding sending a COUNT query + try: + self[1] + raise self.model.MultipleObjectsReturned("Multiple objects found") + except IndexError: + pass + + try: + obj = self[0] + except IndexError: + raise self.model.DoesNotExist + + return obj + + async def async_update(self, **values): + if not values: + return + + nulled_columns = set() + updated_columns = set() + us = UpdateStatement( + self.column_family_name, + where=self._where, + ttl=self._ttl, + timestamp=self._timestamp, + conditionals=self._conditional, + if_exists=self._if_exists, + ) + for name, val in values.items(): + col_name, col_op = self._parse_filter_arg(name) + col = self.model._columns.get(col_name) + # check for nonexistant columns + if col is None: + raise ValidationError( + "{0}.{1} has no column named: {2}".format( + self.__module__, self.model.__name__, col_name)) + # check for primary key update attempts + if col.is_primary_key: + raise ValidationError( + "Cannot apply update to primary key '{0}' for {1}.{2}". + format(col_name, self.__module__, self.model.__name__)) + + if col_op == "remove" and isinstance(col, columns.Map): + if not isinstance(val, set): + raise ValidationError( + "Cannot apply update operation '{0}' on column '{1}' with value '{2}'. A set is required." + .format(col_op, col_name, val)) + val = {v: None for v in val} + else: + # we should not provide default values in this use case. + val = col.validate(val) + + if val is None: + nulled_columns.add(col_name) + continue + + us.add_update(col, val, operation=col_op) + updated_columns.add(col_name) + + if us.assignments: + await self._async_execute(us) + + if nulled_columns: + delete_conditional = ([ + condition for condition in self._conditional + if condition.field not in updated_columns + ] if self._conditional else None) + ds = DeleteStatement( + self.column_family_name, + fields=nulled_columns, + where=self._where, + conditionals=delete_conditional, + if_exists=self._if_exists, + ) + await self._async_execute(ds) + + +class AioBatchQuery(BatchQuery): + async def async_execute(self): + if self._executed and self.warn_multiple_exec: + msg = "Batch executed multiple times." + if self._context_entered: + msg += (" If using the batch as a context manager, " + "there is no need to call execute directly.") + warn(msg) + self._executed = True + + if len(self.queries) == 0: + # Empty batch is a no-op + # except for callbacks + self._execute_callbacks() + return + + opener = ("BEGIN " + + (self.batch_type + " " if self.batch_type else "") + + " BATCH") + if self.timestamp: + + if isinstance(self.timestamp, six.integer_types): + ts = self.timestamp + elif isinstance(self.timestamp, (datetime, timedelta)): + ts = self.timestamp + if isinstance(self.timestamp, timedelta): + ts += datetime.now() # Apply timedelta + ts = int(time.mktime(ts.timetuple()) * 1e6 + ts.microsecond) + else: + raise ValueError("Batch expects a long, a timedelta, " + "or a datetime") + + opener += " USING TIMESTAMP {0}".format(ts) + + query_list = [opener] + parameters = {} + ctx_counter = 0 + for query in self.queries: + query.update_context_id(ctx_counter) + ctx = query.get_context() + ctx_counter += len(ctx) + query_list.append(" " + str(query)) + parameters.update(ctx) + + query_list.append("APPLY BATCH;") + + tmp = await execute( + "\n".join(query_list), + parameters, + self._consistency, + self._timeout, + connection=self._connection, + ) + check_applied(tmp) + + self.queries = [] + self._execute_callbacks() diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..ce83d04 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,304 @@ +[[package]] +category = "main" +description = "Simple threaded cassandra wrapper for asyncio" +name = "aiocassandra" +optional = false +python-versions = ">=3.4.0" +version = "2.0.1" + +[package.dependencies] +async-generator = "*" +cassandra-driver = "*" + +[[package]] +category = "main" +description = "Async generators and context managers for Python 3.5+" +name = "async-generator" +optional = false +python-versions = ">=3.5" +version = "1.10" + +[[package]] +category = "dev" +description = "Atomic file writes." +name = "atomicwrites" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.3.0" + +[[package]] +category = "dev" +description = "Classes Without Boilerplate" +name = "attrs" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "19.3.0" + +[[package]] +category = "main" +description = "Python driver for Cassandra" +name = "cassandra-driver" +optional = false +python-versions = "*" +version = "3.20.0" + +[package.dependencies] +futures = "*" +six = ">=1.9" + +[[package]] +category = "dev" +description = "Cross-platform colored terminal text." +marker = "sys_platform == \"win32\"" +name = "colorama" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "0.4.1" + +[[package]] +category = "dev" +description = "Code coverage measurement for Python" +name = "coverage" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, <4" +version = "4.5.4" + +[[package]] +category = "main" +description = "Backport of the concurrent.futures package from Python 3.2" +name = "futures" +optional = false +python-versions = "*" +version = "3.1.1" + +[[package]] +category = "dev" +description = "Read metadata from Python packages" +marker = "python_version < \"3.8\"" +name = "importlib-metadata" +optional = false +python-versions = ">=2.7,!=3.0,!=3.1,!=3.2,!=3.3" +version = "0.23" + +[package.dependencies] +zipp = ">=0.5" + +[[package]] +category = "dev" +description = "McCabe checker, plugin for flake8" +name = "mccabe" +optional = false +python-versions = "*" +version = "0.6.1" + +[[package]] +category = "dev" +description = "More routines for operating on iterables, beyond itertools" +name = "more-itertools" +optional = false +python-versions = ">=3.4" +version = "7.2.0" + +[[package]] +category = "dev" +description = "Core utilities for Python packages" +name = "packaging" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "19.2" + +[package.dependencies] +pyparsing = ">=2.0.2" +six = "*" + +[[package]] +category = "dev" +description = "plugin and hook calling mechanisms for python" +name = "pluggy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "0.13.0" + +[package.dependencies] +[package.dependencies.importlib-metadata] +python = "<3.8" +version = ">=0.12" + +[[package]] +category = "dev" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +name = "py" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.8.0" + +[[package]] +category = "dev" +description = "Python style guide checker" +name = "pycodestyle" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "2.5.0" + +[[package]] +category = "dev" +description = "Python docstring style checker" +name = "pydocstyle" +optional = false +python-versions = ">=3.4" +version = "4.0.1" + +[package.dependencies] +snowballstemmer = "*" + +[[package]] +category = "dev" +description = "passive checker of Python programs" +name = "pyflakes" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "2.1.1" + +[[package]] +category = "dev" +description = "Pylava -- Code audit tool for Python" +name = "pylava" +optional = false +python-versions = "*" +version = "0.2.2" + +[package.dependencies] +mccabe = ">=0.5.2" +pycodestyle = ">=2.3.1" +pydocstyle = ">=2.0.0" +pyflakes = ">=1.5.0" + +[[package]] +category = "dev" +description = "Python parsing module" +name = "pyparsing" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +version = "2.4.2" + +[[package]] +category = "dev" +description = "pytest: simple powerful testing with Python" +name = "pytest" +optional = false +python-versions = ">=3.5" +version = "5.2.2" + +[package.dependencies] +atomicwrites = ">=1.0" +attrs = ">=17.4.0" +colorama = "*" +more-itertools = ">=4.0.0" +packaging = "*" +pluggy = ">=0.12,<1.0" +py = ">=1.5.0" +wcwidth = "*" + +[package.dependencies.importlib-metadata] +python = "<3.8" +version = ">=0.12" + +[[package]] +category = "dev" +description = "Pytest support for asyncio." +name = "pytest-asyncio" +optional = false +python-versions = ">= 3.5" +version = "0.10.0" + +[package.dependencies] +pytest = ">=3.0.6" + +[[package]] +category = "dev" +description = "Pytest plugin for measuring coverage." +name = "pytest-cov" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "2.8.1" + +[package.dependencies] +coverage = ">=4.4" +pytest = ">=3.6" + +[[package]] +category = "main" +description = "Python 2 and 3 compatibility utilities" +name = "six" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*" +version = "1.12.0" + +[[package]] +category = "dev" +description = "This package provides 26 stemmers for 25 languages generated from Snowball algorithms." +name = "snowballstemmer" +optional = false +python-versions = "*" +version = "2.0.0" + +[[package]] +category = "dev" +description = "Measures number of Terminal column cells of wide-character codes" +name = "wcwidth" +optional = false +python-versions = "*" +version = "0.1.7" + +[[package]] +category = "dev" +description = "A formatter for Python code." +name = "yapf" +optional = false +python-versions = "*" +version = "0.28.0" + +[[package]] +category = "dev" +description = "Backport of pathlib-compatible object wrapper for zip files" +marker = "python_version < \"3.8\"" +name = "zipp" +optional = false +python-versions = ">=2.7" +version = "0.6.0" + +[package.dependencies] +more-itertools = "*" + +[metadata] +content-hash = "236059b911383f33aaea4a0f164b377c5b48168e13503494cc5236b5f209b803" +python-versions = "^3.6" + +[metadata.hashes] +aiocassandra = ["82a91c8766d9e23b23775ed14eb6322a6f0bc22ddcc2babb17a69e31b46467a9", "bd9b95c60e0c729266041c4f5407ccf07c647f945bf2bfdb889989222ba14578"] +async-generator = ["01c7bf666359b4967d2cda0000cc2e4af16a0ae098cbffcb8472fb9e8ad6585b", "6ebb3d106c12920aaae42ccb6f787ef5eefdcdd166ea3d628fa8476abe712144"] +atomicwrites = ["03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4", "75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"] +attrs = ["08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", "f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"] +cassandra-driver = ["83ec9e122c2b5fb11aa9fa758faee7710f2eae61bdce297190ddce114c336cab"] +colorama = ["05eed71e2e327246ad6b38c540c4a3117230b19679b875190486ddd2d721422d", "f8ac84de7840f5b9c4e3347b3c1eaa50f7e49c2b07596221daec5edaabbd7c48"] +coverage = ["08907593569fe59baca0bf152c43f3863201efb6113ecb38ce7e97ce339805a6", "0be0f1ed45fc0c185cfd4ecc19a1d6532d72f86a2bac9de7e24541febad72650", "141f08ed3c4b1847015e2cd62ec06d35e67a3ac185c26f7635f4406b90afa9c5", "19e4df788a0581238e9390c85a7a09af39c7b539b29f25c89209e6c3e371270d", "23cc09ed395b03424d1ae30dcc292615c1372bfba7141eb85e11e50efaa6b351", "245388cda02af78276b479f299bbf3783ef0a6a6273037d7c60dc73b8d8d7755", "331cb5115673a20fb131dadd22f5bcaf7677ef758741312bee4937d71a14b2ef", "386e2e4090f0bc5df274e720105c342263423e77ee8826002dcffe0c9533dbca", "3a794ce50daee01c74a494919d5ebdc23d58873747fa0e288318728533a3e1ca", "60851187677b24c6085248f0a0b9b98d49cba7ecc7ec60ba6b9d2e5574ac1ee9", "63a9a5fc43b58735f65ed63d2cf43508f462dc49857da70b8980ad78d41d52fc", "6b62544bb68106e3f00b21c8930e83e584fdca005d4fffd29bb39fb3ffa03cb5", "6ba744056423ef8d450cf627289166da65903885272055fb4b5e113137cfa14f", "7494b0b0274c5072bddbfd5b4a6c6f18fbbe1ab1d22a41e99cd2d00c8f96ecfe", "826f32b9547c8091679ff292a82aca9c7b9650f9fda3e2ca6bf2ac905b7ce888", "93715dffbcd0678057f947f496484e906bf9509f5c1c38fc9ba3922893cda5f5", "9a334d6c83dfeadae576b4d633a71620d40d1c379129d587faa42ee3e2a85cce", "af7ed8a8aa6957aac47b4268631fa1df984643f07ef00acd374e456364b373f5", "bf0a7aed7f5521c7ca67febd57db473af4762b9622254291fbcbb8cd0ba5e33e", "bf1ef9eb901113a9805287e090452c05547578eaab1b62e4ad456fcc049a9b7e", "c0afd27bc0e307a1ffc04ca5ec010a290e49e3afbe841c5cafc5c5a80ecd81c9", "dd579709a87092c6dbee09d1b7cfa81831040705ffa12a1b248935274aee0437", "df6712284b2e44a065097846488f66840445eb987eb81b3cc6e4149e7b6982e1", "e07d9f1a23e9e93ab5c62902833bf3e4b1f65502927379148b6622686223125c", "e2ede7c1d45e65e209d6093b762e98e8318ddeff95317d07a27a2140b80cfd24", "e4ef9c164eb55123c62411f5936b5c2e521b12356037b6e1c2617cef45523d47", "eca2b7343524e7ba246cab8ff00cab47a2d6d54ada3b02772e908a45675722e2", "eee64c616adeff7db37cc37da4180a3a5b6177f5c46b187894e633f088fb5b28", "ef824cad1f980d27f26166f86856efe11eff9912c4fed97d3804820d43fa550c", "efc89291bd5a08855829a3c522df16d856455297cf35ae827a37edac45f466a7", "fa964bae817babece5aa2e8c1af841bebb6d0b9add8e637548809d040443fee0", "ff37757e068ae606659c28c3bd0d923f9d29a85de79bf25b2b34b148473b5025"] +futures = ["3a44f286998ae64f0cc083682fcfec16c406134a81a589a5de445d7bb7c2751b", "51ecb45f0add83c806c68e4b06106f90db260585b25ef2abfcda0bd95c0132fd", "c4884a65654a7c45435063e14ae85280eb1f111d94e542396717ba9828c4337f"] +importlib-metadata = ["aa18d7378b00b40847790e7c27e11673d7fed219354109d0e7b9e5b25dc3ad26", "d5f18a79777f3aa179c145737780282e27b508fc8fd688cb17c7a813e8bd39af"] +mccabe = ["ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", "dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"] +more-itertools = ["409cd48d4db7052af495b09dec721011634af3753ae1ef92d2b32f73a745f832", "92b8c4b06dac4f0611c0729b2f2ede52b2e1bac1ab48f089c7ddc12e26bb60c4"] +packaging = ["28b924174df7a2fa32c1953825ff29c61e2f5e082343165438812f00d3a7fc47", "d9551545c6d761f3def1677baf08ab2a3ca17c56879e70fecba2fc4dde4ed108"] +pluggy = ["0db4b7601aae1d35b4a033282da476845aa19185c1e6964b25cf324b5e4ec3e6", "fa5fa1622fa6dd5c030e9cad086fa19ef6a0cf6d7a2d12318e10cb49d6d68f34"] +py = ["64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa", "dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"] +pycodestyle = ["95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56", "e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c"] +pydocstyle = ["04c84e034ebb56eb6396c820442b8c4499ac5eb94a3bda88951ac3dc519b6058", "66aff87ffe34b1e49bff2dd03a88ce6843be2f3346b0c9814410d34987fbab59"] +pyflakes = ["17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0", "d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2"] +pylava = ["b2fc881e9112a36f5a8a97bdbbedfa1704bbb65c34afa630e6df31532ae20994", "dcdecb6668cb8c4a38e0ee4fdb5f4d9488793af57bc040d89b0d1142f13b8622"] +pyparsing = ["6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80", "d9338df12903bbf5d65a0e4e87c2161968b10d2e489652bb47001d82a9b028b4"] +pytest = ["27abc3fef618a01bebb1f0d6d303d2816a99aa87a5968ebc32fe971be91eb1e6", "58cee9e09242937e136dbb3dab466116ba20d6b7828c7620f23947f37eb4dae4"] +pytest-asyncio = ["9fac5100fd716cbecf6ef89233e8590a4ad61d729d1732e0a96b84182df1daaf", "d734718e25cfc32d2bf78d346e99d33724deeba774cc4afdf491530c6184b63b"] +pytest-cov = ["cc6742d8bac45070217169f5f72ceee1e0e55b0221f54bcf24845972d3a47f2b", "cdbdef4f870408ebdbfeb44e63e07eb18bb4619fae852f6e760645fa36172626"] +six = ["3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", "d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"] +snowballstemmer = ["209f257d7533fdb3cb73bdbd24f436239ca3b2fa67d56f6ff88e86be08cc5ef0", "df3bac3df4c2c01363f3dd2cfa78cce2840a79b9f1c2d2de9ce8d31683992f52"] +wcwidth = ["3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e", "f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"] +yapf = ["02ace10a00fa2e36c7ebd1df2ead91dbfbd7989686dc4ccbdc549e95d19f5780", "6f94b6a176a7c114cfa6bad86d40f259bbe0f10cf2fa7f2f4b3596fc5802a41b"] +zipp = ["3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e", "f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335"] diff --git a/pyproject.toml b/pyproject.toml index 86eef12..5d2324b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,14 +1,20 @@ [tool.poetry] name = "aiocqlengine" -version = "0.0.1" +version = "0.1.0" description = "" authors = ["Darren "] [tool.poetry.dependencies] python = "^3.6" +cassandra-driver = "^3.20" +aiocassandra = "^2.0" [tool.poetry.dev-dependencies] -pytest = "^3.0" +pytest = "^5.2" +pytest-asyncio = "^0.10.0" +pytest-cov = "^2.8" +pylava = "^0.2.2" +yapf = "^0.28.0" [build-system] requires = ["poetry>=0.12"] diff --git a/src/aiocqlengine/__init__.py b/src/aiocqlengine/__init__.py deleted file mode 100644 index b794fd4..0000000 --- a/src/aiocqlengine/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = '0.1.0' diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..15a7ead --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,51 @@ +import asyncio +import os + +import pytest +from cassandra.cluster import Cluster +from cassandra.cqlengine import management +from cassandra.cqlengine import connection as cqlengine_connection + +from aiocassandra import aiosession + +asyncio.set_event_loop(None) + + +@pytest.fixture +def cluster(): + cluster = Cluster() + yield cluster + cluster.shutdown() + + +@pytest.fixture +def event_loop(): + loop = asyncio.new_event_loop() + yield loop + loop.close() + + +@pytest.fixture +def cassandra(cluster, event_loop): + session = cluster.connect() + return aiosession(session, loop=event_loop) + + +@pytest.fixture +def cqlengine_management(cassandra): + """Setup cqlengine management + """ + # create test keyspace + os.environ["CQLENG_ALLOW_SCHEMA_MANAGEMENT"] = "true" + test_keyspace = "test_async_cqlengine" + cqlengine_connection.register_connection("cqlengine", + session=cassandra, + default=True) + management.create_keyspace_simple(test_keyspace, replication_factor=1) + + # setup cqlengine session + cassandra.set_keyspace(test_keyspace) + cqlengine_connection.set_session(cassandra) + yield management + management.drop_keyspace(test_keyspace) + cqlengine_connection.unregister_connection("cqlengine") diff --git a/tests/test_aiocqlengine.py b/tests/test_aiocqlengine.py index d5e7289..052b4b8 100644 --- a/tests/test_aiocqlengine.py +++ b/tests/test_aiocqlengine.py @@ -1,5 +1,89 @@ -from aiocqlengine import __version__ +import uuid +import pytest +from cassandra.cqlengine import columns -def test_version(): - assert __version__ == '0.1.0' +from aiocqlengine.models import AioModel +from aiocqlengine.query import AioBatchQuery + + +class User(AioModel): + user_id = columns.UUID(primary_key=True) + username = columns.Text() + + +@pytest.mark.asyncio +async def test_queryset_async_functions(cqlengine_management): + """test cqlengine Model async functions: + Model.objects.async_get() + Model.objects.async_all() + Model.objects.async_create() + Model.objects(id=obj_id).async_update() + """ + cqlengine_management.sync_table(User) + + # test: Model.objects.async_create(), Model.objects.async_all(), Model.objects.async_get() + username1 = "test-username-1" + await User.objects.async_create(user_id=uuid.uuid4(), username=username1) + users = await User.objects.async_all() + user = users[0] + _user = await User.objects.async_get(user_id=user.user_id) + assert user.username == _user.username == username1 + + # test DML query: Model.objects(id=obj_id).async_update() + username2 = "test-username-2" + await User.objects(user_id=user.user_id).async_update(username=username2) + updated_user = await User.objects.async_get(user_id=user.user_id) + assert updated_user.username == username2 + + +@pytest.mark.asyncio +async def test_model_async_functions(cqlengine_management): + """test cqlengine Model async functions: + Model.async_get() + Model.async_all() + Model.async_create() + obj.async_update() + obj.async_save() + obj.async_delete() + """ + cqlengine_management.sync_table(User) + + # test: Model.async_create(), Model.async_all(), Model.async_get() + username1 = "test-username-1" + await User.async_create(user_id=uuid.uuid4(), username=username1) + users = await User.async_all() + user = users[0] + _user = await User.async_get(user_id=user.user_id) + assert user.username == _user.username == username1 + + # test: obj.async_save() + username2 = "test-username-2" + user.username = username2 + await user.async_save() + _user = await User.async_get(user_id=user.user_id) + assert user.username == _user.username == username2 + + # test: obj.async_update() + username3 = "test-username-3" + await user.async_update(username=username3) + _user = await User.async_get(user_id=user.user_id) + assert user.username == _user.username == username3 + + # test: obj.async_delete() + await user.async_delete() + assert len(await User.objects.async_all()) == 0 + + +@pytest.mark.asyncio +async def test_batch_query_async_execute(cqlengine_management): + cqlengine_management.sync_table(User) + batch_query = AioBatchQuery() + User.batch(batch_query).create(user_id=uuid.uuid4(), username="user-1") + User.batch(batch_query).create(user_id=uuid.uuid4(), username="user-2") + User.batch(batch_query).create(user_id=uuid.uuid4(), username="user-3") + await batch_query.async_execute() + + users = await User.async_all() + username_set = {user.username for user in users} + assert username_set == {"user-1", "user-2", "user-3"}