diff --git a/.github/workflows/minos-broker-kafka-tests.yml b/.github/workflows/minos-broker-kafka-tests.yml index 9fca0d856..0828e490c 100644 --- a/.github/workflows/minos-broker-kafka-tests.yml +++ b/.github/workflows/minos-broker-kafka-tests.yml @@ -20,16 +20,6 @@ jobs: working-directory: packages/plugins/minos-broker-kafka services: - postgres: - image: postgres - env: - POSTGRES_USER: minos - POSTGRES_PASSWORD: min0s - POSTGRES_DB: order_db - ports: - - 5432:5432 - options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 - zookeeper: image: wurstmeister/zookeeper:latest ports: @@ -44,10 +34,7 @@ jobs: KAFKA_ADVERTISED_HOST_NAME: kafka KAFKA_DELETE_TOPIC_ENABLE: "true" env: - MINOS_BROKER_QUEUE_HOST: postgres MINOS_BROKER_HOST: kafka - MINOS_REPOSITORY_HOST: postgres - MINOS_SNAPSHOT_HOST: postgres steps: - name: Check out repository code diff --git a/.github/workflows/minos-broker-rabbitmq-tests.yml b/.github/workflows/minos-broker-rabbitmq-tests.yml index 65db8140c..cf21e6258 100644 --- a/.github/workflows/minos-broker-rabbitmq-tests.yml +++ b/.github/workflows/minos-broker-rabbitmq-tests.yml @@ -18,28 +18,13 @@ jobs: defaults: run: working-directory: packages/plugins/minos-broker-rabbitmq - services: - postgres: - image: postgres - env: - POSTGRES_USER: minos - POSTGRES_PASSWORD: min0s - POSTGRES_DB: order_db - ports: - - 5432:5432 - options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 - rabbitmq: image: rabbitmq:3 ports: - - "5672:5672" - + - 5672:5672 env: - MINOS_BROKER_QUEUE_HOST: postgres MINOS_BROKER_HOST: rabbitmq - MINOS_REPOSITORY_HOST: postgres - MINOS_SNAPSHOT_HOST: postgres steps: - name: Check out repository code diff --git a/.github/workflows/minos-database-aiopg-publish.yml b/.github/workflows/minos-database-aiopg-publish.yml new file mode 100644 index 000000000..95d9c4760 --- /dev/null +++ b/.github/workflows/minos-database-aiopg-publish.yml @@ -0,0 +1,33 @@ +name: "Publish: minos-database-aiopg" + +on: + push: + branches: + - '*.*.x' + paths: + - 'packages/plugins/minos-database-aiopg/**' + +jobs: + deploy: + runs-on: ubuntu-latest + container: python:3.9-buster + defaults: + run: + working-directory: packages/plugins/minos-database-aiopg + + steps: + + - name: Check out repository code + uses: actions/checkout@v2 + + - name: Install Poetry + uses: snok/install-poetry@v1 + + - name: Install dependencies + run: make install + + - name: Publish package + run: make release + env: + POETRY_HTTP_BASIC_PYPI_USERNAME: ${{ secrets.PYPI_USERNAME }} + POETRY_HTTP_BASIC_PYPI_PASSWORD: ${{ secrets.PYPI_PASSWORD }} diff --git a/.github/workflows/minos-database-aiopg-tests.yml b/.github/workflows/minos-database-aiopg-tests.yml new file mode 100644 index 000000000..9dbc735cc --- /dev/null +++ b/.github/workflows/minos-database-aiopg-tests.yml @@ -0,0 +1,64 @@ +name: "Test: minos-database-aiopg" + +on: + push: + branches: + - main + - '*.*.x' + pull_request: + paths: + - 'packages/plugins/minos-database-aiopg/**' + - 'packages/core/minos-microservice-aggregate/**' + - 'packages/core/minos-microservice-networks/**' + - 'packages/core/minos-microservice-common/**' + +jobs: + build: + runs-on: ubuntu-latest + container: python:3.9-buster + defaults: + run: + working-directory: packages/plugins/minos-database-aiopg + + services: + postgres: + image: postgres + env: + POSTGRES_USER: minos + POSTGRES_PASSWORD: min0s + POSTGRES_DB: order_db + ports: + - 5432:5432 + options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 + + env: + MINOS_DATABASES_DEFAULT_HOST: postgres + + steps: + - name: Check out repository code + uses: actions/checkout@v2 + + - name: Install Poetry + uses: snok/install-poetry@v1 + + - name: Install dependencies + run: make install + + - name: Lint package + run: make lint + + - name: Test package with coverage + run: make coverage + + - name: Publish coverage + uses: codecov/codecov-action@v2 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: ./packages/plugins/minos-database-aiopg/coverage.xml + fail_ci_if_error: true + + - name: Generate documentation + run: make docs + + - name: Generate build + run: make dist diff --git a/.github/workflows/minos-http-aiohttp-tests.yml b/.github/workflows/minos-http-aiohttp-tests.yml index 3d83928ae..d9a164f91 100644 --- a/.github/workflows/minos-http-aiohttp-tests.yml +++ b/.github/workflows/minos-http-aiohttp-tests.yml @@ -19,23 +19,6 @@ jobs: run: working-directory: packages/plugins/minos-http-aiohttp - services: - postgres: - image: postgres - env: - POSTGRES_USER: minos - POSTGRES_PASSWORD: min0s - POSTGRES_DB: order_db - ports: - - 5432:5432 - options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 - - env: - MINOS_BROKER_QUEUE_HOST: postgres - MINOS_BROKER_HOST: kafka - MINOS_REPOSITORY_HOST: postgres - MINOS_SNAPSHOT_HOST: postgres - steps: - name: Check out repository code uses: actions/checkout@v2 diff --git a/.github/workflows/minos-microservice-aggregate-tests.yml b/.github/workflows/minos-microservice-aggregate-tests.yml index 1c731a5dc..4b8604ea6 100644 --- a/.github/workflows/minos-microservice-aggregate-tests.yml +++ b/.github/workflows/minos-microservice-aggregate-tests.yml @@ -19,22 +19,6 @@ jobs: run: working-directory: packages/core/minos-microservice-aggregate - services: - postgres: - image: postgres - env: - POSTGRES_USER: minos - POSTGRES_PASSWORD: min0s - POSTGRES_DB: order_db - ports: - - 5432:5432 - options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 - - env: - MINOS_BROKER_QUEUE_HOST: postgres - MINOS_REPOSITORY_HOST: postgres - MINOS_SNAPSHOT_HOST: postgres - steps: - name: Check out repository code uses: actions/checkout@v2 diff --git a/.github/workflows/minos-microservice-common-tests.yml b/.github/workflows/minos-microservice-common-tests.yml index 92e2af7f5..cc89ccefb 100644 --- a/.github/workflows/minos-microservice-common-tests.yml +++ b/.github/workflows/minos-microservice-common-tests.yml @@ -17,21 +17,6 @@ jobs: run: working-directory: packages/core/minos-microservice-common - services: - postgres: - image: postgres - env: - POSTGRES_USER: minos - POSTGRES_PASSWORD: min0s - POSTGRES_DB: order_db - ports: - - 5432:5432 - options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 - - env: - MINOS_DATABASES_DEFAULT_HOST: postgres - MINOS_DATABASES_QUERY_HOST: postgres - steps: - name: Check out repository code uses: actions/checkout@v2 diff --git a/.github/workflows/minos-microservice-cqrs-tests.yml b/.github/workflows/minos-microservice-cqrs-tests.yml index dc2644ba4..1ab963a4a 100644 --- a/.github/workflows/minos-microservice-cqrs-tests.yml +++ b/.github/workflows/minos-microservice-cqrs-tests.yml @@ -20,22 +20,6 @@ jobs: run: working-directory: packages/core/minos-microservice-cqrs - services: - postgres: - image: postgres - env: - POSTGRES_USER: minos - POSTGRES_PASSWORD: min0s - POSTGRES_DB: order_db - ports: - - 5432:5432 - options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 - - env: - MINOS_BROKER_QUEUE_HOST: postgres - MINOS_REPOSITORY_HOST: postgres - MINOS_SNAPSHOT_HOST: postgres - steps: - name: Check out repository code uses: actions/checkout@v2 diff --git a/.github/workflows/minos-microservice-networks-tests.yml b/.github/workflows/minos-microservice-networks-tests.yml index 49a642b3e..5cf23e011 100644 --- a/.github/workflows/minos-microservice-networks-tests.yml +++ b/.github/workflows/minos-microservice-networks-tests.yml @@ -18,23 +18,6 @@ jobs: run: working-directory: packages/core/minos-microservice-networks - services: - postgres: - image: postgres - env: - POSTGRES_USER: minos - POSTGRES_PASSWORD: min0s - POSTGRES_DB: order_db - ports: - - 5432:5432 - options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 - - env: - MINOS_BROKER_QUEUE_HOST: postgres - MINOS_BROKER_HOST: kafka - MINOS_REPOSITORY_HOST: postgres - MINOS_SNAPSHOT_HOST: postgres - steps: - name: Check out repository code uses: actions/checkout@v2 diff --git a/.github/workflows/minos-router-graphql-tests.yml b/.github/workflows/minos-router-graphql-tests.yml index e830f4370..f36c3617b 100644 --- a/.github/workflows/minos-router-graphql-tests.yml +++ b/.github/workflows/minos-router-graphql-tests.yml @@ -19,23 +19,6 @@ jobs: run: working-directory: packages/plugins/minos-router-graphql - services: - postgres: - image: postgres - env: - POSTGRES_USER: minos - POSTGRES_PASSWORD: min0s - POSTGRES_DB: order_db - ports: - - 5432:5432 - options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 - - env: - MINOS_BROKER_QUEUE_HOST: postgres - MINOS_BROKER_HOST: kafka - MINOS_REPOSITORY_HOST: postgres - MINOS_SNAPSHOT_HOST: postgres - steps: - name: Check out repository code uses: actions/checkout@v2 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 500b9a919..84470da60 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -44,6 +44,13 @@ repos: files: ^packages/plugins/minos-broker-kafka/ language: system + - id: minos-database-aiopg-check + pass_filenames: false + entry: make --directory=packages/plugins/minos-database-aiopg check + name: Check minos-database-aiopg + files: ^packages/plugins/minos-database-aiopg/ + language: system + - id: minos-discovery-minos-check pass_filenames: false entry: make --directory=packages/plugins/minos-discovery-minos check diff --git a/.sonarcloud.properties b/.sonarcloud.properties index f65be1b2a..566fa3ed0 100644 --- a/.sonarcloud.properties +++ b/.sonarcloud.properties @@ -1,3 +1,3 @@ sonar.python.version=3.9 sonar.exclusions=tutorials/** -sonar.cpd.exclusions=**/tests/** \ No newline at end of file +sonar.cpd.exclusions=**/tests/**, **/testing/** \ No newline at end of file diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/__init__.py b/packages/core/minos-microservice-aggregate/minos/aggregate/__init__.py index eb1234240..3c99d6c3a 100644 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/__init__.py +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/__init__.py @@ -29,7 +29,6 @@ RootEntity, ) from .events import ( - AiopgEventDatabaseOperationFactory, DatabaseEventRepository, Event, EventDatabaseOperationFactory, @@ -59,12 +58,7 @@ Ordering, ) from .snapshots import ( - AiopgSnapshotDatabaseOperationFactory, - AiopgSnapshotQueryDatabaseOperationBuilder, - DatabaseSnapshotReader, DatabaseSnapshotRepository, - DatabaseSnapshotSetup, - DatabaseSnapshotWriter, InMemorySnapshotRepository, SnapshotDatabaseOperationFactory, SnapshotEntry, @@ -73,7 +67,6 @@ ) from .transactions import ( TRANSACTION_CONTEXT_VAR, - AiopgTransactionDatabaseOperationFactory, DatabaseTransactionRepository, InMemoryTransactionRepository, TransactionDatabaseOperationFactory, diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/entities/collections.py b/packages/core/minos-microservice-aggregate/minos/aggregate/entities/collections.py index 3925a7784..62da179b2 100644 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/entities/collections.py +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/entities/collections.py @@ -140,6 +140,8 @@ def decode_data(cls, decoder: DataDecoder, target: Any, type_: ModelType, **kwar :param type_: The data type. :return: A decoded instance. """ + data_cls = get_args(type_.type_hints["data"])[1] + target = {decoder.build(v, data_cls, **kwargs) for v in target} target = {str(v["uuid"]): v for v in target} decoded = decoder.build(target, type_.type_hints["data"], **kwargs) return cls(decoded, additional_type_hints=type_.type_hints) diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/entities/refs/extractors.py b/packages/core/minos-microservice-aggregate/minos/aggregate/entities/refs/extractors.py index 298727725..96d22da41 100644 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/entities/refs/extractors.py +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/entities/refs/extractors.py @@ -61,7 +61,11 @@ def _build(self, value: Any, type_: type, ans: dict[str, set[Ref]]) -> None: self._build_iterable(value.values(), get_args(type_)[1], ans) elif isinstance(value, Ref): - cls = value.data_cls or get_args(type_)[0] + cls = value.data_cls + if cls is None and len(args := get_args(type_)): + cls = args[0] + if cls is None and len(args := get_args(type_.type_hints["data"])): + cls = args[0] name = cls.__name__ ans[name].add(value) diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/events/__init__.py b/packages/core/minos-microservice-aggregate/minos/aggregate/events/__init__.py index 30785fd95..08f4df4da 100644 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/events/__init__.py +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/events/__init__.py @@ -10,7 +10,6 @@ Event, ) from .repositories import ( - AiopgEventDatabaseOperationFactory, DatabaseEventRepository, EventDatabaseOperationFactory, EventRepository, diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/events/repositories/__init__.py b/packages/core/minos-microservice-aggregate/minos/aggregate/events/repositories/__init__.py index 779f5963e..d4c3e79b1 100644 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/events/repositories/__init__.py +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/events/repositories/__init__.py @@ -2,7 +2,6 @@ EventRepository, ) from .database import ( - AiopgEventDatabaseOperationFactory, DatabaseEventRepository, EventDatabaseOperationFactory, ) diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/events/repositories/abc.py b/packages/core/minos-microservice-aggregate/minos/aggregate/events/repositories/abc.py index f3fd7f593..de03ccfd5 100644 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/events/repositories/abc.py +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/events/repositories/abc.py @@ -13,6 +13,7 @@ suppress, ) from typing import ( + TYPE_CHECKING, AsyncIterator, Awaitable, Optional, @@ -31,6 +32,7 @@ NotProvidedException, PoolFactory, SetupMixin, + classname, ) from minos.networks import ( BrokerMessageV1, @@ -65,6 +67,11 @@ Event, ) +if TYPE_CHECKING: + from ...entities import ( + RootEntity, + ) + @Injectable("event_repository") class EventRepository(ABC, SetupMixin): @@ -233,7 +240,7 @@ async def _send_events(self, event: Event): async def select( self, uuid: Optional[UUID] = None, - name: Optional[str] = None, + name: Optional[Union[str, type[RootEntity]]] = None, version: Optional[int] = None, version_lt: Optional[int] = None, version_gt: Optional[int] = None, @@ -268,6 +275,8 @@ async def select( :param transaction_uuid_in: The destination transaction identifier must be equal to one of the given values. :return: A list of entries. """ + if isinstance(name, type): + name = classname(name) generator = self._select( uuid=uuid, name=name, diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/events/repositories/database/__init__.py b/packages/core/minos-microservice-aggregate/minos/aggregate/events/repositories/database/__init__.py index a16590570..c37452705 100644 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/events/repositories/database/__init__.py +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/events/repositories/database/__init__.py @@ -1,5 +1,4 @@ from .factories import ( - AiopgEventDatabaseOperationFactory, EventDatabaseOperationFactory, ) from .impl import ( diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/events/repositories/database/factories/__init__.py b/packages/core/minos-microservice-aggregate/minos/aggregate/events/repositories/database/factories/__init__.py index 1d69d3be1..c78f0abb4 100644 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/events/repositories/database/factories/__init__.py +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/events/repositories/database/factories/__init__.py @@ -1,6 +1,3 @@ from .abc import ( EventDatabaseOperationFactory, ) -from .aiopg import ( - AiopgEventDatabaseOperationFactory, -) diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/events/repositories/database/factories/abc.py b/packages/core/minos-microservice-aggregate/minos/aggregate/events/repositories/database/factories/abc.py index 10866aa5c..80068df56 100644 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/events/repositories/database/factories/abc.py +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/events/repositories/database/factories/abc.py @@ -2,6 +2,9 @@ ABC, abstractmethod, ) +from collections.abc import ( + Iterable, +) from datetime import ( datetime, ) @@ -26,16 +29,16 @@ class EventDatabaseOperationFactory(DatabaseOperationFactory, ABC): """Event Database Operation Factory base class.""" @abstractmethod - def build_create_table(self) -> DatabaseOperation: + def build_create(self) -> DatabaseOperation: """Build the database operation to create the event table. :return: A ``DatabaseOperation`` instance.s """ @abstractmethod - def build_submit_row( + def build_submit( self, - transaction_uuids: tuple[UUID], + transaction_uuids: Iterable[UUID], uuid: UUID, action: Action, name: str, @@ -63,7 +66,7 @@ def build_submit_row( # noinspection PyShadowingBuiltins @abstractmethod - def build_select_rows( + def build_query( self, uuid: Optional[UUID] = None, name: Optional[str] = None, @@ -104,7 +107,7 @@ def build_select_rows( """ @abstractmethod - def build_select_max_id(self) -> DatabaseOperation: + def build_query_offset(self) -> DatabaseOperation: """Build the database operation to get the maximum identifier. :return: A ``DatabaseOperation`` instance. diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/events/repositories/database/impl.py b/packages/core/minos-microservice-aggregate/minos/aggregate/events/repositories/database/impl.py index 3f45751f1..0f549e33e 100644 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/events/repositories/database/impl.py +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/events/repositories/database/impl.py @@ -9,7 +9,6 @@ from minos.common import ( NULL_UUID, - Config, DatabaseMixin, DatabaseOperation, IntegrityException, @@ -32,23 +31,24 @@ class DatabaseEventRepository(DatabaseMixin[EventDatabaseOperationFactory], EventRepository): """Database-based implementation of the event repository class.""" - @classmethod - def _from_config(cls, config: Config, **kwargs) -> Optional[EventRepository]: - return super()._from_config(config, database_key=None, **kwargs) + def __init__(self, *args, database_key: Optional[tuple[str]] = None, **kwargs): + if database_key is None: + database_key = ("aggregate", "event") + super().__init__(*args, database_key=database_key, **kwargs) async def _setup(self): """Setup miscellaneous repository things. :return: This method does not return anything. """ - operation = self.operation_factory.build_create_table() - await self.submit_query(operation) + operation = self.database_operation_factory.build_create() + await self.execute_on_database(operation) async def _submit(self, entry: EventEntry, **kwargs) -> EventEntry: operation = await self._build_submit_operation(entry) try: - response = await self.submit_query_and_fetchone(operation) + response = await self.execute_on_database_and_fetch_one(operation) except IntegrityException: raise EventRepositoryConflictException( f"{entry!r} could not be submitted due to a key (uuid, version, transaction) collision", @@ -69,15 +69,17 @@ async def _build_submit_operation(self, entry: EventEntry) -> DatabaseOperation: else: transaction_uuids = (NULL_UUID,) - return self.operation_factory.build_submit_row(transaction_uuids=transaction_uuids, **entry.as_raw(), lock=lock) + return self.database_operation_factory.build_submit( + transaction_uuids=transaction_uuids, **entry.as_raw(), lock=lock + ) async def _select(self, streaming_mode: Optional[bool] = None, **kwargs) -> AsyncIterator[EventEntry]: - operation = self.operation_factory.build_select_rows(**kwargs) - async for row in self.submit_query_and_iter(operation, streaming_mode=streaming_mode): + operation = self.database_operation_factory.build_query(**kwargs) + async for row in self.execute_on_database_and_fetch_all(operation, streaming_mode=streaming_mode): yield EventEntry(*row) @property async def _offset(self) -> int: - operation = self.operation_factory.build_select_max_id() - row = await self.submit_query_and_fetchone(operation) + operation = self.database_operation_factory.build_query_offset() + row = await self.execute_on_database_and_fetch_one(operation) return row[0] or 0 diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/exceptions.py b/packages/core/minos-microservice-aggregate/minos/aggregate/exceptions.py index b0d8790ba..6958a2a48 100644 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/exceptions.py +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/exceptions.py @@ -40,7 +40,7 @@ class TransactionRepositoryException(AggregateException): class TransactionRepositoryConflictException(TransactionRepositoryException): - """Exception to be raised when a transactions has invalid status.""" + """Exception to be raised when a transaction has invalid status.""" class TransactionNotFoundException(TransactionRepositoryException): diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/__init__.py b/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/__init__.py index 2e568ba02..fa4e34c31 100644 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/__init__.py +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/__init__.py @@ -1,20 +1,11 @@ -from .abc import ( - SnapshotRepository, -) -from .database import ( - AiopgSnapshotDatabaseOperationFactory, - AiopgSnapshotQueryDatabaseOperationBuilder, - DatabaseSnapshotReader, - DatabaseSnapshotRepository, - DatabaseSnapshotSetup, - DatabaseSnapshotWriter, - SnapshotDatabaseOperationFactory, -) from .entries import ( SnapshotEntry, ) -from .memory import ( +from .repositories import ( + DatabaseSnapshotRepository, InMemorySnapshotRepository, + SnapshotDatabaseOperationFactory, + SnapshotRepository, ) from .services import ( SnapshotService, diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/database/__init__.py b/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/database/__init__.py deleted file mode 100644 index 038c6d32c..000000000 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/database/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -from .abc import ( - DatabaseSnapshotSetup, -) -from .api import ( - DatabaseSnapshotRepository, -) -from .factories import ( - AiopgSnapshotDatabaseOperationFactory, - AiopgSnapshotQueryDatabaseOperationBuilder, - SnapshotDatabaseOperationFactory, -) -from .readers import ( - DatabaseSnapshotReader, -) -from .writers import ( - DatabaseSnapshotWriter, -) diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/database/abc.py b/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/database/abc.py deleted file mode 100644 index 588ea7f6c..000000000 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/database/abc.py +++ /dev/null @@ -1,32 +0,0 @@ -from __future__ import ( - annotations, -) - -from typing import ( - Type, - TypeVar, -) - -from minos.common import ( - Config, - DatabaseMixin, -) - -from .factories import ( - SnapshotDatabaseOperationFactory, -) - - -class DatabaseSnapshotSetup(DatabaseMixin[SnapshotDatabaseOperationFactory]): - """Minos Snapshot Setup Class""" - - @classmethod - def _from_config(cls: Type[T], config: Config, **kwargs) -> T: - return cls(database_key=None, **kwargs) - - async def _setup(self) -> None: - operation = self.operation_factory.build_create_table() - await self.submit_query(operation) - - -T = TypeVar("T", bound=DatabaseSnapshotSetup) diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/database/api.py b/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/database/api.py deleted file mode 100644 index 6872540ce..000000000 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/database/api.py +++ /dev/null @@ -1,73 +0,0 @@ -from __future__ import ( - annotations, -) - -from typing import ( - TYPE_CHECKING, - AsyncIterator, - Awaitable, -) - -from minos.common import ( - Config, -) - -from ..abc import ( - SnapshotRepository, -) -from .readers import ( - DatabaseSnapshotReader, -) -from .writers import ( - DatabaseSnapshotWriter, -) - -if TYPE_CHECKING: - from ...entities import ( - RootEntity, - ) - - -class DatabaseSnapshotRepository(SnapshotRepository): - """Database Snapshot Repository class. - - The snapshot provides a direct accessor to the ``RootEntity`` instances stored as events by the event repository - class. - """ - - reader: DatabaseSnapshotReader - writer: DatabaseSnapshotWriter - - def __init__(self, *args, reader: DatabaseSnapshotReader, writer: DatabaseSnapshotWriter, **kwargs): - super().__init__(*args, **kwargs) - self.reader = reader - self.writer = writer - - @classmethod - def _from_config(cls, config: Config, **kwargs) -> DatabaseSnapshotRepository: - if "reader" not in kwargs: - kwargs["reader"] = DatabaseSnapshotReader.from_config(config, **kwargs) - - if "writer" not in kwargs: - kwargs["writer"] = DatabaseSnapshotWriter.from_config(config, **kwargs) - - return cls(database_key=None, **kwargs) - - async def _setup(self) -> None: - await self.writer.setup() - await self.reader.setup() - await super()._setup() - - async def _destroy(self) -> None: - await super()._destroy() - await self.reader.destroy() - await self.writer.destroy() - - def _get(self, *args, **kwargs) -> Awaitable[RootEntity]: - return self.reader.get(*args, **kwargs) - - def _find(self, *args, **kwargs) -> AsyncIterator[RootEntity]: - return self.reader.find(*args, **kwargs) - - def _synchronize(self, *args, **kwargs) -> Awaitable[None]: - return self.writer.dispatch(**kwargs) diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/database/factories/__init__.py b/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/database/factories/__init__.py deleted file mode 100644 index dd9c232cc..000000000 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/database/factories/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -from .abc import ( - SnapshotDatabaseOperationFactory, -) -from .aiopg import ( - AiopgSnapshotDatabaseOperationFactory, - AiopgSnapshotQueryDatabaseOperationBuilder, -) diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/database/readers.py b/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/database/readers.py deleted file mode 100644 index 1dfca2862..000000000 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/database/readers.py +++ /dev/null @@ -1,131 +0,0 @@ -from __future__ import ( - annotations, -) - -import logging -from typing import ( - TYPE_CHECKING, - AsyncIterator, - Optional, -) -from uuid import ( - UUID, -) - -from minos.common import ( - NULL_UUID, -) - -from ...exceptions import ( - NotFoundException, -) -from ...queries import ( - _Condition, - _EqualCondition, - _Ordering, -) -from ...transactions import ( - TransactionEntry, -) -from ..entries import ( - SnapshotEntry, -) -from .abc import ( - DatabaseSnapshotSetup, -) - -if TYPE_CHECKING: - from ...entities import ( - RootEntity, - ) - -logger = logging.getLogger(__name__) - - -class DatabaseSnapshotReader(DatabaseSnapshotSetup): - """Database Snapshot Reader class. - - The snapshot provides a direct accessor to the ``RootEntity`` instances stored as events by the event repository - class. - """ - - async def get(self, name: str, uuid: UUID, **kwargs) -> RootEntity: - """Get a ``RootEntity`` instance from its identifier. - - :param name: Class name of the ``RootEntity``. - :param uuid: Identifier of the ``RootEntity``. - :param kwargs: Additional named arguments. - :return: The ``RootEntity`` instance. - """ - snapshot_entry = await self.get_entry(name, uuid, **kwargs) - instance = snapshot_entry.build(**kwargs) - return instance - - # noinspection PyUnusedLocal - async def get_entry(self, name: str, uuid: UUID, **kwargs) -> SnapshotEntry: - """Get a ``SnapshotEntry`` from its identifier. - - :param name: Class name of the ``RootEntity``. - :param uuid: Identifier of the ``RootEntity``. - :param kwargs: Additional named arguments. - :return: The ``SnapshotEntry`` instance. - """ - - try: - return await self.find_entries( - name, _EqualCondition("uuid", uuid), **kwargs | {"exclude_deleted": False} - ).__anext__() - except StopAsyncIteration: - raise NotFoundException(f"The instance could not be found: {uuid!s}") - - async def find(self, *args, **kwargs) -> AsyncIterator[RootEntity]: - """Find a collection of ``RootEntity`` instances based on a ``Condition``. - - :param args: Additional positional arguments. - :param kwargs: Additional named arguments. - :return: An asynchronous iterator that containing the ``RootEntity`` instances. - """ - async for snapshot_entry in self.find_entries(*args, **kwargs): - yield snapshot_entry.build(**kwargs) - - # noinspection PyUnusedLocal - async def find_entries( - self, - name: str, - condition: _Condition, - ordering: Optional[_Ordering] = None, - limit: Optional[int] = None, - streaming_mode: bool = False, - transaction: Optional[TransactionEntry] = None, - exclude_deleted: bool = True, - **kwargs, - ) -> AsyncIterator[SnapshotEntry]: - """Find a collection of ``SnapshotEntry`` instances based on a ``Condition``. - - :param name: Class name of the ``RootEntity``. - :param condition: The condition that must be satisfied by the ``RootEntity`` instances. - :param ordering: Optional argument to return the instance with specific ordering strategy. The default behaviour - is to retrieve them without any order pattern. - :param limit: Optional argument to return only a subset of instances. The default behaviour is to return all the - instances that meet the given condition. - :param streaming_mode: If ``True`` return the values in streaming directly from the database (keep an open - database connection), otherwise preloads the full set of values on memory and then retrieves them. - :param transaction: The transaction within the operation is performed. If not any value is provided, then the - transaction is extracted from the context var. If not any transaction is being scoped then the query is - performed to the global snapshot. - :param exclude_deleted: If ``True``, deleted ``RootEntity`` entries are included, otherwise deleted - ``RootEntity`` entries are filtered. - :param kwargs: Additional named arguments. - :return: An asynchronous iterator that containing the ``RootEntity`` instances. - """ - if transaction is None: - transaction_uuids = (NULL_UUID,) - else: - transaction_uuids = await transaction.uuids - - operation = self.operation_factory.build_query( - name, condition, ordering, limit, transaction_uuids, exclude_deleted - ) - - async for row in self.submit_query_and_iter(operation, streaming_mode=streaming_mode): - yield SnapshotEntry(*row) diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/entries.py b/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/entries.py index 73b8afab1..6a92f5078 100644 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/entries.py +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/entries.py @@ -42,25 +42,14 @@ class SnapshotEntry: Is the python object representation of a row in the ``snapshot`` storage system. """ - __slots__ = ( - "uuid", - "name", - "version", - "schema", - "data", - "created_at", - "updated_at", - "transaction_uuid", - ) - # noinspection PyShadowingBuiltins def __init__( self, uuid: UUID, name: str, version: int, - schema: Optional[Union[list[dict[str, Any]], dict[str, Any]]] = None, - data: Optional[dict[str, Any]] = None, + schema: Optional[Union[list[dict[str, Any]], dict[str, Any]], bytes, memoryview] = None, + data: Optional[Union[dict[str, Any], str]] = None, created_at: Optional[datetime] = None, updated_at: Optional[datetime] = None, transaction_uuid: UUID = NULL_UUID, @@ -70,6 +59,9 @@ def __init__( if isinstance(schema, bytes): schema = MinosJsonBinaryProtocol.decode(schema) + if isinstance(data, str): + data = json.loads(data) + self.uuid = uuid self.name = name self.version = version diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/repositories/__init__.py b/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/repositories/__init__.py new file mode 100644 index 000000000..172c54117 --- /dev/null +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/repositories/__init__.py @@ -0,0 +1,10 @@ +from .abc import ( + SnapshotRepository, +) +from .database import ( + DatabaseSnapshotRepository, + SnapshotDatabaseOperationFactory, +) +from .memory import ( + InMemorySnapshotRepository, +) diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/abc.py b/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/repositories/abc.py similarity index 55% rename from packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/abc.py rename to packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/repositories/abc.py index f1b0f26a9..a047d5f16 100644 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/abc.py +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/repositories/abc.py @@ -11,6 +11,7 @@ AsyncIterator, Awaitable, Optional, + Union, ) from uuid import ( UUID, @@ -19,20 +20,28 @@ from minos.common import ( Injectable, SetupMixin, + classname, ) -from ..queries import ( +from ...exceptions import ( + NotFoundException, +) +from ...queries import ( _TRUE_CONDITION, _Condition, + _EqualCondition, _Ordering, ) -from ..transactions import ( +from ...transactions import ( TRANSACTION_CONTEXT_VAR, TransactionEntry, ) +from ..entries import ( + SnapshotEntry, +) if TYPE_CHECKING: - from ..entities import ( + from ...entities import ( RootEntity, ) @@ -45,7 +54,13 @@ class SnapshotRepository(ABC, SetupMixin): class. """ - async def get(self, name: str, uuid: UUID, transaction: Optional[TransactionEntry] = None, **kwargs) -> RootEntity: + async def get( + self, + name: Union[str, type[RootEntity]], + uuid: UUID, + transaction: Optional[TransactionEntry] = None, + **kwargs, + ) -> RootEntity: """Get a ``RootEntity`` instance from its identifier. :param name: Class name of the ``RootEntity``. @@ -56,16 +71,25 @@ async def get(self, name: str, uuid: UUID, transaction: Optional[TransactionEntr :param kwargs: Additional named arguments. :return: The ``RootEntity`` instance. """ - if transaction is None: - transaction = TRANSACTION_CONTEXT_VAR.get() + snapshot_entry = await self.get_entry(name, uuid, transaction=transaction, **kwargs) + instance = snapshot_entry.build(**kwargs) + return instance - await self.synchronize(**kwargs) + async def get_entry(self, name: str, uuid: UUID, **kwargs) -> SnapshotEntry: + """Get a ``SnapshotEntry`` from its identifier. - return await self._get(name=name, uuid=uuid, transaction=transaction, **kwargs) + :param name: Class name of the ``RootEntity``. + :param uuid: Identifier of the ``RootEntity``. + :param kwargs: Additional named arguments. + :return: The ``SnapshotEntry`` instance. + """ - @abstractmethod - async def _get(self, *args, **kwargs) -> RootEntity: - raise NotImplementedError + try: + return await self.find_entries( + name, _EqualCondition("uuid", uuid), **kwargs | {"exclude_deleted": False} + ).__anext__() + except StopAsyncIteration: + raise NotFoundException(f"The instance could not be found: {uuid!s}") def get_all( self, @@ -103,7 +127,7 @@ def get_all( async def find( self, - name: str, + name: Union[str, type[RootEntity]], condition: _Condition, ordering: Optional[_Ordering] = None, limit: Optional[int] = None, @@ -127,26 +151,83 @@ async def find( :param kwargs: Additional named arguments. :return: An asynchronous iterator that containing the ``RootEntity`` instances. """ + iterable = self.find_entries( + name=name, + condition=condition, + ordering=ordering, + limit=limit, + streaming_mode=streaming_mode, + transaction=transaction, + **kwargs, + ) + async for snapshot_entry in iterable: + yield snapshot_entry.build(**kwargs) + + async def find_entries( + self, + name: str, + condition: _Condition, + ordering: Optional[_Ordering] = None, + limit: Optional[int] = None, + streaming_mode: bool = False, + transaction: Optional[TransactionEntry] = None, + exclude_deleted: bool = True, + synchronize: bool = True, + **kwargs, + ) -> AsyncIterator[SnapshotEntry]: + """Find a collection of ``SnapshotEntry`` instances based on a ``Condition``. + + :param name: Class name of the ``RootEntity``. + :param condition: The condition that must be satisfied by the ``RootEntity`` instances. + :param ordering: Optional argument to return the instance with specific ordering strategy. The default behaviour + is to retrieve them without any order pattern. + :param limit: Optional argument to return only a subset of instances. The default behaviour is to return all the + instances that meet the given condition. + :param streaming_mode: If ``True`` return the values in streaming directly from the database (keep an open + database connection), otherwise preloads the full set of values on memory and then retrieves them. + :param transaction: The transaction within the operation is performed. If not any value is provided, then the + transaction is extracted from the context var. If not any transaction is being scoped then the query is + performed to the global snapshot. + :param exclude_deleted: If ``True``, deleted ``RootEntity`` entries are included, otherwise deleted + ``RootEntity`` entries are filtered. + :param synchronize: TODO + :param kwargs: Additional named arguments. + :return: An asynchronous iterator that containing the ``RootEntity`` instances. + """ + if isinstance(name, type): + name = classname(name) + if transaction is None: transaction = TRANSACTION_CONTEXT_VAR.get() - await self.synchronize(**kwargs) + if synchronize: + await self.synchronize(**kwargs) - iterable = self._find( + iterable = self._find_entries( name=name, condition=condition, ordering=ordering, limit=limit, streaming_mode=streaming_mode, transaction=transaction, + exclude_deleted=exclude_deleted, **kwargs, ) - - async for instance in iterable: - yield instance + async for entry in iterable: + yield entry @abstractmethod - def _find(self, *args, **kwargs) -> AsyncIterator[RootEntity]: + def _find_entries( + self, + name: str, + condition: _Condition, + ordering: Optional[_Ordering], + limit: Optional[int], + streaming_mode: bool, + transaction: Optional[TransactionEntry], + exclude_deleted: bool, + **kwargs, + ) -> AsyncIterator[SnapshotEntry]: raise NotImplementedError def synchronize(self, **kwargs) -> Awaitable[None]: @@ -155,7 +236,7 @@ def synchronize(self, **kwargs) -> Awaitable[None]: :param kwargs: Additional named arguments. :return: This method does not return anything. """ - return self._synchronize(**kwargs) + return self._synchronize(**kwargs, synchronize=False) @abstractmethod async def _synchronize(self, **kwargs) -> None: diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/repositories/database/__init__.py b/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/repositories/database/__init__.py new file mode 100644 index 000000000..d45559535 --- /dev/null +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/repositories/database/__init__.py @@ -0,0 +1,6 @@ +from .factories import ( + SnapshotDatabaseOperationFactory, +) +from .impl import ( + DatabaseSnapshotRepository, +) diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/repositories/database/factories/__init__.py b/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/repositories/database/factories/__init__.py new file mode 100644 index 000000000..0ac79d093 --- /dev/null +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/repositories/database/factories/__init__.py @@ -0,0 +1,3 @@ +from .abc import ( + SnapshotDatabaseOperationFactory, +) diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/database/factories/abc.py b/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/repositories/database/factories/abc.py similarity index 91% rename from packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/database/factories/abc.py rename to packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/repositories/database/factories/abc.py index 78d895209..12b32dfdb 100644 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/database/factories/abc.py +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/repositories/database/factories/abc.py @@ -21,7 +21,7 @@ DatabaseOperationFactory, ) -from ....queries import ( +from .....queries import ( _Condition, _Ordering, ) @@ -31,14 +31,14 @@ class SnapshotDatabaseOperationFactory(DatabaseOperationFactory, ABC): """Snapshot Database Operation Factory class.""" @abstractmethod - def build_create_table(self) -> DatabaseOperation: + def build_create(self) -> DatabaseOperation: """Build the database operation to create the snapshot table. :return: A ``DatabaseOperation`` instance. """ @abstractmethod - def build_delete_by_transactions(self, transaction_uuids: Iterable[UUID]) -> DatabaseOperation: + def build_delete(self, transaction_uuids: Iterable[UUID]) -> DatabaseOperation: """Build the database operation to delete rows by transaction identifiers. :param transaction_uuids: The transaction identifiers. @@ -46,7 +46,7 @@ def build_delete_by_transactions(self, transaction_uuids: Iterable[UUID]) -> Dat """ @abstractmethod - def build_insert( + def build_submit( self, uuid: UUID, name: str, @@ -97,7 +97,7 @@ def build_query( """ @abstractmethod - def build_store_offset(self, value: int) -> DatabaseOperation: + def build_submit_offset(self, value: int) -> DatabaseOperation: """Build the database operation to store the offset. :param value: The value to be stored as the new offset. @@ -105,7 +105,7 @@ def build_store_offset(self, value: int) -> DatabaseOperation: """ @abstractmethod - def build_get_offset(self) -> DatabaseOperation: + def build_query_offset(self) -> DatabaseOperation: """Build the database operation to get the current offset. :return: A ``DatabaseOperation`` instance. diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/database/writers.py b/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/repositories/database/impl.py similarity index 63% rename from packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/database/writers.py rename to packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/repositories/database/impl.py index 4a241902e..2f80ae02b 100644 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/database/writers.py +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/repositories/database/impl.py @@ -2,63 +2,80 @@ annotations, ) +from collections.abc import ( + AsyncIterator, +) from typing import ( TYPE_CHECKING, - Type, + Optional, ) from uuid import ( UUID, ) from minos.common import ( + NULL_UUID, + DatabaseMixin, Inject, NotProvidedException, + ProgrammingException, import_module, ) -from ...events import ( +from ....events import ( Event, EventEntry, EventRepository, ) -from ...exceptions import ( +from ....exceptions import ( NotFoundException, SnapshotRepositoryConflictException, TransactionNotFoundException, ) -from ...transactions import ( +from ....queries import ( + _Condition, + _Ordering, +) +from ....transactions import ( + TransactionEntry, TransactionRepository, TransactionStatus, ) -from ..entries import ( +from ...entries import ( SnapshotEntry, ) -from .abc import ( - DatabaseSnapshotSetup, +from ..abc import ( + SnapshotRepository, ) -from .readers import ( - DatabaseSnapshotReader, +from .factories import ( + SnapshotDatabaseOperationFactory, ) if TYPE_CHECKING: - from ...entities import ( + from ....entities import ( RootEntity, ) -class DatabaseSnapshotWriter(DatabaseSnapshotSetup): - """Minos Snapshot Dispatcher class.""" +class DatabaseSnapshotRepository(SnapshotRepository, DatabaseMixin[SnapshotDatabaseOperationFactory]): + """Database Snapshot Repository class. + + The snapshot provides a direct accessor to the ``RootEntity`` instances stored as events by the event repository + class. + """ @Inject() def __init__( self, *args, - reader: DatabaseSnapshotReader, event_repository: EventRepository, transaction_repository: TransactionRepository, + database_key: Optional[tuple[str]] = None, **kwargs, ): - super().__init__(*args, **kwargs) + if database_key is None: + database_key = ("aggregate", "snapshot") + super().__init__(*args, database_key=database_key, **kwargs) if event_repository is None: raise NotProvidedException("An event repository instance is required.") @@ -66,10 +83,40 @@ def __init__( if transaction_repository is None: raise NotProvidedException("A transaction repository instance is required.") - self._reader = reader self._event_repository = event_repository self._transaction_repository = transaction_repository + async def _setup(self) -> None: + operation = self.database_operation_factory.build_create() + await self.execute_on_database(operation) + + async def _destroy(self) -> None: + await super()._destroy() + + # noinspection PyUnusedLocal + async def _find_entries( + self, + name: str, + condition: _Condition, + ordering: Optional[_Ordering], + limit: Optional[int], + streaming_mode: bool, + transaction: Optional[TransactionEntry], + exclude_deleted: bool, + **kwargs, + ) -> AsyncIterator[SnapshotEntry]: + if transaction is None: + transaction_uuids = (NULL_UUID,) + else: + transaction_uuids = await transaction.uuids + + operation = self.database_operation_factory.build_query( + name, condition, ordering, limit, transaction_uuids, exclude_deleted + ) + + async for row in self.execute_on_database_and_fetch_all(operation, streaming_mode=streaming_mode): + yield SnapshotEntry(*row) + async def is_synced(self, name: str, **kwargs) -> bool: """Check if the snapshot has the latest version of a ``RootEntity`` instance. @@ -84,11 +131,7 @@ async def is_synced(self, name: str, **kwargs) -> bool: except StopAsyncIteration: return True - async def dispatch(self, **kwargs) -> None: - """Perform a dispatching step, based on the sequence of non already processed ``EventEntry`` objects. - - :return: This method does not return anything. - """ + async def _synchronize(self, **kwargs) -> None: initial_offset = await self._load_offset() offset = initial_offset @@ -105,17 +148,17 @@ async def dispatch(self, **kwargs) -> None: await self._store_offset(offset) async def _load_offset(self) -> int: - operation = self.operation_factory.build_get_offset() + operation = self.database_operation_factory.build_query_offset() # noinspection PyBroadException try: - raw = await self.submit_query_and_fetchone(operation) - return raw[0] - except Exception: + row = await self.execute_on_database_and_fetch_one(operation) + except ProgrammingException: return 0 + return row[0] async def _store_offset(self, offset: int) -> None: - operation = self.operation_factory.build_store_offset(offset) - await self.submit_query(operation) + operation = self.database_operation_factory.build_submit_offset(offset) + await self.execute_on_database(operation) async def _dispatch_one(self, event_entry: EventEntry, **kwargs) -> SnapshotEntry: if event_entry.action.is_delete: @@ -153,7 +196,7 @@ async def _update_instance_if_exists(self, event: Event, **kwargs) -> RootEntity previous = await self._select_one_instance(event.name, event.uuid, **kwargs) except NotFoundException: # noinspection PyTypeChecker - cls: Type[RootEntity] = import_module(event.name) + cls = import_module(event.name) return cls.from_diff(event, **kwargs) if previous.version >= event.version: @@ -163,12 +206,12 @@ async def _update_instance_if_exists(self, event: Event, **kwargs) -> RootEntity return previous async def _select_one_instance(self, name: str, uuid: UUID, **kwargs) -> RootEntity: - snapshot_entry = await self._reader.get_entry(name, uuid, **kwargs) + snapshot_entry = await self.get_entry(name, uuid, **kwargs) return snapshot_entry.build(**kwargs) async def _submit_entry(self, snapshot_entry: SnapshotEntry) -> SnapshotEntry: - operation = self.operation_factory.build_insert(**snapshot_entry.as_raw()) - response = await self.submit_query_and_fetchone(operation) + operation = self.database_operation_factory.build_submit(**snapshot_entry.as_raw()) + response = await self.execute_on_database_and_fetch_one(operation) snapshot_entry.created_at, snapshot_entry.updated_at = response @@ -180,5 +223,5 @@ async def _clean_transactions(self, offset: int, **kwargs) -> None: ) transaction_uuids = {transaction.uuid async for transaction in iterable} if len(transaction_uuids): - operation = self.operation_factory.build_delete_by_transactions(transaction_uuids) - await self.submit_query(operation) + operation = self.database_operation_factory.build_delete(transaction_uuids) + await self.execute_on_database(operation) diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/memory.py b/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/repositories/memory.py similarity index 62% rename from packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/memory.py rename to packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/repositories/memory.py index c74df9a10..cc8ecfb69 100644 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/memory.py +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/repositories/memory.py @@ -2,11 +2,16 @@ annotations, ) +from contextlib import ( + suppress, +) +from functools import ( + cmp_to_key, +) from operator import ( attrgetter, ) from typing import ( - TYPE_CHECKING, AsyncIterator, Optional, ) @@ -20,32 +25,29 @@ NotProvidedException, ) -from ..events import ( +from ...events import ( EventEntry, EventRepository, ) -from ..exceptions import ( +from ...exceptions import ( AlreadyDeletedException, - NotFoundException, ) -from ..queries import ( +from ...queries import ( _Condition, _Ordering, ) -from ..transactions import ( +from ...transactions import ( TransactionEntry, TransactionRepository, TransactionStatus, ) +from ..entries import ( + SnapshotEntry, +) from .abc import ( SnapshotRepository, ) -if TYPE_CHECKING: - from ..entities import ( - RootEntity, - ) - class InMemorySnapshotRepository(SnapshotRepository): """InMemory Snapshot class. @@ -73,46 +75,66 @@ def __init__( self._event_repository = event_repository self._transaction_repository = transaction_repository - async def _find( + async def _find_entries( self, name: str, condition: _Condition, - ordering: Optional[_Ordering] = None, - limit: Optional[int] = None, + ordering: Optional[_Ordering], + limit: Optional[int], + exclude_deleted: bool, **kwargs, - ) -> AsyncIterator[RootEntity]: + ) -> AsyncIterator[SnapshotEntry]: uuids = {v.uuid async for v in self._event_repository.select(name=name)} - instances = list() + entries = list() for uuid in uuids: + entry = await self._get(name, uuid, **kwargs) + try: - instance = await self.get(name, uuid, **kwargs) + instance = entry.build() + if condition.evaluate(instance): + entries.append(entry) except AlreadyDeletedException: - continue - - if condition.evaluate(instance): - instances.append(instance) + # noinspection PyTypeChecker + if not exclude_deleted and condition.evaluate(entry): + entries.append(entry) if ordering is not None: - instances.sort(key=attrgetter(ordering.by), reverse=ordering.reverse) + + def _cmp(a: SnapshotEntry, b: SnapshotEntry) -> int: + with suppress(AlreadyDeletedException): + with suppress(AlreadyDeletedException): + try: + aa = attrgetter(ordering.by)(a.build()) + except AlreadyDeletedException: + aa = attrgetter(ordering.by)(a) + with suppress(AlreadyDeletedException): + try: + bb = attrgetter(ordering.by)(b.build()) + except AlreadyDeletedException: + bb = attrgetter(ordering.by)(b) + + if aa > bb: + return 1 + elif aa < bb: + return -1 + + return 0 + + entries.sort(key=cmp_to_key(_cmp), reverse=ordering.reverse) if limit is not None: - instances = instances[:limit] + entries = entries[:limit] - for instance in instances: - yield instance + for entry in entries: + yield entry # noinspection PyMethodOverriding - async def _get(self, name: str, uuid: UUID, transaction: Optional[TransactionEntry] = None, **kwargs) -> RootEntity: + async def _get( + self, name: str, uuid: UUID, transaction: Optional[TransactionEntry] = None, **kwargs + ) -> SnapshotEntry: transaction_uuids = await self._get_transaction_uuids(transaction) entries = await self._get_event_entries(name, uuid, transaction_uuids) - - if not len(entries): - raise NotFoundException(f"Not found any entries for the {uuid!r} id.") - - if entries[-1].action.is_delete: - raise AlreadyDeletedException(f"The {uuid!r} identifier belongs to an already deleted instance.") - return self._build_instance(entries, **kwargs) async def _get_transaction_uuids(self, transaction: Optional[TransactionEntry]) -> tuple[UUID, ...]: @@ -147,12 +169,18 @@ async def _get_event_entries(self, name: str, uuid: UUID, transaction_uuids: tup return entries @staticmethod - def _build_instance(entries: list[EventEntry], **kwargs) -> RootEntity: + def _build_instance(entries: list[EventEntry], **kwargs) -> SnapshotEntry: + if entries[-1].action.is_delete: + return SnapshotEntry.from_event_entry(entries[-1]) + cls = entries[0].type_ instance = cls.from_diff(entries[0].event, **kwargs) for entry in entries[1:]: instance.apply_diff(entry.event) - return instance + + snapshot = SnapshotEntry.from_root_entity(instance) + + return snapshot async def _synchronize(self, **kwargs) -> None: pass diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/services.py b/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/services.py index cd27c022e..b77ab3f4c 100644 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/services.py +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/services.py @@ -30,7 +30,7 @@ enroute, ) -from .abc import ( +from .repositories import ( SnapshotRepository, ) diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/testing/__init__.py b/packages/core/minos-microservice-aggregate/minos/aggregate/testing/__init__.py new file mode 100644 index 000000000..9d205beed --- /dev/null +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/testing/__init__.py @@ -0,0 +1,12 @@ +from .events import ( + EventRepositoryTestCase, + MockedEventDatabaseOperationFactory, +) +from .snapshots import ( + MockedSnapshotDatabaseOperationFactory, + SnapshotRepositoryTestCase, +) +from .transactions import ( + MockedTransactionDatabaseOperationFactory, + TransactionRepositoryTestCase, +) diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/testing/events/__init__.py b/packages/core/minos-microservice-aggregate/minos/aggregate/testing/events/__init__.py new file mode 100644 index 000000000..33ee0ba5f --- /dev/null +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/testing/events/__init__.py @@ -0,0 +1,4 @@ +from .repositories import ( + EventRepositoryTestCase, + MockedEventDatabaseOperationFactory, +) diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/testing/events/repositories/__init__.py b/packages/core/minos-microservice-aggregate/minos/aggregate/testing/events/repositories/__init__.py new file mode 100644 index 000000000..f8147fc40 --- /dev/null +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/testing/events/repositories/__init__.py @@ -0,0 +1,6 @@ +from .factories import ( + MockedEventDatabaseOperationFactory, +) +from .testcases import ( + EventRepositoryTestCase, +) diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/testing/events/repositories/factories.py b/packages/core/minos-microservice-aggregate/minos/aggregate/testing/events/repositories/factories.py new file mode 100644 index 000000000..8bf90cf35 --- /dev/null +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/testing/events/repositories/factories.py @@ -0,0 +1,77 @@ +from datetime import ( + datetime, +) +from typing import ( + Optional, +) +from uuid import ( + UUID, +) + +from minos.common import ( + DatabaseOperation, +) +from minos.common.testing import ( + MockedDatabaseClient, + MockedDatabaseOperation, +) + +from ....actions import ( + Action, +) +from ....events import ( + EventDatabaseOperationFactory, +) + + +class MockedEventDatabaseOperationFactory(EventDatabaseOperationFactory): + """For testing purposes.""" + + def build_create(self) -> DatabaseOperation: + """For testing purposes.""" + return MockedDatabaseOperation("create") + + def build_submit( + self, + transaction_uuids: tuple[UUID], + uuid: UUID, + action: Action, + name: str, + version: int, + data: bytes, + created_at: datetime, + transaction_uuid: UUID, + lock: Optional[int], + **kwargs, + ) -> DatabaseOperation: + """For testing purposes.""" + return MockedDatabaseOperation("submit") + + def build_query( + self, + uuid: Optional[UUID] = None, + name: Optional[str] = None, + version: Optional[int] = None, + version_lt: Optional[int] = None, + version_gt: Optional[int] = None, + version_le: Optional[int] = None, + version_ge: Optional[int] = None, + id: Optional[int] = None, + id_lt: Optional[int] = None, + id_gt: Optional[int] = None, + id_le: Optional[int] = None, + id_ge: Optional[int] = None, + transaction_uuid: Optional[UUID] = None, + transaction_uuid_ne: Optional[UUID] = None, + transaction_uuid_in: Optional[tuple[UUID, ...]] = None, + **kwargs, + ) -> DatabaseOperation: + """For testing purposes.""" + return MockedDatabaseOperation("select_rows") + + def build_query_offset(self) -> DatabaseOperation: + """For testing purposes.""" + return MockedDatabaseOperation("select_max_id") + + +MockedDatabaseClient.set_factory(EventDatabaseOperationFactory, MockedEventDatabaseOperationFactory) diff --git a/packages/core/minos-microservice-aggregate/tests/testcases/event_repository.py b/packages/core/minos-microservice-aggregate/minos/aggregate/testing/events/repositories/testcases.py similarity index 82% rename from packages/core/minos-microservice-aggregate/tests/testcases/event_repository.py rename to packages/core/minos-microservice-aggregate/minos/aggregate/testing/events/repositories/testcases.py index 3ff98508a..e7d8ee72e 100644 --- a/packages/core/minos-microservice-aggregate/tests/testcases/event_repository.py +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/testing/events/repositories/testcases.py @@ -28,22 +28,126 @@ NULL_UUID, current_datetime, ) -from tests.utils import ( - AggregateTestCase, +from minos.common.testing import ( + MinosTestCase, ) -class EventRepositoryTestCase(AggregateTestCase): +class EventRepositoryTestCase(MinosTestCase, ABC): + __test__ = False + def setUp(self) -> None: super().setUp() - self.uuid = uuid4() - self.event_repository = self.build_event_repository() self.field_diff_container_patcher = patch( "minos.aggregate.FieldDiffContainer.from_avro_bytes", return_value=FieldDiffContainer.empty() ) self.field_diff_container_patcher.start() + self.uuid = uuid4() + self.uuid_1 = uuid4() + self.uuid_2 = uuid4() + self.uuid_4 = uuid4() + + self.first_transaction = uuid4() + self.second_transaction = uuid4() + + self.entries = [ + EventEntry( + self.uuid_1, + "example.Car", + 1, + bytes("foo", "utf-8"), + 1, + Action.CREATE, + current_datetime(), + ), + EventEntry( + self.uuid_1, + "example.Car", + 2, + bytes("bar", "utf-8"), + 2, + Action.UPDATE, + current_datetime(), + ), + EventEntry( + self.uuid_2, + "example.Car", + 1, + bytes("hello", "utf-8"), + 3, + Action.CREATE, + current_datetime(), + ), + EventEntry( + self.uuid_1, + "example.Car", + 3, + bytes("foobar", "utf-8"), + 4, + Action.UPDATE, + current_datetime(), + ), + EventEntry( + self.uuid_1, + "example.Car", + 4, + bytes(), + 5, + Action.DELETE, + current_datetime(), + ), + EventEntry( + self.uuid_2, + "example.Car", + 2, + bytes("bye", "utf-8"), + 6, + Action.UPDATE, + current_datetime(), + ), + EventEntry( + self.uuid_4, + "example.MotorCycle", + 1, + bytes("one", "utf-8"), + 7, + Action.CREATE, + current_datetime(), + ), + EventEntry( + self.uuid_2, + "example.Car", + 3, + bytes("hola", "utf-8"), + 8, + Action.UPDATE, + current_datetime(), + transaction_uuid=self.first_transaction, + ), + EventEntry( + self.uuid_2, + "example.Car", + 3, + bytes("salut", "utf-8"), + 9, + Action.UPDATE, + current_datetime(), + transaction_uuid=self.second_transaction, + ), + EventEntry( + self.uuid_2, + "example.Car", + 4, + bytes("adios", "utf-8"), + 10, + Action.UPDATE, + current_datetime(), + transaction_uuid=self.first_transaction, + ), + ] + async def asyncSetUp(self): await super().asyncSetUp() await self.event_repository.setup() @@ -56,6 +160,27 @@ def tearDown(self): self.field_diff_container_patcher.stop() super().tearDown() + async def populate(self) -> None: + await self.transaction_repository.submit(TransactionEntry(self.first_transaction)) + await self.transaction_repository.submit(TransactionEntry(self.second_transaction)) + + await self.event_repository.create(EventEntry(self.uuid_1, "example.Car", 1, bytes("foo", "utf-8"))) + await self.event_repository.update(EventEntry(self.uuid_1, "example.Car", 2, bytes("bar", "utf-8"))) + await self.event_repository.create(EventEntry(self.uuid_2, "example.Car", 1, bytes("hello", "utf-8"))) + await self.event_repository.update(EventEntry(self.uuid_1, "example.Car", 3, bytes("foobar", "utf-8"))) + await self.event_repository.delete(EventEntry(self.uuid_1, "example.Car", 4)) + await self.event_repository.update(EventEntry(self.uuid_2, "example.Car", 2, bytes("bye", "utf-8"))) + await self.event_repository.create(EventEntry(self.uuid_4, "example.MotorCycle", 1, bytes("one", "utf-8"))) + await self.event_repository.update( + EventEntry(self.uuid_2, "example.Car", 3, bytes("hola", "utf-8"), transaction_uuid=self.first_transaction) + ) + await self.event_repository.update( + EventEntry(self.uuid_2, "example.Car", 3, bytes("salut", "utf-8"), transaction_uuid=self.second_transaction) + ) + await self.event_repository.update( + EventEntry(self.uuid_2, "example.Car", 4, bytes("adios", "utf-8"), transaction_uuid=self.first_transaction) + ) + @abstractmethod def build_event_repository(self) -> EventRepository: """For testing purposes.""" @@ -75,10 +200,6 @@ def assert_equal_repository_entries(self, expected: list[EventEntry], observed: self.assertEqual(e.action, o.action) self.assertAlmostEqual(e.created_at or current_datetime(), o.created_at, delta=timedelta(seconds=5)) - -class EventRepositorySubmitTestCase(EventRepositoryTestCase, ABC): - __test__ = False - async def test_generate_uuid(self): await self.event_repository.create(EventEntry(NULL_UUID, "example.Car", 1, bytes("foo", "utf-8"))) observed = [v async for v in self.event_repository.select()] @@ -124,133 +245,68 @@ async def test_offset(self): await self.event_repository.submit(EventEntry(self.uuid, "example.Car", version=3, action=Action.CREATE)) self.assertEqual(1, await self.event_repository.offset) - -class EventRepositorySelectTestCase(EventRepositoryTestCase, ABC): - __test__ = False - - def setUp(self) -> None: - super().setUp() - - self.uuid_1 = uuid4() - self.uuid_2 = uuid4() - self.uuid_4 = uuid4() - - self.first_transaction = uuid4() - self.second_transaction = uuid4() - - self.entries = [ - EventEntry(self.uuid_1, "example.Car", 1, bytes("foo", "utf-8"), 1, Action.CREATE), - EventEntry(self.uuid_1, "example.Car", 2, bytes("bar", "utf-8"), 2, Action.UPDATE), - EventEntry(self.uuid_2, "example.Car", 1, bytes("hello", "utf-8"), 3, Action.CREATE), - EventEntry(self.uuid_1, "example.Car", 3, bytes("foobar", "utf-8"), 4, Action.UPDATE), - EventEntry(self.uuid_1, "example.Car", 4, bytes(), 5, Action.DELETE), - EventEntry(self.uuid_2, "example.Car", 2, bytes("bye", "utf-8"), 6, Action.UPDATE), - EventEntry(self.uuid_4, "example.MotorCycle", 1, bytes("one", "utf-8"), 7, Action.CREATE), - EventEntry( - self.uuid_2, - "example.Car", - 3, - bytes("hola", "utf-8"), - 8, - Action.UPDATE, - transaction_uuid=self.first_transaction, - ), - EventEntry( - self.uuid_2, - "example.Car", - 3, - bytes("salut", "utf-8"), - 9, - Action.UPDATE, - transaction_uuid=self.second_transaction, - ), - EventEntry( - self.uuid_2, - "example.Car", - 4, - bytes("adios", "utf-8"), - 10, - Action.UPDATE, - transaction_uuid=self.first_transaction, - ), - ] - - async def asyncSetUp(self): - await super().asyncSetUp() - await self._populate() - - async def _populate(self): - await self.transaction_repository.submit(TransactionEntry(self.first_transaction)) - await self.transaction_repository.submit(TransactionEntry(self.second_transaction)) - - await self.event_repository.create(EventEntry(self.uuid_1, "example.Car", 1, bytes("foo", "utf-8"))) - await self.event_repository.update(EventEntry(self.uuid_1, "example.Car", 2, bytes("bar", "utf-8"))) - await self.event_repository.create(EventEntry(self.uuid_2, "example.Car", 1, bytes("hello", "utf-8"))) - await self.event_repository.update(EventEntry(self.uuid_1, "example.Car", 3, bytes("foobar", "utf-8"))) - await self.event_repository.delete(EventEntry(self.uuid_1, "example.Car", 4)) - await self.event_repository.update(EventEntry(self.uuid_2, "example.Car", 2, bytes("bye", "utf-8"))) - await self.event_repository.create(EventEntry(self.uuid_4, "example.MotorCycle", 1, bytes("one", "utf-8"))) - await self.event_repository.update( - EventEntry(self.uuid_2, "example.Car", 3, bytes("hola", "utf-8"), transaction_uuid=self.first_transaction) - ) - await self.event_repository.update( - EventEntry(self.uuid_2, "example.Car", 3, bytes("salut", "utf-8"), transaction_uuid=self.second_transaction) - ) - await self.event_repository.update( - EventEntry(self.uuid_2, "example.Car", 4, bytes("adios", "utf-8"), transaction_uuid=self.first_transaction) - ) - async def test_select(self): + await self.populate() expected = self.entries observed = [v async for v in self.event_repository.select()] self.assert_equal_repository_entries(expected, observed) async def test_select_id(self): + await self.populate() expected = [self.entries[1]] observed = [v async for v in self.event_repository.select(id=2)] self.assert_equal_repository_entries(expected, observed) async def test_select_id_lt(self): + await self.populate() expected = self.entries[:4] observed = [v async for v in self.event_repository.select(id_lt=5)] self.assert_equal_repository_entries(expected, observed) async def test_select_id_gt(self): + await self.populate() expected = self.entries[4:] observed = [v async for v in self.event_repository.select(id_gt=4)] self.assert_equal_repository_entries(expected, observed) async def test_select_id_le(self): + await self.populate() expected = self.entries[:4] observed = [v async for v in self.event_repository.select(id_le=4)] self.assert_equal_repository_entries(expected, observed) async def test_select_id_ge(self): + await self.populate() expected = self.entries[4:] observed = [v async for v in self.event_repository.select(id_ge=5)] self.assert_equal_repository_entries(expected, observed) async def test_select_uuid(self): + await self.populate() expected = [self.entries[2], self.entries[5], self.entries[7], self.entries[8], self.entries[9]] observed = [v async for v in self.event_repository.select(uuid=self.uuid_2)] self.assert_equal_repository_entries(expected, observed) async def test_select_name(self): + await self.populate() expected = [self.entries[6]] observed = [v async for v in self.event_repository.select(name="example.MotorCycle")] self.assert_equal_repository_entries(expected, observed) async def test_select_version(self): + await self.populate() expected = [self.entries[4], self.entries[9]] observed = [v async for v in self.event_repository.select(version=4)] self.assert_equal_repository_entries(expected, observed) async def test_select_version_lt(self): + await self.populate() expected = [self.entries[0], self.entries[2], self.entries[6]] observed = [v async for v in self.event_repository.select(version_lt=2)] self.assert_equal_repository_entries(expected, observed) async def test_select_version_gt(self): + await self.populate() expected = [ self.entries[1], self.entries[3], @@ -264,11 +320,13 @@ async def test_select_version_gt(self): self.assert_equal_repository_entries(expected, observed) async def test_select_version_le(self): + await self.populate() expected = [self.entries[0], self.entries[2], self.entries[6]] observed = [v async for v in self.event_repository.select(version_le=1)] self.assert_equal_repository_entries(expected, observed) async def test_select_version_ge(self): + await self.populate() expected = [ self.entries[1], self.entries[3], @@ -282,21 +340,25 @@ async def test_select_version_ge(self): self.assert_equal_repository_entries(expected, observed) async def test_select_transaction_uuid_null(self): + await self.populate() expected = self.entries[:7] observed = [v async for v in self.event_repository.select(transaction_uuid=NULL_UUID)] self.assert_equal_repository_entries(expected, observed) async def test_select_transaction_uuid(self): + await self.populate() expected = [self.entries[7], self.entries[9]] observed = [v async for v in self.event_repository.select(transaction_uuid=self.first_transaction)] self.assert_equal_repository_entries(expected, observed) async def test_select_transaction_uuid_ne(self): + await self.populate() expected = [self.entries[7], self.entries[8], self.entries[9]] observed = [v async for v in self.event_repository.select(transaction_uuid_ne=NULL_UUID)] self.assert_equal_repository_entries(expected, observed) async def test_select_transaction_uuid_in(self): + await self.populate() expected = [self.entries[7], self.entries[8], self.entries[9]] observed = [ v @@ -307,6 +369,7 @@ async def test_select_transaction_uuid_in(self): self.assert_equal_repository_entries(expected, observed) async def test_select_combined(self): + await self.populate() expected = [self.entries[2], self.entries[5], self.entries[7], self.entries[8], self.entries[9]] observed = [v async for v in self.event_repository.select(name="example.Car", uuid=self.uuid_2)] self.assert_equal_repository_entries(expected, observed) diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/testing/snapshots/__init__.py b/packages/core/minos-microservice-aggregate/minos/aggregate/testing/snapshots/__init__.py new file mode 100644 index 000000000..6395db04a --- /dev/null +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/testing/snapshots/__init__.py @@ -0,0 +1,4 @@ +from .repositories import ( + MockedSnapshotDatabaseOperationFactory, + SnapshotRepositoryTestCase, +) diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/testing/snapshots/repositories/__init__.py b/packages/core/minos-microservice-aggregate/minos/aggregate/testing/snapshots/repositories/__init__.py new file mode 100644 index 000000000..46b611965 --- /dev/null +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/testing/snapshots/repositories/__init__.py @@ -0,0 +1,6 @@ +from .factories import ( + MockedSnapshotDatabaseOperationFactory, +) +from .testcases import ( + SnapshotRepositoryTestCase, +) diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/testing/snapshots/repositories/factories.py b/packages/core/minos-microservice-aggregate/minos/aggregate/testing/snapshots/repositories/factories.py new file mode 100644 index 000000000..183092f8b --- /dev/null +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/testing/snapshots/repositories/factories.py @@ -0,0 +1,78 @@ +from collections.abc import ( + Iterable, +) +from datetime import ( + datetime, +) +from typing import ( + Any, + Optional, +) +from uuid import ( + UUID, +) + +from minos.common import ( + DatabaseOperation, +) +from minos.common.testing import ( + MockedDatabaseClient, + MockedDatabaseOperation, +) + +from ....queries import ( + _Condition, + _Ordering, +) +from ....snapshots import ( + SnapshotDatabaseOperationFactory, +) + + +class MockedSnapshotDatabaseOperationFactory(SnapshotDatabaseOperationFactory): + """For testing purposes.""" + + def build_create(self) -> DatabaseOperation: + """For testing purposes.""" + return MockedDatabaseOperation("create") + + def build_delete(self, transaction_uuids: Iterable[UUID]) -> DatabaseOperation: + """For testing purposes.""" + return MockedDatabaseOperation("delete") + + def build_submit( + self, + uuid: UUID, + name: str, + version: int, + schema: bytes, + data: dict[str, Any], + created_at: datetime, + updated_at: datetime, + transaction_uuid: UUID, + ) -> DatabaseOperation: + """For testing purposes.""" + return MockedDatabaseOperation("insert") + + def build_query( + self, + name: str, + condition: _Condition, + ordering: Optional[_Ordering], + limit: Optional[int], + transaction_uuids: tuple[UUID, ...], + exclude_deleted: bool, + ) -> DatabaseOperation: + """For testing purposes.""" + return MockedDatabaseOperation("query") + + def build_submit_offset(self, value: int) -> DatabaseOperation: + """For testing purposes.""" + return MockedDatabaseOperation("store_offset") + + def build_query_offset(self) -> DatabaseOperation: + """For testing purposes.""" + return MockedDatabaseOperation("get_offset") + + +MockedDatabaseClient.set_factory(SnapshotDatabaseOperationFactory, MockedSnapshotDatabaseOperationFactory) diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/testing/snapshots/repositories/testcases.py b/packages/core/minos-microservice-aggregate/minos/aggregate/testing/snapshots/repositories/testcases.py new file mode 100644 index 000000000..edf0dc10b --- /dev/null +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/testing/snapshots/repositories/testcases.py @@ -0,0 +1,623 @@ +from __future__ import ( + annotations, +) + +from abc import ( + ABC, + abstractmethod, +) +from datetime import ( + timedelta, +) +from typing import ( + Optional, +) +from unittest.mock import ( + MagicMock, + call, +) +from uuid import ( + uuid4, +) + +from minos.aggregate import ( + Action, + AlreadyDeletedException, + Condition, + EventEntry, + FieldDiff, + FieldDiffContainer, + NotFoundException, + Ordering, + Ref, + RootEntity, + SnapshotEntry, + SnapshotRepository, + TransactionEntry, + TransactionStatus, +) +from minos.common import ( + classname, + current_datetime, +) +from minos.common.testing import ( + MinosTestCase, +) + + +class SnapshotRepositoryTestCase(MinosTestCase, ABC): + __test__ = False + + snapshot_repository: SnapshotRepository + + class Owner(RootEntity): + """For testing purposes""" + + name: str + surname: str + age: Optional[int] + + class Car(RootEntity): + """For testing purposes""" + + doors: int + color: str + owner: Optional[Ref[SnapshotRepositoryTestCase.Owner]] + + def setUp(self) -> None: + super().setUp() + self.snapshot_repository = self.build_snapshot_repository() + + self.uuid_1 = uuid4() + self.uuid_2 = uuid4() + self.uuid_3 = uuid4() + + self.transaction_1 = uuid4() + self.transaction_2 = uuid4() + self.transaction_3 = uuid4() + self.transaction_4 = uuid4() + + @abstractmethod + def build_snapshot_repository(self) -> SnapshotRepository: + pass + + async def populate(self) -> None: + diff = FieldDiffContainer([FieldDiff("doors", int, 3), FieldDiff("color", str, "blue")]) + name: str = classname(self.Car) + + await self.event_repository.create(EventEntry(self.uuid_1, name, 1, diff.avro_bytes)) + await self.event_repository.update(EventEntry(self.uuid_1, name, 2, diff.avro_bytes)) + await self.event_repository.create(EventEntry(self.uuid_2, name, 1, diff.avro_bytes)) + await self.event_repository.update(EventEntry(self.uuid_1, name, 3, diff.avro_bytes)) + await self.event_repository.delete(EventEntry(self.uuid_1, name, 4)) + await self.event_repository.update(EventEntry(self.uuid_2, name, 2, diff.avro_bytes)) + await self.event_repository.update( + EventEntry(self.uuid_2, name, 3, diff.avro_bytes, transaction_uuid=self.transaction_1) + ) + await self.event_repository.delete( + EventEntry(self.uuid_2, name, 3, bytes(), transaction_uuid=self.transaction_2) + ) + await self.event_repository.update( + EventEntry(self.uuid_2, name, 4, diff.avro_bytes, transaction_uuid=self.transaction_1) + ) + await self.event_repository.create(EventEntry(self.uuid_3, name, 1, diff.avro_bytes)) + await self.event_repository.delete( + EventEntry(self.uuid_2, name, 3, bytes(), transaction_uuid=self.transaction_3) + ) + await self.transaction_repository.submit( + TransactionEntry(self.transaction_1, TransactionStatus.PENDING, await self.event_repository.offset) + ) + await self.transaction_repository.submit( + TransactionEntry(self.transaction_2, TransactionStatus.PENDING, await self.event_repository.offset) + ) + await self.transaction_repository.submit( + TransactionEntry(self.transaction_3, TransactionStatus.REJECTED, await self.event_repository.offset) + ) + await self.transaction_repository.submit( + TransactionEntry( + self.transaction_4, TransactionStatus.REJECTED, await self.event_repository.offset, self.transaction_3 + ) + ) + + async def populate_and_synchronize(self): + await self.populate() + await self.synchronize() + + async def synchronize(self): + await self.snapshot_repository.synchronize() + + async def asyncSetUp(self): + await super().asyncSetUp() + await self.snapshot_repository.setup() + + async def asyncTearDown(self): + await self.snapshot_repository.destroy() + await super().asyncTearDown() + + def assert_equal_snapshot_entries(self, expected: list[SnapshotEntry], observed: list[SnapshotEntry]): + self.assertEqual(len(expected), len(observed)) + for exp, obs in zip(expected, observed): + if exp.data is None: + with self.assertRaises(AlreadyDeletedException): + # noinspection PyStatementEffect + obs.build() + else: + self.assertEqual(exp.build(), obs.build()) + self.assertAlmostEqual(exp.created_at or current_datetime(), obs.created_at, delta=timedelta(seconds=5)) + self.assertAlmostEqual(exp.updated_at or current_datetime(), obs.updated_at, delta=timedelta(seconds=5)) + + def test_type(self): + self.assertTrue(isinstance(self.snapshot_repository, SnapshotRepository)) + + async def test_dispatch(self): + await self.populate_and_synchronize() + + # noinspection PyTypeChecker + iterable = self.snapshot_repository.find_entries( + self.Car.classname, Condition.TRUE, Ordering.ASC("updated_at"), exclude_deleted=False + ) + observed = [v async for v in iterable] + + # noinspection PyTypeChecker + expected = [ + SnapshotEntry(self.uuid_1, self.Car.classname, 4), + SnapshotEntry.from_root_entity( + self.Car( + 3, + "blue", + uuid=self.uuid_2, + version=2, + created_at=observed[1].created_at, + updated_at=observed[1].updated_at, + ) + ), + SnapshotEntry.from_root_entity( + self.Car( + 3, + "blue", + uuid=self.uuid_3, + version=1, + created_at=observed[2].created_at, + updated_at=observed[2].updated_at, + ) + ), + ] + self.assert_equal_snapshot_entries(expected, observed) + + async def test_dispatch_first_transaction(self): + await self.populate_and_synchronize() + + # noinspection PyTypeChecker + iterable = self.snapshot_repository.find_entries( + self.Car.classname, + Condition.TRUE, + Ordering.ASC("updated_at"), + exclude_deleted=False, + transaction=TransactionEntry(self.transaction_1), + ) + observed = [v async for v in iterable] + + # noinspection PyTypeChecker + expected = [ + SnapshotEntry(self.uuid_1, self.Car.classname, 4), + SnapshotEntry.from_root_entity( + self.Car( + 3, + "blue", + uuid=self.uuid_2, + version=4, + created_at=observed[1].created_at, + updated_at=observed[1].updated_at, + ) + ), + SnapshotEntry.from_root_entity( + self.Car( + 3, + "blue", + uuid=self.uuid_3, + version=1, + created_at=observed[2].created_at, + updated_at=observed[2].updated_at, + ) + ), + ] + self.assert_equal_snapshot_entries(expected, observed) + + async def test_dispatch_second_transaction(self): + await self.populate_and_synchronize() + + # noinspection PyTypeChecker + iterable = self.snapshot_repository.find_entries( + self.Car.classname, + Condition.TRUE, + Ordering.ASC("updated_at"), + exclude_deleted=False, + transaction=TransactionEntry(self.transaction_2), + ) + observed = [v async for v in iterable] + + # noinspection PyTypeChecker + expected = [ + SnapshotEntry(self.uuid_1, self.Car.classname, 4), + SnapshotEntry(self.uuid_2, self.Car.classname, 4), + SnapshotEntry.from_root_entity( + self.Car( + 3, + "blue", + uuid=self.uuid_3, + version=1, + created_at=observed[2].created_at, + updated_at=observed[2].updated_at, + ) + ), + ] + self.assert_equal_snapshot_entries(expected, observed) + + async def test_dispatch_third_transaction(self): + await self.populate_and_synchronize() + + # noinspection PyTypeChecker + iterable = self.snapshot_repository.find_entries( + self.Car.classname, + Condition.TRUE, + Ordering.ASC("updated_at"), + exclude_deleted=False, + transaction_uuid=self.transaction_3, + ) + observed = [v async for v in iterable] + + # noinspection PyTypeChecker + expected = [ + SnapshotEntry(self.uuid_1, self.Car.classname, 4), + SnapshotEntry.from_root_entity( + self.Car( + 3, + "blue", + uuid=self.uuid_2, + version=2, + created_at=observed[1].created_at, + updated_at=observed[1].updated_at, + ) + ), + SnapshotEntry.from_root_entity( + self.Car( + 3, + "blue", + uuid=self.uuid_3, + version=1, + created_at=observed[2].created_at, + updated_at=observed[2].updated_at, + ) + ), + ] + self.assert_equal_snapshot_entries(expected, observed) + + async def test_dispatch_ignore_previous_version(self): + await self.populate() + diff = FieldDiffContainer([FieldDiff("doors", int, 3), FieldDiff("color", str, "blue")]) + # noinspection PyTypeChecker + name: str = self.Car.classname + condition = Condition.EQUAL("uuid", self.uuid_1) + + async def _fn(*args, id_gt: Optional[int] = None, **kwargs): + if id_gt is not None and id_gt > 0: + return + yield EventEntry(self.uuid_1, name, 1, diff.avro_bytes, 1, Action.CREATE, current_datetime()) + yield EventEntry(self.uuid_1, name, 3, diff.avro_bytes, 2, Action.CREATE, current_datetime()) + yield EventEntry(self.uuid_1, name, 2, diff.avro_bytes, 3, Action.CREATE, current_datetime()) + + self.event_repository.select = MagicMock(side_effect=_fn) + await self.snapshot_repository.synchronize() + + observed = [v async for v in self.snapshot_repository.find_entries(name, condition)] + + # noinspection PyTypeChecker + expected = [ + SnapshotEntry( + uuid=self.uuid_1, + name=name, + version=3, + schema=self.Car.avro_schema, + data=self.Car(3, "blue", uuid=self.uuid_1, version=1).avro_data, + created_at=observed[0].created_at, + updated_at=observed[0].updated_at, + ) + ] + self.assert_equal_snapshot_entries(expected, observed) + + async def test_dispatch_with_offset(self): + await self.populate() + + mock = MagicMock(side_effect=self.event_repository.select) + self.event_repository.select = mock + + await self.snapshot_repository.synchronize() + self.assertEqual(1, mock.call_count) + self.assertEqual(call(id_gt=0, synchronize=False), mock.call_args) + mock.reset_mock() + + # noinspection PyTypeChecker + entry = EventEntry( + uuid=self.uuid_3, + name=self.Car.classname, + data=FieldDiffContainer([FieldDiff("doors", int, 3), FieldDiff("color", str, "blue")]).avro_bytes, + ) + await self.event_repository.create(entry) + + await self.snapshot_repository.synchronize() + self.assertEqual(1, mock.call_count) + self.assertEqual(call(id_gt=11, synchronize=False), mock.call_args) + mock.reset_mock() + + await self.snapshot_repository.synchronize() + self.assertEqual(1, mock.call_count) + self.assertEqual(call(id_gt=12, synchronize=False), mock.call_args) + mock.reset_mock() + + await self.snapshot_repository.synchronize() + self.assertEqual(1, mock.call_count) + self.assertEqual(call(id_gt=12, synchronize=False), mock.call_args) + mock.reset_mock() + + async def test_find_by_uuid(self): + await self.populate_and_synchronize() + condition = Condition.IN("uuid", [self.uuid_2, self.uuid_3]) + + iterable = self.snapshot_repository.find(self.Car, condition, ordering=Ordering.ASC("updated_at")) + observed = [v async for v in iterable] + + expected = [ + self.Car( + 3, + "blue", + uuid=self.uuid_2, + version=2, + created_at=observed[0].created_at, + updated_at=observed[0].updated_at, + ), + self.Car( + 3, + "blue", + uuid=self.uuid_3, + version=1, + created_at=observed[1].created_at, + updated_at=observed[1].updated_at, + ), + ] + self.assertEqual(expected, observed) + + async def test_find_with_transaction(self): + await self.populate_and_synchronize() + condition = Condition.IN("uuid", [self.uuid_2, self.uuid_3]) + + iterable = self.snapshot_repository.find( + self.Car, + condition, + ordering=Ordering.ASC("updated_at"), + transaction=TransactionEntry(self.transaction_1), + ) + observed = [v async for v in iterable] + + expected = [ + self.Car( + 3, + "blue", + uuid=self.uuid_2, + version=4, + created_at=observed[0].created_at, + updated_at=observed[0].updated_at, + ), + self.Car( + 3, + "blue", + uuid=self.uuid_3, + version=1, + created_at=observed[1].created_at, + updated_at=observed[1].updated_at, + ), + ] + self.assertEqual(expected, observed) + + async def test_find_with_transaction_delete(self): + await self.populate_and_synchronize() + condition = Condition.IN("uuid", [self.uuid_2, self.uuid_3]) + + iterable = self.snapshot_repository.find( + self.Car, + condition, + ordering=Ordering.ASC("updated_at"), + transaction=TransactionEntry(self.transaction_2), + ) + observed = [v async for v in iterable] + + expected = [ + self.Car( + 3, + "blue", + uuid=self.uuid_3, + version=1, + created_at=observed[0].created_at, + updated_at=observed[0].updated_at, + ), + ] + self.assertEqual(expected, observed) + + async def test_find_with_transaction_reverted(self): + await self.populate_and_synchronize() + condition = Condition.IN("uuid", [self.uuid_2, self.uuid_3]) + + iterable = self.snapshot_repository.find( + self.Car, + condition, + ordering=Ordering.ASC("updated_at"), + transaction=TransactionEntry(self.transaction_4), + ) + observed = [v async for v in iterable] + + expected = [ + self.Car( + 3, + "blue", + uuid=self.uuid_2, + version=2, + created_at=observed[0].created_at, + updated_at=observed[0].updated_at, + ), + self.Car( + 3, + "blue", + uuid=self.uuid_3, + version=1, + created_at=observed[1].created_at, + updated_at=observed[1].updated_at, + ), + ] + self.assertEqual(expected, observed) + + async def test_find_streaming_true(self): + await self.populate_and_synchronize() + condition = Condition.IN("uuid", [self.uuid_2, self.uuid_3]) + + iterable = self.snapshot_repository.find( + self.Car, condition, streaming_mode=True, ordering=Ordering.ASC("updated_at") + ) + observed = [v async for v in iterable] + + expected = [ + self.Car( + 3, + "blue", + uuid=self.uuid_2, + version=2, + created_at=observed[0].created_at, + updated_at=observed[0].updated_at, + ), + self.Car( + 3, + "blue", + uuid=self.uuid_3, + version=1, + created_at=observed[1].created_at, + updated_at=observed[1].updated_at, + ), + ] + self.assertEqual(expected, observed) + + async def test_find_with_duplicates(self): + await self.populate_and_synchronize() + uuids = [self.uuid_2, self.uuid_2, self.uuid_3] + condition = Condition.IN("uuid", uuids) + + iterable = self.snapshot_repository.find(self.Car, condition, ordering=Ordering.ASC("updated_at")) + observed = [v async for v in iterable] + + expected = [ + self.Car( + 3, + "blue", + uuid=self.uuid_2, + version=2, + created_at=observed[0].created_at, + updated_at=observed[0].updated_at, + ), + self.Car( + 3, + "blue", + uuid=self.uuid_3, + version=1, + created_at=observed[1].created_at, + updated_at=observed[1].updated_at, + ), + ] + self.assertEqual(expected, observed) + + async def test_find_empty(self): + await self.populate_and_synchronize() + observed = {v async for v in self.snapshot_repository.find(self.Car, Condition.FALSE)} + + expected = set() + self.assertEqual(expected, observed) + + async def test_get(self): + await self.populate_and_synchronize() + observed = await self.snapshot_repository.get(self.Car, self.uuid_2) + + expected = self.Car( + 3, "blue", uuid=self.uuid_2, version=2, created_at=observed.created_at, updated_at=observed.updated_at + ) + self.assertEqual(expected, observed) + + async def test_get_with_transaction(self): + await self.populate_and_synchronize() + + observed = await self.snapshot_repository.get( + self.Car, self.uuid_2, transaction=TransactionEntry(self.transaction_1) + ) + + expected = self.Car( + 3, "blue", uuid=self.uuid_2, version=4, created_at=observed.created_at, updated_at=observed.updated_at + ) + self.assertEqual(expected, observed) + + async def test_get_raises(self): + await self.populate_and_synchronize() + with self.assertRaises(AlreadyDeletedException): + await self.snapshot_repository.get(self.Car, self.uuid_1) + with self.assertRaises(NotFoundException): + await self.snapshot_repository.get(self.Car, uuid4()) + + async def test_get_with_transaction_raises(self): + await self.populate_and_synchronize() + with self.assertRaises(AlreadyDeletedException): + await self.snapshot_repository.get(self.Car, self.uuid_2, transaction=TransactionEntry(self.transaction_2)) + + async def test_find(self): + await self.populate_and_synchronize() + condition = Condition.EQUAL("color", "blue") + iterable = self.snapshot_repository.find(self.Car, condition, ordering=Ordering.ASC("updated_at")) + observed = [v async for v in iterable] + + expected = [ + self.Car( + 3, + "blue", + uuid=self.uuid_2, + version=2, + created_at=observed[0].created_at, + updated_at=observed[0].updated_at, + ), + self.Car( + 3, + "blue", + uuid=self.uuid_3, + version=1, + created_at=observed[1].created_at, + updated_at=observed[1].updated_at, + ), + ] + self.assertEqual(expected, observed) + + async def test_find_all(self): + await self.populate_and_synchronize() + iterable = self.snapshot_repository.find(self.Car, Condition.TRUE, Ordering.ASC("updated_at")) + observed = [v async for v in iterable] + + expected = [ + self.Car( + 3, + "blue", + uuid=self.uuid_2, + version=2, + created_at=observed[0].created_at, + updated_at=observed[0].updated_at, + ), + self.Car( + 3, + "blue", + uuid=self.uuid_3, + version=1, + created_at=observed[1].created_at, + updated_at=observed[1].updated_at, + ), + ] + self.assertEqual(expected, observed) diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/testing/transactions/__init__.py b/packages/core/minos-microservice-aggregate/minos/aggregate/testing/transactions/__init__.py new file mode 100644 index 000000000..08cde5249 --- /dev/null +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/testing/transactions/__init__.py @@ -0,0 +1,4 @@ +from .repositories import ( + MockedTransactionDatabaseOperationFactory, + TransactionRepositoryTestCase, +) diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/testing/transactions/repositories/__init__.py b/packages/core/minos-microservice-aggregate/minos/aggregate/testing/transactions/repositories/__init__.py new file mode 100644 index 000000000..d1a78141e --- /dev/null +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/testing/transactions/repositories/__init__.py @@ -0,0 +1,6 @@ +from .factories import ( + MockedTransactionDatabaseOperationFactory, +) +from .testcases import ( + TransactionRepositoryTestCase, +) diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/testing/transactions/repositories/factories.py b/packages/core/minos-microservice-aggregate/minos/aggregate/testing/transactions/repositories/factories.py new file mode 100644 index 000000000..5be8648d4 --- /dev/null +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/testing/transactions/repositories/factories.py @@ -0,0 +1,62 @@ +from datetime import ( + datetime, +) +from typing import ( + Optional, +) +from uuid import ( + UUID, +) + +from minos.common import ( + DatabaseOperation, +) +from minos.common.testing import ( + MockedDatabaseClient, + MockedDatabaseOperation, +) + +from ....transactions import ( + TransactionDatabaseOperationFactory, + TransactionStatus, +) + + +class MockedTransactionDatabaseOperationFactory(TransactionDatabaseOperationFactory): + """For testing purposes.""" + + def build_create(self) -> DatabaseOperation: + """For testing purposes.""" + return MockedDatabaseOperation("create") + + def build_submit( + self, uuid: UUID, destination_uuid: UUID, status: TransactionStatus, event_offset: int, **kwargs + ) -> DatabaseOperation: + """For testing purposes.""" + return MockedDatabaseOperation("submit") + + def build_query( + self, + uuid: Optional[UUID] = None, + uuid_ne: Optional[UUID] = None, + uuid_in: Optional[tuple[UUID]] = None, + destination_uuid: Optional[UUID] = None, + status: Optional[str] = None, + status_in: Optional[tuple[str]] = None, + event_offset: Optional[int] = None, + event_offset_lt: Optional[int] = None, + event_offset_gt: Optional[int] = None, + event_offset_le: Optional[int] = None, + event_offset_ge: Optional[int] = None, + updated_at: Optional[datetime] = None, + updated_at_lt: Optional[datetime] = None, + updated_at_gt: Optional[datetime] = None, + updated_at_le: Optional[datetime] = None, + updated_at_ge: Optional[datetime] = None, + **kwargs, + ) -> DatabaseOperation: + """For testing purposes.""" + return MockedDatabaseOperation("select") + + +MockedDatabaseClient.set_factory(TransactionDatabaseOperationFactory, MockedTransactionDatabaseOperationFactory) diff --git a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_transactions/test_repositories/test_pg.py b/packages/core/minos-microservice-aggregate/minos/aggregate/testing/transactions/repositories/testcases.py similarity index 86% rename from packages/core/minos-microservice-aggregate/tests/test_aggregate/test_transactions/test_repositories/test_pg.py rename to packages/core/minos-microservice-aggregate/minos/aggregate/testing/transactions/repositories/testcases.py index c24350f48..8276bbf85 100644 --- a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_transactions/test_repositories/test_pg.py +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/testing/transactions/repositories/testcases.py @@ -1,66 +1,71 @@ -import unittest +from abc import ( + ABC, + abstractmethod, +) from uuid import ( uuid4, ) from minos.aggregate import ( - DatabaseTransactionRepository, TransactionEntry, TransactionRepository, TransactionRepositoryConflictException, TransactionStatus, ) -from minos.common import ( - AiopgDatabaseClient, - AiopgDatabaseOperation, - DatabaseClientPool, -) from minos.common.testing import ( - DatabaseMinosTestCase, -) -from tests.utils import ( - AggregateTestCase, + MinosTestCase, ) -# noinspection SqlNoDataSourceInspection -class TestDatabaseTransactionRepository(AggregateTestCase, DatabaseMinosTestCase): +class TransactionRepositoryTestCase(MinosTestCase, ABC): + __test__ = False + def setUp(self) -> None: super().setUp() - self.transaction_repository = DatabaseTransactionRepository() + self.transaction_repository = self.build_transaction_repository() self.uuid = uuid4() + self.uuid_1 = uuid4() + self.uuid_2 = uuid4() + self.uuid_3 = uuid4() + self.uuid_4 = uuid4() + self.uuid_5 = uuid4() - async def asyncSetUp(self) -> None: + self.entries = [ + TransactionEntry(self.uuid_1, TransactionStatus.PENDING, 12), + TransactionEntry(self.uuid_2, TransactionStatus.PENDING, 15), + TransactionEntry(self.uuid_3, TransactionStatus.REJECTED, 16), + TransactionEntry(self.uuid_4, TransactionStatus.COMMITTED, 20), + TransactionEntry(self.uuid_5, TransactionStatus.PENDING, 20, self.uuid_1), + ] + + async def populate(self) -> None: + await self.transaction_repository.submit(TransactionEntry(self.uuid_1, TransactionStatus.PENDING, 12)) + await self.transaction_repository.submit(TransactionEntry(self.uuid_2, TransactionStatus.PENDING, 15)) + await self.transaction_repository.submit(TransactionEntry(self.uuid_3, TransactionStatus.REJECTED, 16)) + await self.transaction_repository.submit(TransactionEntry(self.uuid_4, TransactionStatus.COMMITTED, 20)) + await self.transaction_repository.submit( + TransactionEntry(self.uuid_5, TransactionStatus.PENDING, 20, self.uuid_1) + ) + + async def asyncSetUp(self): await super().asyncSetUp() await self.transaction_repository.setup() - async def asyncTearDown(self) -> None: + async def asyncTearDown(self): await self.transaction_repository.destroy() await super().asyncTearDown() + def tearDown(self): + super().tearDown() + + @abstractmethod + def build_transaction_repository(self) -> TransactionRepository: + """For testing purposes.""" + async def test_subclass(self) -> None: - self.assertTrue(issubclass(DatabaseTransactionRepository, TransactionRepository)) - - def test_constructor(self): - pool = DatabaseClientPool.from_config(self.config) - repository = DatabaseTransactionRepository(pool) - self.assertIsInstance(repository, DatabaseTransactionRepository) - self.assertEqual(pool, repository.database_pool) - - def test_from_config(self): - repository = DatabaseTransactionRepository.from_config(self.config) - self.assertIsInstance(repository.database_pool, DatabaseClientPool) - - async def test_setup(self): - async with AiopgDatabaseClient(**self.config.get_default_database()) as client: - operation = AiopgDatabaseOperation( - "SELECT EXISTS (SELECT FROM information_schema.tables WHERE table_name = 'aggregate_transaction');" - ) - await client.execute(operation) - response = (await client.fetch_one())[0] - self.assertTrue(response) + self.assertTrue(isinstance(self.transaction_repository, TransactionRepository)) async def test_submit(self): await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.PENDING, 34)) @@ -68,11 +73,6 @@ async def test_submit(self): observed = [v async for v in self.transaction_repository.select()] self.assertEqual(expected, observed) - async def test_select_empty(self): - expected = [] - observed = [v async for v in self.transaction_repository.select()] - self.assertEqual(expected, observed) - async def test_submit_pending_raises(self): await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.PENDING, 34)) with self.assertRaises(TransactionRepositoryConflictException): @@ -145,75 +145,49 @@ async def test_submit_rejected_raises(self): with self.assertRaises(TransactionRepositoryConflictException): await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.REJECTED, 34)) - -class TestDatabaseTransactionRepositorySelect(AggregateTestCase, DatabaseMinosTestCase): - def setUp(self) -> None: - super().setUp() - self.uuid_1 = uuid4() - self.uuid_2 = uuid4() - self.uuid_3 = uuid4() - self.uuid_4 = uuid4() - self.uuid_5 = uuid4() - - self.transaction_repository = DatabaseTransactionRepository() - - self.entries = [ - TransactionEntry(self.uuid_1, TransactionStatus.PENDING, 12), - TransactionEntry(self.uuid_2, TransactionStatus.PENDING, 15), - TransactionEntry(self.uuid_3, TransactionStatus.REJECTED, 16), - TransactionEntry(self.uuid_4, TransactionStatus.COMMITTED, 20), - TransactionEntry(self.uuid_5, TransactionStatus.PENDING, 20, self.uuid_1), - ] - - async def asyncSetUp(self): - await super().asyncSetUp() - await self.transaction_repository.setup() - await self._populate() - - async def _populate(self): - await self.transaction_repository.submit(TransactionEntry(self.uuid_1, TransactionStatus.PENDING, 12)) - await self.transaction_repository.submit(TransactionEntry(self.uuid_2, TransactionStatus.PENDING, 15)) - await self.transaction_repository.submit(TransactionEntry(self.uuid_3, TransactionStatus.REJECTED, 16)) - await self.transaction_repository.submit(TransactionEntry(self.uuid_4, TransactionStatus.COMMITTED, 20)) - await self.transaction_repository.submit( - TransactionEntry(self.uuid_5, TransactionStatus.PENDING, 20, self.uuid_1) - ) - - async def asyncTearDown(self): - await self.transaction_repository.destroy() - await super().asyncTearDown() + async def test_select_empty(self): + expected = [] + observed = [v async for v in self.transaction_repository.select()] + self.assertEqual(expected, observed) async def test_select(self): + await self.populate() expected = self.entries observed = [v async for v in self.transaction_repository.select()] self.assertEqual(expected, observed) async def test_select_uuid(self): + await self.populate() expected = [self.entries[1]] observed = [v async for v in self.transaction_repository.select(uuid=self.uuid_2)] self.assertEqual(expected, observed) async def test_select_uuid_ne(self): + await self.populate() expected = [self.entries[0], self.entries[2], self.entries[3], self.entries[4]] observed = [v async for v in self.transaction_repository.select(uuid_ne=self.uuid_2)] self.assertEqual(expected, observed) async def test_select_uuid_in(self): + await self.populate() expected = [self.entries[1], self.entries[2]] observed = [v async for v in self.transaction_repository.select(uuid_in=(self.uuid_2, self.uuid_3))] self.assertEqual(expected, observed) async def test_select_destination_uuid(self): + await self.populate() expected = [self.entries[4]] observed = [v async for v in self.transaction_repository.select(destination_uuid=self.uuid_1)] self.assertEqual(expected, observed) async def test_select_status(self): + await self.populate() expected = [self.entries[0], self.entries[1], self.entries[4]] observed = [v async for v in self.transaction_repository.select(status=TransactionStatus.PENDING)] self.assertEqual(expected, observed) async def test_select_status_in(self): + await self.populate() expected = [self.entries[2], self.entries[3]] observed = [ v @@ -224,31 +198,37 @@ async def test_select_status_in(self): self.assertEqual(expected, observed) async def test_select_event_offset(self): + await self.populate() expected = [self.entries[1]] observed = [v async for v in self.transaction_repository.select(event_offset=15)] self.assertEqual(expected, observed) async def test_select_event_offset_lt(self): + await self.populate() expected = [self.entries[0]] observed = [v async for v in self.transaction_repository.select(event_offset_lt=15)] self.assertEqual(expected, observed) async def test_select_event_offset_gt(self): + await self.populate() expected = [self.entries[2], self.entries[3], self.entries[4]] observed = [v async for v in self.transaction_repository.select(event_offset_gt=15)] self.assertEqual(expected, observed) async def test_select_event_offset_le(self): + await self.populate() expected = [self.entries[0], self.entries[1]] observed = [v async for v in self.transaction_repository.select(event_offset_le=15)] self.assertEqual(expected, observed) async def test_select_event_offset_ge(self): + await self.populate() expected = [self.entries[1], self.entries[2], self.entries[3], self.entries[4]] observed = [v async for v in self.transaction_repository.select(event_offset_ge=15)] self.assertEqual(expected, observed) async def test_select_updated_at(self): + await self.populate() updated_at = (await self.transaction_repository.get(self.uuid_3)).updated_at expected = [self.entries[2]] @@ -256,6 +236,7 @@ async def test_select_updated_at(self): self.assertEqual(expected, observed) async def test_select_updated_at_lt(self): + await self.populate() updated_at = (await self.transaction_repository.get(self.uuid_3)).updated_at expected = [self.entries[0], self.entries[1]] @@ -263,6 +244,7 @@ async def test_select_updated_at_lt(self): self.assertEqual(expected, observed) async def test_select_updated_at_gt(self): + await self.populate() updated_at = (await self.transaction_repository.get(self.uuid_3)).updated_at expected = [self.entries[3], self.entries[4]] @@ -270,6 +252,7 @@ async def test_select_updated_at_gt(self): self.assertEqual(expected, observed) async def test_select_updated_at_le(self): + await self.populate() updated_at = (await self.transaction_repository.get(self.uuid_3)).updated_at expected = [self.entries[0], self.entries[1], self.entries[2]] @@ -277,12 +260,9 @@ async def test_select_updated_at_le(self): self.assertEqual(expected, observed) async def test_select_updated_at_ge(self): + await self.populate() updated_at = (await self.transaction_repository.get(self.uuid_3)).updated_at expected = [self.entries[2], self.entries[3], self.entries[4]] observed = [v async for v in self.transaction_repository.select(updated_at_ge=updated_at)] self.assertEqual(expected, observed) - - -if __name__ == "__main__": - unittest.main() diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/transactions/__init__.py b/packages/core/minos-microservice-aggregate/minos/aggregate/transactions/__init__.py index 2a2e506dd..387f13660 100644 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/transactions/__init__.py +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/transactions/__init__.py @@ -6,7 +6,6 @@ TransactionStatus, ) from .repositories import ( - AiopgTransactionDatabaseOperationFactory, DatabaseTransactionRepository, InMemoryTransactionRepository, TransactionDatabaseOperationFactory, diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/transactions/entries.py b/packages/core/minos-microservice-aggregate/minos/aggregate/transactions/entries.py index 2d7dd7f1f..d883c2410 100644 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/transactions/entries.py +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/transactions/entries.py @@ -17,6 +17,7 @@ ) from typing import ( TYPE_CHECKING, + Any, Iterable, Optional, Union, @@ -316,6 +317,19 @@ def __repr__(self): f"destination_uuid={self.destination_uuid!r}, updated_at={self.updated_at!r})" ) + def as_raw(self) -> dict[str, Any]: + """Get a raw representation of the instance. + + :return: A dictionary in which the keys are attribute names and values the attribute contents. + """ + return { + "uuid": self.uuid, + "status": self.status, + "event_offset": self.event_offset, + "destination_uuid": self.destination_uuid, + "updated_at": self.updated_at, + } + class TransactionStatus(str, Enum): """Transaction Status Enum.""" diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/transactions/repositories/__init__.py b/packages/core/minos-microservice-aggregate/minos/aggregate/transactions/repositories/__init__.py index 33907a58d..efc6c6599 100644 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/transactions/repositories/__init__.py +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/transactions/repositories/__init__.py @@ -2,7 +2,6 @@ TransactionRepository, ) from .database import ( - AiopgTransactionDatabaseOperationFactory, DatabaseTransactionRepository, TransactionDatabaseOperationFactory, ) diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/transactions/repositories/database/__init__.py b/packages/core/minos-microservice-aggregate/minos/aggregate/transactions/repositories/database/__init__.py index 8c6f72e99..a418d1f39 100644 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/transactions/repositories/database/__init__.py +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/transactions/repositories/database/__init__.py @@ -1,5 +1,4 @@ from .factories import ( - AiopgTransactionDatabaseOperationFactory, TransactionDatabaseOperationFactory, ) from .impl import ( diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/transactions/repositories/database/factories/__init__.py b/packages/core/minos-microservice-aggregate/minos/aggregate/transactions/repositories/database/factories/__init__.py index a7bfcb28c..c9fea61b2 100644 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/transactions/repositories/database/factories/__init__.py +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/transactions/repositories/database/factories/__init__.py @@ -1,6 +1,3 @@ from .abc import ( TransactionDatabaseOperationFactory, ) -from .aiopg import ( - AiopgTransactionDatabaseOperationFactory, -) diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/transactions/repositories/database/factories/abc.py b/packages/core/minos-microservice-aggregate/minos/aggregate/transactions/repositories/database/factories/abc.py index 07c09914d..5171be2b6 100644 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/transactions/repositories/database/factories/abc.py +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/transactions/repositories/database/factories/abc.py @@ -32,19 +32,15 @@ class TransactionDatabaseOperationFactory(DatabaseOperationFactory, ABC): """Transaction Database Operation Factory base class.""" @abstractmethod - def build_create_table(self) -> DatabaseOperation: + def build_create(self) -> DatabaseOperation: """Build the database operation to create the snapshot table. :return: A ``DatabaseOperation`` instance. """ @abstractmethod - def build_submit_row( - self, - uuid: UUID, - destination_uuid: UUID, - status: TransactionStatus, - event_offset: int, + def build_submit( + self, uuid: UUID, destination_uuid: UUID, status: TransactionStatus, event_offset: int, **kwargs ) -> DatabaseOperation: """Build the database operation to submit a row. @@ -52,11 +48,12 @@ def build_submit_row( :param destination_uuid: The identifier of the destination transaction. :param status: The status of the transaction. :param event_offset: The event offset of the transaction. + :param kwargs: Additional named arguments. :return: A ``DatabaseOperation`` instance. """ @abstractmethod - def build_select_rows( + def build_query( self, uuid: Optional[UUID] = None, uuid_ne: Optional[UUID] = None, diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/transactions/repositories/database/impl.py b/packages/core/minos-microservice-aggregate/minos/aggregate/transactions/repositories/database/impl.py index 202bfd326..2b0ac0026 100644 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/transactions/repositories/database/impl.py +++ b/packages/core/minos-microservice-aggregate/minos/aggregate/transactions/repositories/database/impl.py @@ -8,8 +8,8 @@ ) from minos.common import ( - Config, DatabaseMixin, + ProgrammingException, ) from ....exceptions import ( @@ -29,25 +29,23 @@ class DatabaseTransactionRepository(DatabaseMixin[TransactionDatabaseOperationFactory], TransactionRepository): """Database Transaction Repository class.""" - @classmethod - def _from_config(cls, config: Config, **kwargs) -> DatabaseTransactionRepository: - return super()._from_config(config, database_key=None, **kwargs) + def __init__(self, *args, database_key: Optional[tuple[str]] = None, **kwargs): + if database_key is None: + database_key = ("aggregate", "transaction") + super().__init__(*args, database_key=database_key, **kwargs) async def _setup(self): - operation = self.operation_factory.build_create_table() - await self.submit_query(operation) + operation = self.database_operation_factory.build_create() + await self.execute_on_database(operation) async def _submit(self, transaction: TransactionEntry) -> TransactionEntry: - operation = self.operation_factory.build_submit_row( - uuid=transaction.uuid, - destination_uuid=transaction.destination_uuid, - status=transaction.status, - event_offset=transaction.event_offset, + operation = self.database_operation_factory.build_submit( + **transaction.as_raw(), ) try: - updated_at = await self.submit_query_and_fetchone(operation) - except StopAsyncIteration: + updated_at = await self.execute_on_database_and_fetch_one(operation) + except ProgrammingException: raise TransactionRepositoryConflictException( f"{transaction!r} status is invalid respect to the previous one." ) @@ -55,6 +53,6 @@ async def _submit(self, transaction: TransactionEntry) -> TransactionEntry: return transaction async def _select(self, streaming_mode: Optional[bool] = None, **kwargs) -> AsyncIterator[TransactionEntry]: - operation = self.operation_factory.build_select_rows(**kwargs) - async for row in self.submit_query_and_iter(operation, streaming_mode=streaming_mode): + operation = self.database_operation_factory.build_query(**kwargs) + async for row in self.execute_on_database_and_fetch_all(operation, streaming_mode=streaming_mode): yield TransactionEntry(*row, transaction_repository=self) diff --git a/packages/core/minos-microservice-aggregate/poetry.lock b/packages/core/minos-microservice-aggregate/poetry.lock index 852cdb5f5..6a47dc8e4 100644 --- a/packages/core/minos-microservice-aggregate/poetry.lock +++ b/packages/core/minos-microservice-aggregate/poetry.lock @@ -19,29 +19,6 @@ develop = ["aiocontextvars (==0.2.2)", "aiohttp-asgi", "aiohttp (<4)", "async-ti raven = ["raven-aiohttp"] uvloop = ["uvloop (>=0.14,<1)"] -[[package]] -name = "aiopg" -version = "1.3.3" -description = "Postgres integration with asyncio." -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -async-timeout = ">=3.0,<5.0" -psycopg2-binary = ">=2.8.4" - -[package.extras] -sa = ["sqlalchemy[postgresql_psycopg2binary] (>=1.3,<1.5)"] - -[[package]] -name = "async-timeout" -version = "4.0.2" -description = "Timeout context manager for asyncio programs" -category = "main" -optional = false -python-versions = ">=3.6" - [[package]] name = "atomicwrites" version = "1.4.0" @@ -239,7 +216,6 @@ develop = true [package.dependencies] aiomisc = ">=14.0.3,<15.8.0" -aiopg = "^1.2.1" cached-property = "^1.5.2" dependency-injector = "^4.32.2" fastavro = "^1.4.0" @@ -262,10 +238,8 @@ python-versions = "^3.9" develop = true [package.dependencies] -aiopg = "^1.2.1" crontab = "^0.23.0" minos-microservice-common = "^0.7.0*" -psycopg2-binary = "^2.9.3" [package.source] type = "directory" @@ -330,14 +304,6 @@ python-versions = ">=3.6" dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] -[[package]] -name = "psycopg2-binary" -version = "2.9.3" -description = "psycopg2 - Python-PostgreSQL Database Adapter" -category = "main" -optional = false -python-versions = ">=3.6" - [[package]] name = "py" version = "1.11.0" @@ -442,21 +408,13 @@ test = ["aiohttp", "flake8 (>=3.9.2,<3.10.0)", "psutil", "pycodestyle (>=2.7.0,< [metadata] lock-version = "1.1" python-versions = "^3.9" -content-hash = "6396dea0a2b7eaacdc945a96189dc6569ed60a0646c252be31e273aad6629985" +content-hash = "9c9ca4862b7363ae69ed21727f7a65a29a33e373df45250938f7c75f398b8da5" [metadata.files] aiomisc = [ {file = "aiomisc-15.7.3-py3-none-any.whl", hash = "sha256:0403e83268e98d0f2a125a70d13303fe1a2358e36db3daf02df032c7fa4f1525"}, {file = "aiomisc-15.7.3.tar.gz", hash = "sha256:ba250a34bd4609ced36111cb50580f57c3d52f3955f953a53ecb2986988baedc"}, ] -aiopg = [ - {file = "aiopg-1.3.3-py3-none-any.whl", hash = "sha256:2842dd8741460eeef940032dcb577bfba4d4115205dd82a73ce13b3271f5bf0a"}, - {file = "aiopg-1.3.3.tar.gz", hash = "sha256:547c6ba4ea0d73c2a11a2f44387d7133cc01d3c6f3b8ed976c0ac1eff4f595d7"}, -] -async-timeout = [ - {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, - {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, -] atomicwrites = [ {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, @@ -709,64 +667,6 @@ pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] -psycopg2-binary = [ - {file = "psycopg2-binary-2.9.3.tar.gz", hash = "sha256:761df5313dc15da1502b21453642d7599d26be88bff659382f8f9747c7ebea4e"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:539b28661b71da7c0e428692438efbcd048ca21ea81af618d845e06ebfd29478"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e82d38390a03da28c7985b394ec3f56873174e2c88130e6966cb1c946508e65"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57804fc02ca3ce0dbfbef35c4b3a4a774da66d66ea20f4bda601294ad2ea6092"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:083a55275f09a62b8ca4902dd11f4b33075b743cf0d360419e2051a8a5d5ff76"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-manylinux_2_24_ppc64le.whl", hash = "sha256:0a29729145aaaf1ad8bafe663131890e2111f13416b60e460dae0a96af5905c9"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a79d622f5206d695d7824cbf609a4f5b88ea6d6dab5f7c147fc6d333a8787e4"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:090f3348c0ab2cceb6dfbe6bf721ef61262ddf518cd6cc6ecc7d334996d64efa"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a9e1f75f96ea388fbcef36c70640c4efbe4650658f3d6a2967b4cc70e907352e"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c3ae8e75eb7160851e59adc77b3a19a976e50622e44fd4fd47b8b18208189d42"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-win32.whl", hash = "sha256:7b1e9b80afca7b7a386ef087db614faebbf8839b7f4db5eb107d0f1a53225029"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:8b344adbb9a862de0c635f4f0425b7958bf5a4b927c8594e6e8d261775796d53"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:e847774f8ffd5b398a75bc1c18fbb56564cda3d629fe68fd81971fece2d3c67e"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:68641a34023d306be959101b345732360fc2ea4938982309b786f7be1b43a4a1"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3303f8807f342641851578ee7ed1f3efc9802d00a6f83c101d21c608cb864460"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-manylinux_2_24_aarch64.whl", hash = "sha256:e3699852e22aa68c10de06524a3721ade969abf382da95884e6a10ff798f9281"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-manylinux_2_24_ppc64le.whl", hash = "sha256:526ea0378246d9b080148f2d6681229f4b5964543c170dd10bf4faaab6e0d27f"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:b1c8068513f5b158cf7e29c43a77eb34b407db29aca749d3eb9293ee0d3103ca"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:15803fa813ea05bef089fa78835118b5434204f3a17cb9f1e5dbfd0b9deea5af"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:152f09f57417b831418304c7f30d727dc83a12761627bb826951692cc6491e57"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:404224e5fef3b193f892abdbf8961ce20e0b6642886cfe1fe1923f41aaa75c9d"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-win32.whl", hash = "sha256:1f6b813106a3abdf7b03640d36e24669234120c72e91d5cbaeb87c5f7c36c65b"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:2d872e3c9d5d075a2e104540965a1cf898b52274a5923936e5bfddb58c59c7c2"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:10bb90fb4d523a2aa67773d4ff2b833ec00857f5912bafcfd5f5414e45280fb1"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:874a52ecab70af13e899f7847b3e074eeb16ebac5615665db33bce8a1009cf33"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a29b3ca4ec9defec6d42bf5feb36bb5817ba3c0230dd83b4edf4bf02684cd0ae"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:12b11322ea00ad8db8c46f18b7dfc47ae215e4df55b46c67a94b4effbaec7094"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-manylinux_2_24_ppc64le.whl", hash = "sha256:53293533fcbb94c202b7c800a12c873cfe24599656b341f56e71dd2b557be063"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c381bda330ddf2fccbafab789d83ebc6c53db126e4383e73794c74eedce855ef"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d29409b625a143649d03d0fd7b57e4b92e0ecad9726ba682244b73be91d2fdb"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:183a517a3a63503f70f808b58bfbf962f23d73b6dccddae5aa56152ef2bcb232"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:15c4e4cfa45f5a60599d9cec5f46cd7b1b29d86a6390ec23e8eebaae84e64554"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-win32.whl", hash = "sha256:adf20d9a67e0b6393eac162eb81fb10bc9130a80540f4df7e7355c2dd4af9fba"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:2f9ffd643bc7349eeb664eba8864d9e01f057880f510e4681ba40a6532f93c71"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:def68d7c21984b0f8218e8a15d514f714d96904265164f75f8d3a70f9c295667"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dffc08ca91c9ac09008870c9eb77b00a46b3378719584059c034b8945e26b272"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:280b0bb5cbfe8039205c7981cceb006156a675362a00fe29b16fbc264e242834"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:af9813db73395fb1fc211bac696faea4ca9ef53f32dc0cfa27e4e7cf766dcf24"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-manylinux_2_24_ppc64le.whl", hash = "sha256:63638d875be8c2784cfc952c9ac34e2b50e43f9f0a0660b65e2a87d656b3116c"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ffb7a888a047696e7f8240d649b43fb3644f14f0ee229077e7f6b9f9081635bd"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0c9d5450c566c80c396b7402895c4369a410cab5a82707b11aee1e624da7d004"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:d1c1b569ecafe3a69380a94e6ae09a4789bbb23666f3d3a08d06bbd2451f5ef1"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8fc53f9af09426a61db9ba357865c77f26076d48669f2e1bb24d85a22fb52307"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-win32.whl", hash = "sha256:6472a178e291b59e7f16ab49ec8b4f3bdada0a879c68d3817ff0963e722a82ce"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:35168209c9d51b145e459e05c31a9eaeffa9a6b0fd61689b48e07464ffd1a83e"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:47133f3f872faf28c1e87d4357220e809dfd3fa7c64295a4a148bcd1e6e34ec9"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91920527dea30175cc02a1099f331aa8c1ba39bf8b7762b7b56cbf54bc5cce42"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:887dd9aac71765ac0d0bac1d0d4b4f2c99d5f5c1382d8b770404f0f3d0ce8a39"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:1f14c8b0942714eb3c74e1e71700cbbcb415acbc311c730370e70c578a44a25c"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-manylinux_2_24_ppc64le.whl", hash = "sha256:7af0dd86ddb2f8af5da57a976d27cd2cd15510518d582b478fbb2292428710b4"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93cd1967a18aa0edd4b95b1dfd554cf15af657cb606280996d393dadc88c3c35"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bda845b664bb6c91446ca9609fc69f7db6c334ec5e4adc87571c34e4f47b7ddb"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:01310cf4cf26db9aea5158c217caa92d291f0500051a6469ac52166e1a16f5b7"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:99485cab9ba0fa9b84f1f9e1fef106f44a46ef6afdeec8885e0b88d0772b49e8"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-win32.whl", hash = "sha256:46f0e0a6b5fa5851bbd9ab1bc805eef362d3a230fbdfbc209f4a236d0a7a990d"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:accfe7e982411da3178ec690baaceaad3c278652998b2c45828aaac66cd8285f"}, -] py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, diff --git a/packages/core/minos-microservice-aggregate/pyproject.toml b/packages/core/minos-microservice-aggregate/pyproject.toml index 68dd85bf2..d8b52373b 100644 --- a/packages/core/minos-microservice-aggregate/pyproject.toml +++ b/packages/core/minos-microservice-aggregate/pyproject.toml @@ -34,7 +34,6 @@ python = "^3.9" minos-microservice-common = { version ="^0.7.0*", allow-prereleases = true } minos-microservice-networks = { version ="^0.7.0*", allow-prereleases = true } cached-property = "^1.5.2" -psycopg2-binary = "^2.9.3" [tool.poetry.dev-dependencies] minos-microservice-common = { path = "../minos-microservice-common", develop = true } diff --git a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_events/test_entries.py b/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_events/test_entries.py index a2941bdfc..cf57c6055 100644 --- a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_events/test_entries.py +++ b/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_events/test_entries.py @@ -39,6 +39,11 @@ def test_constructor(self): self.assertEqual(None, entry.created_at) self.assertEqual(NULL_UUID, entry.transaction_uuid) + # noinspection SpellCheckingInspection + def test_constructor_with_memoryview_data(self): + entry = EventEntry(self.uuid, "example.Car", 0, memoryview(bytes("car", "utf-8"))) + self.assertEqual(bytes("car", "utf-8"), entry.data) + def test_constructor_extended(self): entry = EventEntry( uuid=self.uuid, diff --git a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_events/test_repositories/test_abc.py b/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_events/test_repositories/test_abc.py index 19e0d76af..90a09c8ee 100644 --- a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_events/test_repositories/test_abc.py +++ b/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_events/test_repositories/test_abc.py @@ -35,6 +35,7 @@ NULL_UUID, NotProvidedException, SetupMixin, + classname, current_datetime, ) from minos.networks import ( @@ -42,6 +43,7 @@ ) from tests.utils import ( AggregateTestCase, + Car, FakeAsyncIterator, FakeLock, ) @@ -465,17 +467,16 @@ async def test_select(self): self.event_repository._select = mock uuid = uuid4() - name = "path.to.Product" transaction_uuid = uuid4() - iterable = self.event_repository.select(uuid=uuid, name=name, id_gt=56, transaction_uuid=transaction_uuid) + iterable = self.event_repository.select(uuid=uuid, name=Car, id_gt=56, transaction_uuid=transaction_uuid) observed = [a async for a in iterable] self.assertEqual(list(range(5)), observed) self.assertEqual(1, mock.call_count) args = call( uuid=uuid, - name=name, + name=classname(Car), version=None, version_lt=None, version_gt=None, diff --git a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_events/test_repositories/test_database.py b/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_events/test_repositories/test_database.py new file mode 100644 index 000000000..fef33992f --- /dev/null +++ b/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_events/test_repositories/test_database.py @@ -0,0 +1,392 @@ +import unittest +from datetime import ( + datetime, + timezone, +) +from unittest.mock import ( + patch, +) +from uuid import ( + uuid4, +) + +from minos.aggregate import ( + Action, + DatabaseEventRepository, + EventRepository, +) +from minos.aggregate.testing import ( + EventRepositoryTestCase, +) +from minos.common import ( + DatabaseClient, + IntegrityException, + current_datetime, +) +from tests.utils import ( + AggregateTestCase, + FakeAsyncIterator, +) + + +class TestDatabaseEventRepositorySubmit(AggregateTestCase, EventRepositoryTestCase): + __test__ = True + + def build_event_repository(self) -> EventRepository: + """For testing purposes.""" + return DatabaseEventRepository.from_config(self.config) + + async def test_generate_uuid(self): + fetch_one = [ + (1, self.uuid, 1, current_datetime()), + ] + fetch_all = [(self.uuid, "example.Car", 1, bytes(), 1, Action.CREATE, current_datetime())] + with patch.object(DatabaseClient, "fetch_one", side_effect=fetch_one): + with patch.object(DatabaseClient, "fetch_all", return_value=FakeAsyncIterator(fetch_all)): + await super().test_generate_uuid() + + async def test_submit(self): + fetch_one = [ + (1, self.uuid, 1, current_datetime()), + ] + fetch_all = [(self.uuid, "example.Car", 1, bytes(), 1, Action.CREATE, current_datetime())] + with patch.object(DatabaseClient, "fetch_one", side_effect=fetch_one): + with patch.object(DatabaseClient, "fetch_all", return_value=FakeAsyncIterator(fetch_all)): + await super().test_submit() + + async def test_submit_with_version(self): + fetch_one = [ + (1, self.uuid, 3, current_datetime()), + ] + fetch_all = [(self.uuid, "example.Car", 3, bytes(), 1, Action.CREATE, current_datetime())] + with patch.object(DatabaseClient, "fetch_one", side_effect=fetch_one): + with patch.object(DatabaseClient, "fetch_all", return_value=FakeAsyncIterator(fetch_all)): + await super().test_submit_with_version() + + async def test_submit_with_created_at(self): + created_at = datetime(2021, 10, 25, 8, 30, tzinfo=timezone.utc) + fetch_one = [ + (1, self.uuid, 1, created_at), + ] + fetch_all = [(self.uuid, "example.Car", 1, bytes(), 1, Action.CREATE, created_at)] + with patch.object(DatabaseClient, "fetch_one", side_effect=fetch_one): + with patch.object(DatabaseClient, "fetch_all", return_value=FakeAsyncIterator(fetch_all)): + await super().test_submit_with_created_at() + + async def test_submit_raises_duplicate(self): + fetch_one = [ + (1, uuid4(), 1, current_datetime()), + IntegrityException(""), + (1,), + ] + with patch.object(DatabaseClient, "fetch_one", side_effect=fetch_one): + await super().test_submit_raises_duplicate() + + async def test_offset(self): + fetch_one = [ + (0,), + (1, uuid4(), 1, current_datetime()), + (1,), + ] + with patch.object(DatabaseClient, "fetch_one", side_effect=fetch_one): + await super().test_offset() + + async def populate(self) -> None: + with patch.object( + DatabaseClient, + "fetch_one", + side_effect=[ + (1, uuid4(), 1, current_datetime()), + (2, uuid4(), 2, current_datetime()), + (3, uuid4(), 1, current_datetime()), + (4, uuid4(), 3, current_datetime()), + (5, uuid4(), 4, current_datetime()), + (6, uuid4(), 2, current_datetime()), + (7, uuid4(), 1, current_datetime()), + (8, uuid4(), 3, current_datetime()), + (9, uuid4(), 3, current_datetime()), + (10, uuid4(), 4, current_datetime()), + ], + ): + await super().populate() + + async def test_select(self): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [ + tuple(self.entries[0].as_raw().values()), + tuple(self.entries[1].as_raw().values()), + tuple(self.entries[2].as_raw().values()), + tuple(self.entries[3].as_raw().values()), + tuple(self.entries[4].as_raw().values()), + tuple(self.entries[5].as_raw().values()), + tuple(self.entries[6].as_raw().values()), + tuple(self.entries[7].as_raw().values()), + tuple(self.entries[8].as_raw().values()), + tuple(self.entries[9].as_raw().values()), + ] + ), + ): + await super().test_select() + + async def test_select_id(self): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [ + tuple(self.entries[1].as_raw().values()), + ] + ), + ): + await super().test_select_id() + + async def test_select_id_lt(self): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [ + tuple(self.entries[0].as_raw().values()), + tuple(self.entries[1].as_raw().values()), + tuple(self.entries[2].as_raw().values()), + tuple(self.entries[3].as_raw().values()), + ] + ), + ): + await super().test_select_id_lt() + + async def test_select_id_gt(self): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [ + tuple(self.entries[4].as_raw().values()), + tuple(self.entries[5].as_raw().values()), + tuple(self.entries[6].as_raw().values()), + tuple(self.entries[7].as_raw().values()), + tuple(self.entries[8].as_raw().values()), + tuple(self.entries[9].as_raw().values()), + ] + ), + ): + await super().test_select_id_gt() + + async def test_select_id_le(self): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [ + tuple(self.entries[0].as_raw().values()), + tuple(self.entries[1].as_raw().values()), + tuple(self.entries[2].as_raw().values()), + tuple(self.entries[3].as_raw().values()), + ] + ), + ): + await super().test_select_id_le() + + async def test_select_id_ge(self): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [ + tuple(self.entries[4].as_raw().values()), + tuple(self.entries[5].as_raw().values()), + tuple(self.entries[6].as_raw().values()), + tuple(self.entries[7].as_raw().values()), + tuple(self.entries[8].as_raw().values()), + tuple(self.entries[9].as_raw().values()), + ] + ), + ): + await super().test_select_id_ge() + + async def test_select_uuid(self): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [ + tuple(self.entries[2].as_raw().values()), + tuple(self.entries[5].as_raw().values()), + tuple(self.entries[7].as_raw().values()), + tuple(self.entries[8].as_raw().values()), + tuple(self.entries[9].as_raw().values()), + ] + ), + ): + await super().test_select_uuid() + + async def test_select_name(self): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [ + tuple(self.entries[6].as_raw().values()), + ] + ), + ): + await super().test_select_name() + + async def test_select_version(self): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [ + tuple(self.entries[4].as_raw().values()), + tuple(self.entries[9].as_raw().values()), + ] + ), + ): + await super().test_select_version() + + async def test_select_version_lt(self): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [ + tuple(self.entries[0].as_raw().values()), + tuple(self.entries[2].as_raw().values()), + tuple(self.entries[6].as_raw().values()), + ] + ), + ): + await super().test_select_version_lt() + + async def test_select_version_gt(self): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [ + tuple(self.entries[1].as_raw().values()), + tuple(self.entries[3].as_raw().values()), + tuple(self.entries[4].as_raw().values()), + tuple(self.entries[5].as_raw().values()), + tuple(self.entries[7].as_raw().values()), + tuple(self.entries[8].as_raw().values()), + tuple(self.entries[9].as_raw().values()), + ] + ), + ): + await super().test_select_version_gt() + + async def test_select_version_le(self): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [ + tuple(self.entries[0].as_raw().values()), + tuple(self.entries[2].as_raw().values()), + tuple(self.entries[6].as_raw().values()), + ] + ), + ): + await super().test_select_version_le() + + async def test_select_version_ge(self): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [ + tuple(self.entries[1].as_raw().values()), + tuple(self.entries[3].as_raw().values()), + tuple(self.entries[4].as_raw().values()), + tuple(self.entries[5].as_raw().values()), + tuple(self.entries[7].as_raw().values()), + tuple(self.entries[8].as_raw().values()), + tuple(self.entries[9].as_raw().values()), + ] + ), + ): + await super().test_select_version_ge() + + async def test_select_transaction_uuid_null(self): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [ + tuple(self.entries[0].as_raw().values()), + tuple(self.entries[1].as_raw().values()), + tuple(self.entries[2].as_raw().values()), + tuple(self.entries[3].as_raw().values()), + tuple(self.entries[4].as_raw().values()), + tuple(self.entries[5].as_raw().values()), + tuple(self.entries[6].as_raw().values()), + ] + ), + ): + await super().test_select_transaction_uuid_null() + + async def test_select_transaction_uuid(self): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [ + tuple(self.entries[7].as_raw().values()), + tuple(self.entries[9].as_raw().values()), + ] + ), + ): + await super().test_select_transaction_uuid() + + async def test_select_transaction_uuid_ne(self): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [ + tuple(self.entries[7].as_raw().values()), + tuple(self.entries[8].as_raw().values()), + tuple(self.entries[9].as_raw().values()), + ] + ), + ): + await super().test_select_transaction_uuid_ne() + + async def test_select_transaction_uuid_in(self): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [ + tuple(self.entries[7].as_raw().values()), + tuple(self.entries[8].as_raw().values()), + tuple(self.entries[9].as_raw().values()), + ] + ), + ): + await super().test_select_transaction_uuid_in() + + async def test_select_combined(self): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [ + tuple(self.entries[2].as_raw().values()), + tuple(self.entries[5].as_raw().values()), + tuple(self.entries[7].as_raw().values()), + tuple(self.entries[8].as_raw().values()), + tuple(self.entries[9].as_raw().values()), + ] + ), + ): + await super().test_select_combined() + + +if __name__ == "__main__": + unittest.main() diff --git a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_events/test_repositories/test_memory.py b/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_events/test_repositories/test_memory.py index 02d1a69e3..c1efcdce1 100644 --- a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_events/test_repositories/test_memory.py +++ b/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_events/test_repositories/test_memory.py @@ -4,21 +4,15 @@ EventRepository, InMemoryEventRepository, ) -from tests.testcases import ( - EventRepositorySelectTestCase, - EventRepositorySubmitTestCase, +from minos.aggregate.testing import ( + EventRepositoryTestCase, +) +from tests.utils import ( + AggregateTestCase, ) -class TestInMemoryEventRepositorySubmit(EventRepositorySubmitTestCase): - __test__ = True - - def build_event_repository(self) -> EventRepository: - """For testing purposes.""" - return InMemoryEventRepository() - - -class TestInMemoryEventRepositorySelect(EventRepositorySelectTestCase): +class TestInMemoryEventRepositorySubmit(AggregateTestCase, EventRepositoryTestCase): __test__ = True def build_event_repository(self) -> EventRepository: diff --git a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_exceptions.py b/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_exceptions.py index d41042910..6f1ca4a4a 100644 --- a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_exceptions.py +++ b/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_exceptions.py @@ -3,31 +3,68 @@ from minos.aggregate import ( AggregateException, AlreadyDeletedException, + Event, + EventRepositoryConflictException, EventRepositoryException, NotFoundException, + SnapshotRepositoryConflictException, SnapshotRepositoryException, + TransactionRepositoryConflictException, + TransactionRepositoryException, ) from minos.common import ( MinosException, ) +from tests.utils import ( + AggregateTestCase, + Car, +) -class TestExceptions(unittest.TestCase): +class TestExceptions(AggregateTestCase): def test_base(self): self.assertTrue(issubclass(AggregateException, MinosException)) - def test_repository(self): + def test_event(self): self.assertTrue(issubclass(EventRepositoryException, AggregateException)) + def test_event_conflict(self): + message = "There was a conflict" + offset = 56 + exception = EventRepositoryConflictException(message, offset) + + self.assertIsInstance(exception, EventRepositoryException) + self.assertEqual(message, str(exception)) + self.assertEqual(offset, exception.offset) + def test_snapshot(self): self.assertTrue(issubclass(SnapshotRepositoryException, AggregateException)) + def test_snapshot_conflict(self): + entity = Car(3, "red") + event = Event.from_root_entity(entity) + exception = SnapshotRepositoryConflictException(entity, event) + + self.assertIsInstance(exception, SnapshotRepositoryException) + self.assertEqual(entity, exception.previous) + self.assertEqual(event, exception.event) + def test_snapshot_not_found(self): self.assertTrue(issubclass(NotFoundException, SnapshotRepositoryException)) def test_snapshot_already_deleted(self): self.assertTrue(issubclass(AlreadyDeletedException, SnapshotRepositoryException)) + def test_transaction(self): + self.assertTrue(issubclass(TransactionRepositoryException, AggregateException)) + + def test_transaction_conflict(self): + message = "There was a conflict" + exception = TransactionRepositoryConflictException(message) + + self.assertIsInstance(exception, TransactionRepositoryException) + self.assertEqual(message, str(exception)) + if __name__ == "__main__": unittest.main() diff --git a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_entries.py b/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_entries.py index a3408b759..6d597c56d 100644 --- a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_entries.py +++ b/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_entries.py @@ -1,3 +1,4 @@ +import json import unittest from datetime import ( datetime, @@ -10,8 +11,14 @@ ) from minos.aggregate import ( + AlreadyDeletedException, + Event, + EventEntry, SnapshotEntry, ) +from minos.common import ( + MinosJsonBinaryProtocol, +) from tests.utils import ( AggregateTestCase, Car, @@ -41,6 +48,17 @@ def test_constructor(self): self.assertEqual(None, entry.created_at) self.assertEqual(None, entry.updated_at) + def test_constructor_with_bytes_schema(self): + raw = MinosJsonBinaryProtocol.encode(self.schema) + entry = SnapshotEntry(self.uuid, "example.Car", 0, raw, self.data) + self.assertEqual(self.schema, entry.schema) + + # noinspection SpellCheckingInspection + def test_constructor_with_memoryview_schema(self): + raw = memoryview(MinosJsonBinaryProtocol.encode(self.schema)) + entry = SnapshotEntry(self.uuid, "example.Car", 0, raw, self.data) + self.assertEqual(self.schema, entry.schema) + def test_constructor_extended(self): entry = SnapshotEntry( self.uuid, @@ -71,6 +89,18 @@ def test_from_root_entity(self): self.assertEqual(car.created_at, entry.created_at) self.assertEqual(car.updated_at, entry.updated_at) + def test_from_event_entry(self): + car = Car(3, "blue", uuid=self.uuid, version=1) + event_entry = EventEntry.from_event(Event.from_root_entity(car), version=1) + with patch("minos.common.AvroSchemaEncoder.generate_random_str", return_value="hello"): + snapshot_entry = SnapshotEntry.from_event_entry(event_entry) + self.assertEqual(event_entry.uuid, snapshot_entry.uuid) + self.assertEqual(event_entry.name, snapshot_entry.name) + self.assertEqual(event_entry.version, snapshot_entry.version) + self.assertEqual(event_entry.created_at, snapshot_entry.created_at) + self.assertEqual(event_entry.created_at, snapshot_entry.updated_at) + self.assertEqual(event_entry.transaction_uuid, snapshot_entry.transaction_uuid) + def test_equals(self): a = SnapshotEntry(self.uuid, "example.Car", 0, self.schema, self.data) b = SnapshotEntry(self.uuid, "example.Car", 0, self.schema, self.data) @@ -86,6 +116,12 @@ def test_build(self): entry = SnapshotEntry.from_root_entity(car) self.assertEqual(car, entry.build()) + def test_build_raises(self): + entry = SnapshotEntry(uuid=self.uuid, name="example.Car", version=0, schema=self.schema) + + with self.assertRaises(AlreadyDeletedException): + entry.build() + def test_repr(self): name = "example.Car" version = 0 @@ -112,6 +148,55 @@ def test_repr(self): self.assertEqual(expected, repr(entry)) + def test_as_raw(self): + name = "example.Car" + version = 0 + created_at = datetime(2020, 1, 10, 4, 23) + updated_at = datetime(2020, 1, 10, 4, 25) + transaction_uuid = uuid4() + + entry = SnapshotEntry( + uuid=self.uuid, + name=name, + version=version, + schema=self.schema, + data=self.data, + created_at=created_at, + updated_at=updated_at, + transaction_uuid=transaction_uuid, + ) + + expected = { + "created_at": created_at, + "data": json.dumps(self.data), + "name": name, + "schema": MinosJsonBinaryProtocol.encode(self.schema), + "transaction_uuid": transaction_uuid, + "updated_at": updated_at, + "uuid": self.uuid, + "version": version, + } + + self.assertEqual(expected, entry.as_raw()) + + def test_encoded_schema(self): + entry = SnapshotEntry(uuid=self.uuid, name="example.Car", version=0, schema=self.schema) + expected = MinosJsonBinaryProtocol.encode(self.schema) + self.assertEqual(expected, entry.encoded_schema) + + def test_encoded_schema_none(self): + entry = SnapshotEntry(uuid=self.uuid, name="example.Car", version=0) + self.assertEqual(None, entry.encoded_schema) + + def test_encoded_data(self): + entry = SnapshotEntry(uuid=self.uuid, name="example.Car", version=0, schema=self.schema, data=self.data) + expected = json.dumps(self.data) + self.assertEqual(expected, entry.encoded_data) + + def test_encoded_none(self): + entry = SnapshotEntry(uuid=self.uuid, name="example.Car", version=0, schema=self.schema) + self.assertEqual(None, entry.encoded_data) + if __name__ == "__main__": unittest.main() diff --git a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_memory.py b/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_memory.py deleted file mode 100644 index d0a5887a0..000000000 --- a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_memory.py +++ /dev/null @@ -1,359 +0,0 @@ -import unittest -from datetime import ( - datetime, -) -from uuid import ( - uuid4, -) - -from minos.aggregate import ( - AlreadyDeletedException, - Condition, - EventEntry, - FieldDiff, - FieldDiffContainer, - InMemorySnapshotRepository, - NotFoundException, - Ordering, - SnapshotEntry, - SnapshotRepository, - TransactionEntry, - TransactionStatus, -) -from minos.common import ( - NotProvidedException, -) -from tests.utils import ( - AggregateTestCase, - Car, -) - - -class TestInMemorySnapshotRepository(AggregateTestCase): - def setUp(self) -> None: - super().setUp() - - self.uuid_1 = uuid4() - self.uuid_2 = uuid4() - self.uuid_3 = uuid4() - - self.transaction_1 = uuid4() - self.transaction_2 = uuid4() - self.transaction_3 = uuid4() - self.transaction_4 = uuid4() - - async def asyncSetUp(self): - await super().asyncSetUp() - await self._populate() - - async def _populate(self): - diff = FieldDiffContainer([FieldDiff("doors", int, 3), FieldDiff("color", str, "blue")]) - # noinspection PyTypeChecker - name: str = Car.classname - await self.event_repository.create(EventEntry(self.uuid_1, name, 1, diff.avro_bytes)) - await self.event_repository.update(EventEntry(self.uuid_1, name, 2, diff.avro_bytes)) - await self.event_repository.create(EventEntry(self.uuid_2, name, 1, diff.avro_bytes)) - await self.event_repository.update(EventEntry(self.uuid_1, name, 3, diff.avro_bytes)) - await self.event_repository.delete(EventEntry(self.uuid_1, name, 4)) - await self.event_repository.update(EventEntry(self.uuid_2, name, 2, diff.avro_bytes)) - await self.event_repository.update( - EventEntry(self.uuid_2, name, 3, diff.avro_bytes, transaction_uuid=self.transaction_1) - ) - await self.event_repository.delete( - EventEntry(self.uuid_2, name, 3, bytes(), transaction_uuid=self.transaction_2) - ) - await self.event_repository.update( - EventEntry(self.uuid_2, name, 4, diff.avro_bytes, transaction_uuid=self.transaction_1) - ) - await self.event_repository.create(EventEntry(self.uuid_3, name, 1, diff.avro_bytes)) - await self.event_repository.delete( - EventEntry(self.uuid_2, name, 3, bytes(), transaction_uuid=self.transaction_3) - ) - await self.transaction_repository.submit( - TransactionEntry(self.transaction_1, TransactionStatus.PENDING, await self.event_repository.offset) - ) - await self.transaction_repository.submit( - TransactionEntry(self.transaction_2, TransactionStatus.PENDING, await self.event_repository.offset) - ) - await self.transaction_repository.submit( - TransactionEntry(self.transaction_3, TransactionStatus.REJECTED, await self.event_repository.offset) - ) - await self.transaction_repository.submit( - TransactionEntry( - self.transaction_4, TransactionStatus.REJECTED, await self.event_repository.offset, self.transaction_3 - ) - ) - - def test_type(self): - self.assertTrue(issubclass(InMemorySnapshotRepository, SnapshotRepository)) - - def test_constructor_raises(self): - with self.assertRaises(NotProvidedException): - # noinspection PyTypeChecker - InMemorySnapshotRepository(event_repository=None) - - with self.assertRaises(NotProvidedException): - # noinspection PyTypeChecker - InMemorySnapshotRepository(transaction_repository=None) - - async def test_find_by_uuid(self): - condition = Condition.IN("uuid", [self.uuid_2, self.uuid_3]) - iterable = self.snapshot_repository.find("tests.utils.Car", condition, ordering=Ordering.ASC("updated_at")) - observed = [v async for v in iterable] - - expected = [ - Car( - 3, - "blue", - uuid=self.uuid_2, - version=2, - created_at=observed[0].created_at, - updated_at=observed[0].updated_at, - ), - Car( - 3, - "blue", - uuid=self.uuid_3, - version=1, - created_at=observed[1].created_at, - updated_at=observed[1].updated_at, - ), - ] - self.assertEqual(expected, observed) - - async def test_find_with_transaction(self): - condition = Condition.IN("uuid", [self.uuid_2, self.uuid_3]) - iterable = self.snapshot_repository.find( - "tests.utils.Car", - condition, - ordering=Ordering.ASC("updated_at"), - transaction=TransactionEntry(self.transaction_1), - ) - observed = [v async for v in iterable] - - expected = [ - Car( - 3, - "blue", - uuid=self.uuid_2, - version=4, - created_at=observed[0].created_at, - updated_at=observed[0].updated_at, - ), - Car( - 3, - "blue", - uuid=self.uuid_3, - version=1, - created_at=observed[1].created_at, - updated_at=observed[1].updated_at, - ), - ] - self.assertEqual(expected, observed) - - async def test_find_with_transaction_delete(self): - condition = Condition.IN("uuid", [self.uuid_2, self.uuid_3]) - iterable = self.snapshot_repository.find( - "tests.utils.Car", - condition, - ordering=Ordering.ASC("updated_at"), - transaction=TransactionEntry(self.transaction_2), - ) - observed = [v async for v in iterable] - - expected = [ - Car( - 3, - "blue", - uuid=self.uuid_3, - version=1, - created_at=observed[0].created_at, - updated_at=observed[0].updated_at, - ), - ] - self.assertEqual(expected, observed) - - async def test_find_with_transaction_reverted(self): - condition = Condition.IN("uuid", [self.uuid_2, self.uuid_3]) - iterable = self.snapshot_repository.find( - "tests.utils.Car", - condition, - ordering=Ordering.ASC("updated_at"), - transaction=TransactionEntry(self.transaction_4), - ) - observed = [v async for v in iterable] - - expected = [ - Car( - 3, - "blue", - uuid=self.uuid_2, - version=2, - created_at=observed[0].created_at, - updated_at=observed[0].updated_at, - ), - Car( - 3, - "blue", - uuid=self.uuid_3, - version=1, - created_at=observed[1].created_at, - updated_at=observed[1].updated_at, - ), - ] - self.assertEqual(expected, observed) - - async def test_find_streaming_true(self): - condition = Condition.IN("uuid", [self.uuid_2, self.uuid_3]) - - iterable = self.snapshot_repository.find( - "tests.utils.Car", condition, streaming_mode=True, ordering=Ordering.ASC("updated_at") - ) - observed = [v async for v in iterable] - - expected = [ - Car( - 3, - "blue", - uuid=self.uuid_2, - version=2, - created_at=observed[0].created_at, - updated_at=observed[0].updated_at, - ), - Car( - 3, - "blue", - uuid=self.uuid_3, - version=1, - created_at=observed[1].created_at, - updated_at=observed[1].updated_at, - ), - ] - self.assertEqual(expected, observed) - - async def test_find_with_duplicates(self): - uuids = [self.uuid_2, self.uuid_2, self.uuid_3] - condition = Condition.IN("uuid", uuids) - iterable = self.snapshot_repository.find("tests.utils.Car", condition, ordering=Ordering.ASC("updated_at")) - observed = [v async for v in iterable] - - expected = [ - Car( - 3, - "blue", - uuid=self.uuid_2, - version=2, - created_at=observed[0].created_at, - updated_at=observed[0].updated_at, - ), - Car( - 3, - "blue", - uuid=self.uuid_3, - version=1, - created_at=observed[1].created_at, - updated_at=observed[1].updated_at, - ), - ] - self.assertEqual(expected, observed) - - async def test_find_empty(self): - observed = {v async for v in self.snapshot_repository.find("tests.utils.Car", Condition.FALSE)} - - expected = set() - self.assertEqual(expected, observed) - - async def test_get(self): - observed = await self.snapshot_repository.get("tests.utils.Car", self.uuid_2) - - expected = Car( - 3, "blue", uuid=self.uuid_2, version=2, created_at=observed.created_at, updated_at=observed.updated_at - ) - self.assertEqual(expected, observed) - - async def test_get_with_transaction(self): - observed = await self.snapshot_repository.get( - "tests.utils.Car", self.uuid_2, transaction=TransactionEntry(self.transaction_1) - ) - - expected = Car( - 3, "blue", uuid=self.uuid_2, version=4, created_at=observed.created_at, updated_at=observed.updated_at - ) - self.assertEqual(expected, observed) - - async def test_get_raises(self): - with self.assertRaises(AlreadyDeletedException): - await self.snapshot_repository.get("tests.utils.Car", self.uuid_1) - with self.assertRaises(NotFoundException): - await self.snapshot_repository.get("tests.utils.Car", uuid4()) - - async def test_get_with_transaction_raises(self): - with self.assertRaises(AlreadyDeletedException): - await self.snapshot_repository.get( - "tests.utils.Car", self.uuid_2, transaction=TransactionEntry(self.transaction_2) - ) - - async def test_find(self): - condition = Condition.EQUAL("color", "blue") - iterable = self.snapshot_repository.find("tests.utils.Car", condition, ordering=Ordering.ASC("updated_at")) - observed = [v async for v in iterable] - - expected = [ - Car( - 3, - "blue", - uuid=self.uuid_2, - version=2, - created_at=observed[0].created_at, - updated_at=observed[0].updated_at, - ), - Car( - 3, - "blue", - uuid=self.uuid_3, - version=1, - created_at=observed[1].created_at, - updated_at=observed[1].updated_at, - ), - ] - self.assertEqual(expected, observed) - - async def test_find_all(self): - iterable = self.snapshot_repository.find("tests.utils.Car", Condition.TRUE, Ordering.ASC("updated_at")) - observed = [v async for v in iterable] - - expected = [ - Car( - 3, - "blue", - uuid=self.uuid_2, - version=2, - created_at=observed[0].created_at, - updated_at=observed[0].updated_at, - ), - Car( - 3, - "blue", - uuid=self.uuid_3, - version=1, - created_at=observed[1].created_at, - updated_at=observed[1].updated_at, - ), - ] - self.assertEqual(expected, observed) - - def _assert_equal_snapshot_entries(self, expected: list[SnapshotEntry], observed: list[SnapshotEntry]): - self.assertEqual(len(expected), len(observed)) - for exp, obs in zip(expected, observed): - if exp.data is None: - with self.assertRaises(AlreadyDeletedException): - # noinspection PyStatementEffect - obs.build() - else: - self.assertEqual(exp.build(), obs.build()) - self.assertIsInstance(obs.created_at, datetime) - self.assertIsInstance(obs.updated_at, datetime) - - -if __name__ == "__main__": - unittest.main() diff --git a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_pg/__init__.py b/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_pg/__init__.py deleted file mode 100644 index 8b1378917..000000000 --- a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_pg/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_pg/test_abc.py b/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_pg/test_abc.py deleted file mode 100644 index 688d092ef..000000000 --- a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_pg/test_abc.py +++ /dev/null @@ -1,43 +0,0 @@ -import unittest - -from minos.aggregate import ( - DatabaseSnapshotSetup, -) -from minos.common import ( - AiopgDatabaseClient, - AiopgDatabaseOperation, -) -from minos.common.testing import ( - DatabaseMinosTestCase, -) -from tests.utils import ( - AggregateTestCase, -) - - -# noinspection SqlNoDataSourceInspection -class TestDatabaseSnapshotSetup(AggregateTestCase, DatabaseMinosTestCase): - async def test_setup_snapshot_table(self): - async with DatabaseSnapshotSetup.from_config(self.config): - async with AiopgDatabaseClient(**self.config.get_default_database()) as client: - operation = AiopgDatabaseOperation( - "SELECT EXISTS (SELECT FROM pg_tables WHERE schemaname = 'public' AND tablename = 'snapshot');" - ) - await client.execute(operation) - observed = (await client.fetch_one())[0] - self.assertEqual(True, observed) - - async def test_setup_snapshot_aux_offset_table(self): - async with DatabaseSnapshotSetup.from_config(self.config): - async with AiopgDatabaseClient(**self.config.get_default_database()) as client: - operation = AiopgDatabaseOperation( - "SELECT EXISTS (SELECT FROM pg_tables WHERE " - "schemaname = 'public' AND tablename = 'snapshot_aux_offset');" - ) - await client.execute(operation) - observed = (await client.fetch_one())[0] - self.assertEqual(True, observed) - - -if __name__ == "__main__": - unittest.main() diff --git a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_pg/test_api.py b/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_pg/test_api.py deleted file mode 100644 index 45c387dc9..000000000 --- a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_pg/test_api.py +++ /dev/null @@ -1,98 +0,0 @@ -import unittest -from unittest.mock import ( - AsyncMock, - MagicMock, - call, -) -from uuid import ( - uuid4, -) - -from minos.aggregate import ( - Condition, - DatabaseSnapshotReader, - DatabaseSnapshotRepository, - DatabaseSnapshotWriter, - Ordering, - TransactionEntry, -) -from minos.common.testing import ( - DatabaseMinosTestCase, -) -from tests.utils import ( - AggregateTestCase, - FakeAsyncIterator, -) - - -class TestDatabaseSnapshotRepository(AggregateTestCase, DatabaseMinosTestCase): - def setUp(self) -> None: - super().setUp() - - self.snapshot_repository = DatabaseSnapshotRepository.from_config(self.config) - - self.dispatch_mock = AsyncMock() - self.get_mock = AsyncMock(return_value=1) - self.find_mock = MagicMock(return_value=FakeAsyncIterator(range(5))) - self.snapshot_repository.reader.get = self.get_mock - self.snapshot_repository.reader.find = self.find_mock - self.snapshot_repository.writer.dispatch = self.dispatch_mock - - self.classname = "path.to.Product" - - async def asyncSetUp(self): - await super().asyncSetUp() - await self.snapshot_repository.setup() - - async def asyncTearDown(self): - await self.snapshot_repository.destroy() - await super().asyncTearDown() - - def test_from_config(self): - self.assertIsInstance(self.snapshot_repository.reader, DatabaseSnapshotReader) - self.assertIsInstance(self.snapshot_repository.writer, DatabaseSnapshotWriter) - - async def test_get(self): - transaction = TransactionEntry() - uuid = uuid4() - observed = await self.snapshot_repository.get(self.classname, uuid, transaction) - self.assertEqual(1, observed) - - self.assertEqual(1, self.dispatch_mock.call_count) - self.assertEqual(call(), self.dispatch_mock.call_args) - - self.assertEqual(1, self.get_mock.call_count) - args = call(name=self.classname, uuid=uuid, transaction=transaction) - self.assertEqual(args, self.get_mock.call_args) - - async def test_find(self): - transaction = TransactionEntry() - iterable = self.snapshot_repository.find( - self.classname, Condition.TRUE, Ordering.ASC("name"), 10, True, transaction - ) - observed = [a async for a in iterable] - self.assertEqual(list(range(5)), observed) - - self.assertEqual(1, self.dispatch_mock.call_count) - self.assertEqual(call(), self.dispatch_mock.call_args) - - self.assertEqual(1, self.find_mock.call_count) - args = call( - name=self.classname, - condition=Condition.TRUE, - ordering=Ordering.ASC("name"), - limit=10, - streaming_mode=True, - transaction=transaction, - ) - self.assertEqual(args, self.find_mock.call_args) - - async def test_synchronize(self): - await self.snapshot_repository.synchronize() - - self.assertEqual(1, self.dispatch_mock.call_count) - self.assertEqual(call(), self.dispatch_mock.call_args) - - -if __name__ == "__main__": - unittest.main() diff --git a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_pg/test_readers.py b/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_pg/test_readers.py deleted file mode 100644 index 092aa086d..000000000 --- a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_pg/test_readers.py +++ /dev/null @@ -1,368 +0,0 @@ -import unittest -from datetime import ( - datetime, -) -from uuid import ( - uuid4, -) - -from minos.aggregate import ( - AlreadyDeletedException, - Condition, - DatabaseSnapshotReader, - DatabaseSnapshotSetup, - DatabaseSnapshotWriter, - EventEntry, - FieldDiff, - FieldDiffContainer, - NotFoundException, - Ordering, - SnapshotEntry, - TransactionEntry, - TransactionStatus, -) -from minos.common import ( - DatabaseClientPool, -) -from minos.common.testing import ( - DatabaseMinosTestCase, -) -from tests.utils import ( - AggregateTestCase, - Car, -) - - -class TestDatabaseSnapshotReader(AggregateTestCase, DatabaseMinosTestCase): - def setUp(self) -> None: - super().setUp() - - self.uuid_1 = uuid4() - self.uuid_2 = uuid4() - self.uuid_3 = uuid4() - - self.transaction_1 = uuid4() - self.transaction_2 = uuid4() - self.transaction_3 = uuid4() - - self.reader = DatabaseSnapshotReader.from_config(self.config) - - async def asyncSetUp(self): - await super().asyncSetUp() - await self.reader.setup() - await self._populate() - - async def asyncTearDown(self): - await self.reader.destroy() - await super().asyncTearDown() - - async def _populate(self): - diff = FieldDiffContainer([FieldDiff("doors", int, 3), FieldDiff("color", str, "blue")]) - # noinspection PyTypeChecker - name: str = Car.classname - - await self.event_repository.create(EventEntry(self.uuid_1, name, 1, diff.avro_bytes)) - await self.event_repository.update(EventEntry(self.uuid_1, name, 2, diff.avro_bytes)) - await self.event_repository.create(EventEntry(self.uuid_2, name, 1, diff.avro_bytes)) - await self.event_repository.update(EventEntry(self.uuid_1, name, 3, diff.avro_bytes)) - await self.event_repository.delete(EventEntry(self.uuid_1, name, 4)) - await self.event_repository.update(EventEntry(self.uuid_2, name, 2, diff.avro_bytes)) - await self.event_repository.update( - EventEntry(self.uuid_2, name, 3, diff.avro_bytes, transaction_uuid=self.transaction_1) - ) - await self.event_repository.delete( - EventEntry(self.uuid_2, name, 3, bytes(), transaction_uuid=self.transaction_2) - ) - await self.event_repository.update( - EventEntry(self.uuid_2, name, 4, diff.avro_bytes, transaction_uuid=self.transaction_1) - ) - await self.event_repository.create(EventEntry(self.uuid_3, name, 1, diff.avro_bytes)) - await self.event_repository.delete( - EventEntry(self.uuid_2, name, 3, bytes(), transaction_uuid=self.transaction_3) - ) - await self.transaction_repository.submit( - TransactionEntry(self.transaction_1, TransactionStatus.PENDING, await self.event_repository.offset) - ) - await self.transaction_repository.submit( - TransactionEntry(self.transaction_2, TransactionStatus.PENDING, await self.event_repository.offset) - ) - await self.transaction_repository.submit( - TransactionEntry(self.transaction_3, TransactionStatus.REJECTED, await self.event_repository.offset) - ) - async with DatabaseSnapshotWriter.from_config(self.config, reader=self.reader) as writer: - await writer.dispatch() - - def test_type(self): - self.assertTrue(issubclass(DatabaseSnapshotReader, DatabaseSnapshotSetup)) - - def test_from_config(self): - reader = DatabaseSnapshotReader.from_config(self.config) - self.assertIsInstance(reader.database_pool, DatabaseClientPool) - - async def test_find_by_uuid(self): - condition = Condition.IN("uuid", [self.uuid_2, self.uuid_3]) - - iterable = self.reader.find("tests.utils.Car", condition, ordering=Ordering.ASC("updated_at")) - observed = [v async for v in iterable] - - expected = [ - Car( - 3, - "blue", - uuid=self.uuid_2, - version=2, - created_at=observed[0].created_at, - updated_at=observed[0].updated_at, - ), - Car( - 3, - "blue", - uuid=self.uuid_3, - version=1, - created_at=observed[1].created_at, - updated_at=observed[1].updated_at, - ), - ] - self.assertEqual(expected, observed) - - async def test_find_with_transaction(self): - condition = Condition.IN("uuid", [self.uuid_2, self.uuid_3]) - - iterable = self.reader.find( - "tests.utils.Car", - condition, - ordering=Ordering.ASC("updated_at"), - transaction=TransactionEntry(self.transaction_1), - ) - observed = [v async for v in iterable] - - expected = [ - Car( - 3, - "blue", - uuid=self.uuid_2, - version=4, - created_at=observed[0].created_at, - updated_at=observed[0].updated_at, - ), - Car( - 3, - "blue", - uuid=self.uuid_3, - version=1, - created_at=observed[1].created_at, - updated_at=observed[1].updated_at, - ), - ] - self.assertEqual(expected, observed) - - async def test_find_with_transaction_delete(self): - condition = Condition.IN("uuid", [self.uuid_2, self.uuid_3]) - - iterable = self.reader.find( - "tests.utils.Car", - condition, - ordering=Ordering.ASC("updated_at"), - transaction=TransactionEntry(self.transaction_2), - ) - observed = [v async for v in iterable] - - expected = [ - Car( - 3, - "blue", - uuid=self.uuid_3, - version=1, - created_at=observed[0].created_at, - updated_at=observed[0].updated_at, - ), - ] - self.assertEqual(expected, observed) - - async def test_find_with_transaction_reverted(self): - condition = Condition.IN("uuid", [self.uuid_2, self.uuid_3]) - - iterable = self.reader.find( - "tests.utils.Car", condition, ordering=Ordering.ASC("updated_at"), transaction_uuid=self.transaction_3 - ) - observed = [v async for v in iterable] - - expected = [ - Car( - 3, - "blue", - uuid=self.uuid_2, - version=2, - created_at=observed[0].created_at, - updated_at=observed[0].updated_at, - ), - Car( - 3, - "blue", - uuid=self.uuid_3, - version=1, - created_at=observed[1].created_at, - updated_at=observed[1].updated_at, - ), - ] - self.assertEqual(expected, observed) - - async def test_find_streaming_true(self): - condition = Condition.IN("uuid", [self.uuid_2, self.uuid_3]) - - iterable = self.reader.find( - "tests.utils.Car", condition, streaming_mode=True, ordering=Ordering.ASC("updated_at") - ) - observed = [v async for v in iterable] - - expected = [ - Car( - 3, - "blue", - uuid=self.uuid_2, - version=2, - created_at=observed[0].created_at, - updated_at=observed[0].updated_at, - ), - Car( - 3, - "blue", - uuid=self.uuid_3, - version=1, - created_at=observed[1].created_at, - updated_at=observed[1].updated_at, - ), - ] - self.assertEqual(expected, observed) - - async def test_find_with_duplicates(self): - uuids = [self.uuid_2, self.uuid_2, self.uuid_3] - condition = Condition.IN("uuid", uuids) - - iterable = self.reader.find("tests.utils.Car", condition, ordering=Ordering.ASC("updated_at")) - observed = [v async for v in iterable] - - expected = [ - Car( - 3, - "blue", - uuid=self.uuid_2, - version=2, - created_at=observed[0].created_at, - updated_at=observed[0].updated_at, - ), - Car( - 3, - "blue", - uuid=self.uuid_3, - version=1, - created_at=observed[1].created_at, - updated_at=observed[1].updated_at, - ), - ] - self.assertEqual(expected, observed) - - async def test_find_empty(self): - - observed = {v async for v in self.reader.find("tests.utils.Car", Condition.FALSE)} - - expected = set() - self.assertEqual(expected, observed) - - async def test_get(self): - - observed = await self.reader.get("tests.utils.Car", self.uuid_2) - - expected = Car( - 3, "blue", uuid=self.uuid_2, version=2, created_at=observed.created_at, updated_at=observed.updated_at - ) - self.assertEqual(expected, observed) - - async def test_get_with_transaction(self): - - observed = await self.reader.get( - "tests.utils.Car", self.uuid_2, transaction=TransactionEntry(self.transaction_1) - ) - - expected = Car( - 3, "blue", uuid=self.uuid_2, version=4, created_at=observed.created_at, updated_at=observed.updated_at - ) - self.assertEqual(expected, observed) - - async def test_get_raises(self): - - with self.assertRaises(AlreadyDeletedException): - await self.reader.get("tests.utils.Car", self.uuid_1) - with self.assertRaises(NotFoundException): - await self.reader.get("tests.utils.Car", uuid4()) - - async def test_get_with_transaction_raises(self): - - with self.assertRaises(AlreadyDeletedException): - await self.reader.get("tests.utils.Car", self.uuid_2, transaction=TransactionEntry(self.transaction_2)) - - async def test_find(self): - condition = Condition.EQUAL("color", "blue") - iterable = self.reader.find("tests.utils.Car", condition, ordering=Ordering.ASC("updated_at")) - observed = [v async for v in iterable] - - expected = [ - Car( - 3, - "blue", - uuid=self.uuid_2, - version=2, - created_at=observed[0].created_at, - updated_at=observed[0].updated_at, - ), - Car( - 3, - "blue", - uuid=self.uuid_3, - version=1, - created_at=observed[1].created_at, - updated_at=observed[1].updated_at, - ), - ] - self.assertEqual(expected, observed) - - async def test_find_all(self): - - iterable = self.reader.find("tests.utils.Car", Condition.TRUE, Ordering.ASC("updated_at")) - observed = [v async for v in iterable] - - expected = [ - Car( - 3, - "blue", - uuid=self.uuid_2, - version=2, - created_at=observed[0].created_at, - updated_at=observed[0].updated_at, - ), - Car( - 3, - "blue", - uuid=self.uuid_3, - version=1, - created_at=observed[1].created_at, - updated_at=observed[1].updated_at, - ), - ] - self.assertEqual(expected, observed) - - def _assert_equal_snapshot_entries(self, expected: list[SnapshotEntry], observed: list[SnapshotEntry]): - self.assertEqual(len(expected), len(observed)) - for exp, obs in zip(expected, observed): - if exp.data is None: - with self.assertRaises(AlreadyDeletedException): - # noinspection PyStatementEffect - obs.build() - else: - self.assertEqual(exp.build(), obs.build()) - self.assertIsInstance(obs.created_at, datetime) - self.assertIsInstance(obs.updated_at, datetime) - - -if __name__ == "__main__": - unittest.main() diff --git a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_pg/test_writers.py b/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_pg/test_writers.py deleted file mode 100644 index dd639b53e..000000000 --- a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_pg/test_writers.py +++ /dev/null @@ -1,338 +0,0 @@ -import unittest -from datetime import ( - datetime, -) -from unittest.mock import ( - MagicMock, - call, -) -from uuid import ( - uuid4, -) - -from minos.aggregate import ( - Action, - AlreadyDeletedException, - Condition, - DatabaseSnapshotReader, - DatabaseSnapshotSetup, - DatabaseSnapshotWriter, - EventEntry, - FieldDiff, - FieldDiffContainer, - Ordering, - SnapshotEntry, - TransactionEntry, - TransactionStatus, -) -from minos.common import ( - DatabaseClientPool, - NotProvidedException, - current_datetime, -) -from minos.common.testing import ( - DatabaseMinosTestCase, -) -from tests.utils import ( - AggregateTestCase, - Car, -) - - -class TestDatabaseSnapshotWriter(AggregateTestCase, DatabaseMinosTestCase): - def setUp(self) -> None: - super().setUp() - self.uuid_1 = uuid4() - self.uuid_2 = uuid4() - self.uuid_3 = uuid4() - - self.transaction_1 = uuid4() - self.transaction_2 = uuid4() - self.transaction_3 = uuid4() - - self.reader = DatabaseSnapshotReader.from_config(self.config) - self.writer = DatabaseSnapshotWriter.from_config(self.config, reader=self.reader) - - async def asyncSetUp(self): - await super().asyncSetUp() - await self.writer.setup() - await self.reader.setup() - await self._populate() - - async def asyncTearDown(self): - await self.reader.destroy() - await self.writer.destroy() - await super().asyncTearDown() - - async def _populate(self): - diff = FieldDiffContainer([FieldDiff("doors", int, 3), FieldDiff("color", str, "blue")]) - # noinspection PyTypeChecker - name: str = Car.classname - - await self.event_repository.create(EventEntry(self.uuid_1, name, 1, diff.avro_bytes)) - await self.event_repository.update(EventEntry(self.uuid_1, name, 2, diff.avro_bytes)) - await self.event_repository.create(EventEntry(self.uuid_2, name, 1, diff.avro_bytes)) - await self.event_repository.update(EventEntry(self.uuid_1, name, 3, diff.avro_bytes)) - await self.event_repository.delete(EventEntry(self.uuid_1, name, 4)) - await self.event_repository.update(EventEntry(self.uuid_2, name, 2, diff.avro_bytes)) - await self.event_repository.update( - EventEntry(self.uuid_2, name, 3, diff.avro_bytes, transaction_uuid=self.transaction_1) - ) - await self.event_repository.delete( - EventEntry(self.uuid_2, name, 3, bytes(), transaction_uuid=self.transaction_2) - ) - await self.event_repository.update( - EventEntry(self.uuid_2, name, 4, diff.avro_bytes, transaction_uuid=self.transaction_1) - ) - await self.event_repository.create(EventEntry(self.uuid_3, name, 1, diff.avro_bytes)) - await self.event_repository.delete( - EventEntry(self.uuid_2, name, 3, bytes(), transaction_uuid=self.transaction_3) - ) - await self.transaction_repository.submit( - TransactionEntry(self.transaction_1, TransactionStatus.PENDING, await self.event_repository.offset) - ) - await self.transaction_repository.submit( - TransactionEntry(self.transaction_2, TransactionStatus.PENDING, await self.event_repository.offset) - ) - await self.transaction_repository.submit( - TransactionEntry(self.transaction_3, TransactionStatus.REJECTED, await self.event_repository.offset) - ) - - def test_type(self): - self.assertTrue(issubclass(DatabaseSnapshotWriter, DatabaseSnapshotSetup)) - - def test_from_config(self): - self.assertIsInstance(self.writer.database_pool, DatabaseClientPool) - - def test_from_config_raises(self): - with self.assertRaises(NotProvidedException): - DatabaseSnapshotWriter.from_config(self.config, reader=self.reader, event_repository=None) - - with self.assertRaises(NotProvidedException): - DatabaseSnapshotWriter.from_config(self.config, reader=self.reader, transaction_repository=None) - - async def test_dispatch(self): - await self.writer.dispatch() - - # noinspection PyTypeChecker - iterable = self.reader.find_entries( - Car.classname, Condition.TRUE, Ordering.ASC("updated_at"), exclude_deleted=False - ) - observed = [v async for v in iterable] - - # noinspection PyTypeChecker - expected = [ - SnapshotEntry(self.uuid_1, Car.classname, 4), - SnapshotEntry.from_root_entity( - Car( - 3, - "blue", - uuid=self.uuid_2, - version=2, - created_at=observed[1].created_at, - updated_at=observed[1].updated_at, - ) - ), - SnapshotEntry.from_root_entity( - Car( - 3, - "blue", - uuid=self.uuid_3, - version=1, - created_at=observed[2].created_at, - updated_at=observed[2].updated_at, - ) - ), - ] - self._assert_equal_snapshot_entries(expected, observed) - - async def test_dispatch_first_transaction(self): - await self.writer.dispatch() - - # noinspection PyTypeChecker - iterable = self.reader.find_entries( - Car.classname, - Condition.TRUE, - Ordering.ASC("updated_at"), - exclude_deleted=False, - transaction=TransactionEntry(self.transaction_1), - ) - observed = [v async for v in iterable] - - # noinspection PyTypeChecker - expected = [ - SnapshotEntry(self.uuid_1, Car.classname, 4), - SnapshotEntry.from_root_entity( - Car( - 3, - "blue", - uuid=self.uuid_2, - version=4, - created_at=observed[1].created_at, - updated_at=observed[1].updated_at, - ) - ), - SnapshotEntry.from_root_entity( - Car( - 3, - "blue", - uuid=self.uuid_3, - version=1, - created_at=observed[2].created_at, - updated_at=observed[2].updated_at, - ) - ), - ] - self._assert_equal_snapshot_entries(expected, observed) - - async def test_dispatch_second_transaction(self): - await self.writer.dispatch() - - # noinspection PyTypeChecker - iterable = self.reader.find_entries( - Car.classname, - Condition.TRUE, - Ordering.ASC("updated_at"), - exclude_deleted=False, - transaction=TransactionEntry(self.transaction_2), - ) - observed = [v async for v in iterable] - - # noinspection PyTypeChecker - expected = [ - SnapshotEntry(self.uuid_1, Car.classname, 4), - SnapshotEntry(self.uuid_2, Car.classname, 4), - SnapshotEntry.from_root_entity( - Car( - 3, - "blue", - uuid=self.uuid_3, - version=1, - created_at=observed[2].created_at, - updated_at=observed[2].updated_at, - ) - ), - ] - self._assert_equal_snapshot_entries(expected, observed) - - async def test_dispatch_third_transaction(self): - await self.writer.dispatch() - - # noinspection PyTypeChecker - iterable = self.reader.find_entries( - Car.classname, - Condition.TRUE, - Ordering.ASC("updated_at"), - exclude_deleted=False, - transaction_uuid=self.transaction_3, - ) - observed = [v async for v in iterable] - - # noinspection PyTypeChecker - expected = [ - SnapshotEntry(self.uuid_1, Car.classname, 4), - SnapshotEntry.from_root_entity( - Car( - 3, - "blue", - uuid=self.uuid_2, - version=2, - created_at=observed[1].created_at, - updated_at=observed[1].updated_at, - ) - ), - SnapshotEntry.from_root_entity( - Car( - 3, - "blue", - uuid=self.uuid_3, - version=1, - created_at=observed[2].created_at, - updated_at=observed[2].updated_at, - ) - ), - ] - self._assert_equal_snapshot_entries(expected, observed) - - async def test_is_synced(self): - self.assertFalse(await self.writer.is_synced("tests.utils.Car")) - await self.writer.dispatch() - self.assertTrue(await self.writer.is_synced("tests.utils.Car")) - - async def test_dispatch_ignore_previous_version(self): - diff = FieldDiffContainer([FieldDiff("doors", int, 3), FieldDiff("color", str, "blue")]) - # noinspection PyTypeChecker - name: str = Car.classname - condition = Condition.EQUAL("uuid", self.uuid_1) - - async def _fn(*args, **kwargs): - yield EventEntry(self.uuid_1, name, 1, diff.avro_bytes, 1, Action.CREATE, current_datetime()) - yield EventEntry(self.uuid_1, name, 3, diff.avro_bytes, 2, Action.CREATE, current_datetime()) - yield EventEntry(self.uuid_1, name, 2, diff.avro_bytes, 3, Action.CREATE, current_datetime()) - - self.event_repository.select = MagicMock(side_effect=_fn) - await self.writer.dispatch() - - observed = [v async for v in self.reader.find_entries(name, condition)] - - # noinspection PyTypeChecker - expected = [ - SnapshotEntry( - uuid=self.uuid_1, - name=name, - version=3, - schema=Car.avro_schema, - data=Car(3, "blue", uuid=self.uuid_1, version=1).avro_data, - created_at=observed[0].created_at, - updated_at=observed[0].updated_at, - ) - ] - self._assert_equal_snapshot_entries(expected, observed) - - def _assert_equal_snapshot_entries(self, expected: list[SnapshotEntry], observed: list[SnapshotEntry]): - self.assertEqual(len(expected), len(observed)) - for exp, obs in zip(expected, observed): - if exp.data is None: - with self.assertRaises(AlreadyDeletedException): - # noinspection PyStatementEffect - obs.build() - else: - self.assertEqual(exp.build(), obs.build()) - self.assertIsInstance(obs.created_at, datetime) - self.assertIsInstance(obs.updated_at, datetime) - - async def test_dispatch_with_offset(self): - mock = MagicMock(side_effect=self.writer._event_repository.select) - self.writer._event_repository.select = mock - - await self.writer.dispatch() - self.assertEqual(1, mock.call_count) - self.assertEqual(call(id_gt=0), mock.call_args) - mock.reset_mock() - - # noinspection PyTypeChecker - entry = EventEntry( - uuid=self.uuid_3, - name=Car.classname, - data=FieldDiffContainer([FieldDiff("doors", int, 3), FieldDiff("color", str, "blue")]).avro_bytes, - ) - await self.event_repository.create(entry) - - await self.writer.dispatch() - self.assertEqual(1, mock.call_count) - self.assertEqual(call(id_gt=11), mock.call_args) - mock.reset_mock() - - await self.writer.dispatch() - self.assertEqual(1, mock.call_count) - self.assertEqual(call(id_gt=12), mock.call_args) - mock.reset_mock() - - await self.writer.dispatch() - self.assertEqual(1, mock.call_count) - self.assertEqual(call(id_gt=12), mock.call_args) - mock.reset_mock() - - -if __name__ == "__main__": - unittest.main() diff --git a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_repositories/__init__.py b/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_repositories/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_abc.py b/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_repositories/test_abc.py similarity index 70% rename from packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_abc.py rename to packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_repositories/test_abc.py index 1f0c979bb..4413415b2 100644 --- a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_abc.py +++ b/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_repositories/test_abc.py @@ -18,14 +18,19 @@ TRANSACTION_CONTEXT_VAR, Condition, Ordering, - RootEntity, + SnapshotEntry, SnapshotRepository, TransactionEntry, ) +from minos.aggregate.queries import ( + _EqualCondition, +) from minos.common import ( SetupMixin, ) from tests.utils import ( + AggregateTestCase, + Car, FakeAsyncIterator, ) @@ -33,28 +38,23 @@ class _SnapshotRepository(SnapshotRepository): """For testing purposes.""" - async def _get(self, *args, **kwargs) -> RootEntity: - """For testing purposes.""" - - def _find(self, *args, **kwargs) -> AsyncIterator[RootEntity]: + def _find_entries(self, *args, **kwargs) -> AsyncIterator[SnapshotEntry]: """For testing purposes.""" async def _synchronize(self, **kwargs) -> None: """For testing purposes.""" -class TestSnapshotRepository(unittest.IsolatedAsyncioTestCase): +class TestSnapshotRepository(AggregateTestCase): def setUp(self) -> None: super().setUp() self.snapshot_repository = _SnapshotRepository() - + self.entries = [SnapshotEntry.from_root_entity(Car(3, "red"))] * 5 self.synchronize_mock = AsyncMock() - self.get_mock = AsyncMock(return_value=1) - self.find_mock = MagicMock(return_value=FakeAsyncIterator(range(5))) + self.find_mock = MagicMock(return_value=FakeAsyncIterator(self.entries)) - self.snapshot_repository._get = self.get_mock - self.snapshot_repository._find = self.find_mock + self.snapshot_repository._find_entries = self.find_mock self.snapshot_repository._synchronize = self.synchronize_mock self.classname = "path.to.Product" @@ -64,41 +64,49 @@ def test_subclass(self): def test_abstract(self): # noinspection PyUnresolvedReferences - self.assertEqual({"_get", "_find", "_synchronize"}, SnapshotRepository.__abstractmethods__) + self.assertEqual({"_find_entries", "_synchronize"}, SnapshotRepository.__abstractmethods__) async def test_get(self): transaction = TransactionEntry() uuid = uuid4() observed = await self.snapshot_repository.get(self.classname, uuid, transaction) - self.assertEqual(1, observed) + self.assertEqual(self.entries[0].build(), observed) self.assertEqual(1, self.synchronize_mock.call_count) - self.assertEqual(call(), self.synchronize_mock.call_args) + self.assertEqual(call(synchronize=False), self.synchronize_mock.call_args) - self.assertEqual(1, self.get_mock.call_count) - args = call(name=self.classname, uuid=uuid, transaction=transaction) - self.assertEqual(args, self.get_mock.call_args) + self.assertEqual(1, self.find_mock.call_count) + args = call( + name=self.classname, + condition=_EqualCondition("uuid", uuid), + ordering=None, + limit=None, + streaming_mode=False, + transaction=transaction, + exclude_deleted=False, + ) + self.assertEqual(args, self.find_mock.call_args) async def test_get_transaction_null(self): await self.snapshot_repository.get(self.classname, uuid4()) - self.assertEqual(1, self.get_mock.call_count) - self.assertEqual(None, self.get_mock.call_args.kwargs["transaction"]) + self.assertEqual(1, self.find_mock.call_count) + self.assertEqual(None, self.find_mock.call_args.kwargs["transaction"]) async def test_get_transaction_context(self): transaction = TransactionEntry() TRANSACTION_CONTEXT_VAR.set(transaction) await self.snapshot_repository.get(self.classname, uuid4()) - self.assertEqual(1, self.get_mock.call_count) - self.assertEqual(transaction, self.get_mock.call_args.kwargs["transaction"]) + self.assertEqual(1, self.find_mock.call_count) + self.assertEqual(transaction, self.find_mock.call_args.kwargs["transaction"]) async def test_get_all(self): transaction = TransactionEntry() iterable = self.snapshot_repository.get_all(self.classname, Ordering.ASC("name"), 10, True, transaction) observed = [a async for a in iterable] - self.assertEqual(list(range(5)), observed) + self.assertEqual([e.build() for e in self.entries], observed) self.assertEqual( [ @@ -109,6 +117,7 @@ async def test_get_all(self): limit=10, streaming_mode=True, transaction=transaction, + exclude_deleted=True, ) ], self.find_mock.call_args_list, @@ -120,10 +129,10 @@ async def test_find(self): self.classname, Condition.TRUE, Ordering.ASC("name"), 10, True, transaction ) observed = [a async for a in iterable] - self.assertEqual(list(range(5)), observed) + self.assertEqual([e.build() for e in self.entries], observed) self.assertEqual(1, self.synchronize_mock.call_count) - self.assertEqual(call(), self.synchronize_mock.call_args) + self.assertEqual(call(synchronize=False), self.synchronize_mock.call_args) self.assertEqual(1, self.find_mock.call_count) args = call( @@ -133,6 +142,7 @@ async def test_find(self): limit=10, streaming_mode=True, transaction=transaction, + exclude_deleted=True, ) self.assertEqual(args, self.find_mock.call_args) @@ -154,7 +164,7 @@ async def test_synchronize(self): await self.snapshot_repository.synchronize() self.assertEqual(1, self.synchronize_mock.call_count) - self.assertEqual(call(), self.synchronize_mock.call_args) + self.assertEqual(call(synchronize=False), self.synchronize_mock.call_args) if __name__ == "__main__": diff --git a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_repositories/test_database.py b/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_repositories/test_database.py new file mode 100644 index 000000000..76eadca7f --- /dev/null +++ b/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_repositories/test_database.py @@ -0,0 +1,506 @@ +import unittest +from itertools import ( + chain, + cycle, +) +from unittest.mock import ( + MagicMock, + patch, +) + +from minos.aggregate import ( + DatabaseSnapshotRepository, + SnapshotEntry, + SnapshotRepository, +) +from minos.aggregate.testing import ( + SnapshotRepositoryTestCase, +) +from minos.common import ( + DatabaseClient, + NotProvidedException, + ProgrammingException, + classname, + current_datetime, +) +from tests.utils import ( + AggregateTestCase, + FakeAsyncIterator, +) + + +class TestDatabaseSnapshotRepository(AggregateTestCase, SnapshotRepositoryTestCase): + __test__ = True + + def test_constructor_raises(self): + with self.assertRaises(NotProvidedException): + # noinspection PyTypeChecker + DatabaseSnapshotRepository(event_repository=None) + + with self.assertRaises(NotProvidedException): + # noinspection PyTypeChecker + DatabaseSnapshotRepository(transaction_repository=None) + + async def test_is_synced(self): + self.event_repository.select = MagicMock(side_effect=[FakeAsyncIterator([1]), FakeAsyncIterator([])]) + + with patch.object(DatabaseClient, "fetch_one", return_value=(0,)): + self.assertFalse(await self.snapshot_repository.is_synced(SnapshotRepositoryTestCase.Car)) + self.assertTrue(await self.snapshot_repository.is_synced(SnapshotRepositoryTestCase.Car)) + + def build_snapshot_repository(self) -> SnapshotRepository: + return DatabaseSnapshotRepository.from_config(self.config) + + async def synchronize(self): + with patch.object( + DatabaseClient, + "fetch_one", + side_effect=[ + ProgrammingException(""), + (current_datetime(), current_datetime()), + (current_datetime(), current_datetime()), + (current_datetime(), current_datetime()), + (current_datetime(), current_datetime()), + (current_datetime(), current_datetime()), + (current_datetime(), current_datetime()), + (current_datetime(), current_datetime()), + (current_datetime(), current_datetime()), + (current_datetime(), current_datetime()), + (current_datetime(), current_datetime()), + (current_datetime(), current_datetime()), + ], + ): + with patch.object( + DatabaseClient, + "fetch_all", + side_effect=chain( + [ + FakeAsyncIterator([]), + FakeAsyncIterator( + [ + tuple( + SnapshotEntry.from_root_entity( + SnapshotRepositoryTestCase.Car(3, "blue", uuid=self.uuid_1, version=1) + ) + .as_raw() + .values() + ) + ] + ), + ], + cycle( + [ + FakeAsyncIterator([]), + ] + ), + ), + ): + await super().synchronize() + + async def test_dispatch(self): + entries = [ + SnapshotEntry( + self.uuid_1, + classname(SnapshotRepositoryTestCase.Car), + 4, + created_at=current_datetime(), + updated_at=current_datetime(), + ), + SnapshotEntry.from_root_entity( + SnapshotRepositoryTestCase.Car( + 3, + "blue", + uuid=self.uuid_2, + version=2, + created_at=current_datetime(), + updated_at=current_datetime(), + ) + ), + SnapshotEntry.from_root_entity( + SnapshotRepositoryTestCase.Car( + 3, + "blue", + uuid=self.uuid_3, + version=1, + created_at=current_datetime(), + updated_at=current_datetime(), + ) + ), + ] + with patch.object(DatabaseClient, "fetch_one", return_value=(9999,)): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator([tuple(entry.as_raw().values()) for entry in entries]), + ): + await super().test_dispatch() + + async def test_dispatch_first_transaction(self): + entries = [ + SnapshotEntry( + self.uuid_1, + classname(SnapshotRepositoryTestCase.Car), + 4, + created_at=current_datetime(), + updated_at=current_datetime(), + ), + SnapshotEntry.from_root_entity( + SnapshotRepositoryTestCase.Car( + 3, + "blue", + uuid=self.uuid_2, + version=4, + created_at=current_datetime(), + updated_at=current_datetime(), + ) + ), + SnapshotEntry.from_root_entity( + SnapshotRepositoryTestCase.Car( + 3, + "blue", + uuid=self.uuid_3, + version=1, + created_at=current_datetime(), + updated_at=current_datetime(), + ) + ), + ] + with patch.object(DatabaseClient, "fetch_one", return_value=(9999,)): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator([tuple(entry.as_raw().values()) for entry in entries]), + ): + await super().test_dispatch_first_transaction() + + async def test_dispatch_second_transaction(self): + entries = [ + SnapshotEntry( + self.uuid_1, + classname(SnapshotRepositoryTestCase.Car), + 4, + created_at=current_datetime(), + updated_at=current_datetime(), + ), + SnapshotEntry( + self.uuid_2, + classname(SnapshotRepositoryTestCase.Car), + 4, + created_at=current_datetime(), + updated_at=current_datetime(), + ), + SnapshotEntry.from_root_entity( + SnapshotRepositoryTestCase.Car( + 3, + "blue", + uuid=self.uuid_3, + version=1, + created_at=current_datetime(), + updated_at=current_datetime(), + ) + ), + ] + with patch.object(DatabaseClient, "fetch_one", return_value=(9999,)): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator([tuple(entry.as_raw().values()) for entry in entries]), + ): + await super().test_dispatch_second_transaction() + + async def test_dispatch_third_transaction(self): + entries = [ + SnapshotEntry( + self.uuid_1, + classname(SnapshotRepositoryTestCase.Car), + 4, + created_at=current_datetime(), + updated_at=current_datetime(), + ), + SnapshotEntry.from_root_entity( + SnapshotRepositoryTestCase.Car( + 3, + "blue", + uuid=self.uuid_2, + version=2, + created_at=current_datetime(), + updated_at=current_datetime(), + ) + ), + SnapshotEntry.from_root_entity( + SnapshotRepositoryTestCase.Car( + 3, + "blue", + uuid=self.uuid_3, + version=1, + created_at=current_datetime(), + updated_at=current_datetime(), + ) + ), + ] + with patch.object(DatabaseClient, "fetch_one", return_value=(9999,)): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator([tuple(entry.as_raw().values()) for entry in entries]), + ): + await super().test_dispatch_third_transaction() + + async def test_dispatch_ignore_previous_version(self): + entries = [ + SnapshotEntry( + uuid=self.uuid_1, + name=classname(SnapshotRepositoryTestCase.Car), + version=3, + schema=SnapshotRepositoryTestCase.Car.avro_schema, + data=SnapshotRepositoryTestCase.Car(3, "blue", uuid=self.uuid_1, version=1).avro_data, + created_at=current_datetime(), + updated_at=current_datetime(), + ) + ] + with patch.object( + DatabaseClient, + "fetch_one", + side_effect=[ + ProgrammingException(""), + (current_datetime(), current_datetime()), + (current_datetime(), current_datetime()), + (9999,), + ], + ): + with patch.object( + DatabaseClient, + "fetch_all", + side_effect=[ + FakeAsyncIterator([]), + FakeAsyncIterator( + [ + tuple( + SnapshotEntry.from_root_entity( + SnapshotRepositoryTestCase.Car(3, "blue", uuid=self.uuid_1, version=5) + ) + .as_raw() + .values() + ) + ] + ), + FakeAsyncIterator([]), + FakeAsyncIterator([tuple(entry.as_raw().values()) for entry in entries]), + ], + ): + await super().test_dispatch_ignore_previous_version() + + async def test_dispatch_with_offset(self): + with patch.object( + DatabaseClient, + "fetch_one", + side_effect=[ + (0,), + (current_datetime(), current_datetime()), + (current_datetime(), current_datetime()), + (current_datetime(), current_datetime()), + (current_datetime(), current_datetime()), + (current_datetime(), current_datetime()), + (current_datetime(), current_datetime()), + (current_datetime(), current_datetime()), + (current_datetime(), current_datetime()), + (current_datetime(), current_datetime()), + (current_datetime(), current_datetime()), + (current_datetime(), current_datetime()), + (11,), + (current_datetime(), current_datetime()), + (12,), + (12,), + ], + ): + await super().test_dispatch_with_offset() + + async def test_find_by_uuid(self): + entities = [ + SnapshotRepositoryTestCase.Car(3, "blue", uuid=self.uuid_2, version=2), + SnapshotRepositoryTestCase.Car(3, "blue", uuid=self.uuid_3, version=1), + ] + with patch.object(DatabaseClient, "fetch_one", return_value=(9999,)): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [tuple(SnapshotEntry.from_root_entity(entity).as_raw().values()) for entity in entities] + ), + ): + await super().test_find_by_uuid() + + async def test_find_with_transaction(self): + entities = [ + SnapshotRepositoryTestCase.Car(3, "blue", uuid=self.uuid_2, version=4), + SnapshotRepositoryTestCase.Car(3, "blue", uuid=self.uuid_3, version=1), + ] + with patch.object(DatabaseClient, "fetch_one", return_value=(9999,)): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [tuple(SnapshotEntry.from_root_entity(entity).as_raw().values()) for entity in entities] + ), + ): + await super().test_find_with_transaction() + + async def test_find_with_transaction_delete(self): + entities = [SnapshotRepositoryTestCase.Car(3, "blue", uuid=self.uuid_3, version=1)] + with patch.object(DatabaseClient, "fetch_one", return_value=(9999,)): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [tuple(SnapshotEntry.from_root_entity(entity).as_raw().values()) for entity in entities] + ), + ): + await super().test_find_with_transaction_delete() + + async def test_find_with_transaction_reverted(self): + entities = [ + SnapshotRepositoryTestCase.Car(3, "blue", uuid=self.uuid_2, version=2), + SnapshotRepositoryTestCase.Car(3, "blue", uuid=self.uuid_3, version=1), + ] + with patch.object(DatabaseClient, "fetch_one", return_value=(9999,)): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [tuple(SnapshotEntry.from_root_entity(entity).as_raw().values()) for entity in entities] + ), + ): + await super().test_find_with_transaction_reverted() + + async def test_find_streaming_true(self): + entities = [ + SnapshotRepositoryTestCase.Car(3, "blue", uuid=self.uuid_2, version=2), + SnapshotRepositoryTestCase.Car(3, "blue", uuid=self.uuid_3, version=1), + ] + with patch.object(DatabaseClient, "fetch_one", return_value=(9999,)): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [tuple(SnapshotEntry.from_root_entity(entity).as_raw().values()) for entity in entities] + ), + ): + await super().test_find_streaming_true() + + async def test_find_with_duplicates(self): + entities = [ + SnapshotRepositoryTestCase.Car(3, "blue", uuid=self.uuid_2, version=2), + SnapshotRepositoryTestCase.Car(3, "blue", uuid=self.uuid_3, version=1), + ] + with patch.object(DatabaseClient, "fetch_one", return_value=(9999,)): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [tuple(SnapshotEntry.from_root_entity(entity).as_raw().values()) for entity in entities] + ), + ): + await super().test_find_with_duplicates() + + async def test_find_empty(self): + entities = [] + with patch.object(DatabaseClient, "fetch_one", return_value=(9999,)): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [tuple(SnapshotEntry.from_root_entity(entity).as_raw().values()) for entity in entities] + ), + ): + await super().test_find_empty() + + async def test_get(self): + entities = [ + SnapshotRepositoryTestCase.Car(3, "blue", uuid=self.uuid_2, version=2), + ] + with patch.object(DatabaseClient, "fetch_one", return_value=(9999,)): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [tuple(SnapshotEntry.from_root_entity(entity).as_raw().values()) for entity in entities] + ), + ): + await super().test_get() + + async def test_get_with_transaction(self): + entities = [SnapshotRepositoryTestCase.Car(3, "blue", uuid=self.uuid_2, version=4)] + with patch.object(DatabaseClient, "fetch_one", return_value=(9999,)): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [tuple(SnapshotEntry.from_root_entity(entity).as_raw().values()) for entity in entities] + ), + ): + await super().test_get_with_transaction() + + async def test_get_raises(self): + with patch.object(DatabaseClient, "fetch_one", return_value=(9999,)): + with patch.object( + DatabaseClient, + "fetch_all", + side_effect=[ + FakeAsyncIterator( + [ + tuple( + SnapshotEntry(self.uuid_1, classname(SnapshotRepositoryTestCase.Car), 1) + .as_raw() + .values() + ) + ] + ), + FakeAsyncIterator([]), + ], + ): + await super().test_get_raises() + + async def test_get_with_transaction_raises(self): + with patch.object(DatabaseClient, "fetch_one", return_value=(9999,)): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [tuple(SnapshotEntry(self.uuid_1, classname(SnapshotRepositoryTestCase.Car), 1).as_raw().values())] + ), + ): + await super().test_get_with_transaction_raises() + + async def test_find(self): + entities = [ + SnapshotRepositoryTestCase.Car(3, "blue", uuid=self.uuid_2, version=2), + SnapshotRepositoryTestCase.Car(3, "blue", uuid=self.uuid_3, version=1), + ] + with patch.object(DatabaseClient, "fetch_one", return_value=(9999,)): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [tuple(SnapshotEntry.from_root_entity(entity).as_raw().values()) for entity in entities] + ), + ): + await super().test_find() + + async def test_find_all(self): + entities = [ + SnapshotRepositoryTestCase.Car(3, "blue", uuid=self.uuid_2, version=2), + SnapshotRepositoryTestCase.Car(3, "blue", uuid=self.uuid_3, version=1), + ] + with patch.object(DatabaseClient, "fetch_one", return_value=(9999,)): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [tuple(SnapshotEntry.from_root_entity(entity).as_raw().values()) for entity in entities] + ), + ): + await super().test_find_all() + + +if __name__ == "__main__": + unittest.main() diff --git a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_repositories/test_memory.py b/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_repositories/test_memory.py new file mode 100644 index 000000000..0fbc1b325 --- /dev/null +++ b/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_repositories/test_memory.py @@ -0,0 +1,38 @@ +import unittest + +from minos.aggregate import ( + InMemorySnapshotRepository, + SnapshotRepository, +) +from minos.aggregate.testing import ( + SnapshotRepositoryTestCase, +) +from minos.common import ( + NotProvidedException, +) +from tests.utils import ( + AggregateTestCase, +) + + +class TestInMemorySnapshotRepository(AggregateTestCase, SnapshotRepositoryTestCase): + __test__ = True + + def build_snapshot_repository(self) -> SnapshotRepository: + return InMemorySnapshotRepository() + + def test_constructor_raises(self): + with self.assertRaises(NotProvidedException): + # noinspection PyTypeChecker + InMemorySnapshotRepository(event_repository=None) + + with self.assertRaises(NotProvidedException): + # noinspection PyTypeChecker + InMemorySnapshotRepository(transaction_repository=None) + + async def test_dispatch_with_offset(self): + pass + + +if __name__ == "__main__": + unittest.main() diff --git a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_transactions/test_entries.py b/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_transactions/test_entries.py index 32eb6d7fd..ef8f2d75b 100644 --- a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_transactions/test_entries.py +++ b/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_transactions/test_entries.py @@ -1,4 +1,7 @@ import unittest +from datetime import ( + datetime, +) from unittest.mock import ( AsyncMock, MagicMock, @@ -523,6 +526,24 @@ def test_repr(self): ) self.assertEqual(expected, repr(transaction)) + def test_as_raw(self): + uuid = uuid4() + status = TransactionStatus.PENDING + event_offset = 56 + updated_at = datetime(2020, 10, 13, 8, 45, 32) + destination_uuid = uuid4() + + entry = TransactionEntry(uuid, status, event_offset, destination_uuid, updated_at) + expected = { + "uuid": uuid, + "status": status, + "event_offset": event_offset, + "destination_uuid": destination_uuid, + "updated_at": updated_at, + } + + self.assertEqual(expected, entry.as_raw()) + class TestTransactionStatus(unittest.TestCase): def test_value_of_created(self): diff --git a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_transactions/test_repositories/test_database.py b/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_transactions/test_repositories/test_database.py new file mode 100644 index 000000000..f7900acff --- /dev/null +++ b/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_transactions/test_repositories/test_database.py @@ -0,0 +1,326 @@ +import unittest +from unittest.mock import ( + patch, +) + +from minos.aggregate import ( + DatabaseTransactionRepository, + TransactionRepository, + TransactionStatus, +) +from minos.aggregate.testing import ( + TransactionRepositoryTestCase, +) +from minos.common import ( + DatabaseClient, + ProgrammingException, + current_datetime, +) +from tests.utils import ( + AggregateTestCase, + FakeAsyncIterator, +) + + +# noinspection SqlNoDataSourceInspection +class TestDatabaseTransactionRepository(AggregateTestCase, TransactionRepositoryTestCase): + __test__ = True + + def build_transaction_repository(self) -> TransactionRepository: + return DatabaseTransactionRepository.from_config(self.config) + + async def test_submit(self): + with patch.object( + DatabaseClient, + "fetch_one", + return_value=[current_datetime()], + ): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator([(self.uuid, TransactionStatus.PENDING, 34)]), + ): + await super().test_submit() + + async def test_submit_pending_raises(self): + with patch.object( + DatabaseClient, + "fetch_one", + side_effect=[ + (current_datetime(),), + ProgrammingException(""), + ProgrammingException(""), + ProgrammingException(""), + ProgrammingException(""), + ], + ): + await super().test_submit_pending_raises() + + async def test_submit_reserving_raises(self): + with patch.object( + DatabaseClient, + "fetch_one", + side_effect=[ + (current_datetime(),), + ProgrammingException(""), + ProgrammingException(""), + ProgrammingException(""), + ProgrammingException(""), + ], + ): + await super().test_submit_reserving_raises() + + async def test_submit_reserved_raises(self): + with patch.object( + DatabaseClient, + "fetch_one", + side_effect=[ + (current_datetime(),), + ProgrammingException(""), + ProgrammingException(""), + ProgrammingException(""), + ], + ): + await super().test_submit_reserved_raises() + + async def test_submit_committing_raises(self): + with patch.object( + DatabaseClient, + "fetch_one", + side_effect=[ + (current_datetime(),), + ProgrammingException(""), + ProgrammingException(""), + ProgrammingException(""), + ProgrammingException(""), + ProgrammingException(""), + ProgrammingException(""), + ], + ): + await super().test_submit_committing_raises() + + async def test_submit_committed_raises(self): + with patch.object( + DatabaseClient, + "fetch_one", + side_effect=[ + (current_datetime(),), + ProgrammingException(""), + ProgrammingException(""), + ProgrammingException(""), + ProgrammingException(""), + ProgrammingException(""), + ProgrammingException(""), + ], + ): + await super().test_submit_committed_raises() + + async def test_submit_rejected_raises(self): + with patch.object( + DatabaseClient, + "fetch_one", + side_effect=[ + (current_datetime(),), + ProgrammingException(""), + ProgrammingException(""), + ProgrammingException(""), + ProgrammingException(""), + ProgrammingException(""), + ProgrammingException(""), + ], + ): + await super().test_submit_rejected_raises() + + async def populate(self) -> None: + with patch.object( + DatabaseClient, + "fetch_one", + side_effect=[ + (current_datetime(),), + (current_datetime(),), + (current_datetime(),), + (current_datetime(),), + (current_datetime(),), + (current_datetime(),), + ], + ): + await super().populate() + + async def test_select(self): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator([tuple(entry.as_raw().values()) for entry in self.entries]), + ): + await super().test_select() + + async def test_select_uuid(self): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator([tuple(entry.as_raw().values()) for entry in [self.entries[1]]]), + ): + await super().test_select_uuid() + + async def test_select_uuid_ne(self): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [ + tuple(entry.as_raw().values()) + for entry in [self.entries[0], self.entries[2], self.entries[3], self.entries[4]] + ] + ), + ): + await super().test_select_uuid_ne() + + async def test_select_uuid_in(self): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [tuple(entry.as_raw().values()) for entry in [self.entries[1], self.entries[2]]] + ), + ): + await super().test_select_uuid_in() + + async def test_select_destination_uuid(self): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator([tuple(entry.as_raw().values()) for entry in [self.entries[4]]]), + ): + await super().test_select_destination_uuid() + + async def test_select_status(self): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [tuple(entry.as_raw().values()) for entry in [self.entries[0], self.entries[1], self.entries[4]]] + ), + ): + await super().test_select_status() + + async def test_select_status_in(self): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [tuple(entry.as_raw().values()) for entry in [self.entries[2], self.entries[3]]] + ), + ): + await super().test_select_status_in() + + async def test_select_event_offset(self): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator([tuple(entry.as_raw().values()) for entry in [self.entries[1]]]), + ): + await super().test_select_event_offset() + + async def test_select_event_offset_lt(self): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator([tuple(entry.as_raw().values()) for entry in [self.entries[0]]]), + ): + await super().test_select_event_offset_lt() + + async def test_select_event_offset_gt(self): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [tuple(entry.as_raw().values()) for entry in [self.entries[2], self.entries[3], self.entries[4]]] + ), + ): + await super().test_select_event_offset_gt() + + async def test_select_event_offset_le(self): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [tuple(entry.as_raw().values()) for entry in [self.entries[0], self.entries[1]]] + ), + ): + await super().test_select_event_offset_le() + + async def test_select_event_offset_ge(self): + with patch.object( + DatabaseClient, + "fetch_all", + return_value=FakeAsyncIterator( + [ + tuple(entry.as_raw().values()) + for entry in [self.entries[1], self.entries[2], self.entries[3], self.entries[4]] + ] + ), + ): + await super().test_select_event_offset_ge() + + async def test_select_updated_at(self): + with patch.object( + DatabaseClient, + "fetch_all", + side_effect=[ + FakeAsyncIterator([tuple(self.entries[2].as_raw().values())]), + FakeAsyncIterator([tuple(entry.as_raw().values()) for entry in [self.entries[2]]]), + ], + ): + await super().test_select_updated_at() + + async def test_select_updated_at_lt(self): + with patch.object( + DatabaseClient, + "fetch_all", + side_effect=[ + FakeAsyncIterator([tuple(self.entries[2].as_raw().values())]), + FakeAsyncIterator([tuple(entry.as_raw().values()) for entry in [self.entries[0], self.entries[1]]]), + ], + ): + await super().test_select_updated_at_lt() + + async def test_select_updated_at_gt(self): + with patch.object( + DatabaseClient, + "fetch_all", + side_effect=[ + FakeAsyncIterator([tuple(self.entries[2].as_raw().values())]), + FakeAsyncIterator([tuple(entry.as_raw().values()) for entry in [self.entries[3], self.entries[4]]]), + ], + ): + await super().test_select_updated_at_gt() + + async def test_select_updated_at_le(self): + with patch.object( + DatabaseClient, + "fetch_all", + side_effect=[ + FakeAsyncIterator([tuple(self.entries[2].as_raw().values())]), + FakeAsyncIterator( + [tuple(entry.as_raw().values()) for entry in [self.entries[0], self.entries[1], self.entries[2]]] + ), + ], + ): + await super().test_select_updated_at_le() + + async def test_select_updated_at_ge(self): + with patch.object( + DatabaseClient, + "fetch_all", + side_effect=[ + FakeAsyncIterator([tuple(self.entries[2].as_raw().values())]), + FakeAsyncIterator( + [tuple(entry.as_raw().values()) for entry in [self.entries[2], self.entries[3], self.entries[4]]] + ), + ], + ): + await super().test_select_updated_at_ge() + + +if __name__ == "__main__": + unittest.main() diff --git a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_transactions/test_repositories/test_memory.py b/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_transactions/test_repositories/test_memory.py index 394666700..95ba70979 100644 --- a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_transactions/test_repositories/test_memory.py +++ b/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_transactions/test_repositories/test_memory.py @@ -1,257 +1,22 @@ import unittest -from uuid import ( - uuid4, -) from minos.aggregate import ( InMemoryTransactionRepository, - TransactionEntry, TransactionRepository, - TransactionRepositoryConflictException, - TransactionStatus, +) +from minos.aggregate.testing import ( + TransactionRepositoryTestCase, ) from tests.utils import ( AggregateTestCase, ) -class TestInMemoryTransactionRepository(AggregateTestCase): - def setUp(self) -> None: - super().setUp() - self.uuid = uuid4() - - async def asyncSetUp(self) -> None: - await super().asyncSetUp() - self.transaction_repository = InMemoryTransactionRepository() - await self.transaction_repository.setup() - - async def asyncTearDown(self) -> None: - await self.transaction_repository.destroy() - await super().asyncTearDown() - - async def test_subclass(self) -> None: - self.assertTrue(issubclass(InMemoryTransactionRepository, TransactionRepository)) - - async def test_submit(self): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.PENDING, 34)) - expected = [TransactionEntry(self.uuid, TransactionStatus.PENDING, 34)] - observed = [v async for v in self.transaction_repository.select()] - self.assertEqual(expected, observed) - - async def test_submit_pending_raises(self): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.PENDING, 34)) - with self.assertRaises(TransactionRepositoryConflictException): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.RESERVED, 34)) - with self.assertRaises(TransactionRepositoryConflictException): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.COMMITTING, 34)) - with self.assertRaises(TransactionRepositoryConflictException): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.COMMITTED, 34)) - - async def test_submit_reserving_raises(self): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.RESERVING, 34)) - with self.assertRaises(TransactionRepositoryConflictException): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.PENDING, 34)) - with self.assertRaises(TransactionRepositoryConflictException): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.RESERVING, 34)) - with self.assertRaises(TransactionRepositoryConflictException): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.COMMITTING, 34)) - with self.assertRaises(TransactionRepositoryConflictException): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.COMMITTED, 34)) - - async def test_submit_reserved_raises(self): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.RESERVED, 34)) - with self.assertRaises(TransactionRepositoryConflictException): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.PENDING, 34)) - with self.assertRaises(TransactionRepositoryConflictException): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.RESERVING, 34)) - with self.assertRaises(TransactionRepositoryConflictException): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.RESERVED, 34)) - - async def test_submit_committing_raises(self): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.COMMITTED, 34)) - with self.assertRaises(TransactionRepositoryConflictException): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.PENDING, 34)) - with self.assertRaises(TransactionRepositoryConflictException): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.RESERVING, 34)) - with self.assertRaises(TransactionRepositoryConflictException): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.RESERVED, 34)) - with self.assertRaises(TransactionRepositoryConflictException): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.COMMITTING, 34)) - with self.assertRaises(TransactionRepositoryConflictException): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.REJECTED, 34)) - - async def test_submit_committed_raises(self): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.COMMITTED, 34)) - with self.assertRaises(TransactionRepositoryConflictException): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.PENDING, 34)) - with self.assertRaises(TransactionRepositoryConflictException): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.RESERVING, 34)) - with self.assertRaises(TransactionRepositoryConflictException): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.RESERVED, 34)) - with self.assertRaises(TransactionRepositoryConflictException): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.COMMITTING, 34)) - with self.assertRaises(TransactionRepositoryConflictException): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.COMMITTED, 34)) - with self.assertRaises(TransactionRepositoryConflictException): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.REJECTED, 34)) - - async def test_submit_rejected_raises(self): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.REJECTED, 34)) - with self.assertRaises(TransactionRepositoryConflictException): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.PENDING, 34)) - with self.assertRaises(TransactionRepositoryConflictException): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.RESERVING, 34)) - with self.assertRaises(TransactionRepositoryConflictException): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.RESERVED, 34)) - with self.assertRaises(TransactionRepositoryConflictException): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.COMMITTING, 34)) - with self.assertRaises(TransactionRepositoryConflictException): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.COMMITTED, 34)) - with self.assertRaises(TransactionRepositoryConflictException): - await self.transaction_repository.submit(TransactionEntry(self.uuid, TransactionStatus.REJECTED, 34)) - - async def test_select_empty(self): - expected = [] - observed = [v async for v in self.transaction_repository.select()] - self.assertEqual(expected, observed) - - -class TestInMemoryTransactionRepositorySelect(AggregateTestCase): - def setUp(self) -> None: - super().setUp() - self.uuid_1 = uuid4() - self.uuid_2 = uuid4() - self.uuid_3 = uuid4() - self.uuid_4 = uuid4() - self.uuid_5 = uuid4() - - self.transaction_repository = InMemoryTransactionRepository() - - self.entries = [ - TransactionEntry(self.uuid_1, TransactionStatus.PENDING, 12), - TransactionEntry(self.uuid_2, TransactionStatus.PENDING, 15), - TransactionEntry(self.uuid_3, TransactionStatus.REJECTED, 16), - TransactionEntry(self.uuid_4, TransactionStatus.COMMITTED, 20), - TransactionEntry(self.uuid_5, TransactionStatus.PENDING, 20, self.uuid_1), - ] - - async def asyncSetUp(self): - await super().asyncSetUp() - await self._populate() - - async def _populate(self): - await self.transaction_repository.setup() - await self.transaction_repository.submit(TransactionEntry(self.uuid_1, TransactionStatus.PENDING, 12)) - await self.transaction_repository.submit(TransactionEntry(self.uuid_2, TransactionStatus.PENDING, 15)) - await self.transaction_repository.submit(TransactionEntry(self.uuid_3, TransactionStatus.REJECTED, 16)) - await self.transaction_repository.submit(TransactionEntry(self.uuid_4, TransactionStatus.COMMITTED, 20)) - await self.transaction_repository.submit( - TransactionEntry(self.uuid_5, TransactionStatus.PENDING, 20, self.uuid_1) - ) - - async def asyncTearDown(self): - await self.transaction_repository.destroy() - await super().asyncTearDown() - - async def test_select(self): - expected = self.entries - observed = [v async for v in self.transaction_repository.select()] - self.assertEqual(expected, observed) - - async def test_select_uuid(self): - expected = [self.entries[1]] - observed = [v async for v in self.transaction_repository.select(uuid=self.uuid_2)] - self.assertEqual(expected, observed) - - async def test_select_uuid_ne(self): - expected = [self.entries[0], self.entries[2], self.entries[3], self.entries[4]] - observed = [v async for v in self.transaction_repository.select(uuid_ne=self.uuid_2)] - self.assertEqual(expected, observed) - - async def test_select_uuid_in(self): - expected = [self.entries[1], self.entries[2]] - observed = [v async for v in self.transaction_repository.select(uuid_in=(self.uuid_2, self.uuid_3))] - self.assertEqual(expected, observed) - - async def test_select_destination_uuid(self): - expected = [self.entries[4]] - observed = [v async for v in self.transaction_repository.select(destination_uuid=self.uuid_1)] - self.assertEqual(expected, observed) - - async def test_select_status(self): - expected = [self.entries[0], self.entries[1], self.entries[4]] - observed = [v async for v in self.transaction_repository.select(status=TransactionStatus.PENDING)] - self.assertEqual(expected, observed) - - async def test_select_status_in(self): - expected = [self.entries[2], self.entries[3]] - observed = [ - v - async for v in self.transaction_repository.select( - status_in=(TransactionStatus.COMMITTED, TransactionStatus.REJECTED) - ) - ] - self.assertEqual(expected, observed) - - async def test_select_event_offset(self): - expected = [self.entries[1]] - observed = [v async for v in self.transaction_repository.select(event_offset=15)] - self.assertEqual(expected, observed) - - async def test_select_event_offset_lt(self): - expected = [self.entries[0]] - observed = [v async for v in self.transaction_repository.select(event_offset_lt=15)] - self.assertEqual(expected, observed) - - async def test_select_event_offset_gt(self): - expected = [self.entries[2], self.entries[3], self.entries[4]] - observed = [v async for v in self.transaction_repository.select(event_offset_gt=15)] - self.assertEqual(expected, observed) - - async def test_select_event_offset_le(self): - expected = [self.entries[0], self.entries[1]] - observed = [v async for v in self.transaction_repository.select(event_offset_le=15)] - self.assertEqual(expected, observed) - - async def test_select_event_offset_ge(self): - expected = [self.entries[1], self.entries[2], self.entries[3], self.entries[4]] - observed = [v async for v in self.transaction_repository.select(event_offset_ge=15)] - self.assertEqual(expected, observed) - - async def test_select_updated_at(self): - updated_at = (await self.transaction_repository.get(self.uuid_3)).updated_at - - expected = [self.entries[2]] - observed = [v async for v in self.transaction_repository.select(updated_at=updated_at)] - self.assertEqual(expected, observed) - - async def test_select_updated_at_lt(self): - updated_at = (await self.transaction_repository.get(self.uuid_3)).updated_at - - expected = [self.entries[0], self.entries[1]] - observed = [v async for v in self.transaction_repository.select(updated_at_lt=updated_at)] - self.assertEqual(expected, observed) - - async def test_select_updated_at_gt(self): - updated_at = (await self.transaction_repository.get(self.uuid_3)).updated_at - - expected = [self.entries[3], self.entries[4]] - observed = [v async for v in self.transaction_repository.select(updated_at_gt=updated_at)] - self.assertEqual(expected, observed) - - async def test_select_updated_at_le(self): - updated_at = (await self.transaction_repository.get(self.uuid_3)).updated_at - - expected = [self.entries[0], self.entries[1], self.entries[2]] - observed = [v async for v in self.transaction_repository.select(updated_at_le=updated_at)] - self.assertEqual(expected, observed) - - async def test_select_updated_at_ge(self): - updated_at = (await self.transaction_repository.get(self.uuid_3)).updated_at +class TestInMemoryTransactionRepository(AggregateTestCase, TransactionRepositoryTestCase): + __test__ = True - expected = [self.entries[2], self.entries[3], self.entries[4]] - observed = [v async for v in self.transaction_repository.select(updated_at_ge=updated_at)] - self.assertEqual(expected, observed) + def build_transaction_repository(self) -> TransactionRepository: + return InMemoryTransactionRepository() if __name__ == "__main__": diff --git a/packages/core/minos-microservice-aggregate/tests/test_config.yml b/packages/core/minos-microservice-aggregate/tests/test_config.yml index 17bdcc6f8..ed9905abd 100644 --- a/packages/core/minos-microservice-aggregate/tests/test_config.yml +++ b/packages/core/minos-microservice-aggregate/tests/test_config.yml @@ -2,14 +2,14 @@ service: name: Order aggregate: tests.utils.Order repository: - client: minos.common.AiopgDatabaseClient + client: minos.common.testing.MockedDatabaseClient database: order_db user: minos password: min0s host: localhost port: 5432 snapshot: - client: minos.common.AiopgDatabaseClient + client: minos.common.testing.MockedDatabaseClient database: order_db user: minos password: min0s @@ -22,7 +22,7 @@ broker: host: localhost port: 9092 queue: - client: minos.common.AiopgDatabaseClient + client: minos.common.testing.MockedDatabaseClient database: order_db user: minos password: min0s diff --git a/packages/core/minos-microservice-aggregate/tests/testcases/__init__.py b/packages/core/minos-microservice-aggregate/tests/testcases/__init__.py deleted file mode 100644 index ed0100614..000000000 --- a/packages/core/minos-microservice-aggregate/tests/testcases/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from .event_repository import ( - EventRepositorySelectTestCase, - EventRepositorySubmitTestCase, - EventRepositoryTestCase, -) diff --git a/packages/core/minos-microservice-aggregate/tests/utils.py b/packages/core/minos-microservice-aggregate/tests/utils.py index 908121875..aa9333287 100644 --- a/packages/core/minos-microservice-aggregate/tests/utils.py +++ b/packages/core/minos-microservice-aggregate/tests/utils.py @@ -27,6 +27,7 @@ RootEntity, ValueObject, ValueObjectSet, + testing, ) from minos.common import ( DatabaseClientPool, @@ -48,6 +49,8 @@ class AggregateTestCase(MinosTestCase, ABC): + testing_module = testing + def get_config_file_path(self): return CONFIG_FILE_PATH diff --git a/packages/core/minos-microservice-common/minos/common/__init__.py b/packages/core/minos-microservice-common/minos/common/__init__.py index 048001ea1..968bd051a 100644 --- a/packages/core/minos-microservice-common/minos/common/__init__.py +++ b/packages/core/minos-microservice-common/minos/common/__init__.py @@ -14,11 +14,8 @@ MinosConfig, ) from .database import ( - AiopgDatabaseClient, - AiopgDatabaseOperation, - AiopgLockDatabaseOperationFactory, - AiopgManageDatabaseOperationFactory, ComposedDatabaseOperation, + ConnectionException, DatabaseClient, DatabaseClientBuilder, DatabaseClientException, @@ -30,8 +27,8 @@ DatabaseOperationFactory, IntegrityException, LockDatabaseOperationFactory, - ManageDatabaseOperationFactory, - UnableToConnectException, + ManagementDatabaseOperationFactory, + ProgrammingException, ) from .datetime import ( NULL_DATETIME, @@ -107,6 +104,7 @@ SchemaEncoder, TypeHintBuilder, TypeHintComparator, + TypeHintParser, is_model_type, ) from .object import ( @@ -115,6 +113,7 @@ from .pools import ( MinosPool, Pool, + PoolException, PoolFactory, ) from .ports import ( diff --git a/packages/core/minos-microservice-common/minos/common/config/abc.py b/packages/core/minos-microservice-common/minos/common/config/abc.py index 9c4714094..60e38c1d2 100644 --- a/packages/core/minos-microservice-common/minos/common/config/abc.py +++ b/packages/core/minos-microservice-common/minos/common/config/abc.py @@ -145,18 +145,21 @@ def get_default_database(self): :return: A ``dict`` containing the database's config values. """ - return self.get_database_by_name("default") + return self.get_database_by_name(None) - def get_database_by_name(self, name: str) -> dict[str, Any]: + def get_database_by_name(self, name: Optional[str]) -> dict[str, Any]: """Get the database value by name. :param name: The name of the database. If ``None`` is provided then the default database will be used. :return: A ``dict`` containing the database's config values. """ + if name is None: + name = "default" + databases = self.get_databases() if name not in databases: - name = "default" + raise MinosConfigException(f"{name!r} database is not configured") return databases[name] diff --git a/packages/core/minos-microservice-common/minos/common/database/__init__.py b/packages/core/minos-microservice-common/minos/common/database/__init__.py index 7f3e5676c..30f604571 100644 --- a/packages/core/minos-microservice-common/minos/common/database/__init__.py +++ b/packages/core/minos-microservice-common/minos/common/database/__init__.py @@ -1,25 +1,22 @@ from .clients import ( - AiopgDatabaseClient, + ConnectionException, DatabaseClient, DatabaseClientBuilder, DatabaseClientException, IntegrityException, - UnableToConnectException, + ProgrammingException, ) from .locks import ( - AiopgLockDatabaseOperationFactory, DatabaseLock, LockDatabaseOperationFactory, ) -from .manage import ( - AiopgManageDatabaseOperationFactory, - ManageDatabaseOperationFactory, +from .managements import ( + ManagementDatabaseOperationFactory, ) from .mixins import ( DatabaseMixin, ) from .operations import ( - AiopgDatabaseOperation, ComposedDatabaseOperation, DatabaseOperation, DatabaseOperationFactory, diff --git a/packages/core/minos-microservice-common/minos/common/database/clients/__init__.py b/packages/core/minos-microservice-common/minos/common/database/clients/__init__.py index a2884b3c4..928f11271 100644 --- a/packages/core/minos-microservice-common/minos/common/database/clients/__init__.py +++ b/packages/core/minos-microservice-common/minos/common/database/clients/__init__.py @@ -2,11 +2,9 @@ DatabaseClient, DatabaseClientBuilder, ) -from .aiopg import ( - AiopgDatabaseClient, -) from .exceptions import ( + ConnectionException, DatabaseClientException, IntegrityException, - UnableToConnectException, + ProgrammingException, ) diff --git a/packages/core/minos-microservice-common/minos/common/database/clients/abc.py b/packages/core/minos-microservice-common/minos/common/database/clients/abc.py index 37c707434..dc1f9e010 100644 --- a/packages/core/minos-microservice-common/minos/common/database/clients/abc.py +++ b/packages/core/minos-microservice-common/minos/common/database/clients/abc.py @@ -32,6 +32,9 @@ DatabaseOperation, DatabaseOperationFactory, ) +from .exceptions import ( + ProgrammingException, +) if TYPE_CHECKING: from ..locks import ( @@ -67,6 +70,10 @@ async def is_valid(self, **kwargs) -> bool: async def _is_valid(self, **kwargs) -> bool: raise NotImplementedError + async def _destroy(self) -> None: + await self.reset() + await super()._destroy() + async def reset(self, **kwargs) -> None: """Reset the current instance status. @@ -133,7 +140,10 @@ async def fetch_one(self) -> Any: :return: This method does not return anything. """ - return await self.fetch_all().__anext__() + try: + return await self.fetch_all().__anext__() + except StopAsyncIteration: + raise ProgrammingException("There are not any value to be fetched.") def fetch_all(self) -> AsyncIterator[Any]: """Fetch all values with an asynchronous iterator. @@ -147,7 +157,7 @@ def _fetch_all(self, *args, **kwargs) -> AsyncIterator[Any]: raise NotImplementedError @classmethod - def register_factory(cls, base: type[DatabaseOperationFactory], impl: type[DatabaseOperationFactory]) -> None: + def set_factory(cls, base: type[DatabaseOperationFactory], impl: type[DatabaseOperationFactory]) -> None: """Register an operation factory implementation for an operation factory interface. :param base: The operation factory interface. diff --git a/packages/core/minos-microservice-common/minos/common/database/clients/exceptions.py b/packages/core/minos-microservice-common/minos/common/database/clients/exceptions.py index d638a738c..f135f74bf 100644 --- a/packages/core/minos-microservice-common/minos/common/database/clients/exceptions.py +++ b/packages/core/minos-microservice-common/minos/common/database/clients/exceptions.py @@ -7,9 +7,13 @@ class DatabaseClientException(MinosException): """Base exception for database client.""" -class UnableToConnectException(DatabaseClientException): +class ConnectionException(DatabaseClientException): """Exception to be raised when database client is not able to connect to the database.""" class IntegrityException(DatabaseClientException): """Exception to be raised when an integrity check is not satisfied.""" + + +class ProgrammingException(DatabaseClientException): + """Exception to be raised when an integrity check is not satisfied.""" diff --git a/packages/core/minos-microservice-common/minos/common/database/locks/__init__.py b/packages/core/minos-microservice-common/minos/common/database/locks/__init__.py index 4f02878f1..ddf3ddc07 100644 --- a/packages/core/minos-microservice-common/minos/common/database/locks/__init__.py +++ b/packages/core/minos-microservice-common/minos/common/database/locks/__init__.py @@ -1,5 +1,4 @@ from .factories import ( - AiopgLockDatabaseOperationFactory, LockDatabaseOperationFactory, ) from .impl import ( diff --git a/packages/core/minos-microservice-common/minos/common/database/locks/factories/__init__.py b/packages/core/minos-microservice-common/minos/common/database/locks/factories/__init__.py index e2f0933f4..47511313b 100644 --- a/packages/core/minos-microservice-common/minos/common/database/locks/factories/__init__.py +++ b/packages/core/minos-microservice-common/minos/common/database/locks/factories/__init__.py @@ -1,6 +1,3 @@ from .abc import ( LockDatabaseOperationFactory, ) -from .aiopg import ( - AiopgLockDatabaseOperationFactory, -) diff --git a/packages/core/minos-microservice-common/minos/common/database/manage/__init__.py b/packages/core/minos-microservice-common/minos/common/database/manage/__init__.py deleted file mode 100644 index 53dca9546..000000000 --- a/packages/core/minos-microservice-common/minos/common/database/manage/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .factories import ( - AiopgManageDatabaseOperationFactory, - ManageDatabaseOperationFactory, -) diff --git a/packages/core/minos-microservice-common/minos/common/database/manage/factories.py b/packages/core/minos-microservice-common/minos/common/database/manage/factories.py deleted file mode 100644 index 7ded96715..000000000 --- a/packages/core/minos-microservice-common/minos/common/database/manage/factories.py +++ /dev/null @@ -1,57 +0,0 @@ -from abc import ( - ABC, - abstractmethod, -) - -from ..clients import ( - AiopgDatabaseClient, -) -from ..operations import ( - AiopgDatabaseOperation, - DatabaseOperation, - DatabaseOperationFactory, -) - - -class ManageDatabaseOperationFactory(DatabaseOperationFactory, ABC): - """Manage Database Operation Factory base class.""" - - @abstractmethod - def build_create(self, database: str) -> DatabaseOperation: - """Build the database operation to create a database. - - :param database: The new database's name. - :return: A ``DatabaseOperation``. - """ - - @abstractmethod - def build_delete(self, database: str) -> DatabaseOperation: - """Build the database operation to create a database. - - :param database: The name of the database to be deleted. - :return: A ``DatabaseOperation``. - """ - - -# noinspection SqlNoDataSourceInspection -class AiopgManageDatabaseOperationFactory(ManageDatabaseOperationFactory): - """Aiopg Manage Database Operation Factory class.""" - - def build_create(self, database: str) -> DatabaseOperation: - """Build the database operation to create a database. - - :param database: The new database's name. - :return: A ``DatabaseOperation``. - """ - return AiopgDatabaseOperation(f"CREATE DATABASE {database};") - - def build_delete(self, database: str) -> DatabaseOperation: - """Build the database operation to create a database. - - :param database: The name of the database to be deleted. - :return: A ``DatabaseOperation``. - """ - return AiopgDatabaseOperation(f"DROP DATABASE IF EXISTS {database};") - - -AiopgDatabaseClient.register_factory(ManageDatabaseOperationFactory, AiopgManageDatabaseOperationFactory) diff --git a/packages/core/minos-microservice-common/minos/common/database/managements/__init__.py b/packages/core/minos-microservice-common/minos/common/database/managements/__init__.py new file mode 100644 index 000000000..a00b0ae0d --- /dev/null +++ b/packages/core/minos-microservice-common/minos/common/database/managements/__init__.py @@ -0,0 +1,3 @@ +from .factories import ( + ManagementDatabaseOperationFactory, +) diff --git a/packages/core/minos-microservice-common/minos/common/database/managements/factories.py b/packages/core/minos-microservice-common/minos/common/database/managements/factories.py new file mode 100644 index 000000000..d84c5ebbe --- /dev/null +++ b/packages/core/minos-microservice-common/minos/common/database/managements/factories.py @@ -0,0 +1,29 @@ +from abc import ( + ABC, + abstractmethod, +) + +from ..operations import ( + DatabaseOperation, + DatabaseOperationFactory, +) + + +class ManagementDatabaseOperationFactory(DatabaseOperationFactory, ABC): + """Management Database Operation Factory base class.""" + + @abstractmethod + def build_create(self, database: str) -> DatabaseOperation: + """Build the database operation to create a database. + + :param database: The new database's name. + :return: A ``DatabaseOperation``. + """ + + @abstractmethod + def build_delete(self, database: str) -> DatabaseOperation: + """Build the database operation to create a database. + + :param database: The name of the database to be deleted. + :return: A ``DatabaseOperation``. + """ diff --git a/packages/core/minos-microservice-common/minos/common/database/mixins.py b/packages/core/minos-microservice-common/minos/common/database/mixins.py index 13602518c..48c64a06e 100644 --- a/packages/core/minos-microservice-common/minos/common/database/mixins.py +++ b/packages/core/minos-microservice-common/minos/common/database/mixins.py @@ -1,3 +1,6 @@ +from contextlib import ( + suppress, +) from typing import ( AsyncIterator, Generic, @@ -14,6 +17,7 @@ Inject, ) from ..pools import ( + PoolException, PoolFactory, ) from ..setup import ( @@ -39,15 +43,16 @@ def __init__( self, database_pool: Optional[DatabaseClientPool] = None, pool_factory: Optional[PoolFactory] = None, - database_key: Optional[str] = None, + database_key: Optional[tuple[str]] = None, operation_factory: Optional[GenericDatabaseOperationFactory] = None, operation_factory_cls: Optional[type[GenericDatabaseOperationFactory]] = None, *args, **kwargs, ): super().__init__(*args, **kwargs, pool_factory=pool_factory) + if database_pool is None and pool_factory is not None: - database_pool = pool_factory.get_pool(type_="database", identifier=database_key) + database_pool = self._get_pool_from_factory(pool_factory, database_key) if not isinstance(database_pool, DatabaseClientPool): raise NotProvidedException(f"A {DatabaseClientPool!r} instance is required. Obtained: {database_pool}") @@ -62,8 +67,19 @@ def __init__( self._operation_factory = operation_factory + @staticmethod + def _get_pool_from_factory(pool_factory: PoolFactory, database_key: Optional[tuple[str]]): + if database_key is None: + database_key = tuple() + + for identifier in database_key: + with suppress(PoolException): + return pool_factory.get_pool(type_="database", identifier=identifier) + + return pool_factory.get_pool(type_="database") + @property - def operation_factory(self) -> Optional[GenericDatabaseOperationFactory]: + def database_operation_factory(self) -> Optional[GenericDatabaseOperationFactory]: """Get the operation factory if any. :return: A ``OperationFactory`` if it has been set or ``None`` otherwise. @@ -85,8 +101,8 @@ def _get_generic_operation_factory(self) -> Optional[type[GenericDatabaseOperati raise TypeError(f"{type(self)!r} must contain a {DatabaseOperationFactory!r} as generic value.") return operation_factory_cls - async def submit_query_and_fetchone(self, operation: DatabaseOperation) -> tuple: - """Submit a SQL query and gets the first response. + async def execute_on_database_and_fetch_one(self, operation: DatabaseOperation) -> tuple: + """Submit an Operation and get the first response. :param operation: The operation to be executed. :return: This method does not return anything. @@ -96,10 +112,10 @@ async def submit_query_and_fetchone(self, operation: DatabaseOperation) -> tuple return await client.fetch_one() # noinspection PyUnusedLocal - async def submit_query_and_iter( + async def execute_on_database_and_fetch_all( self, operation: DatabaseOperation, streaming_mode: Optional[bool] = None ) -> AsyncIterator[tuple]: - """Submit a SQL query and return an asynchronous iterator. + """Submit an Operation and return an asynchronous iterator. :param operation: The operation to be executed. :param streaming_mode: If ``True`` return the values in streaming directly from the database (keep an open @@ -123,8 +139,8 @@ async def submit_query_and_iter( yield value # noinspection PyUnusedLocal - async def submit_query(self, operation: DatabaseOperation) -> None: - """Submit a SQL query. + async def execute_on_database(self, operation: DatabaseOperation) -> None: + """Submit an Operation. :param operation: The operation to be executed. :return: This method does not return anything. diff --git a/packages/core/minos-microservice-common/minos/common/database/operations/__init__.py b/packages/core/minos-microservice-common/minos/common/database/operations/__init__.py index 08f7b389c..96a05c57c 100644 --- a/packages/core/minos-microservice-common/minos/common/database/operations/__init__.py +++ b/packages/core/minos-microservice-common/minos/common/database/operations/__init__.py @@ -3,6 +3,3 @@ DatabaseOperation, DatabaseOperationFactory, ) -from .aiopg import ( - AiopgDatabaseOperation, -) diff --git a/packages/core/minos-microservice-common/minos/common/database/pools.py b/packages/core/minos-microservice-common/minos/common/database/pools.py index 16aec0c9b..f12ec25c8 100644 --- a/packages/core/minos-microservice-common/minos/common/database/pools.py +++ b/packages/core/minos-microservice-common/minos/common/database/pools.py @@ -26,9 +26,9 @@ Pool, ) from .clients import ( + ConnectionException, DatabaseClient, DatabaseClientBuilder, - UnableToConnectException, ) from .locks import ( DatabaseLock, @@ -66,7 +66,7 @@ async def _create_instance(self) -> Optional[DatabaseClient]: try: await instance.setup() - except UnableToConnectException: + except ConnectionException: await sleep(0.1) return None diff --git a/packages/core/minos-microservice-common/minos/common/importlib.py b/packages/core/minos-microservice-common/minos/common/importlib.py index f917f2a21..b27aa8908 100644 --- a/packages/core/minos-microservice-common/minos/common/importlib.py +++ b/packages/core/minos-microservice-common/minos/common/importlib.py @@ -1,5 +1,8 @@ import importlib import pkgutil +from contextlib import ( + suppress, +) from functools import ( lru_cache, ) @@ -22,7 +25,7 @@ def import_module(module_name: str) -> Union[type, Callable, ModuleType]: parts = module_name.rsplit(".", 1) try: - kallable = importlib.import_module(parts[0]) + kallable = _import_module(parts[0]) except ImportError: raise MinosImportException(f"Error importing {module_name!r}: the module does not exist") @@ -35,6 +38,16 @@ def import_module(module_name: str) -> Union[type, Callable, ModuleType]: return kallable +def _import_module(module_name: str) -> Union[type, Callable, ModuleType]: + try: + return importlib.import_module(module_name) + except ImportError as exc: + if "." in module_name: + with suppress(MinosImportException): + return import_module(module_name) + raise exc + + def classname(cls: Union[type, Callable]) -> str: """Compute the given class full name. diff --git a/packages/core/minos-microservice-common/minos/common/model/__init__.py b/packages/core/minos-microservice-common/minos/common/model/__init__.py index 6cba174aa..04b4019e5 100644 --- a/packages/core/minos-microservice-common/minos/common/model/__init__.py +++ b/packages/core/minos-microservice-common/minos/common/model/__init__.py @@ -31,5 +31,6 @@ NoneType, TypeHintBuilder, TypeHintComparator, + TypeHintParser, is_model_type, ) diff --git a/packages/core/minos-microservice-common/minos/common/model/fields.py b/packages/core/minos-microservice-common/minos/common/model/fields.py index 512cc8041..942e7cce4 100644 --- a/packages/core/minos-microservice-common/minos/common/model/fields.py +++ b/packages/core/minos-microservice-common/minos/common/model/fields.py @@ -31,6 +31,7 @@ MissingSentinel, TypeHintBuilder, TypeHintComparator, + TypeHintParser, ) logger = logging.getLogger(__name__) @@ -64,7 +65,7 @@ def name(self) -> str: @property def type(self) -> type: """Type getter.""" - return self._type + return TypeHintParser(self._type).build() @property def real_type(self) -> type: diff --git a/packages/core/minos-microservice-common/minos/common/model/types/__init__.py b/packages/core/minos-microservice-common/minos/common/model/types/__init__.py index 26649a7ca..2a6a407e7 100644 --- a/packages/core/minos-microservice-common/minos/common/model/types/__init__.py +++ b/packages/core/minos-microservice-common/minos/common/model/types/__init__.py @@ -1,5 +1,6 @@ from .builders import ( TypeHintBuilder, + TypeHintParser, build_union, ) from .comparators import ( diff --git a/packages/core/minos-microservice-common/minos/common/model/types/builders.py b/packages/core/minos-microservice-common/minos/common/model/types/builders.py index 9a25d8290..dc84b4a74 100644 --- a/packages/core/minos-microservice-common/minos/common/model/types/builders.py +++ b/packages/core/minos-microservice-common/minos/common/model/types/builders.py @@ -6,6 +6,9 @@ from collections.abc import ( Iterable, ) +from functools import ( + lru_cache, +) from typing import ( Any, Optional, @@ -16,6 +19,7 @@ from .comparators import ( TypeHintComparator, + is_model_subclass, is_model_type, ) from .model_types import ( @@ -87,3 +91,30 @@ def _build_from_iterable(self, values: Iterable, type_: Optional[type]) -> type: @staticmethod def _build_from_dynamic(dynamic: type, static: Optional[type]) -> type: return dynamic if not len(get_args(static)) and TypeHintComparator(dynamic, static).match() else static + + +class TypeHintParser: + """Type Hint Parser class.""" + + def __init__(self, type_: Optional[type] = None): + self.type_ = type_ + + def build(self) -> type: + """Parse type hint. + + :return: A type. + """ + return self._build(self.type_) + + @classmethod + @lru_cache() + def _build(cls, type_: Optional[type]) -> type: + if is_model_subclass(type_): + # noinspection PyTypeChecker + return ModelType.from_model(type_) + + origin = get_origin(type_) + if origin is None: + return type_ + args = get_args(type_) + return cls._build(origin)[tuple(cls._build(arg) for arg in args)] diff --git a/packages/core/minos-microservice-common/minos/common/model/types/model_types.py b/packages/core/minos-microservice-common/minos/common/model/types/model_types.py index 027c89d34..e13812a83 100644 --- a/packages/core/minos-microservice-common/minos/common/model/types/model_types.py +++ b/packages/core/minos-microservice-common/minos/common/model/types/model_types.py @@ -87,8 +87,15 @@ def from_model(model: Union[Model, type[Model]]) -> ModelType: :param model: The model class. :return: A new ``ModelType`` instance. """ + from .builders import ( + TypeHintParser, + ) + + type_hints = GenericTypeProjector.from_model(model).build() + type_hints = {k: TypeHintParser(v).build() for k, v in type_hints.items()} + # noinspection PyTypeChecker - return ModelType.build(name_=model.classname, type_hints_=GenericTypeProjector.from_model(model).build()) + return ModelType.build(name_=model.classname, type_hints_=type_hints) def __call__(cls, *args, **kwargs) -> Model: return cls.model_cls.from_model_type(cls, *args, **kwargs) diff --git a/packages/core/minos-microservice-common/minos/common/pools.py b/packages/core/minos-microservice-common/minos/common/pools.py index c78ee6eed..c16c57176 100644 --- a/packages/core/minos-microservice-common/minos/common/pools.py +++ b/packages/core/minos-microservice-common/minos/common/pools.py @@ -29,6 +29,10 @@ from .config import ( Config, ) +from .exceptions import ( + MinosConfigException, + MinosException, +) from .injections import ( Injectable, ) @@ -87,7 +91,10 @@ def get_pool(self, type_: str, identifier: Optional[str] = None, **kwargs) -> Po def _create_pool(self, type_: str, **kwargs) -> Pool: # noinspection PyTypeChecker pool_cls = self._get_pool_cls(type_) - pool = pool_cls.from_config(self._config, **kwargs) + try: + pool = pool_cls.from_config(self._config, **kwargs) + except MinosConfigException: + raise PoolException("The pool could not be built.") return pool def _get_pool_cls(self, type_: str) -> type[Pool]: @@ -97,19 +104,23 @@ def _get_pool_cls(self, type_: str) -> type[Pool]: pool_cls = self._config.get_pools().get("types", dict()).get(type_) if pool_cls is None: - raise ValueError( + raise PoolException( f"There is not any provided {type!r} to build pools that matches the given type: {type_!r}" ) return pool_cls -class Pool(SetupMixin, PoolBase, Generic[P], ABC): +class _PoolBase(PoolBase, ABC): + def __init__(self, *args, maxsize: int = 10, recycle: Optional[int] = 300, **kwargs): + super().__init__(maxsize=maxsize, recycle=recycle) + + +class Pool(SetupMixin, _PoolBase, Generic[P], ABC): """Base class for Pool implementations in minos""" def __init__(self, *args, maxsize: int = 10, recycle: Optional[int] = 300, already_setup: bool = True, **kwargs): - SetupMixin.__init__(self, *args, already_setup=already_setup, **kwargs) - PoolBase.__init__(self, maxsize=maxsize, recycle=recycle) + super().__init__(*args, maxsize=maxsize, recycle=recycle, already_setup=already_setup, **kwargs) # noinspection PyUnresolvedReferences async def __acquire(self) -> Any: # pragma: no cover @@ -171,3 +182,7 @@ class MinosPool(Pool, Generic[P], ABC): def __init__(self, *args, **kwargs): warnings.warn(f"{MinosPool!r} has been deprecated. Use {Pool} instead.", DeprecationWarning) super().__init__(*args, **kwargs) + + +class PoolException(MinosException): + """Exception to be raised when some problem related with a pool happens.""" diff --git a/packages/core/minos-microservice-common/minos/common/testing/__init__.py b/packages/core/minos-microservice-common/minos/common/testing/__init__.py new file mode 100644 index 000000000..01fd17cc3 --- /dev/null +++ b/packages/core/minos-microservice-common/minos/common/testing/__init__.py @@ -0,0 +1,10 @@ +from .database import ( + MockedDatabaseClient, + MockedDatabaseOperation, + MockedLockDatabaseOperationFactory, + MockedManagementDatabaseOperationFactory, +) +from .testcases import ( + DatabaseMinosTestCase, + MinosTestCase, +) diff --git a/packages/core/minos-microservice-common/minos/common/testing/database/__init__.py b/packages/core/minos-microservice-common/minos/common/testing/database/__init__.py new file mode 100644 index 000000000..03b3ef9a7 --- /dev/null +++ b/packages/core/minos-microservice-common/minos/common/testing/database/__init__.py @@ -0,0 +1,10 @@ +from .clients import ( + MockedDatabaseClient, +) +from .factories import ( + MockedLockDatabaseOperationFactory, + MockedManagementDatabaseOperationFactory, +) +from .operations import ( + MockedDatabaseOperation, +) diff --git a/packages/core/minos-microservice-common/minos/common/testing/database/clients.py b/packages/core/minos-microservice-common/minos/common/testing/database/clients.py new file mode 100644 index 000000000..cc6ec8aae --- /dev/null +++ b/packages/core/minos-microservice-common/minos/common/testing/database/clients.py @@ -0,0 +1,39 @@ +from collections.abc import ( + AsyncIterator, +) +from typing import ( + Any, +) + +from ...database import ( + DatabaseClient, +) +from .operations import ( + MockedDatabaseOperation, +) + + +class MockedDatabaseClient(DatabaseClient): + """For testing purposes""" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.kwargs = kwargs + self._response = tuple() + + async def _is_valid(self, **kwargs) -> bool: + """For testing purposes""" + return True + + async def _reset(self, **kwargs) -> None: + """For testing purposes""" + self._response = tuple() + + async def _execute(self, operation: MockedDatabaseOperation) -> None: + """For testing purposes""" + self._response = operation.response + + async def _fetch_all(self, *args, **kwargs) -> AsyncIterator[Any]: + """For testing purposes""" + for value in self._response: + yield value diff --git a/packages/core/minos-microservice-common/minos/common/testing/database/factories/__init__.py b/packages/core/minos-microservice-common/minos/common/testing/database/factories/__init__.py new file mode 100644 index 000000000..9aadffc99 --- /dev/null +++ b/packages/core/minos-microservice-common/minos/common/testing/database/factories/__init__.py @@ -0,0 +1,6 @@ +from .locks import ( + MockedLockDatabaseOperationFactory, +) +from .managements import ( + MockedManagementDatabaseOperationFactory, +) diff --git a/packages/core/minos-microservice-common/minos/common/testing/database/factories/locks.py b/packages/core/minos-microservice-common/minos/common/testing/database/factories/locks.py new file mode 100644 index 000000000..9cad4761d --- /dev/null +++ b/packages/core/minos-microservice-common/minos/common/testing/database/factories/locks.py @@ -0,0 +1,25 @@ +from ....database import ( + DatabaseOperation, + LockDatabaseOperationFactory, +) +from ..clients import ( + MockedDatabaseClient, +) +from ..operations import ( + MockedDatabaseOperation, +) + + +class MockedLockDatabaseOperationFactory(LockDatabaseOperationFactory): + """For testing purposes""" + + def build_acquire(self, hashed_key: int) -> DatabaseOperation: + """For testing purposes""" + return MockedDatabaseOperation("acquire") + + def build_release(self, hashed_key: int) -> DatabaseOperation: + """For testing purposes""" + return MockedDatabaseOperation("release") + + +MockedDatabaseClient.set_factory(LockDatabaseOperationFactory, MockedLockDatabaseOperationFactory) diff --git a/packages/core/minos-microservice-common/minos/common/testing/database/factories/managements.py b/packages/core/minos-microservice-common/minos/common/testing/database/factories/managements.py new file mode 100644 index 000000000..3819967b6 --- /dev/null +++ b/packages/core/minos-microservice-common/minos/common/testing/database/factories/managements.py @@ -0,0 +1,25 @@ +from ....database import ( + DatabaseOperation, + ManagementDatabaseOperationFactory, +) +from ..clients import ( + MockedDatabaseClient, +) +from ..operations import ( + MockedDatabaseOperation, +) + + +class MockedManagementDatabaseOperationFactory(ManagementDatabaseOperationFactory): + """For testing purposes""" + + def build_create(self, database: str) -> DatabaseOperation: + """For testing purposes""" + return MockedDatabaseOperation("create") + + def build_delete(self, database: str) -> DatabaseOperation: + """For testing purposes""" + return MockedDatabaseOperation("delete") + + +MockedDatabaseClient.set_factory(ManagementDatabaseOperationFactory, MockedManagementDatabaseOperationFactory) diff --git a/packages/core/minos-microservice-common/minos/common/testing/database/operations.py b/packages/core/minos-microservice-common/minos/common/testing/database/operations.py new file mode 100644 index 000000000..ac6d000e2 --- /dev/null +++ b/packages/core/minos-microservice-common/minos/common/testing/database/operations.py @@ -0,0 +1,20 @@ +from collections.abc import ( + Iterable, +) +from typing import ( + Any, + Optional, +) + +from ...database import ( + DatabaseOperation, +) + + +class MockedDatabaseOperation(DatabaseOperation): + """For testing purposes""" + + def __init__(self, content: str, response: Optional[Iterable[Any]] = tuple(), *args, **kwargs): + super().__init__(*args, **kwargs) + self.content = content + self.response = tuple(response) diff --git a/packages/core/minos-microservice-common/minos/common/testing.py b/packages/core/minos-microservice-common/minos/common/testing/testcases.py similarity index 85% rename from packages/core/minos-microservice-common/minos/common/testing.py rename to packages/core/minos-microservice-common/minos/common/testing/testcases.py index a9f3cf4f7..d9e718ab0 100644 --- a/packages/core/minos-microservice-common/minos/common/testing.py +++ b/packages/core/minos-microservice-common/minos/common/testing/testcases.py @@ -20,20 +20,19 @@ uuid4, ) -from .config import ( +from ..config import ( Config, ) -from .database import ( - AiopgDatabaseClient, +from ..database import ( DatabaseClient, DatabaseClientPool, - ManageDatabaseOperationFactory, + ManagementDatabaseOperationFactory, ) -from .injections import ( +from ..injections import ( DependencyInjector, InjectableMixin, ) -from .pools import ( +from ..pools import ( PoolFactory, ) @@ -84,7 +83,9 @@ def setUp(self): super().setUp() def get_client(self) -> DatabaseClient: - return AiopgDatabaseClient.from_config(self.base_config) + default_config = self.base_config.get_default_database() + client = default_config.get("client") + return client.from_config(self.base_config) def get_config(self) -> Config: config = Config(self.get_config_file_path()) @@ -112,10 +113,10 @@ async def _create_database(self, test: dict[str, Any]) -> None: await self._drop_database(test) async with self.get_client() as client: - operation = client.get_factory(ManageDatabaseOperationFactory).build_create(test["database"]) + operation = client.get_factory(ManagementDatabaseOperationFactory).build_create(test["database"]) await client.execute(operation) async def _drop_database(self, test: dict[str, Any]) -> None: async with self.get_client() as client: - operation = client.get_factory(ManageDatabaseOperationFactory).build_delete(test["database"]) + operation = client.get_factory(ManagementDatabaseOperationFactory).build_delete(test["database"]) await client.execute(operation) diff --git a/packages/core/minos-microservice-common/poetry.lock b/packages/core/minos-microservice-common/poetry.lock index e42299fc2..65af87063 100644 --- a/packages/core/minos-microservice-common/poetry.lock +++ b/packages/core/minos-microservice-common/poetry.lock @@ -19,29 +19,6 @@ develop = ["aiocontextvars (==0.2.2)", "aiohttp-asgi", "aiohttp (<4)", "async-ti raven = ["raven-aiohttp"] uvloop = ["uvloop (>=0.14,<1)"] -[[package]] -name = "aiopg" -version = "1.3.3" -description = "Postgres integration with asyncio." -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -async-timeout = ">=3.0,<5.0" -psycopg2-binary = ">=2.8.4" - -[package.extras] -sa = ["sqlalchemy[postgresql_psycopg2binary] (>=1.3,<1.5)"] - -[[package]] -name = "async-timeout" -version = "4.0.2" -description = "Timeout context manager for asyncio programs" -category = "main" -optional = false -python-versions = ">=3.6" - [[package]] name = "atomicwrites" version = "1.4.0" @@ -279,14 +256,6 @@ python-versions = ">=3.6" dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] -[[package]] -name = "psycopg2-binary" -version = "2.9.3" -description = "psycopg2 - Python-PostgreSQL Database Adapter" -category = "main" -optional = false -python-versions = ">=3.6" - [[package]] name = "py" version = "1.11.0" @@ -391,21 +360,13 @@ test = ["aiohttp", "flake8 (>=3.9.2,<3.10.0)", "psutil", "pycodestyle (>=2.7.0,< [metadata] lock-version = "1.1" python-versions = "^3.9" -content-hash = "0ffaa4f60d01f3464d7a3797d3ba04e6b1ebbaf2ae67f105cf5ba49e79521d50" +content-hash = "fa95fd5d2d8811d958396a78489d81ebc4f8945e7b4139e9e94b1d2797bb1377" [metadata.files] aiomisc = [ {file = "aiomisc-15.7.3-py3-none-any.whl", hash = "sha256:0403e83268e98d0f2a125a70d13303fe1a2358e36db3daf02df032c7fa4f1525"}, {file = "aiomisc-15.7.3.tar.gz", hash = "sha256:ba250a34bd4609ced36111cb50580f57c3d52f3955f953a53ecb2986988baedc"}, ] -aiopg = [ - {file = "aiopg-1.3.3-py3-none-any.whl", hash = "sha256:2842dd8741460eeef940032dcb577bfba4d4115205dd82a73ce13b3271f5bf0a"}, - {file = "aiopg-1.3.3.tar.gz", hash = "sha256:547c6ba4ea0d73c2a11a2f44387d7133cc01d3c6f3b8ed976c0ac1eff4f595d7"}, -] -async-timeout = [ - {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, - {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, -] atomicwrites = [ {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, @@ -653,64 +614,6 @@ pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] -psycopg2-binary = [ - {file = "psycopg2-binary-2.9.3.tar.gz", hash = "sha256:761df5313dc15da1502b21453642d7599d26be88bff659382f8f9747c7ebea4e"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:539b28661b71da7c0e428692438efbcd048ca21ea81af618d845e06ebfd29478"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e82d38390a03da28c7985b394ec3f56873174e2c88130e6966cb1c946508e65"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57804fc02ca3ce0dbfbef35c4b3a4a774da66d66ea20f4bda601294ad2ea6092"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:083a55275f09a62b8ca4902dd11f4b33075b743cf0d360419e2051a8a5d5ff76"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-manylinux_2_24_ppc64le.whl", hash = "sha256:0a29729145aaaf1ad8bafe663131890e2111f13416b60e460dae0a96af5905c9"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a79d622f5206d695d7824cbf609a4f5b88ea6d6dab5f7c147fc6d333a8787e4"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:090f3348c0ab2cceb6dfbe6bf721ef61262ddf518cd6cc6ecc7d334996d64efa"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a9e1f75f96ea388fbcef36c70640c4efbe4650658f3d6a2967b4cc70e907352e"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c3ae8e75eb7160851e59adc77b3a19a976e50622e44fd4fd47b8b18208189d42"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-win32.whl", hash = "sha256:7b1e9b80afca7b7a386ef087db614faebbf8839b7f4db5eb107d0f1a53225029"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:8b344adbb9a862de0c635f4f0425b7958bf5a4b927c8594e6e8d261775796d53"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:e847774f8ffd5b398a75bc1c18fbb56564cda3d629fe68fd81971fece2d3c67e"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:68641a34023d306be959101b345732360fc2ea4938982309b786f7be1b43a4a1"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3303f8807f342641851578ee7ed1f3efc9802d00a6f83c101d21c608cb864460"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-manylinux_2_24_aarch64.whl", hash = "sha256:e3699852e22aa68c10de06524a3721ade969abf382da95884e6a10ff798f9281"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-manylinux_2_24_ppc64le.whl", hash = "sha256:526ea0378246d9b080148f2d6681229f4b5964543c170dd10bf4faaab6e0d27f"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:b1c8068513f5b158cf7e29c43a77eb34b407db29aca749d3eb9293ee0d3103ca"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:15803fa813ea05bef089fa78835118b5434204f3a17cb9f1e5dbfd0b9deea5af"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:152f09f57417b831418304c7f30d727dc83a12761627bb826951692cc6491e57"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:404224e5fef3b193f892abdbf8961ce20e0b6642886cfe1fe1923f41aaa75c9d"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-win32.whl", hash = "sha256:1f6b813106a3abdf7b03640d36e24669234120c72e91d5cbaeb87c5f7c36c65b"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:2d872e3c9d5d075a2e104540965a1cf898b52274a5923936e5bfddb58c59c7c2"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:10bb90fb4d523a2aa67773d4ff2b833ec00857f5912bafcfd5f5414e45280fb1"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:874a52ecab70af13e899f7847b3e074eeb16ebac5615665db33bce8a1009cf33"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a29b3ca4ec9defec6d42bf5feb36bb5817ba3c0230dd83b4edf4bf02684cd0ae"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:12b11322ea00ad8db8c46f18b7dfc47ae215e4df55b46c67a94b4effbaec7094"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-manylinux_2_24_ppc64le.whl", hash = "sha256:53293533fcbb94c202b7c800a12c873cfe24599656b341f56e71dd2b557be063"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c381bda330ddf2fccbafab789d83ebc6c53db126e4383e73794c74eedce855ef"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d29409b625a143649d03d0fd7b57e4b92e0ecad9726ba682244b73be91d2fdb"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:183a517a3a63503f70f808b58bfbf962f23d73b6dccddae5aa56152ef2bcb232"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:15c4e4cfa45f5a60599d9cec5f46cd7b1b29d86a6390ec23e8eebaae84e64554"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-win32.whl", hash = "sha256:adf20d9a67e0b6393eac162eb81fb10bc9130a80540f4df7e7355c2dd4af9fba"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:2f9ffd643bc7349eeb664eba8864d9e01f057880f510e4681ba40a6532f93c71"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:def68d7c21984b0f8218e8a15d514f714d96904265164f75f8d3a70f9c295667"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dffc08ca91c9ac09008870c9eb77b00a46b3378719584059c034b8945e26b272"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:280b0bb5cbfe8039205c7981cceb006156a675362a00fe29b16fbc264e242834"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:af9813db73395fb1fc211bac696faea4ca9ef53f32dc0cfa27e4e7cf766dcf24"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-manylinux_2_24_ppc64le.whl", hash = "sha256:63638d875be8c2784cfc952c9ac34e2b50e43f9f0a0660b65e2a87d656b3116c"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ffb7a888a047696e7f8240d649b43fb3644f14f0ee229077e7f6b9f9081635bd"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0c9d5450c566c80c396b7402895c4369a410cab5a82707b11aee1e624da7d004"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:d1c1b569ecafe3a69380a94e6ae09a4789bbb23666f3d3a08d06bbd2451f5ef1"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8fc53f9af09426a61db9ba357865c77f26076d48669f2e1bb24d85a22fb52307"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-win32.whl", hash = "sha256:6472a178e291b59e7f16ab49ec8b4f3bdada0a879c68d3817ff0963e722a82ce"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:35168209c9d51b145e459e05c31a9eaeffa9a6b0fd61689b48e07464ffd1a83e"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:47133f3f872faf28c1e87d4357220e809dfd3fa7c64295a4a148bcd1e6e34ec9"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91920527dea30175cc02a1099f331aa8c1ba39bf8b7762b7b56cbf54bc5cce42"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:887dd9aac71765ac0d0bac1d0d4b4f2c99d5f5c1382d8b770404f0f3d0ce8a39"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:1f14c8b0942714eb3c74e1e71700cbbcb415acbc311c730370e70c578a44a25c"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-manylinux_2_24_ppc64le.whl", hash = "sha256:7af0dd86ddb2f8af5da57a976d27cd2cd15510518d582b478fbb2292428710b4"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93cd1967a18aa0edd4b95b1dfd554cf15af657cb606280996d393dadc88c3c35"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bda845b664bb6c91446ca9609fc69f7db6c334ec5e4adc87571c34e4f47b7ddb"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:01310cf4cf26db9aea5158c217caa92d291f0500051a6469ac52166e1a16f5b7"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:99485cab9ba0fa9b84f1f9e1fef106f44a46ef6afdeec8885e0b88d0772b49e8"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-win32.whl", hash = "sha256:46f0e0a6b5fa5851bbd9ab1bc805eef362d3a230fbdfbc209f4a236d0a7a990d"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:accfe7e982411da3178ec690baaceaad3c278652998b2c45828aaac66cd8285f"}, -] py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, diff --git a/packages/core/minos-microservice-common/pyproject.toml b/packages/core/minos-microservice-common/pyproject.toml index 7bc7662ab..8e3b573ae 100644 --- a/packages/core/minos-microservice-common/pyproject.toml +++ b/packages/core/minos-microservice-common/pyproject.toml @@ -36,7 +36,6 @@ fastavro = "^1.4.0" orjson = "^3.5.2" lmdb = "^1.2.1" PyYAML = ">=5.4.1,<7.0.0" -aiopg = "^1.2.1" dependency-injector = "^4.32.2" cached-property = "^1.5.2" uvloop = "^0.16.0" diff --git a/packages/core/minos-microservice-common/tests/config/v1.yml b/packages/core/minos-microservice-common/tests/config/v1.yml index 0b124e10c..950aaa4e1 100644 --- a/packages/core/minos-microservice-common/tests/config/v1.yml +++ b/packages/core/minos-microservice-common/tests/config/v1.yml @@ -19,7 +19,7 @@ service: - tests.utils.FakeSagaManager - tests.utils.FakeCustomInjection repository: - client: minos.common.AiopgDatabaseClient + client: minos.common.testing.MockedDatabaseClient database: order_db user: minos password: min0s diff --git a/packages/core/minos-microservice-common/tests/config/v2.yml b/packages/core/minos-microservice-common/tests/config/v2.yml index af4e8dfc9..5222904d1 100644 --- a/packages/core/minos-microservice-common/tests/config/v2.yml +++ b/packages/core/minos-microservice-common/tests/config/v2.yml @@ -6,7 +6,7 @@ serializer: client: tests.utils.FakeSerializer databases: default: - client: minos.common.AiopgDatabaseClient + client: minos.common.testing.MockedDatabaseClient database: order_db user: minos password: min0s @@ -15,6 +15,7 @@ databases: saga: path: "./order.lmdb" query: + client: minos.common.testing.MockedDatabaseClient database: order_query_db user: minos password: min0s diff --git a/packages/core/minos-microservice-common/tests/test_common/test_config/test_abc.py b/packages/core/minos-microservice-common/tests/test_common/test_config/test_abc.py index f8c6077bf..1ae4ac008 100644 --- a/packages/core/minos-microservice-common/tests/test_common/test_config/test_abc.py +++ b/packages/core/minos-microservice-common/tests/test_common/test_config/test_abc.py @@ -147,7 +147,8 @@ def test_get_database_unknown(self): mock = MagicMock(return_value={"default": "foo"}) self.config._get_databases = mock - self.assertEqual("foo", self.config.get_database_by_name("unknown")) + with self.assertRaises(MinosConfigException): + self.config.get_database_by_name("unknown") self.assertEqual([call()], mock.call_args_list) diff --git a/packages/core/minos-microservice-common/tests/test_common/test_config/test_v2/test_base.py b/packages/core/minos-microservice-common/tests/test_common/test_config/test_v2/test_base.py index ff0d2bc8d..163d8b6d5 100644 --- a/packages/core/minos-microservice-common/tests/test_common/test_config/test_v2/test_base.py +++ b/packages/core/minos-microservice-common/tests/test_common/test_config/test_v2/test_base.py @@ -191,15 +191,6 @@ def test_database_default(self): self.assertEqual("localhost", database_config["host"]) self.assertEqual(5432, database_config["port"]) - def test_database_event(self): - config = ConfigV2(self.file_path, with_environment=False) - database_config = config.get_database_by_name("event") - self.assertEqual("order_db", database_config["database"]) - self.assertEqual("minos", database_config["user"]) - self.assertEqual("min0s", database_config["password"]) - self.assertEqual("localhost", database_config["host"]) - self.assertEqual(5432, database_config["port"]) - def test_database_query(self): config = ConfigV2(self.file_path, with_environment=False) query_database = config.get_database_by_name("query") @@ -209,24 +200,6 @@ def test_database_query(self): self.assertEqual("localhost", query_database["host"]) self.assertEqual(5432, query_database["port"]) - def test_database_snapshot(self): - config = ConfigV2(self.file_path, with_environment=False) - snapshot = config.get_database_by_name("snapshot") - self.assertEqual("order_db", snapshot["database"]) - self.assertEqual("minos", snapshot["user"]) - self.assertEqual("min0s", snapshot["password"]) - self.assertEqual("localhost", snapshot["host"]) - self.assertEqual(5432, snapshot["port"]) - - def test_database_broker(self): - config = ConfigV2(self.file_path, with_environment=False) - snapshot = config.get_database_by_name("broker") - self.assertEqual("order_db", snapshot["database"]) - self.assertEqual("minos", snapshot["user"]) - self.assertEqual("min0s", snapshot["password"]) - self.assertEqual("localhost", snapshot["host"]) - self.assertEqual(5432, snapshot["port"]) - def test_database_saga(self): config = ConfigV2(self.file_path, with_environment=False) saga = config.get_database_by_name("saga") diff --git a/packages/core/minos-microservice-common/tests/test_common/test_database/test_clients/test_abc.py b/packages/core/minos-microservice-common/tests/test_common/test_database/test_clients/test_abc.py index dbbf9afa0..e7ee8f393 100644 --- a/packages/core/minos-microservice-common/tests/test_common/test_database/test_clients/test_abc.py +++ b/packages/core/minos-microservice-common/tests/test_common/test_database/test_clients/test_abc.py @@ -10,16 +10,19 @@ AsyncMock, MagicMock, call, + patch, ) from minos.common import ( - AiopgDatabaseClient, BuildableMixin, ComposedDatabaseOperation, DatabaseClient, DatabaseClientBuilder, + DatabaseLock, DatabaseOperation, DatabaseOperationFactory, + LockDatabaseOperationFactory, + ProgrammingException, ) from tests.utils import ( CommonTestCase, @@ -55,7 +58,22 @@ class _DatabaseOperationFactoryImpl(_DatabaseOperationFactory): """For testing purposes.""" -class TestDatabaseClient(unittest.IsolatedAsyncioTestCase): +class _LockDatabaseOperationFactory(LockDatabaseOperationFactory): + """For testing purposes.""" + + def build_acquire(self, hashed_key: int) -> DatabaseOperation: + """For testing purposes.""" + return _DatabaseOperation() + + def build_release(self, hashed_key: int) -> DatabaseOperation: + """For testing purposes.""" + return _DatabaseOperation() + + +_DatabaseClient.set_factory(LockDatabaseOperationFactory, _LockDatabaseOperationFactory) + + +class TestDatabaseClient(CommonTestCase): def test_abstract(self): self.assertTrue(issubclass(DatabaseClient, (ABC, BuildableMixin))) expected = {"_is_valid", "_execute", "_fetch_all", "_reset"} @@ -65,6 +83,10 @@ def test_abstract(self): def test_get_builder(self): self.assertIsInstance(DatabaseClient.get_builder(), DatabaseClientBuilder) + def test_from_config(self): + client = _DatabaseClient.from_config(self.config) + self.assertIsInstance(client, DatabaseClient) + async def test_is_valid(self): mock = AsyncMock(side_effect=[True, False]) client = _DatabaseClient() @@ -75,6 +97,26 @@ async def test_is_valid(self): self.assertEqual([call(), call()], mock.call_args_list) + async def test_lock(self): + _DatabaseClient.set_factory(LockDatabaseOperationFactory, _LockDatabaseOperationFactory) + op1 = _DatabaseOperation(lock="foo") + client = _DatabaseClient() + self.assertIsNone(client.lock) + async with client: + self.assertIsNone(client.lock) + await client.execute(op1) + self.assertIsInstance(client.lock, DatabaseLock) + + self.assertIsNone(client.lock) + + async def test_lock_reset(self): + op1 = _DatabaseOperation(lock="foo") + async with _DatabaseClient() as client: + await client.execute(op1) + self.assertIsInstance(client.lock, DatabaseLock) + await client.reset() + self.assertIsNone(client.lock) + async def test_reset(self): mock = AsyncMock() client = _DatabaseClient() @@ -102,6 +144,36 @@ async def test_execute_composed(self): self.assertEqual([call(composed.operations[0]), call(composed.operations[1])], mock.call_args_list) + async def test_execute_with_lock(self): + op1 = _DatabaseOperation(lock="foo") + with patch.object(DatabaseLock, "acquire") as enter_lock_mock: + with patch.object(DatabaseLock, "release") as exit_lock_mock: + async with _DatabaseClient() as client: + await client.execute(op1) + self.assertEqual(1, enter_lock_mock.call_count) + self.assertEqual(0, exit_lock_mock.call_count) + enter_lock_mock.reset_mock() + exit_lock_mock.reset_mock() + self.assertEqual(0, enter_lock_mock.call_count) + self.assertEqual(1, exit_lock_mock.call_count) + + async def test_execute_with_lock_multiple(self): + op1 = _DatabaseOperation(lock="foo") + op2 = _DatabaseOperation(lock="bar") + async with _DatabaseClient() as client: + self.assertIsNone(client.lock) + + await client.execute(op1) + foo_lock = client.lock + self.assertIsInstance(foo_lock, DatabaseLock) + + await client.execute(op1) + self.assertEqual(foo_lock, client.lock) + + await client.execute(op2) + self.assertNotEqual(foo_lock, client.lock) + self.assertIsInstance(client.lock, DatabaseLock) + async def test_execute_raises_unsupported(self): client = _DatabaseClient() with self.assertRaises(ValueError): @@ -126,31 +198,39 @@ async def test_fetch_one(self): self.assertEqual([call()], mock.call_args_list) - def test_register_factory(self): + async def test_fetch_one_raises(self): + mock = MagicMock(return_value=FakeAsyncIterator([])) + client = _DatabaseClient() + client._fetch_all = mock + + with self.assertRaises(ProgrammingException): + await client.fetch_one() + + def test_set_factory(self): + expected = { + LockDatabaseOperationFactory: _LockDatabaseOperationFactory, + _DatabaseOperationFactory: _DatabaseOperationFactoryImpl, + } try: - _DatabaseClient.register_factory(_DatabaseOperationFactory, _DatabaseOperationFactoryImpl) + _DatabaseClient.set_factory(_DatabaseOperationFactory, _DatabaseOperationFactoryImpl) - self.assertEqual({_DatabaseOperationFactory: _DatabaseOperationFactoryImpl}, _DatabaseClient._factories) + self.assertEqual(expected, _DatabaseClient._factories) finally: - _DatabaseClient._factories.clear() + _DatabaseClient._factories.pop(_DatabaseOperationFactory) - def test_register_factory_raises(self): + def test_set_factory_raises(self): with self.assertRaises(ValueError): # noinspection PyTypeChecker - _DatabaseClient.register_factory(object, DatabaseOperationFactory) + _DatabaseClient.set_factory(object, DatabaseOperationFactory) with self.assertRaises(ValueError): - _DatabaseClient.register_factory(_DatabaseOperationFactoryImpl, _DatabaseOperationFactory) + _DatabaseClient.set_factory(_DatabaseOperationFactoryImpl, _DatabaseOperationFactory) def test_get_factory(self): - try: - _DatabaseClient._factories = {_DatabaseOperationFactory: _DatabaseOperationFactoryImpl} - self.assertIsInstance( - _DatabaseClient.get_factory(_DatabaseOperationFactory), - _DatabaseOperationFactoryImpl, - ) - finally: - _DatabaseClient._factories.clear() + self.assertIsInstance( + _DatabaseClient.get_factory(LockDatabaseOperationFactory), + _LockDatabaseOperationFactory, + ) def test_get_factory_raises(self): with self.assertRaises(ValueError): @@ -159,20 +239,13 @@ def test_get_factory_raises(self): class TestDatabaseClientBuilder(CommonTestCase): def test_with_name(self): - builder = DatabaseClientBuilder(AiopgDatabaseClient).with_name("query") + builder = DatabaseClientBuilder(_DatabaseClient).with_name("query") self.assertEqual({"name": "query"}, builder.kwargs) def test_with_config(self): - builder = DatabaseClientBuilder(AiopgDatabaseClient).with_name("query").with_config(self.config) + builder = DatabaseClientBuilder(_DatabaseClient).with_name("query").with_config(self.config) self.assertEqual({"name": "query"} | self.config.get_database_by_name("query"), builder.kwargs) - def test_build(self): - builder = DatabaseClientBuilder(AiopgDatabaseClient).with_name("query").with_config(self.config) - client = builder.build() - - self.assertIsInstance(client, AiopgDatabaseClient) - self.assertEqual(self.config.get_database_by_name("query")["database"], client.database) - if __name__ == "__main__": unittest.main() diff --git a/packages/core/minos-microservice-common/tests/test_common/test_database/test_locks.py b/packages/core/minos-microservice-common/tests/test_common/test_database/test_locks.py index 1352a6ca3..f18159339 100644 --- a/packages/core/minos-microservice-common/tests/test_common/test_database/test_locks.py +++ b/packages/core/minos-microservice-common/tests/test_common/test_database/test_locks.py @@ -1,39 +1,35 @@ import unittest from minos.common import ( - AiopgDatabaseClient, DatabaseLock, Lock, ) from minos.common.testing import ( - DatabaseMinosTestCase, -) -from tests.utils import ( - CommonTestCase, + MockedDatabaseClient, ) -class TestDatabaseLock(CommonTestCase, DatabaseMinosTestCase): +class TestDatabaseLock(unittest.IsolatedAsyncioTestCase): def test_base(self): self.assertTrue(issubclass(DatabaseLock, Lock)) async def test_client(self): - client = AiopgDatabaseClient(**self.config.get_default_database()) + client = MockedDatabaseClient() lock = DatabaseLock(client, "foo") self.assertEqual(client, lock.client) async def test_key(self): - client = AiopgDatabaseClient(**self.config.get_default_database()) + client = MockedDatabaseClient() lock = DatabaseLock(client, "foo") self.assertEqual("foo", lock.key) async def test_key_raises(self): - client = AiopgDatabaseClient(**self.config.get_default_database()) + client = MockedDatabaseClient() with self.assertRaises(ValueError): DatabaseLock(client, []) async def test_hashed_key(self): - client = AiopgDatabaseClient(**self.config.get_default_database()) + client = MockedDatabaseClient() lock = DatabaseLock(client, "foo") self.assertEqual(hash("foo"), lock.hashed_key) diff --git a/packages/core/minos-microservice-common/tests/test_common/test_database/test_mixins.py b/packages/core/minos-microservice-common/tests/test_common/test_database/test_mixins.py index d41464845..a0433e187 100644 --- a/packages/core/minos-microservice-common/tests/test_common/test_database/test_mixins.py +++ b/packages/core/minos-microservice-common/tests/test_common/test_database/test_mixins.py @@ -1,9 +1,6 @@ import unittest from minos.common import ( - AiopgDatabaseClient, - AiopgDatabaseOperation, - AiopgLockDatabaseOperationFactory, DatabaseClientPool, DatabaseMixin, LockDatabaseOperationFactory, @@ -12,6 +9,9 @@ ) from minos.common.testing import ( DatabaseMinosTestCase, + MockedDatabaseClient, + MockedDatabaseOperation, + MockedLockDatabaseOperationFactory, ) from tests.utils import ( CommonTestCase, @@ -22,6 +22,7 @@ class TestDatabaseMixin(CommonTestCase, DatabaseMinosTestCase): def test_constructor(self): pool = DatabaseClientPool.from_config(self.config) + # noinspection PyTypeChecker database = DatabaseMixin(pool) self.assertEqual(pool, database.database_pool) @@ -32,6 +33,13 @@ async def test_constructor_with_pool_factory(self): # noinspection PyUnresolvedReferences self.assertEqual(pool_factory.get_pool("database"), database.database_pool) + async def test_constructor_with_pool_factory_and_database_key(self): + pool_factory = PoolFactory(self.config, {"database": DatabaseClientPool}) + # noinspection PyTypeChecker + database = DatabaseMixin(pool_factory=pool_factory, database_key=("query", "unknown")) + # noinspection PyUnresolvedReferences + self.assertEqual(pool_factory.get_pool("database", "query"), database.database_pool) + async def test_constructor_raises(self): with self.assertRaises(NotProvidedException): # noinspection PyArgumentEqualDefault @@ -42,24 +50,24 @@ async def test_pool(self): self.assertIsInstance(database.database_pool, DatabaseClientPool) async def test_operation_factory(self): - operation_factory = AiopgLockDatabaseOperationFactory() + operation_factory = MockedLockDatabaseOperationFactory() mixin = DatabaseMixin(operation_factory=operation_factory) - self.assertEqual(operation_factory, mixin.operation_factory) + self.assertEqual(operation_factory, mixin.database_operation_factory) async def test_operation_factory_from_cls_init(self): mixin = DatabaseMixin(operation_factory_cls=LockDatabaseOperationFactory) - self.assertIsInstance(mixin.operation_factory, AiopgLockDatabaseOperationFactory) + self.assertIsInstance(mixin.database_operation_factory, MockedLockDatabaseOperationFactory) async def test_operation_factory_from_cls_generic(self): class _DatabaseMixin(DatabaseMixin[LockDatabaseOperationFactory]): """For testing purposes.""" mixin = _DatabaseMixin() - self.assertIsInstance(mixin.operation_factory, AiopgLockDatabaseOperationFactory) + self.assertIsInstance(mixin.database_operation_factory, MockedLockDatabaseOperationFactory) async def test_operation_factory_none(self): mixin = DatabaseMixin() - self.assertEqual(None, mixin.operation_factory) + self.assertEqual(None, mixin.database_operation_factory) async def test_operation_factory_from_cls_generic_raises(self): class _DatabaseMixin(DatabaseMixin[int]): @@ -68,76 +76,76 @@ class _DatabaseMixin(DatabaseMixin[int]): with self.assertRaises(TypeError): _DatabaseMixin() - async def test_submit_query(self): - op1 = AiopgDatabaseOperation("CREATE TABLE foo (id INT NOT NULL);") - op2 = AiopgDatabaseOperation("SELECT EXISTS (SELECT FROM information_schema.tables WHERE table_name = 'foo');") + async def test_execute_on_database(self): + op1 = MockedDatabaseOperation("create_table") + op2 = MockedDatabaseOperation("check_exist", [(True,)]) async with DatabaseMixin() as database: - await database.submit_query(op1) + await database.execute_on_database(op1) - async with AiopgDatabaseClient(**self.config.get_default_database()) as client: + async with MockedDatabaseClient(**self.config.get_default_database()) as client: await client.execute(op2) self.assertTrue((await client.fetch_one())[0]) - async def test_submit_query_locked(self): - op1 = AiopgDatabaseOperation("CREATE TABLE foo (id INT NOT NULL);", lock=1234) - op2 = AiopgDatabaseOperation("SELECT EXISTS (SELECT FROM information_schema.tables WHERE table_name = 'foo');") + async def test_execute_on_database_locked(self): + op1 = MockedDatabaseOperation("create_table", lock=1234) + op2 = MockedDatabaseOperation("check_exist", [(True,)]) async with DatabaseMixin() as database: - await database.submit_query(op1) + await database.execute_on_database(op1) - async with AiopgDatabaseClient(**self.config.get_default_database()) as client: + async with MockedDatabaseClient(**self.config.get_default_database()) as client: await client.execute(op2) self.assertTrue((await client.fetch_one())[0]) - async def test_submit_query_and_fetchone(self): - op1 = AiopgDatabaseOperation("CREATE TABLE foo (id INT NOT NULL);") - op2 = AiopgDatabaseOperation("INSERT INTO foo (id) VALUES (3), (4), (5);") - op3 = AiopgDatabaseOperation("SELECT * FROM foo;") + async def test_execute_on_database_and_fetch_one(self): + op1 = MockedDatabaseOperation("create_table") + op2 = MockedDatabaseOperation("insert") + op3 = MockedDatabaseOperation("select", [(3,), (4,), (5,)]) async with DatabaseMixin() as database: - await database.submit_query(op1) - await database.submit_query(op2) + await database.execute_on_database(op1) + await database.execute_on_database(op2) - observed = await database.submit_query_and_fetchone(op3) + observed = await database.execute_on_database_and_fetch_one(op3) self.assertEqual((3,), observed) - async def test_submit_query_and_iter(self): - op1 = AiopgDatabaseOperation("CREATE TABLE foo (id INT NOT NULL);") - op2 = AiopgDatabaseOperation("INSERT INTO foo (id) VALUES (3), (4), (5);") - op3 = AiopgDatabaseOperation("SELECT * FROM foo;") + async def test_execute_on_database_and_fetch_all(self): + op1 = MockedDatabaseOperation("create_table") + op2 = MockedDatabaseOperation("insert") + op3 = MockedDatabaseOperation("select", [(3,), (4,), (5,)]) async with DatabaseMixin() as database: - await database.submit_query(op1) - await database.submit_query(op2) - observed = [v async for v in database.submit_query_and_iter(op3)] + await database.execute_on_database(op1) + await database.execute_on_database(op2) + observed = [v async for v in database.execute_on_database_and_fetch_all(op3)] self.assertEqual([(3,), (4,), (5,)], observed) - async def test_submit_query_and_iter_streaming_mode_true(self): - op1 = AiopgDatabaseOperation("CREATE TABLE foo (id INT NOT NULL);") - op2 = AiopgDatabaseOperation("INSERT INTO foo (id) VALUES (3), (4), (5);") - op3 = AiopgDatabaseOperation("SELECT * FROM foo;") + async def test_execute_on_database_and_fetch_all_streaming_mode_true(self): + op1 = MockedDatabaseOperation("create_table") + op2 = MockedDatabaseOperation("insert") + op3 = MockedDatabaseOperation("select", [(3,), (4,), (5,)]) async with DatabaseMixin() as database: - await database.submit_query(op1) - await database.submit_query(op2) + await database.execute_on_database(op1) + await database.execute_on_database(op2) - observed = [v async for v in database.submit_query_and_iter(op3, streaming_mode=True)] + observed = [v async for v in database.execute_on_database_and_fetch_all(op3, streaming_mode=True)] self.assertEqual([(3,), (4,), (5,)], observed) - async def test_submit_query_and_iter_locked(self): - op1 = AiopgDatabaseOperation("CREATE TABLE foo (id INT NOT NULL);", lock=1234) - op2 = AiopgDatabaseOperation("INSERT INTO foo (id) VALUES (3), (4), (5);") - op3 = AiopgDatabaseOperation("SELECT * FROM foo;") + async def test_execute_on_database_and_fetch_all_locked(self): + op1 = MockedDatabaseOperation("create_table", lock=1234) + op2 = MockedDatabaseOperation("insert") + op3 = MockedDatabaseOperation("select", [(3,), (4,), (5,)]) async with DatabaseMixin() as database: - await database.submit_query(op1) - await database.submit_query(op2) + await database.execute_on_database(op1) + await database.execute_on_database(op2) - observed = [v async for v in database.submit_query_and_iter(op3)] + observed = [v async for v in database.execute_on_database_and_fetch_all(op3)] self.assertEqual([(3,), (4,), (5,)], observed) diff --git a/packages/core/minos-microservice-common/tests/test_common/test_database/test_pools.py b/packages/core/minos-microservice-common/tests/test_common/test_database/test_pools.py index e8dbb12fd..bfe2d53b8 100644 --- a/packages/core/minos-microservice-common/tests/test_common/test_database/test_pools.py +++ b/packages/core/minos-microservice-common/tests/test_common/test_database/test_pools.py @@ -4,18 +4,18 @@ ) from minos.common import ( - AiopgDatabaseClient, Config, + ConnectionException, DatabaseClient, DatabaseClientBuilder, DatabaseClientPool, DatabaseLock, DatabaseLockPool, - UnableToConnectException, classname, ) from minos.common.testing import ( DatabaseMinosTestCase, + MockedDatabaseClient, ) from tests.utils import ( CONFIG_FILE_PATH, @@ -44,7 +44,7 @@ async def asyncTearDown(self): def test_from_config(self): pool = DatabaseClientPool.from_config(self.config, key="event") self.assertIsInstance(pool.client_builder, DatabaseClientBuilder) - self.assertEqual(AiopgDatabaseClient, pool.client_builder.instance_cls) + self.assertEqual(MockedDatabaseClient, pool.client_builder.instance_cls) def test_from_config_client_builder(self): config = Config(CONFIG_FILE_PATH, databases_default_client=classname(DatabaseClientBuilder)) @@ -72,16 +72,15 @@ async def test_acquire_multiple_same_time(self): self.assertNotEqual(c1, c2) async def test_acquire_with_reset(self): - with patch.object(AiopgDatabaseClient, "reset") as reset_mock: + with patch.object(MockedDatabaseClient, "reset") as reset_mock: async with self.pool.acquire(): self.assertEqual(0, reset_mock.call_count) self.assertEqual(1, reset_mock.call_count) - async def test_acquire_with_connection_error(self): - with patch.object(AiopgDatabaseClient, "_create_connection", side_effect=(UnableToConnectException(""), None)): - with patch.object(AiopgDatabaseClient, "is_valid", return_value=True): - async with self.pool.acquire() as client: - self.assertIsInstance(client, AiopgDatabaseClient) + async def test_acquire_with_raises(self): + with patch.object(MockedDatabaseClient, "setup", side_effect=[ConnectionException(""), None]): + async with self.pool.acquire() as client: + self.assertIsInstance(client, MockedDatabaseClient) class TestDatabaseLockPool(CommonTestCase, DatabaseMinosTestCase): diff --git a/packages/core/minos-microservice-common/tests/test_common/test_model/test_declarative/test_avro.py b/packages/core/minos-microservice-common/tests/test_common/test_model/test_declarative/test_avro.py index bc2927c80..2e851c368 100644 --- a/packages/core/minos-microservice-common/tests/test_common/test_model/test_declarative/test_avro.py +++ b/packages/core/minos-microservice-common/tests/test_common/test_model/test_declarative/test_avro.py @@ -73,14 +73,12 @@ def test_avro_schema_generics_nested(self): "fields": [ { "name": "user", - "type": [ - { - "fields": [{"name": "username", "type": "string"}], - "name": "GenericUser", - "namespace": "tests.model_classes.goodbye", - "type": "record", - } - ], + "type": { + "fields": [{"name": "username", "type": "string"}], + "name": "GenericUser", + "namespace": "tests.model_classes.goodbye", + "type": "record", + }, } ], "name": "Auth", diff --git a/packages/core/minos-microservice-common/tests/test_common/test_model/test_types/test_builders.py b/packages/core/minos-microservice-common/tests/test_common/test_model/test_types/test_builders.py index 103cc4c66..24a849f60 100644 --- a/packages/core/minos-microservice-common/tests/test_common/test_model/test_types/test_builders.py +++ b/packages/core/minos-microservice-common/tests/test_common/test_model/test_types/test_builders.py @@ -1,12 +1,14 @@ import unittest from typing import ( Any, + Optional, Union, ) from minos.common import ( ModelType, TypeHintBuilder, + TypeHintParser, ) from tests.model_classes import ( Foo, @@ -46,5 +48,35 @@ def test_union_any(self): self.assertEqual(expected, observed) +class TestTypeHintParser(unittest.TestCase): + def test_immutable(self): + self.assertEqual(int, TypeHintParser(int).build()) + + def test_optional(self): + self.assertEqual(Optional[int], TypeHintParser(Optional[int]).build()) + + def test_model(self): + # noinspection PyPep8Naming + FooMt = ModelType.build("tests.model_classes.Foo", {"text": str}) + self.assertEqual(FooMt, TypeHintParser(Foo).build()) + + def test_nested_model(self): + # noinspection PyPep8Naming + FooMt = ModelType.build("tests.model_classes.Foo", {"text": str}) + self.assertEqual(Optional[FooMt], TypeHintParser(Optional[Foo]).build()) + + def test_model_type(self): + # noinspection PyPep8Naming + FooMt = ModelType.build("tests.model_classes.Foo", {"text": str}) + self.assertEqual(FooMt, TypeHintParser(FooMt).build()) + + # noinspection PyPep8Naming + def test_model_type_nested(self): + Base = ModelType.build("Base", {"another": Foo}) + FooMt = ModelType.build("tests.model_classes.Foo", {"text": str}) + Expected = ModelType.build("Base", {"another": FooMt}) + self.assertEqual(Expected, TypeHintParser(Base).build()) + + if __name__ == "__main__": unittest.main() diff --git a/packages/core/minos-microservice-common/tests/test_common/test_pools.py b/packages/core/minos-microservice-common/tests/test_common/test_pools.py index dbd6dea4e..f0607679f 100644 --- a/packages/core/minos-microservice-common/tests/test_common/test_pools.py +++ b/packages/core/minos-microservice-common/tests/test_common/test_pools.py @@ -10,6 +10,7 @@ ) from unittest.mock import ( MagicMock, + patch, ) from aiomisc import ( @@ -17,8 +18,10 @@ ) from minos.common import ( + MinosConfigException, MinosPool, Pool, + PoolException, PoolFactory, SetupMixin, ) @@ -55,10 +58,15 @@ def test_get_pool_with_key(self): self.assertEqual(lock_a, self.factory.get_pool("lock", "a")) self.assertEqual(lock_b, self.factory.get_pool("lock", "b")) - def test_get_pool_raises(self): - with self.assertRaises(ValueError): + def test_get_pool_cls_raises(self): + with self.assertRaises(PoolException): self.factory.get_pool("something") + def test_get_pool_identifier_raises(self): + with patch.object(SetupMixin, "from_config", side_effect=MinosConfigException("")): + with self.assertRaises(PoolException): + self.factory.get_pool("database") + class TestPool(unittest.IsolatedAsyncioTestCase): def test_abstract(self): diff --git a/packages/core/minos-microservice-common/tests/utils.py b/packages/core/minos-microservice-common/tests/utils.py index d54835fe6..b7090bbae 100644 --- a/packages/core/minos-microservice-common/tests/utils.py +++ b/packages/core/minos-microservice-common/tests/utils.py @@ -1,3 +1,7 @@ +from __future__ import ( + annotations, +) + from pathlib import ( Path, ) @@ -9,16 +13,16 @@ Lock, LockPool, Port, -) -from minos.common.testing import ( - MinosTestCase, + testing, ) BASE_PATH = Path(__file__).parent CONFIG_FILE_PATH = BASE_PATH / "config" / "v2.yml" -class CommonTestCase(MinosTestCase): +class CommonTestCase(testing.MinosTestCase): + testing_module = testing + def get_config_file_path(self) -> Path: return CONFIG_FILE_PATH diff --git a/packages/core/minos-microservice-cqrs/tests/test_config.yml b/packages/core/minos-microservice-cqrs/tests/test_config.yml index dd7af43fc..ee1e9270b 100644 --- a/packages/core/minos-microservice-cqrs/tests/test_config.yml +++ b/packages/core/minos-microservice-cqrs/tests/test_config.yml @@ -8,7 +8,7 @@ broker: host: localhost port: 9092 queue: - client: minos.common.AiopgDatabaseClient + client: minos.common.testing.MockedDatabaseClient database: order_db user: minos password: min0s @@ -17,14 +17,14 @@ broker: records: 10 retry: 2 repository: - client: minos.common.AiopgDatabaseClient + client: minos.common.testing.MockedDatabaseClient database: order_db user: minos password: min0s host: localhost port: 5432 snapshot: - client: minos.common.AiopgDatabaseClient + client: minos.common.testing.MockedDatabaseClient database: order_db user: minos password: min0s diff --git a/packages/core/minos-microservice-networks/minos/networks/__init__.py b/packages/core/minos-microservice-networks/minos/networks/__init__.py index 359568042..0dd2582c7 100644 --- a/packages/core/minos-microservice-networks/minos/networks/__init__.py +++ b/packages/core/minos-microservice-networks/minos/networks/__init__.py @@ -7,10 +7,6 @@ from .brokers import ( REQUEST_HEADERS_CONTEXT_VAR, REQUEST_REPLY_TOPIC_CONTEXT_VAR, - AiopgBrokerPublisherQueueDatabaseOperationFactory, - AiopgBrokerQueueDatabaseOperationFactory, - AiopgBrokerSubscriberDuplicateValidatorDatabaseOperationFactory, - AiopgBrokerSubscriberQueueDatabaseOperationFactory, BrokerClient, BrokerClientPool, BrokerDispatcher, diff --git a/packages/core/minos-microservice-networks/minos/networks/brokers/__init__.py b/packages/core/minos-microservice-networks/minos/networks/brokers/__init__.py index bcf814550..0ec5e35de 100644 --- a/packages/core/minos-microservice-networks/minos/networks/brokers/__init__.py +++ b/packages/core/minos-microservice-networks/minos/networks/brokers/__init__.py @@ -2,7 +2,6 @@ BrokerClient, ) from .collections import ( - AiopgBrokerQueueDatabaseOperationFactory, BrokerQueue, BrokerQueueDatabaseOperationFactory, DatabaseBrokerQueue, @@ -33,7 +32,6 @@ BrokerClientPool, ) from .publishers import ( - AiopgBrokerPublisherQueueDatabaseOperationFactory, BrokerPublisher, BrokerPublisherBuilder, BrokerPublisherQueue, @@ -44,8 +42,6 @@ QueuedBrokerPublisher, ) from .subscribers import ( - AiopgBrokerSubscriberDuplicateValidatorDatabaseOperationFactory, - AiopgBrokerSubscriberQueueDatabaseOperationFactory, BrokerSubscriber, BrokerSubscriberBuilder, BrokerSubscriberDuplicateValidator, diff --git a/packages/core/minos-microservice-networks/minos/networks/brokers/collections/__init__.py b/packages/core/minos-microservice-networks/minos/networks/brokers/collections/__init__.py index dd2bfc375..38a6846a9 100644 --- a/packages/core/minos-microservice-networks/minos/networks/brokers/collections/__init__.py +++ b/packages/core/minos-microservice-networks/minos/networks/brokers/collections/__init__.py @@ -1,5 +1,4 @@ from .queues import ( - AiopgBrokerQueueDatabaseOperationFactory, BrokerQueue, BrokerQueueDatabaseOperationFactory, DatabaseBrokerQueue, diff --git a/packages/core/minos-microservice-networks/minos/networks/brokers/collections/queues/__init__.py b/packages/core/minos-microservice-networks/minos/networks/brokers/collections/queues/__init__.py index ad54e3cde..d31afd683 100644 --- a/packages/core/minos-microservice-networks/minos/networks/brokers/collections/queues/__init__.py +++ b/packages/core/minos-microservice-networks/minos/networks/brokers/collections/queues/__init__.py @@ -2,7 +2,6 @@ BrokerQueue, ) from .database import ( - AiopgBrokerQueueDatabaseOperationFactory, BrokerQueueDatabaseOperationFactory, DatabaseBrokerQueue, DatabaseBrokerQueueBuilder, diff --git a/packages/core/minos-microservice-networks/minos/networks/brokers/collections/queues/database/__init__.py b/packages/core/minos-microservice-networks/minos/networks/brokers/collections/queues/database/__init__.py index 2b7024161..05a61761f 100644 --- a/packages/core/minos-microservice-networks/minos/networks/brokers/collections/queues/database/__init__.py +++ b/packages/core/minos-microservice-networks/minos/networks/brokers/collections/queues/database/__init__.py @@ -1,5 +1,4 @@ from .factories import ( - AiopgBrokerQueueDatabaseOperationFactory, BrokerQueueDatabaseOperationFactory, ) from .impl import ( diff --git a/packages/core/minos-microservice-networks/minos/networks/brokers/collections/queues/database/factories/__init__.py b/packages/core/minos-microservice-networks/minos/networks/brokers/collections/queues/database/factories/__init__.py index ee7e48786..f00f6af3a 100644 --- a/packages/core/minos-microservice-networks/minos/networks/brokers/collections/queues/database/factories/__init__.py +++ b/packages/core/minos-microservice-networks/minos/networks/brokers/collections/queues/database/factories/__init__.py @@ -1,6 +1,3 @@ from .abc import ( BrokerQueueDatabaseOperationFactory, ) -from .aiopg import ( - AiopgBrokerQueueDatabaseOperationFactory, -) diff --git a/packages/core/minos-microservice-networks/minos/networks/brokers/collections/queues/database/factories/abc.py b/packages/core/minos-microservice-networks/minos/networks/brokers/collections/queues/database/factories/abc.py index 2a6847aa4..e7bf53f67 100644 --- a/packages/core/minos-microservice-networks/minos/networks/brokers/collections/queues/database/factories/abc.py +++ b/packages/core/minos-microservice-networks/minos/networks/brokers/collections/queues/database/factories/abc.py @@ -17,21 +17,21 @@ class BrokerQueueDatabaseOperationFactory(DatabaseOperationFactory, ABC): """Broker Queue Database Operation Factory class.""" @abstractmethod - def build_create_table(self) -> DatabaseOperation: + def build_create(self) -> DatabaseOperation: """Build the "create table" query. :return: A ``SQL`` instance. """ @abstractmethod - def build_update_not_processed(self, id_: int) -> DatabaseOperation: + def build_mark_processed(self, id_: int) -> DatabaseOperation: """Build the "update not processed" query. :return: A ``SQL`` instance. """ @abstractmethod - def build_delete_processed(self, id_: int) -> DatabaseOperation: + def build_delete(self, id_: int) -> DatabaseOperation: """Build the "delete processed" query. :return: A ``SQL`` instance. @@ -45,21 +45,21 @@ def build_mark_processing(self, ids: Iterable[int]) -> DatabaseOperation: """ @abstractmethod - def build_count_not_processed(self, retry: int, *args, **kwargs) -> DatabaseOperation: + def build_count(self, retry: int, *args, **kwargs) -> DatabaseOperation: """Build the "count not processed" query. :return: """ @abstractmethod - def build_insert(self, topic: str, data: bytes) -> DatabaseOperation: + def build_submit(self, topic: str, data: bytes) -> DatabaseOperation: """Build the "insert" query. :return: A ``SQL`` instance. """ @abstractmethod - def build_select_not_processed(self, retry: int, records: int, *args, **kwargs) -> DatabaseOperation: + def build_query(self, retry: int, records: int, *args, **kwargs) -> DatabaseOperation: """Build the "select not processed" query. :return: A ``SQL`` instance. diff --git a/packages/core/minos-microservice-networks/minos/networks/brokers/collections/queues/database/impl.py b/packages/core/minos-microservice-networks/minos/networks/brokers/collections/queues/database/impl.py index cdfe1e328..05c876780 100644 --- a/packages/core/minos-microservice-networks/minos/networks/brokers/collections/queues/database/impl.py +++ b/packages/core/minos-microservice-networks/minos/networks/brokers/collections/queues/database/impl.py @@ -65,9 +65,12 @@ def __init__( *args, retry: Optional[int] = None, records: Optional[int] = None, + database_key: Optional[tuple[str]] = None, **kwargs, ): - super().__init__(*args, **kwargs) + if database_key is None: + database_key = ("broker",) + super().__init__(*args, database_key=database_key, **kwargs) if retry is None: retry = 2 @@ -117,8 +120,8 @@ async def _destroy(self) -> None: await super()._destroy() async def _create_table(self) -> None: - operation = self.operation_factory.build_create_table() - await self.submit_query(operation) + operation = self.database_operation_factory.build_create() + await self.execute_on_database(operation) async def _start_run(self) -> None: if self._run_task is None: @@ -139,13 +142,13 @@ async def _flush_queue(self): entry = self._queue.get_nowait() except QueueEmpty: break - operation = self.operation_factory.build_update_not_processed(entry.id_) - await self.submit_query(operation) + operation = self.database_operation_factory.build_mark_processed(entry.id_) + await self.execute_on_database(operation) self._queue.task_done() async def _enqueue(self, message: BrokerMessage) -> None: - operation = self.operation_factory.build_insert(message.topic, message.avro_bytes) - await self.submit_query(operation) + operation = self.database_operation_factory.build_submit(message.topic, message.avro_bytes) + await self.execute_on_database(operation) await self._notify_enqueued(message) # noinspection PyUnusedLocal @@ -163,12 +166,12 @@ async def _dequeue(self) -> BrokerMessage: logger.warning( f"There was a problem while trying to deserialize the entry with {entry.id_!r} id: {exc}" ) - operation = self.operation_factory.build_update_not_processed(entry.id_) - await self.submit_query(operation) + operation = self.database_operation_factory.build_mark_processed(entry.id_) + await self.execute_on_database(operation) continue - operation = self.operation_factory.build_delete_processed(entry.id_) - await self.submit_query(operation) + operation = self.database_operation_factory.build_delete(entry.id_) + await self.execute_on_database(operation) return message finally: self._queue.task_done() @@ -192,8 +195,8 @@ async def _wait_enqueued(self) -> None: async def _get_count(self) -> int: # noinspection PyTypeChecker - operation = self.operation_factory.build_count_not_processed(self.retry) - row = await self.submit_query_and_fetchone(operation) + operation = self.database_operation_factory.build_count(self.retry) + row = await self.execute_on_database_and_fetch_one(operation) count = row[0] return count @@ -207,14 +210,14 @@ async def _dequeue_batch(self) -> None: entries = [_Entry(*row) for row in rows] ids = tuple(entry.id_ for entry in entries) - operation = self.operation_factory.build_mark_processing(ids) + operation = self.database_operation_factory.build_mark_processing(ids) await client.execute(operation) for entry in entries: await self._queue.put(entry) async def _dequeue_rows(self, client: DatabaseClient) -> list[Any]: - operation = self.operation_factory.build_select_not_processed(self._retry, self._records) + operation = self.database_operation_factory.build_query(self._retry, self._records) await client.execute(operation) return [row async for row in client.fetch_all()] diff --git a/packages/core/minos-microservice-networks/minos/networks/brokers/publishers/__init__.py b/packages/core/minos-microservice-networks/minos/networks/brokers/publishers/__init__.py index 1c5f5166d..d3195a41f 100644 --- a/packages/core/minos-microservice-networks/minos/networks/brokers/publishers/__init__.py +++ b/packages/core/minos-microservice-networks/minos/networks/brokers/publishers/__init__.py @@ -6,7 +6,6 @@ InMemoryBrokerPublisher, ) from .queued import ( - AiopgBrokerPublisherQueueDatabaseOperationFactory, BrokerPublisherQueue, BrokerPublisherQueueDatabaseOperationFactory, DatabaseBrokerPublisherQueue, diff --git a/packages/core/minos-microservice-networks/minos/networks/brokers/publishers/queued/__init__.py b/packages/core/minos-microservice-networks/minos/networks/brokers/publishers/queued/__init__.py index 9bb141835..9f1dfd8b8 100644 --- a/packages/core/minos-microservice-networks/minos/networks/brokers/publishers/queued/__init__.py +++ b/packages/core/minos-microservice-networks/minos/networks/brokers/publishers/queued/__init__.py @@ -2,7 +2,6 @@ QueuedBrokerPublisher, ) from .queues import ( - AiopgBrokerPublisherQueueDatabaseOperationFactory, BrokerPublisherQueue, BrokerPublisherQueueDatabaseOperationFactory, DatabaseBrokerPublisherQueue, diff --git a/packages/core/minos-microservice-networks/minos/networks/brokers/publishers/queued/queues/__init__.py b/packages/core/minos-microservice-networks/minos/networks/brokers/publishers/queued/queues/__init__.py index 5e544f1a3..85b9822bc 100644 --- a/packages/core/minos-microservice-networks/minos/networks/brokers/publishers/queued/queues/__init__.py +++ b/packages/core/minos-microservice-networks/minos/networks/brokers/publishers/queued/queues/__init__.py @@ -2,7 +2,6 @@ BrokerPublisherQueue, ) from .database import ( - AiopgBrokerPublisherQueueDatabaseOperationFactory, BrokerPublisherQueueDatabaseOperationFactory, DatabaseBrokerPublisherQueue, ) diff --git a/packages/core/minos-microservice-networks/minos/networks/brokers/publishers/queued/queues/database.py b/packages/core/minos-microservice-networks/minos/networks/brokers/publishers/queued/queues/database.py index 339360e27..059a5e0d8 100644 --- a/packages/core/minos-microservice-networks/minos/networks/brokers/publishers/queued/queues/database.py +++ b/packages/core/minos-microservice-networks/minos/networks/brokers/publishers/queued/queues/database.py @@ -7,12 +7,7 @@ ABC, ) -from minos.common import ( - AiopgDatabaseClient, -) - from ....collections import ( - AiopgBrokerQueueDatabaseOperationFactory, BrokerQueueDatabaseOperationFactory, DatabaseBrokerQueue, ) @@ -27,24 +22,6 @@ class BrokerPublisherQueueDatabaseOperationFactory(BrokerQueueDatabaseOperationF """Broker Publisher Queue Database Operation Factory class.""" -class AiopgBrokerPublisherQueueDatabaseOperationFactory( - BrokerPublisherQueueDatabaseOperationFactory, AiopgBrokerQueueDatabaseOperationFactory -): - """Aiopg Broker Publisher Queue Query Factory class.""" - - def build_table_name(self) -> str: - """Get the table name. - - :return: A ``str`` value. - """ - return "broker_publisher_queue" - - -AiopgDatabaseClient.register_factory( - BrokerPublisherQueueDatabaseOperationFactory, AiopgBrokerPublisherQueueDatabaseOperationFactory -) - - class DatabaseBrokerPublisherQueue( DatabaseBrokerQueue[BrokerPublisherQueueDatabaseOperationFactory], BrokerPublisherQueue ): diff --git a/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/__init__.py b/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/__init__.py index 2d6b416d9..c5932b0ec 100644 --- a/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/__init__.py +++ b/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/__init__.py @@ -3,7 +3,6 @@ BrokerSubscriberBuilder, ) from .filtered import ( - AiopgBrokerSubscriberDuplicateValidatorDatabaseOperationFactory, BrokerSubscriberDuplicateValidator, BrokerSubscriberDuplicateValidatorDatabaseOperationFactory, BrokerSubscriberValidator, @@ -17,7 +16,6 @@ InMemoryBrokerSubscriberBuilder, ) from .queued import ( - AiopgBrokerSubscriberQueueDatabaseOperationFactory, BrokerSubscriberQueue, BrokerSubscriberQueueBuilder, BrokerSubscriberQueueDatabaseOperationFactory, diff --git a/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/filtered/__init__.py b/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/filtered/__init__.py index 4878d8cf0..f9239bd89 100644 --- a/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/filtered/__init__.py +++ b/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/filtered/__init__.py @@ -2,7 +2,6 @@ FilteredBrokerSubscriber, ) from .validators import ( - AiopgBrokerSubscriberDuplicateValidatorDatabaseOperationFactory, BrokerSubscriberDuplicateValidator, BrokerSubscriberDuplicateValidatorDatabaseOperationFactory, BrokerSubscriberValidator, diff --git a/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/filtered/validators/__init__.py b/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/filtered/validators/__init__.py index 9ddb90eaf..083fb0000 100644 --- a/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/filtered/validators/__init__.py +++ b/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/filtered/validators/__init__.py @@ -2,7 +2,6 @@ BrokerSubscriberValidator, ) from .duplicates import ( - AiopgBrokerSubscriberDuplicateValidatorDatabaseOperationFactory, BrokerSubscriberDuplicateValidator, BrokerSubscriberDuplicateValidatorDatabaseOperationFactory, DatabaseBrokerSubscriberDuplicateValidator, diff --git a/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/filtered/validators/duplicates/__init__.py b/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/filtered/validators/duplicates/__init__.py index 01c3487dd..48612b056 100644 --- a/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/filtered/validators/duplicates/__init__.py +++ b/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/filtered/validators/duplicates/__init__.py @@ -2,7 +2,6 @@ BrokerSubscriberDuplicateValidator, ) from .database import ( - AiopgBrokerSubscriberDuplicateValidatorDatabaseOperationFactory, BrokerSubscriberDuplicateValidatorDatabaseOperationFactory, DatabaseBrokerSubscriberDuplicateValidator, DatabaseBrokerSubscriberDuplicateValidatorBuilder, diff --git a/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/filtered/validators/duplicates/database/__init__.py b/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/filtered/validators/duplicates/database/__init__.py index ae4271871..8cb5ceb47 100644 --- a/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/filtered/validators/duplicates/database/__init__.py +++ b/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/filtered/validators/duplicates/database/__init__.py @@ -1,5 +1,4 @@ from .factories import ( - AiopgBrokerSubscriberDuplicateValidatorDatabaseOperationFactory, BrokerSubscriberDuplicateValidatorDatabaseOperationFactory, ) from .impl import ( diff --git a/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/filtered/validators/duplicates/database/factories/__init__.py b/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/filtered/validators/duplicates/database/factories/__init__.py index 7cdea9721..a96d4d6bc 100644 --- a/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/filtered/validators/duplicates/database/factories/__init__.py +++ b/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/filtered/validators/duplicates/database/factories/__init__.py @@ -1,6 +1,3 @@ from .abc import ( BrokerSubscriberDuplicateValidatorDatabaseOperationFactory, ) -from .aiopg import ( - AiopgBrokerSubscriberDuplicateValidatorDatabaseOperationFactory, -) diff --git a/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/filtered/validators/duplicates/database/factories/abc.py b/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/filtered/validators/duplicates/database/factories/abc.py index d00bb82e8..d32234805 100644 --- a/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/filtered/validators/duplicates/database/factories/abc.py +++ b/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/filtered/validators/duplicates/database/factories/abc.py @@ -17,14 +17,14 @@ class BrokerSubscriberDuplicateValidatorDatabaseOperationFactory(DatabaseOperati """Broker Subscriber Duplicate Validator Database Operation Factory class.""" @abstractmethod - def build_create_table(self) -> DatabaseOperation: + def build_create(self) -> DatabaseOperation: """Build the "create table" query. :return: A ``SQL`` instance. """ @abstractmethod - def build_insert_row(self, topic: str, uuid: UUID) -> DatabaseOperation: + def build_submit(self, topic: str, uuid: UUID) -> DatabaseOperation: """Build the "insert row" query. :return: A ``SQL`` instance. diff --git a/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/filtered/validators/duplicates/database/impl.py b/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/filtered/validators/duplicates/database/impl.py index d767f639f..d802ad567 100644 --- a/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/filtered/validators/duplicates/database/impl.py +++ b/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/filtered/validators/duplicates/database/impl.py @@ -2,6 +2,9 @@ annotations, ) +from typing import ( + Optional, +) from uuid import ( UUID, ) @@ -27,18 +30,23 @@ class DatabaseBrokerSubscriberDuplicateValidator( ): """Database Broker Subscriber Duplicate Detector class.""" + def __init__(self, *args, database_key: Optional[tuple[str]] = None, **kwargs): + if database_key is None: + database_key = ("broker",) + super().__init__(*args, database_key=database_key, **kwargs) + async def _setup(self) -> None: await super()._setup() await self._create_table() async def _create_table(self) -> None: - operation = self.operation_factory.build_create_table() - await self.submit_query(operation) + operation = self.database_operation_factory.build_create() + await self.execute_on_database(operation) async def _is_unique(self, topic: str, uuid: UUID) -> bool: - operation = self.operation_factory.build_insert_row(topic, uuid) + operation = self.database_operation_factory.build_submit(topic, uuid) try: - await self.submit_query(operation) + await self.execute_on_database(operation) return True except IntegrityException: return False diff --git a/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/queued/__init__.py b/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/queued/__init__.py index d20f7843f..896e77c03 100644 --- a/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/queued/__init__.py +++ b/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/queued/__init__.py @@ -3,7 +3,6 @@ QueuedBrokerSubscriberBuilder, ) from .queues import ( - AiopgBrokerSubscriberQueueDatabaseOperationFactory, BrokerSubscriberQueue, BrokerSubscriberQueueBuilder, BrokerSubscriberQueueDatabaseOperationFactory, diff --git a/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/queued/queues/__init__.py b/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/queued/queues/__init__.py index 7a6286749..6a871e7f3 100644 --- a/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/queued/queues/__init__.py +++ b/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/queued/queues/__init__.py @@ -3,7 +3,6 @@ BrokerSubscriberQueueBuilder, ) from .database import ( - AiopgBrokerSubscriberQueueDatabaseOperationFactory, BrokerSubscriberQueueDatabaseOperationFactory, DatabaseBrokerSubscriberQueue, DatabaseBrokerSubscriberQueueBuilder, diff --git a/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/queued/queues/database/__init__.py b/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/queued/queues/database/__init__.py index 9a31cede2..d06e7c8dc 100644 --- a/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/queued/queues/database/__init__.py +++ b/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/queued/queues/database/__init__.py @@ -1,5 +1,4 @@ from .factories import ( - AiopgBrokerSubscriberQueueDatabaseOperationFactory, BrokerSubscriberQueueDatabaseOperationFactory, ) from .impl import ( diff --git a/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/queued/queues/database/factories/__init__.py b/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/queued/queues/database/factories/__init__.py index 8e6078511..ec7ac9533 100644 --- a/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/queued/queues/database/factories/__init__.py +++ b/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/queued/queues/database/factories/__init__.py @@ -1,6 +1,3 @@ from .abc import ( BrokerSubscriberQueueDatabaseOperationFactory, ) -from .aiopg import ( - AiopgBrokerSubscriberQueueDatabaseOperationFactory, -) diff --git a/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/queued/queues/database/factories/abc.py b/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/queued/queues/database/factories/abc.py index c6b9db202..cfa68708b 100644 --- a/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/queued/queues/database/factories/abc.py +++ b/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/queued/queues/database/factories/abc.py @@ -1,5 +1,6 @@ from abc import ( ABC, + abstractmethod, ) from collections.abc import ( Iterable, @@ -18,7 +19,8 @@ class BrokerSubscriberQueueDatabaseOperationFactory(BrokerQueueDatabaseOperationFactory, ABC): """Broker Subscriber Queue Database Operation Factory class.""" - def build_count_not_processed( + @abstractmethod + def build_count( self, retry: int, topics: Iterable[str] = tuple(), @@ -30,7 +32,8 @@ def build_count_not_processed( :return: """ - def build_select_not_processed( + @abstractmethod + def build_query( self, retry: int, records: int, diff --git a/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/queued/queues/database/impl.py b/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/queued/queues/database/impl.py index c39ba4879..7bbee880c 100644 --- a/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/queued/queues/database/impl.py +++ b/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/queued/queues/database/impl.py @@ -33,13 +33,13 @@ class DatabaseBrokerSubscriberQueue( async def _get_count(self) -> int: # noinspection PyTypeChecker - operation = self.operation_factory.build_count_not_processed(self._retry, self.topics) - row = await self.submit_query_and_fetchone(operation) + operation = self.database_operation_factory.build_count(self._retry, self.topics) + row = await self.execute_on_database_and_fetch_one(operation) count = row[0] return count async def _dequeue_rows(self, client: DatabaseClient) -> list[Any]: - operation = self.operation_factory.build_select_not_processed(self._retry, self._records, self.topics) + operation = self.database_operation_factory.build_query(self._retry, self._records, self.topics) await client.execute(operation) return [row async for row in client.fetch_all()] diff --git a/packages/core/minos-microservice-networks/minos/networks/testing/__init__.py b/packages/core/minos-microservice-networks/minos/networks/testing/__init__.py new file mode 100644 index 000000000..451f42047 --- /dev/null +++ b/packages/core/minos-microservice-networks/minos/networks/testing/__init__.py @@ -0,0 +1,6 @@ +from .brokers import ( + MockedBrokerPublisherQueueDatabaseOperationFactory, + MockedBrokerQueueDatabaseOperationFactory, + MockedBrokerSubscriberDuplicateValidatorDatabaseOperationFactory, + MockedBrokerSubscriberQueueDatabaseOperationFactory, +) diff --git a/packages/core/minos-microservice-networks/minos/networks/testing/brokers/__init__.py b/packages/core/minos-microservice-networks/minos/networks/testing/brokers/__init__.py new file mode 100644 index 000000000..8d6486131 --- /dev/null +++ b/packages/core/minos-microservice-networks/minos/networks/testing/brokers/__init__.py @@ -0,0 +1,10 @@ +from .collections import ( + MockedBrokerQueueDatabaseOperationFactory, +) +from .publishers import ( + MockedBrokerPublisherQueueDatabaseOperationFactory, +) +from .subscribers import ( + MockedBrokerSubscriberDuplicateValidatorDatabaseOperationFactory, + MockedBrokerSubscriberQueueDatabaseOperationFactory, +) diff --git a/packages/core/minos-microservice-networks/minos/networks/testing/brokers/collections/__init__.py b/packages/core/minos-microservice-networks/minos/networks/testing/brokers/collections/__init__.py new file mode 100644 index 000000000..2f2dbc0d3 --- /dev/null +++ b/packages/core/minos-microservice-networks/minos/networks/testing/brokers/collections/__init__.py @@ -0,0 +1,3 @@ +from .queues import ( + MockedBrokerQueueDatabaseOperationFactory, +) diff --git a/packages/core/minos-microservice-networks/minos/networks/testing/brokers/collections/queues.py b/packages/core/minos-microservice-networks/minos/networks/testing/brokers/collections/queues.py new file mode 100644 index 000000000..2d8f47bac --- /dev/null +++ b/packages/core/minos-microservice-networks/minos/networks/testing/brokers/collections/queues.py @@ -0,0 +1,46 @@ +from collections.abc import ( + Iterable, +) + +from minos.common import ( + DatabaseOperation, +) +from minos.common.testing import ( + MockedDatabaseOperation, +) + +from ....brokers import ( + BrokerQueueDatabaseOperationFactory, +) + + +class MockedBrokerQueueDatabaseOperationFactory(BrokerQueueDatabaseOperationFactory): + """For testing purposes.""" + + def build_create(self) -> DatabaseOperation: + """For testing purposes""" + return MockedDatabaseOperation("create_queue_table") + + def build_mark_processed(self, id_: int) -> DatabaseOperation: + """For testing purposes""" + return MockedDatabaseOperation("update_not_processed") + + def build_delete(self, id_: int) -> DatabaseOperation: + """For testing purposes""" + return MockedDatabaseOperation("delete_processed") + + def build_mark_processing(self, ids: Iterable[int]) -> DatabaseOperation: + """For testing purposes""" + return MockedDatabaseOperation("mark_processing") + + def build_count(self, retry: int, *args, **kwargs) -> DatabaseOperation: + """For testing purposes""" + return MockedDatabaseOperation("count_not_processed") + + def build_submit(self, topic: str, data: bytes) -> DatabaseOperation: + """For testing purposes""" + return MockedDatabaseOperation("insert") + + def build_query(self, retry: int, records: int, *args, **kwargs) -> DatabaseOperation: + """For testing purposes""" + return MockedDatabaseOperation("select_not_processed") diff --git a/packages/core/minos-microservice-networks/minos/networks/testing/brokers/publishers/__init__.py b/packages/core/minos-microservice-networks/minos/networks/testing/brokers/publishers/__init__.py new file mode 100644 index 000000000..3100d864b --- /dev/null +++ b/packages/core/minos-microservice-networks/minos/networks/testing/brokers/publishers/__init__.py @@ -0,0 +1,3 @@ +from .queues import ( + MockedBrokerPublisherQueueDatabaseOperationFactory, +) diff --git a/packages/core/minos-microservice-networks/minos/networks/testing/brokers/publishers/queues.py b/packages/core/minos-microservice-networks/minos/networks/testing/brokers/publishers/queues.py new file mode 100644 index 000000000..8afd22f4e --- /dev/null +++ b/packages/core/minos-microservice-networks/minos/networks/testing/brokers/publishers/queues.py @@ -0,0 +1,21 @@ +from minos.common.testing import ( + MockedDatabaseClient, +) +from minos.networks import ( + BrokerPublisherQueueDatabaseOperationFactory, +) + +from ..collections import ( + MockedBrokerQueueDatabaseOperationFactory, +) + + +class MockedBrokerPublisherQueueDatabaseOperationFactory( + BrokerPublisherQueueDatabaseOperationFactory, MockedBrokerQueueDatabaseOperationFactory +): + """For testing purposes""" + + +MockedDatabaseClient.set_factory( + BrokerPublisherQueueDatabaseOperationFactory, MockedBrokerPublisherQueueDatabaseOperationFactory +) diff --git a/packages/core/minos-microservice-networks/minos/networks/testing/brokers/subscribers/__init__.py b/packages/core/minos-microservice-networks/minos/networks/testing/brokers/subscribers/__init__.py new file mode 100644 index 000000000..c4c2368bc --- /dev/null +++ b/packages/core/minos-microservice-networks/minos/networks/testing/brokers/subscribers/__init__.py @@ -0,0 +1,6 @@ +from .queues import ( + MockedBrokerSubscriberQueueDatabaseOperationFactory, +) +from .validators import ( + MockedBrokerSubscriberDuplicateValidatorDatabaseOperationFactory, +) diff --git a/packages/core/minos-microservice-networks/minos/networks/testing/brokers/subscribers/queues.py b/packages/core/minos-microservice-networks/minos/networks/testing/brokers/subscribers/queues.py new file mode 100644 index 000000000..ff480ee5a --- /dev/null +++ b/packages/core/minos-microservice-networks/minos/networks/testing/brokers/subscribers/queues.py @@ -0,0 +1,39 @@ +from collections.abc import ( + Iterable, +) + +from minos.common import ( + DatabaseOperation, +) +from minos.common.testing import ( + MockedDatabaseClient, + MockedDatabaseOperation, +) + +from ....brokers import ( + BrokerSubscriberQueueDatabaseOperationFactory, +) +from ..collections import ( + MockedBrokerQueueDatabaseOperationFactory, +) + + +class MockedBrokerSubscriberQueueDatabaseOperationFactory( + BrokerSubscriberQueueDatabaseOperationFactory, MockedBrokerQueueDatabaseOperationFactory +): + """For testing purposes""" + + def build_count(self, retry: int, topics: Iterable[str] = tuple(), *args, **kwargs) -> DatabaseOperation: + """For testing purposes""" + return MockedDatabaseOperation("count_not_processed") + + def build_query( + self, retry: int, records: int, topics: Iterable[str] = tuple(), *args, **kwargs + ) -> DatabaseOperation: + """For testing purposes""" + return MockedDatabaseOperation("select_not_processed") + + +MockedDatabaseClient.set_factory( + BrokerSubscriberQueueDatabaseOperationFactory, MockedBrokerSubscriberQueueDatabaseOperationFactory +) diff --git a/packages/core/minos-microservice-networks/minos/networks/testing/brokers/subscribers/validators.py b/packages/core/minos-microservice-networks/minos/networks/testing/brokers/subscribers/validators.py new file mode 100644 index 000000000..cf1230143 --- /dev/null +++ b/packages/core/minos-microservice-networks/minos/networks/testing/brokers/subscribers/validators.py @@ -0,0 +1,35 @@ +from uuid import ( + UUID, +) + +from minos.common import ( + DatabaseOperation, +) +from minos.common.testing import ( + MockedDatabaseClient, + MockedDatabaseOperation, +) + +from ....brokers import ( + BrokerSubscriberDuplicateValidatorDatabaseOperationFactory, +) + + +class MockedBrokerSubscriberDuplicateValidatorDatabaseOperationFactory( + BrokerSubscriberDuplicateValidatorDatabaseOperationFactory +): + """For testing purposes""" + + def build_create(self) -> DatabaseOperation: + """For testing purposes""" + return MockedDatabaseOperation("create_table") + + def build_submit(self, topic: str, uuid: UUID) -> DatabaseOperation: + """For testing purposes""" + return MockedDatabaseOperation("insert_row") + + +MockedDatabaseClient.set_factory( + BrokerSubscriberDuplicateValidatorDatabaseOperationFactory, + MockedBrokerSubscriberDuplicateValidatorDatabaseOperationFactory, +) diff --git a/packages/core/minos-microservice-networks/poetry.lock b/packages/core/minos-microservice-networks/poetry.lock index 93681dd9c..e894a9c2c 100644 --- a/packages/core/minos-microservice-networks/poetry.lock +++ b/packages/core/minos-microservice-networks/poetry.lock @@ -19,29 +19,6 @@ develop = ["aiocontextvars (==0.2.2)", "aiohttp-asgi", "aiohttp (<4)", "async-ti raven = ["raven-aiohttp"] uvloop = ["uvloop (>=0.14,<1)"] -[[package]] -name = "aiopg" -version = "1.3.3" -description = "Postgres integration with asyncio." -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -async-timeout = ">=3.0,<5.0" -psycopg2-binary = ">=2.8.4" - -[package.extras] -sa = ["sqlalchemy[postgresql_psycopg2binary] (>=1.3,<1.5)"] - -[[package]] -name = "async-timeout" -version = "4.0.2" -description = "Timeout context manager for asyncio programs" -category = "main" -optional = false -python-versions = ">=3.6" - [[package]] name = "atomicwrites" version = "1.4.0" @@ -239,7 +216,6 @@ develop = true [package.dependencies] aiomisc = ">=14.0.3,<15.8.0" -aiopg = "^1.2.1" cached-property = "^1.5.2" dependency-injector = "^4.32.2" fastavro = "^1.4.0" @@ -311,14 +287,6 @@ python-versions = ">=3.6" dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] -[[package]] -name = "psycopg2-binary" -version = "2.9.3" -description = "psycopg2 - Python-PostgreSQL Database Adapter" -category = "main" -optional = false -python-versions = ">=3.6" - [[package]] name = "py" version = "1.11.0" @@ -423,21 +391,13 @@ test = ["aiohttp", "flake8 (>=3.9.2,<3.10.0)", "psutil", "pycodestyle (>=2.7.0,< [metadata] lock-version = "1.1" python-versions = "^3.9" -content-hash = "c9bb8530112d131ba7e2db97e307010931ebaa7cd724a268aa9182b79608ef92" +content-hash = "fb90fd955533067f90236079e7f071d3f45f6d4cf0d28398f4a1ac2654576755" [metadata.files] aiomisc = [ {file = "aiomisc-15.7.3-py3-none-any.whl", hash = "sha256:0403e83268e98d0f2a125a70d13303fe1a2358e36db3daf02df032c7fa4f1525"}, {file = "aiomisc-15.7.3.tar.gz", hash = "sha256:ba250a34bd4609ced36111cb50580f57c3d52f3955f953a53ecb2986988baedc"}, ] -aiopg = [ - {file = "aiopg-1.3.3-py3-none-any.whl", hash = "sha256:2842dd8741460eeef940032dcb577bfba4d4115205dd82a73ce13b3271f5bf0a"}, - {file = "aiopg-1.3.3.tar.gz", hash = "sha256:547c6ba4ea0d73c2a11a2f44387d7133cc01d3c6f3b8ed976c0ac1eff4f595d7"}, -] -async-timeout = [ - {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, - {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, -] atomicwrites = [ {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, @@ -689,64 +649,6 @@ pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] -psycopg2-binary = [ - {file = "psycopg2-binary-2.9.3.tar.gz", hash = "sha256:761df5313dc15da1502b21453642d7599d26be88bff659382f8f9747c7ebea4e"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:539b28661b71da7c0e428692438efbcd048ca21ea81af618d845e06ebfd29478"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e82d38390a03da28c7985b394ec3f56873174e2c88130e6966cb1c946508e65"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57804fc02ca3ce0dbfbef35c4b3a4a774da66d66ea20f4bda601294ad2ea6092"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:083a55275f09a62b8ca4902dd11f4b33075b743cf0d360419e2051a8a5d5ff76"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-manylinux_2_24_ppc64le.whl", hash = "sha256:0a29729145aaaf1ad8bafe663131890e2111f13416b60e460dae0a96af5905c9"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a79d622f5206d695d7824cbf609a4f5b88ea6d6dab5f7c147fc6d333a8787e4"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:090f3348c0ab2cceb6dfbe6bf721ef61262ddf518cd6cc6ecc7d334996d64efa"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a9e1f75f96ea388fbcef36c70640c4efbe4650658f3d6a2967b4cc70e907352e"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c3ae8e75eb7160851e59adc77b3a19a976e50622e44fd4fd47b8b18208189d42"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-win32.whl", hash = "sha256:7b1e9b80afca7b7a386ef087db614faebbf8839b7f4db5eb107d0f1a53225029"}, - {file = "psycopg2_binary-2.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:8b344adbb9a862de0c635f4f0425b7958bf5a4b927c8594e6e8d261775796d53"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:e847774f8ffd5b398a75bc1c18fbb56564cda3d629fe68fd81971fece2d3c67e"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:68641a34023d306be959101b345732360fc2ea4938982309b786f7be1b43a4a1"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3303f8807f342641851578ee7ed1f3efc9802d00a6f83c101d21c608cb864460"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-manylinux_2_24_aarch64.whl", hash = "sha256:e3699852e22aa68c10de06524a3721ade969abf382da95884e6a10ff798f9281"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-manylinux_2_24_ppc64le.whl", hash = "sha256:526ea0378246d9b080148f2d6681229f4b5964543c170dd10bf4faaab6e0d27f"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:b1c8068513f5b158cf7e29c43a77eb34b407db29aca749d3eb9293ee0d3103ca"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:15803fa813ea05bef089fa78835118b5434204f3a17cb9f1e5dbfd0b9deea5af"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:152f09f57417b831418304c7f30d727dc83a12761627bb826951692cc6491e57"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:404224e5fef3b193f892abdbf8961ce20e0b6642886cfe1fe1923f41aaa75c9d"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-win32.whl", hash = "sha256:1f6b813106a3abdf7b03640d36e24669234120c72e91d5cbaeb87c5f7c36c65b"}, - {file = "psycopg2_binary-2.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:2d872e3c9d5d075a2e104540965a1cf898b52274a5923936e5bfddb58c59c7c2"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:10bb90fb4d523a2aa67773d4ff2b833ec00857f5912bafcfd5f5414e45280fb1"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:874a52ecab70af13e899f7847b3e074eeb16ebac5615665db33bce8a1009cf33"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a29b3ca4ec9defec6d42bf5feb36bb5817ba3c0230dd83b4edf4bf02684cd0ae"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:12b11322ea00ad8db8c46f18b7dfc47ae215e4df55b46c67a94b4effbaec7094"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-manylinux_2_24_ppc64le.whl", hash = "sha256:53293533fcbb94c202b7c800a12c873cfe24599656b341f56e71dd2b557be063"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c381bda330ddf2fccbafab789d83ebc6c53db126e4383e73794c74eedce855ef"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d29409b625a143649d03d0fd7b57e4b92e0ecad9726ba682244b73be91d2fdb"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:183a517a3a63503f70f808b58bfbf962f23d73b6dccddae5aa56152ef2bcb232"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:15c4e4cfa45f5a60599d9cec5f46cd7b1b29d86a6390ec23e8eebaae84e64554"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-win32.whl", hash = "sha256:adf20d9a67e0b6393eac162eb81fb10bc9130a80540f4df7e7355c2dd4af9fba"}, - {file = "psycopg2_binary-2.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:2f9ffd643bc7349eeb664eba8864d9e01f057880f510e4681ba40a6532f93c71"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:def68d7c21984b0f8218e8a15d514f714d96904265164f75f8d3a70f9c295667"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dffc08ca91c9ac09008870c9eb77b00a46b3378719584059c034b8945e26b272"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:280b0bb5cbfe8039205c7981cceb006156a675362a00fe29b16fbc264e242834"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:af9813db73395fb1fc211bac696faea4ca9ef53f32dc0cfa27e4e7cf766dcf24"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-manylinux_2_24_ppc64le.whl", hash = "sha256:63638d875be8c2784cfc952c9ac34e2b50e43f9f0a0660b65e2a87d656b3116c"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ffb7a888a047696e7f8240d649b43fb3644f14f0ee229077e7f6b9f9081635bd"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0c9d5450c566c80c396b7402895c4369a410cab5a82707b11aee1e624da7d004"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:d1c1b569ecafe3a69380a94e6ae09a4789bbb23666f3d3a08d06bbd2451f5ef1"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8fc53f9af09426a61db9ba357865c77f26076d48669f2e1bb24d85a22fb52307"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-win32.whl", hash = "sha256:6472a178e291b59e7f16ab49ec8b4f3bdada0a879c68d3817ff0963e722a82ce"}, - {file = "psycopg2_binary-2.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:35168209c9d51b145e459e05c31a9eaeffa9a6b0fd61689b48e07464ffd1a83e"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:47133f3f872faf28c1e87d4357220e809dfd3fa7c64295a4a148bcd1e6e34ec9"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91920527dea30175cc02a1099f331aa8c1ba39bf8b7762b7b56cbf54bc5cce42"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:887dd9aac71765ac0d0bac1d0d4b4f2c99d5f5c1382d8b770404f0f3d0ce8a39"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:1f14c8b0942714eb3c74e1e71700cbbcb415acbc311c730370e70c578a44a25c"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-manylinux_2_24_ppc64le.whl", hash = "sha256:7af0dd86ddb2f8af5da57a976d27cd2cd15510518d582b478fbb2292428710b4"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93cd1967a18aa0edd4b95b1dfd554cf15af657cb606280996d393dadc88c3c35"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bda845b664bb6c91446ca9609fc69f7db6c334ec5e4adc87571c34e4f47b7ddb"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:01310cf4cf26db9aea5158c217caa92d291f0500051a6469ac52166e1a16f5b7"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:99485cab9ba0fa9b84f1f9e1fef106f44a46ef6afdeec8885e0b88d0772b49e8"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-win32.whl", hash = "sha256:46f0e0a6b5fa5851bbd9ab1bc805eef362d3a230fbdfbc209f4a236d0a7a990d"}, - {file = "psycopg2_binary-2.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:accfe7e982411da3178ec690baaceaad3c278652998b2c45828aaac66cd8285f"}, -] py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, diff --git a/packages/core/minos-microservice-networks/pyproject.toml b/packages/core/minos-microservice-networks/pyproject.toml index 63c0f30fd..6f43b7185 100644 --- a/packages/core/minos-microservice-networks/pyproject.toml +++ b/packages/core/minos-microservice-networks/pyproject.toml @@ -32,9 +32,7 @@ include = [ [tool.poetry.dependencies] python = "^3.9" minos-microservice-common = { version ="^0.7.0*", allow-prereleases = true } -aiopg = "^1.2.1" crontab = "^0.23.0" -psycopg2-binary = "^2.9.3" [tool.poetry.dev-dependencies] minos-microservice-common = { path = "../minos-microservice-common", develop = true } diff --git a/packages/core/minos-microservice-networks/tests/test_config.yml b/packages/core/minos-microservice-networks/tests/test_config.yml index 788655ef9..15153c1ff 100644 --- a/packages/core/minos-microservice-networks/tests/test_config.yml +++ b/packages/core/minos-microservice-networks/tests/test_config.yml @@ -16,34 +16,12 @@ rest: host: localhost port: 8080 repository: - client: minos.common.AiopgDatabaseClient + client: minos.common.testing.MockedDatabaseClient database: order_db user: minos password: min0s host: localhost port: 5432 -snapshot: - client: minos.common.AiopgDatabaseClient - database: order_db - user: minos - password: min0s - host: localhost - port: 5432 -broker: - host: localhost - port: 9092 - queue: - client: minos.common.AiopgDatabaseClient - database: order_db - user: minos - password: min0s - host: localhost - port: 5432 - records: 10 - retry: 2 -saga: - storage: - path: "./order.lmdb" discovery: client: minos.networks.InMemoryDiscoveryClient host: discovery-service diff --git a/packages/core/minos-microservice-networks/tests/test_networks/test_brokers/test_collections/test_queues/test_pg.py b/packages/core/minos-microservice-networks/tests/test_networks/test_brokers/test_collections/test_queues/test_database.py similarity index 50% rename from packages/core/minos-microservice-networks/tests/test_networks/test_brokers/test_collections/test_queues/test_pg.py rename to packages/core/minos-microservice-networks/tests/test_networks/test_brokers/test_collections/test_queues/test_database.py index 7d57dbbbd..7852320ee 100644 --- a/packages/core/minos-microservice-networks/tests/test_networks/test_brokers/test_collections/test_queues/test_pg.py +++ b/packages/core/minos-microservice-networks/tests/test_networks/test_brokers/test_collections/test_queues/test_database.py @@ -2,43 +2,41 @@ from asyncio import ( sleep, ) +from itertools import ( + chain, + cycle, +) from unittest.mock import ( AsyncMock, patch, ) from minos.common import ( - AiopgDatabaseClient, DatabaseMixin, ) from minos.common.testing import ( DatabaseMinosTestCase, + MockedDatabaseClient, ) from minos.networks import ( - AiopgBrokerQueueDatabaseOperationFactory, BrokerMessageV1, BrokerMessageV1Payload, BrokerQueue, DatabaseBrokerQueue, ) +from minos.networks.testing import ( + MockedBrokerQueueDatabaseOperationFactory, +) from tests.utils import ( FakeAsyncIterator, NetworksTestCase, ) -class _AiopgBrokerQueueDatabaseOperationFactory(AiopgBrokerQueueDatabaseOperationFactory): - """For testing purposes.""" - - def build_table_name(self) -> str: - """For testing purposes.""" - return "test_table" - - class TestDatabaseBrokerQueue(NetworksTestCase, DatabaseMinosTestCase): def setUp(self) -> None: super().setUp() - self.operation_factory = _AiopgBrokerQueueDatabaseOperationFactory() + self.operation_factory = MockedBrokerQueueDatabaseOperationFactory() def test_is_subclass(self): self.assertTrue(issubclass(DatabaseBrokerQueue, (BrokerQueue, DatabaseMixin))) @@ -46,21 +44,29 @@ def test_is_subclass(self): def test_constructor(self): queue = DatabaseBrokerQueue(operation_factory=self.operation_factory) self.assertEqual(self.pool_factory.get_pool("database"), queue.database_pool) - self.assertEqual(self.operation_factory, queue.operation_factory) + self.assertEqual(self.operation_factory, queue.database_operation_factory) self.assertEqual(2, queue.retry) self.assertEqual(1000, queue.records) async def test_operation_factory(self): queue = DatabaseBrokerQueue.from_config(self.config, operation_factory=self.operation_factory) - self.assertEqual(self.operation_factory, queue.operation_factory) + self.assertEqual(self.operation_factory, queue.database_operation_factory) async def test_enqueue(self): message = BrokerMessageV1("foo", BrokerMessageV1Payload("bar")) - async with DatabaseBrokerQueue.from_config(self.config, operation_factory=self.operation_factory) as queue: - await queue.enqueue(message) - await sleep(0.5) # To give time to consume the message from db. + with patch.object( + MockedDatabaseClient, + "fetch_all", + side_effect=chain( + [FakeAsyncIterator([(0,)]), FakeAsyncIterator([(1, message.avro_bytes)])], + cycle([FakeAsyncIterator([(0,)])]), + ), + ): + async with DatabaseBrokerQueue.from_config(self.config, operation_factory=self.operation_factory) as queue: + await queue.enqueue(message) + await sleep(0.5) # To give time to consume the message from db. async def test_aiter(self): messages = [ @@ -73,11 +79,23 @@ async def test_aiter(self): await queue.enqueue(messages[0]) await queue.enqueue(messages[1]) - observed = list() - async for message in queue: - observed.append(message) - if len(messages) == len(observed): - await queue.destroy() + with patch.object( + MockedDatabaseClient, + "fetch_all", + side_effect=chain( + [ + FakeAsyncIterator([(2,)]), + FakeAsyncIterator([(1, messages[0].avro_bytes), (2, messages[1].avro_bytes)]), + ], + cycle([FakeAsyncIterator([(0,)])]), + ), + ): + + observed = list() + async for message in queue: + observed.append(message) + if len(messages) == len(observed): + await queue.destroy() self.assertEqual(messages, observed) @@ -88,7 +106,7 @@ async def test_dequeue_with_count(self): ] with patch.object( - AiopgDatabaseClient, + MockedDatabaseClient, "fetch_all", return_value=FakeAsyncIterator([[1, messages[0].avro_bytes], [2, bytes()], [3, messages[1].avro_bytes]]), ): @@ -105,11 +123,28 @@ async def test_dequeue_with_notify(self): BrokerMessageV1("foo", BrokerMessageV1Payload("bar")), BrokerMessageV1("bar", BrokerMessageV1Payload("foo")), ] - async with DatabaseBrokerQueue.from_config(self.config, operation_factory=self.operation_factory) as queue: - await queue.enqueue(messages[0]) - await queue.enqueue(messages[1]) - observed = [await queue.dequeue(), await queue.dequeue()] + with patch.object( + MockedDatabaseClient, + "fetch_all", + side_effect=chain( + [ + FakeAsyncIterator([(0,)]), + FakeAsyncIterator( + [ + (1, messages[0].avro_bytes), + (2, messages[1].avro_bytes), + ] + ), + ], + cycle([FakeAsyncIterator([(0,)])]), + ), + ): + async with DatabaseBrokerQueue.from_config(self.config, operation_factory=self.operation_factory) as queue: + await queue.enqueue(messages[0]) + await queue.enqueue(messages[1]) + + observed = [await queue.dequeue(), await queue.dequeue()] self.assertEqual(messages, observed) @@ -121,15 +156,28 @@ async def test_dequeue_ordered(self): BrokerMessageV1("foo", BrokerMessageV1Payload(1)), ] - async with DatabaseBrokerQueue.from_config(self.config, operation_factory=self.operation_factory) as queue: - - for message in unsorted: - await queue.enqueue(message) - - await sleep(0.5) - observed = list() - for _ in range(len(unsorted)): - observed.append(await queue.dequeue()) + with patch.object( + MockedDatabaseClient, + "fetch_all", + side_effect=chain( + [ + FakeAsyncIterator([(2,)]), + FakeAsyncIterator( + [ + (1, unsorted[0].avro_bytes), + (2, unsorted[1].avro_bytes), + (3, unsorted[2].avro_bytes), + (4, unsorted[3].avro_bytes), + ] + ), + ], + cycle([FakeAsyncIterator([(0,)])]), + ), + ): + async with DatabaseBrokerQueue.from_config(self.config, operation_factory=self.operation_factory) as queue: + observed = list() + for _ in range(len(unsorted)): + observed.append(await queue.dequeue()) expected = [unsorted[3], unsorted[1], unsorted[2], unsorted[0]] diff --git a/packages/core/minos-microservice-networks/tests/test_networks/test_brokers/test_publishers/test_queued/test_queues/test_pg.py b/packages/core/minos-microservice-networks/tests/test_networks/test_brokers/test_publishers/test_queued/test_queues/test_database.py similarity index 57% rename from packages/core/minos-microservice-networks/tests/test_networks/test_brokers/test_publishers/test_queued/test_queues/test_pg.py rename to packages/core/minos-microservice-networks/tests/test_networks/test_brokers/test_publishers/test_queued/test_queues/test_database.py index 9e28b2bfb..4d44c31d6 100644 --- a/packages/core/minos-microservice-networks/tests/test_networks/test_brokers/test_publishers/test_queued/test_queues/test_pg.py +++ b/packages/core/minos-microservice-networks/tests/test_networks/test_brokers/test_publishers/test_queued/test_queues/test_database.py @@ -4,8 +4,8 @@ DatabaseMinosTestCase, ) from minos.networks import ( - AiopgBrokerPublisherQueueDatabaseOperationFactory, BrokerPublisherQueue, + BrokerPublisherQueueDatabaseOperationFactory, DatabaseBrokerPublisherQueue, DatabaseBrokerQueue, ) @@ -21,15 +21,7 @@ def test_is_subclass(self): async def test_operation_factory(self): queue = DatabaseBrokerPublisherQueue.from_config(self.config) - self.assertIsInstance(queue.operation_factory, AiopgBrokerPublisherQueueDatabaseOperationFactory) - - -class TestAiopgBrokerPublisherQueueDatabaseOperationFactory(unittest.TestCase): - def setUp(self) -> None: - self.factory = AiopgBrokerPublisherQueueDatabaseOperationFactory() - - def test_build_table_name(self): - self.assertEqual("broker_publisher_queue", self.factory.build_table_name()) + self.assertIsInstance(queue.database_operation_factory, BrokerPublisherQueueDatabaseOperationFactory) if __name__ == "__main__": diff --git a/packages/core/minos-microservice-networks/tests/test_networks/test_brokers/test_subscribers/test_filtered/test_validators/test_duplicates/test_pg.py b/packages/core/minos-microservice-networks/tests/test_networks/test_brokers/test_subscribers/test_filtered/test_validators/test_duplicates/test_database.py similarity index 53% rename from packages/core/minos-microservice-networks/tests/test_networks/test_brokers/test_subscribers/test_filtered/test_validators/test_duplicates/test_pg.py rename to packages/core/minos-microservice-networks/tests/test_networks/test_brokers/test_subscribers/test_filtered/test_validators/test_duplicates/test_database.py index 7dcaaf460..5f4e71c99 100644 --- a/packages/core/minos-microservice-networks/tests/test_networks/test_brokers/test_subscribers/test_filtered/test_validators/test_duplicates/test_pg.py +++ b/packages/core/minos-microservice-networks/tests/test_networks/test_brokers/test_subscribers/test_filtered/test_validators/test_duplicates/test_database.py @@ -1,15 +1,24 @@ import unittest +from unittest.mock import ( + patch, +) +from minos.common import ( + IntegrityException, +) from minos.common.testing import ( DatabaseMinosTestCase, + MockedDatabaseClient, ) from minos.networks import ( - AiopgBrokerSubscriberDuplicateValidatorDatabaseOperationFactory, BrokerMessageV1, BrokerMessageV1Payload, BrokerSubscriberValidator, DatabaseBrokerSubscriberDuplicateValidator, ) +from minos.networks.testing import ( + MockedBrokerSubscriberDuplicateValidatorDatabaseOperationFactory, +) from tests.utils import ( NetworksTestCase, ) @@ -23,7 +32,7 @@ async def test_operation_factory(self): validator = DatabaseBrokerSubscriberDuplicateValidator.from_config(self.config) self.assertIsInstance( - validator.operation_factory, AiopgBrokerSubscriberDuplicateValidatorDatabaseOperationFactory + validator.database_operation_factory, MockedBrokerSubscriberDuplicateValidatorDatabaseOperationFactory ) async def test_is_valid(self): @@ -31,11 +40,14 @@ async def test_is_valid(self): two = BrokerMessageV1("foo", BrokerMessageV1Payload("bar")) three = BrokerMessageV1("foo", BrokerMessageV1Payload("bar")) - async with DatabaseBrokerSubscriberDuplicateValidator.from_config(self.config) as validator: - self.assertTrue(await validator.is_valid(one)) - self.assertTrue(await validator.is_valid(two)) - self.assertFalse(await validator.is_valid(one)) - self.assertTrue(await validator.is_valid(three)) + with patch.object( + MockedDatabaseClient, "execute", side_effect=[None, None, None, IntegrityException(""), None] + ): + async with DatabaseBrokerSubscriberDuplicateValidator.from_config(self.config) as validator: + self.assertTrue(await validator.is_valid(one)) + self.assertTrue(await validator.is_valid(two)) + self.assertFalse(await validator.is_valid(one)) + self.assertTrue(await validator.is_valid(three)) if __name__ == "__main__": diff --git a/packages/core/minos-microservice-networks/tests/test_networks/test_brokers/test_subscribers/test_queued/test_queues/test_pg.py b/packages/core/minos-microservice-networks/tests/test_networks/test_brokers/test_subscribers/test_queued/test_queues/test_database.py similarity index 55% rename from packages/core/minos-microservice-networks/tests/test_networks/test_brokers/test_subscribers/test_queued/test_queues/test_pg.py rename to packages/core/minos-microservice-networks/tests/test_networks/test_brokers/test_subscribers/test_queued/test_queues/test_database.py index bc289ee9e..eb42d9f18 100644 --- a/packages/core/minos-microservice-networks/tests/test_networks/test_brokers/test_subscribers/test_queued/test_queues/test_pg.py +++ b/packages/core/minos-microservice-networks/tests/test_networks/test_brokers/test_subscribers/test_queued/test_queues/test_database.py @@ -2,19 +2,19 @@ from asyncio import ( sleep, ) +from itertools import ( + chain, + cycle, +) from unittest.mock import ( - AsyncMock, patch, ) -from minos.common import ( - AiopgDatabaseClient, -) from minos.common.testing import ( DatabaseMinosTestCase, + MockedDatabaseClient, ) from minos.networks import ( - AiopgBrokerSubscriberQueueDatabaseOperationFactory, BrokerMessageV1, BrokerMessageV1Payload, BrokerSubscriberQueue, @@ -22,6 +22,9 @@ DatabaseBrokerSubscriberQueue, DatabaseBrokerSubscriberQueueBuilder, ) +from minos.networks.testing import ( + MockedBrokerSubscriberQueueDatabaseOperationFactory, +) from tests.utils import ( FakeAsyncIterator, NetworksTestCase, @@ -35,14 +38,22 @@ def test_is_subclass(self): async def test_operation_factory(self): queue = DatabaseBrokerSubscriberQueue.from_config(self.config, topics={"foo", "bar"}) - self.assertIsInstance(queue.operation_factory, AiopgBrokerSubscriberQueueDatabaseOperationFactory) + self.assertIsInstance(queue.database_operation_factory, MockedBrokerSubscriberQueueDatabaseOperationFactory) async def test_enqueue(self): message = BrokerMessageV1("foo", BrokerMessageV1Payload("bar")) - async with DatabaseBrokerSubscriberQueue.from_config(self.config, topics={"foo", "bar"}) as queue: - await queue.enqueue(message) - await sleep(0.5) # To give time to consume the message from db. + with patch.object( + MockedDatabaseClient, + "fetch_all", + side_effect=chain( + [FakeAsyncIterator([(0,)]), FakeAsyncIterator([(1, message.avro_bytes)])], + cycle([FakeAsyncIterator([(0,)])]), + ), + ): + async with DatabaseBrokerSubscriberQueue.from_config(self.config, topics={"foo", "bar"}) as queue: + await queue.enqueue(message) + await sleep(0.5) # To give time to consume the message from db. async def test_dequeue_with_count(self): messages = [ @@ -51,13 +62,19 @@ async def test_dequeue_with_count(self): ] with patch.object( - AiopgDatabaseClient, + MockedDatabaseClient, "fetch_all", - return_value=FakeAsyncIterator([[1, messages[0].avro_bytes], [2, bytes()], [3, messages[1].avro_bytes]]), + side_effect=[ + FakeAsyncIterator( + [ + [2], + ] + ), + FakeAsyncIterator([[1, messages[0].avro_bytes], [2, bytes()], [3, messages[1].avro_bytes]]), + FakeAsyncIterator([(0,)]), + ], ): async with DatabaseBrokerSubscriberQueue.from_config(self.config, topics={"foo", "bar"}) as queue: - queue._get_count = AsyncMock(side_effect=[3, 0]) - async with queue: observed = [await queue.dequeue(), await queue.dequeue()] @@ -68,23 +85,24 @@ async def test_dequeue_with_notify(self): BrokerMessageV1("foo", BrokerMessageV1Payload("bar")), BrokerMessageV1("bar", BrokerMessageV1Payload("foo")), ] - async with DatabaseBrokerSubscriberQueue.from_config(self.config, topics={"foo", "bar"}) as queue: - await queue.enqueue(messages[0]) - await queue.enqueue(messages[1]) + with patch.object( + MockedDatabaseClient, + "fetch_all", + side_effect=[ + FakeAsyncIterator([(0,)]), + FakeAsyncIterator([(1, messages[0].avro_bytes), (3, messages[1].avro_bytes)]), + FakeAsyncIterator([(0,)]), + ], + ): + async with DatabaseBrokerSubscriberQueue.from_config(self.config, topics={"foo", "bar"}) as queue: + await queue.enqueue(messages[0]) + await queue.enqueue(messages[1]) - observed = [await queue.dequeue(), await queue.dequeue()] + observed = [await queue.dequeue(), await queue.dequeue()] self.assertEqual(messages, observed) -class TestAiopgBrokerSubscriberQueueDatabaseOperationFactory(unittest.TestCase): - def setUp(self) -> None: - self.factory = AiopgBrokerSubscriberQueueDatabaseOperationFactory() - - def test_build_table_name(self): - self.assertEqual("broker_subscriber_queue", self.factory.build_table_name()) - - class TestDatabaseBrokerSubscriberQueueBuilder(NetworksTestCase, DatabaseMinosTestCase): def test_build(self): builder = DatabaseBrokerSubscriberQueueBuilder().with_config(self.config).with_topics({"one", "two"}) diff --git a/packages/core/minos-microservice-networks/tests/utils.py b/packages/core/minos-microservice-networks/tests/utils.py index 2ea505a14..d357189ee 100644 --- a/packages/core/minos-microservice-networks/tests/utils.py +++ b/packages/core/minos-microservice-networks/tests/utils.py @@ -1,3 +1,7 @@ +from __future__ import ( + annotations, +) + from abc import ( ABC, ) @@ -29,6 +33,7 @@ Response, WrappedRequest, enroute, + testing, ) BASE_PATH = Path(__file__).parent @@ -36,6 +41,8 @@ class NetworksTestCase(MinosTestCase, ABC): + testing_module = testing + def get_config_file_path(self): return CONFIG_FILE_PATH diff --git a/packages/core/minos-microservice-saga/minos/saga/executions/storage.py b/packages/core/minos-microservice-saga/minos/saga/executions/storage.py index 3bcf75ef5..c2a31a201 100644 --- a/packages/core/minos-microservice-saga/minos/saga/executions/storage.py +++ b/packages/core/minos-microservice-saga/minos/saga/executions/storage.py @@ -2,6 +2,9 @@ annotations, ) +from contextlib import ( + suppress, +) from typing import ( Type, Union, @@ -12,6 +15,7 @@ from minos.common import ( Config, + MinosConfigException, MinosJsonBinaryProtocol, MinosStorage, MinosStorageLmdb, @@ -46,7 +50,9 @@ def from_config(cls, config: Config, **kwargs) -> SagaExecutionStorage: :param kwargs: Additional named arguments. :return: A new ``SagaExecutionStorage`` instance. """ - return cls(**(config.get_database_by_name("saga") | kwargs)) + with suppress(MinosConfigException): + kwargs |= config.get_database_by_name("saga") + return cls(**kwargs) def store(self, execution: SagaExecution) -> None: """Store an execution. diff --git a/packages/plugins/minos-broker-rabbitmq/tests/docker-compose.yml b/packages/plugins/minos-broker-rabbitmq/tests/docker-compose.yml index 2e0f9e21f..bc4158ab3 100644 --- a/packages/plugins/minos-broker-rabbitmq/tests/docker-compose.yml +++ b/packages/plugins/minos-broker-rabbitmq/tests/docker-compose.yml @@ -1,13 +1,5 @@ version: '2' services: - postgres: - image: postgres:alpine - network_mode: host - environment: - POSTGRES_USER: minos - POSTGRES_PASSWORD: min0s - POSTGRES_DB: order_db - rabbitmq: image: rabbitmq:3-management ports: diff --git a/packages/plugins/minos-broker-rabbitmq/tests/test_config.yml b/packages/plugins/minos-broker-rabbitmq/tests/test_config.yml index 2e1a8a779..3e4c40985 100644 --- a/packages/plugins/minos-broker-rabbitmq/tests/test_config.yml +++ b/packages/plugins/minos-broker-rabbitmq/tests/test_config.yml @@ -3,41 +3,8 @@ service: aggregate: tests.utils.Order services: - minos.networks.BrokerPort -services: - - tests.services.commands.CommandService - - tests.services.queries.QueryService -rest: - host: localhost - port: 8080 -repository: - database: order_db - user: minos - password: min0s - host: localhost - port: 5432 -snapshot: - database: order_db - user: minos - password: min0s - host: localhost - port: 5432 broker: host: localhost port: 5672 user: guest - password: guest - queue: - database: order_db - user: minos - password: min0s - host: localhost - port: 5432 - records: 10 - retry: 2 -saga: - storage: - path: "./order.lmdb" -discovery: - client: minos.networks.MinosDiscoveryClient - host: discovery-service - port: 8080 + password: guest \ No newline at end of file diff --git a/packages/plugins/minos-database-aiopg/AUTHORS.md b/packages/plugins/minos-database-aiopg/AUTHORS.md new file mode 100644 index 000000000..30ff94991 --- /dev/null +++ b/packages/plugins/minos-database-aiopg/AUTHORS.md @@ -0,0 +1,15 @@ +# Credits + +## Development Lead + +* Andrea Mucci + +## Core Devs + +* Sergio Garcia Prado +* Vladyslav Fenchak +* Alberto Amigo Alonso + +## Contributors + +None yet. Why not be the first? diff --git a/packages/plugins/minos-database-aiopg/HISTORY.md b/packages/plugins/minos-database-aiopg/HISTORY.md new file mode 100644 index 000000000..e0e57d3b3 --- /dev/null +++ b/packages/plugins/minos-database-aiopg/HISTORY.md @@ -0,0 +1,2 @@ +# History + diff --git a/packages/plugins/minos-database-aiopg/LICENSE b/packages/plugins/minos-database-aiopg/LICENSE new file mode 100644 index 000000000..4daf85bf2 --- /dev/null +++ b/packages/plugins/minos-database-aiopg/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021 Clariteia + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/packages/plugins/minos-database-aiopg/Makefile b/packages/plugins/minos-database-aiopg/Makefile new file mode 100644 index 000000000..2016f9e38 --- /dev/null +++ b/packages/plugins/minos-database-aiopg/Makefile @@ -0,0 +1,37 @@ +.PHONY: docs + +lint: + poetry run flake8 + +test: + poetry run pytest + +coverage: + poetry run coverage run -m pytest + poetry run coverage report -m + poetry run coverage xml + +reformat: + poetry run black --line-length 120 minos tests + poetry run isort minos tests + +release: + $(MAKE) dist + poetry publish + +dist: + poetry build + ls -l dist + +install: + poetry install + +update: + poetry update + +check: + $(MAKE) install + $(MAKE) reformat + $(MAKE) lint + $(MAKE) test + $(MAKE) dist diff --git a/packages/plugins/minos-database-aiopg/README.md b/packages/plugins/minos-database-aiopg/README.md new file mode 100644 index 000000000..c05e24ad0 --- /dev/null +++ b/packages/plugins/minos-database-aiopg/README.md @@ -0,0 +1,66 @@ +

+ Minos logo +

+ +## minos-database-aiopg + +[![PyPI Latest Release](https://img.shields.io/pypi/v/minos-database-aiopg.svg)](https://pypi.org/project/minos-database-aiopg/) +[![GitHub Workflow Status](https://img.shields.io/github/workflow/status/minos-framework/minos-python/pages%20build%20and%20deployment?label=docs)](https://minos-framework.github.io/minos-python) +[![License](https://img.shields.io/github/license/minos-framework/minos-python.svg)](https://github.com/minos-framework/minos-python/blob/main/LICENSE) +[![Coverage](https://codecov.io/github/minos-framework/minos-python/coverage.svg?branch=main)](https://codecov.io/gh/minos-framework/minos-python) +[![Stack Overflow](https://img.shields.io/badge/Stack%20Overflow-Ask%20a%20question-green)](https://stackoverflow.com/questions/tagged/minos) + +## Summary + +Minos is a framework which helps you create [reactive](https://www.reactivemanifesto.org/) microservices in Python. Internally, it leverages Event Sourcing, CQRS and a message driven architecture to fulfil the commitments of an asynchronous environment. + +## Installation + +Install the dependency: + +```shell +pip install minos-database-aiopg +``` + +Set the database client on the `config.yml` file: + +```yaml +... +databases: + default: + client: minos.plugins.aiopg.AiopgDatabaseClient + database: order_db + user: minos + password: min0s + host: localhost + port: 5432 + query: + client: minos.plugins.aiopg.AiopgDatabaseClient + database: order_query_db + user: minos + password: min0s + host: localhost + port: 5432 + ... +... +``` + +## Documentation + +The official API Reference is publicly available at the [GitHub Pages](https://minos-framework.github.io/minos-python). + +## Source Code + +The source code of this project is hosted at the [GitHub Repository](https://github.com/minos-framework/minos-python). + +## Getting Help + +For usage questions, the best place to go to is [StackOverflow](https://stackoverflow.com/questions/tagged/minos). + +## Discussion and Development + +Most development discussions take place over the [GitHub Issues](https://github.com/minos-framework/minos-python/issues). In addition, a [Gitter channel](https://gitter.im/minos-framework/community) is available for development-related questions. + +## License + +This project is distributed under the [MIT](https://raw.githubusercontent.com/minos-framework/minos-python/main/LICENSE) license. diff --git a/packages/plugins/minos-database-aiopg/RUNTHETESTS.md b/packages/plugins/minos-database-aiopg/RUNTHETESTS.md new file mode 100644 index 000000000..386e2b0e7 --- /dev/null +++ b/packages/plugins/minos-database-aiopg/RUNTHETESTS.md @@ -0,0 +1,20 @@ +# Run the tests + +In order to run the tests, please make sure you have the `Docker Engine `_ +and `Docker Compose `_ installed. + +Move into tests/ directory + +`cd tests/` + +Run service dependencies: + +`docker-compose up -d` + +Install library dependencies: + +`make install` + +Run tests: + +`make test` diff --git a/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/__init__.py b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/__init__.py new file mode 100644 index 000000000..15ff2a727 --- /dev/null +++ b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/__init__.py @@ -0,0 +1,18 @@ +from .clients import ( + AiopgDatabaseClient, +) +from .factories import ( + AiopgBrokerPublisherQueueDatabaseOperationFactory, + AiopgBrokerQueueDatabaseOperationFactory, + AiopgBrokerSubscriberDuplicateValidatorDatabaseOperationFactory, + AiopgBrokerSubscriberQueueDatabaseOperationFactory, + AiopgEventDatabaseOperationFactory, + AiopgLockDatabaseOperationFactory, + AiopgManagementDatabaseOperationFactory, + AiopgSnapshotDatabaseOperationFactory, + AiopgSnapshotQueryDatabaseOperationBuilder, + AiopgTransactionDatabaseOperationFactory, +) +from .operations import ( + AiopgDatabaseOperation, +) diff --git a/packages/core/minos-microservice-common/minos/common/database/clients/aiopg.py b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/clients.py similarity index 93% rename from packages/core/minos-microservice-common/minos/common/database/clients/aiopg.py rename to packages/plugins/minos-database-aiopg/minos/plugins/aiopg/clients.py index e9ffcd49f..825c6b364 100644 --- a/packages/core/minos-microservice-common/minos/common/database/clients/aiopg.py +++ b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/clients.py @@ -18,17 +18,18 @@ from psycopg2 import ( IntegrityError, OperationalError, + ProgrammingError, ) -from ..operations import ( - AiopgDatabaseOperation, -) -from .abc import ( +from minos.common import ( + ConnectionException, DatabaseClient, -) -from .exceptions import ( IntegrityException, - UnableToConnectException, + ProgrammingException, +) + +from .operations import ( + AiopgDatabaseOperation, ) logger = logging.getLogger(__name__) @@ -75,9 +76,8 @@ async def _setup(self) -> None: await self._create_connection() async def _destroy(self) -> None: - await self.reset() - await self._close_connection() await super()._destroy() + await self._close_connection() async def _create_connection(self): try: @@ -87,7 +87,7 @@ async def _create_connection(self): except OperationalError as exc: msg = f"There was an {exc!r} while trying to get a database connection." logger.warning(msg) - raise UnableToConnectException(msg) + raise ConnectionException(msg) logger.debug(f"Created {self.database!r} database connection identified by {id(self._connection)}!") @@ -115,9 +115,11 @@ async def _reset(self, **kwargs) -> None: # noinspection PyUnusedLocal async def _fetch_all(self) -> AsyncIterator[tuple]: await self._create_cursor() - - async for row in self._cursor: - yield row + try: + async for row in self._cursor: + yield row + except ProgrammingError as exc: + raise ProgrammingException(str(exc)) # noinspection PyUnusedLocal async def _execute(self, operation: AiopgDatabaseOperation) -> None: diff --git a/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/__init__.py b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/__init__.py new file mode 100644 index 000000000..0a9416f42 --- /dev/null +++ b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/__init__.py @@ -0,0 +1,16 @@ +from .aggregate import ( + AiopgEventDatabaseOperationFactory, + AiopgSnapshotDatabaseOperationFactory, + AiopgSnapshotQueryDatabaseOperationBuilder, + AiopgTransactionDatabaseOperationFactory, +) +from .common import ( + AiopgLockDatabaseOperationFactory, + AiopgManagementDatabaseOperationFactory, +) +from .networks import ( + AiopgBrokerPublisherQueueDatabaseOperationFactory, + AiopgBrokerQueueDatabaseOperationFactory, + AiopgBrokerSubscriberDuplicateValidatorDatabaseOperationFactory, + AiopgBrokerSubscriberQueueDatabaseOperationFactory, +) diff --git a/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/aggregate/__init__.py b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/aggregate/__init__.py new file mode 100644 index 000000000..7ab92c103 --- /dev/null +++ b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/aggregate/__init__.py @@ -0,0 +1,10 @@ +from .events import ( + AiopgEventDatabaseOperationFactory, +) +from .snapshots import ( + AiopgSnapshotDatabaseOperationFactory, + AiopgSnapshotQueryDatabaseOperationBuilder, +) +from .transactions import ( + AiopgTransactionDatabaseOperationFactory, +) diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/events/repositories/database/factories/aiopg.py b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/aggregate/events.py similarity index 86% rename from packages/core/minos-microservice-aggregate/minos/aggregate/events/repositories/database/factories/aiopg.py rename to packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/aggregate/events.py index aaa7b1c27..701578bb2 100644 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/events/repositories/database/factories/aiopg.py +++ b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/aggregate/events.py @@ -15,22 +15,25 @@ from psycopg2.sql import ( SQL, Composable, + Identifier, Literal, Placeholder, ) +from minos.aggregate import ( + Action, + EventDatabaseOperationFactory, +) from minos.common import ( - AiopgDatabaseClient, - AiopgDatabaseOperation, ComposedDatabaseOperation, DatabaseOperation, ) -from .....actions import ( - Action, +from ...clients import ( + AiopgDatabaseClient, ) -from .abc import ( - EventDatabaseOperationFactory, +from ...operations import ( + AiopgDatabaseOperation, ) @@ -38,7 +41,14 @@ class AiopgEventDatabaseOperationFactory(EventDatabaseOperationFactory): """Aiopg Event Database Operation Factory class.""" - def build_create_table(self) -> DatabaseOperation: + def build_table_name(self) -> str: + """Get the table name. + + :return: A ``str`` value. + """ + return "aggregate_event" + + def build_create(self) -> DatabaseOperation: """Build the database operation to create the event table. :return: A ``DatabaseOperation`` instance.s @@ -66,11 +76,11 @@ def build_create_table(self) -> DatabaseOperation: $$ LANGUAGE plpgsql; """, - lock="aggregate_event", + lock=self.build_table_name(), ), AiopgDatabaseOperation( - """ - CREATE TABLE IF NOT EXISTS aggregate_event ( + f""" + CREATE TABLE IF NOT EXISTS {self.build_table_name()} ( id BIGSERIAL PRIMARY KEY, action ACTION_TYPE NOT NULL, uuid UUID NOT NULL, @@ -82,14 +92,14 @@ def build_create_table(self) -> DatabaseOperation: UNIQUE (uuid, version, transaction_uuid) ); """, - lock="aggregate_event", + lock=self.build_table_name(), ), ] ) - def build_submit_row( + def build_submit( self, - transaction_uuids: tuple[UUID], + transaction_uuids: Iterable[UUID], uuid: UUID, action: Action, name: str, @@ -116,7 +126,7 @@ def build_submit_row( """ insert_values = SQL( """ - INSERT INTO aggregate_event (id, action, uuid, name, version, data, created_at, transaction_uuid) + INSERT INTO {table_name} (id, action, uuid, name, version, data, created_at, transaction_uuid) VALUES ( default, %(action)s, @@ -149,7 +159,7 @@ def build_submit_row( from_sql, from_parameters = self._build_submit_from(transaction_uuids) - query = insert_values.format(from_parts=from_sql) + query = insert_values.format(from_parts=from_sql, table_name=Identifier(self.build_table_name())) parameters = from_parameters | insert_parameters return AiopgDatabaseOperation(query, parameters, lock) @@ -158,7 +168,7 @@ def _build_submit_from(self, transaction_uuids: Iterable[UUID]) -> tuple[Composa select_transaction = SQL( """ SELECT {index} AS transaction_index, uuid, MAX(version) AS version - FROM aggregate_event + FROM {table_name} WHERE uuid = %(uuid)s AND transaction_uuid = {transaction_uuid} GROUP BY uuid """ @@ -169,13 +179,19 @@ def _build_submit_from(self, transaction_uuids: Iterable[UUID]) -> tuple[Composa name = f"transaction_uuid_{index}" parameters[name] = transaction_uuid - from_query_parts.append(select_transaction.format(index=Literal(index), transaction_uuid=Placeholder(name))) + from_query_parts.append( + select_transaction.format( + index=Literal(index), + transaction_uuid=Placeholder(name), + table_name=Identifier(self.build_table_name()), + ), + ) query = SQL(" UNION ALL ").join(from_query_parts) return query, parameters # noinspection PyShadowingBuiltins - def build_select_rows( + def build_query( self, uuid: Optional[UUID] = None, name: Optional[str] = None, @@ -191,7 +207,7 @@ def build_select_rows( id_ge: Optional[int] = None, transaction_uuid: Optional[UUID] = None, transaction_uuid_ne: Optional[UUID] = None, - transaction_uuid_in: Optional[tuple[UUID, ...]] = None, + transaction_uuid_in: Optional[Iterable[UUID, ...]] = None, **kwargs, ) -> DatabaseOperation: """Build the database operation to select rows. @@ -214,10 +230,12 @@ def build_select_rows( :return: A ``DatabaseOperation`` instance. """ + if transaction_uuid_in is not None: + transaction_uuid_in = tuple(transaction_uuid_in) - _select_all = """ + _select_all = f""" SELECT uuid, name, version, data, id, action, created_at, transaction_uuid - FROM aggregate_event + FROM {self.build_table_name()} """ conditions = list() @@ -277,12 +295,12 @@ def build_select_rows( }, ) - def build_select_max_id(self) -> DatabaseOperation: + def build_query_offset(self) -> DatabaseOperation: """Build the database operation to get the maximum identifier. :return: A ``DatabaseOperation`` instance. """ - return AiopgDatabaseOperation("SELECT MAX(id) FROM aggregate_event;".strip()) + return AiopgDatabaseOperation(f"SELECT MAX(id) FROM {self.build_table_name()};".strip()) -AiopgDatabaseClient.register_factory(EventDatabaseOperationFactory, AiopgEventDatabaseOperationFactory) +AiopgDatabaseClient.set_factory(EventDatabaseOperationFactory, AiopgEventDatabaseOperationFactory) diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/database/factories/aiopg/__init__.py b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/aggregate/snapshots/__init__.py similarity index 100% rename from packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/database/factories/aiopg/__init__.py rename to packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/aggregate/snapshots/__init__.py diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/database/factories/aiopg/impl.py b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/aggregate/snapshots/impl.py similarity index 74% rename from packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/database/factories/aiopg/impl.py rename to packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/aggregate/snapshots/impl.py index 391d487f5..acc3f9171 100644 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/database/factories/aiopg/impl.py +++ b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/aggregate/snapshots/impl.py @@ -12,19 +12,23 @@ UUID, ) +from minos.aggregate import ( + SnapshotDatabaseOperationFactory, +) +from minos.aggregate.queries import ( + _Condition, + _Ordering, +) from minos.common import ( - AiopgDatabaseClient, - AiopgDatabaseOperation, ComposedDatabaseOperation, DatabaseOperation, ) -from .....queries import ( - _Condition, - _Ordering, +from ....clients import ( + AiopgDatabaseClient, ) -from ..abc import ( - SnapshotDatabaseOperationFactory, +from ....operations import ( + AiopgDatabaseOperation, ) from .queries import ( AiopgSnapshotQueryDatabaseOperationBuilder, @@ -35,7 +39,21 @@ class AiopgSnapshotDatabaseOperationFactory(SnapshotDatabaseOperationFactory): """Aiopg Snapshot Database Operation Factory class.""" - def build_create_table(self) -> DatabaseOperation: + def build_table_name(self) -> str: + """Get the table name. + + :return: A ``str`` value. + """ + return "snapshot" + + def build_offset_table_name(self) -> str: + """Get the offset table name. + + :return: A ``str`` value. + """ + return "snapshot_aux_offset" + + def build_create(self) -> DatabaseOperation: """Build the database operation to create the snapshot table. :return: A ``DatabaseOperation`` instance. @@ -47,8 +65,8 @@ def build_create_table(self) -> DatabaseOperation: lock="uuid-ossp", ), AiopgDatabaseOperation( - """ - CREATE TABLE IF NOT EXISTS snapshot ( + f""" + CREATE TABLE IF NOT EXISTS {self.build_table_name()} ( uuid UUID NOT NULL, name TEXT NOT NULL, version INT NOT NULL, @@ -60,36 +78,36 @@ def build_create_table(self) -> DatabaseOperation: PRIMARY KEY (uuid, transaction_uuid) ); """, - lock="snapshot", + lock=self.build_table_name(), ), AiopgDatabaseOperation( - """ - CREATE TABLE IF NOT EXISTS snapshot_aux_offset ( + f""" + CREATE TABLE IF NOT EXISTS {self.build_offset_table_name()} ( id bool PRIMARY KEY DEFAULT TRUE, value BIGINT NOT NULL, CONSTRAINT id_uni CHECK (id) ); """, - lock="snapshot_aux_offset", + lock=self.build_offset_table_name(), ), ] ) - def build_delete_by_transactions(self, transaction_uuids: Iterable[UUID]) -> DatabaseOperation: + def build_delete(self, transaction_uuids: Iterable[UUID]) -> DatabaseOperation: """Build the database operation to delete rows by transaction identifiers. :param transaction_uuids: The transaction identifiers. :return: A ``DatabaseOperation`` instance. """ return AiopgDatabaseOperation( - """ - DELETE FROM snapshot + f""" + DELETE FROM {self.build_table_name()} WHERE transaction_uuid IN %(transaction_uuids)s; """, {"transaction_uuids": tuple(transaction_uuids)}, ) - def build_insert( + def build_submit( self, uuid: UUID, name: str, @@ -114,8 +132,10 @@ def build_insert( """ return AiopgDatabaseOperation( - """ - INSERT INTO snapshot (uuid, name, version, schema, data, created_at, updated_at, transaction_uuid) + f""" + INSERT INTO {self.build_table_name()} ( + uuid, name, version, schema, data, created_at, updated_at, transaction_uuid + ) VALUES ( %(uuid)s, %(name)s, @@ -149,7 +169,7 @@ def build_query( condition: _Condition, ordering: Optional[_Ordering], limit: Optional[int], - transaction_uuids: tuple[UUID, ...], + transaction_uuids: Iterable[UUID], exclude_deleted: bool, ) -> DatabaseOperation: """Build the query database operation. @@ -168,41 +188,50 @@ def build_query( :return: A ``DatabaseOperation`` instance. """ builder = AiopgSnapshotQueryDatabaseOperationBuilder( - name, condition, ordering, limit, transaction_uuids, exclude_deleted + name=name, + condition=condition, + ordering=ordering, + limit=limit, + transaction_uuids=transaction_uuids, + exclude_deleted=exclude_deleted, + table_name=self.build_table_name(), ) query, parameters = builder.build() return AiopgDatabaseOperation(query, parameters) - def build_store_offset(self, value: int) -> DatabaseOperation: + def build_submit_offset(self, value: int) -> DatabaseOperation: """Build the database operation to store the offset. :param value: The value to be stored as the new offset. :return: A ``DatabaseOperation`` instance. """ return AiopgDatabaseOperation( - """ - INSERT INTO snapshot_aux_offset (id, value) + f""" + INSERT INTO {self.build_offset_table_name()} (id, value) VALUES (TRUE, %(value)s) ON CONFLICT (id) - DO UPDATE SET value = GREATEST(%(value)s, (SELECT value FROM snapshot_aux_offset WHERE id = TRUE)); + DO UPDATE SET value = GREATEST( + %(value)s, + (SELECT value FROM {self.build_offset_table_name()} WHERE id = TRUE) + ); """.strip(), {"value": value}, - lock="insert_snapshot_aux_offset", + lock=f"insert_{self.build_offset_table_name()}", ) - def build_get_offset(self) -> DatabaseOperation: + def build_query_offset(self) -> DatabaseOperation: """Build the database operation to get the current offset. :return: A ``DatabaseOperation`` instance. """ return AiopgDatabaseOperation( - """ + f""" SELECT value - FROM snapshot_aux_offset + FROM {self.build_offset_table_name()} WHERE id = TRUE; """ ) -AiopgDatabaseClient.register_factory(SnapshotDatabaseOperationFactory, AiopgSnapshotDatabaseOperationFactory) +AiopgDatabaseClient.set_factory(SnapshotDatabaseOperationFactory, AiopgSnapshotDatabaseOperationFactory) diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/database/factories/aiopg/queries.py b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/aggregate/snapshots/queries.py similarity index 93% rename from packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/database/factories/aiopg/queries.py rename to packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/aggregate/snapshots/queries.py index bf65b4174..ddbd559e9 100644 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/snapshots/database/factories/aiopg/queries.py +++ b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/aggregate/snapshots/queries.py @@ -4,6 +4,7 @@ from typing import ( Any, + Iterable, Optional, ) from uuid import ( @@ -22,15 +23,10 @@ Placeholder, ) -from minos.common import ( - NULL_UUID, - AvroDataEncoder, -) - -from .....contextvars import ( +from minos.aggregate import ( IS_REPOSITORY_SERIALIZATION_CONTEXT_VAR, ) -from .....queries import ( +from minos.aggregate.queries import ( _FALSE_CONDITION, _AndCondition, _ComposedCondition, @@ -50,6 +46,10 @@ _SimpleCondition, _TrueCondition, ) +from minos.common import ( + NULL_UUID, + AvroDataEncoder, +) # noinspection SqlResolve,SqlNoDataSourceInspection @@ -65,15 +65,21 @@ def __init__( condition: _Condition, ordering: Optional[_Ordering] = None, limit: Optional[int] = None, - transaction_uuids: tuple[UUID, ...] = (NULL_UUID,), + transaction_uuids: Iterable[UUID, ...] = (NULL_UUID,), exclude_deleted: bool = False, + table_name: Optional[str] = None, ): + if not isinstance(transaction_uuids, tuple): + transaction_uuids = tuple(transaction_uuids) + if table_name is None: + table_name = "snapshot" self.name = name self.condition = condition self.ordering = ordering self.limit = limit self.transaction_uuids = transaction_uuids self.exclude_deleted = exclude_deleted + self.table_name = table_name self._parameters = None def build(self) -> tuple[Composable, dict[str, Any]]: @@ -117,7 +123,9 @@ def _build_select_from(self) -> Composable: self._parameters[name] = transaction_uuid from_query_parts.append( - self._SELECT_TRANSACTION_CHUNK.format(index=Literal(index), transaction_uuid=Placeholder(name)) + self._SELECT_TRANSACTION_CHUNK.format( + index=Literal(index), transaction_uuid=Placeholder(name), table_name=Identifier(self.table_name) + ) ) from_query = SQL(" UNION ALL ").join(from_query_parts) @@ -265,7 +273,7 @@ def generate_random_str() -> str: _SELECT_TRANSACTION_CHUNK = SQL( "SELECT {index} AS transaction_index, * " - "FROM snapshot " + "FROM {table_name} " "WHERE name = %(name)s AND transaction_uuid = {transaction_uuid} " ) diff --git a/packages/core/minos-microservice-aggregate/minos/aggregate/transactions/repositories/database/factories/aiopg.py b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/aggregate/transactions.py similarity index 87% rename from packages/core/minos-microservice-aggregate/minos/aggregate/transactions/repositories/database/factories/aiopg.py rename to packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/aggregate/transactions.py index c325fb9e1..d98ecfe08 100644 --- a/packages/core/minos-microservice-aggregate/minos/aggregate/transactions/repositories/database/factories/aiopg.py +++ b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/aggregate/transactions.py @@ -2,39 +2,48 @@ annotations, ) +from collections.abc import ( + Iterable, +) from datetime import ( datetime, ) from typing import ( - TYPE_CHECKING, Optional, ) from uuid import ( UUID, ) +from minos.aggregate import ( + TransactionDatabaseOperationFactory, + TransactionStatus, +) from minos.common import ( - AiopgDatabaseClient, - AiopgDatabaseOperation, ComposedDatabaseOperation, DatabaseOperation, ) -from .abc import ( - TransactionDatabaseOperationFactory, +from ...clients import ( + AiopgDatabaseClient, +) +from ...operations import ( + AiopgDatabaseOperation, ) - -if TYPE_CHECKING: - from ....entries import ( - TransactionStatus, - ) # noinspection SqlNoDataSourceInspection,SqlResolve,PyMethodMayBeStatic class AiopgTransactionDatabaseOperationFactory(TransactionDatabaseOperationFactory): """Aiopg Transaction Database Operation Factory class.""" - def build_create_table(self) -> DatabaseOperation: + def build_table_name(self) -> str: + """Get the table name. + + :return: A ``str`` value. + """ + return "aggregate_transaction" + + def build_create(self) -> DatabaseOperation: """Build the database operation to create the snapshot table. :return: A ``DatabaseOperation`` instance. @@ -65,11 +74,11 @@ def build_create_table(self) -> DatabaseOperation: $$ LANGUAGE plpgsql; """, - lock="aggregate_transaction_enum", + lock="transaction_status", ), AiopgDatabaseOperation( - """ - CREATE TABLE IF NOT EXISTS aggregate_transaction ( + f""" + CREATE TABLE IF NOT EXISTS {self.build_table_name()} ( uuid UUID PRIMARY KEY, destination_uuid UUID NOT NULL, status TRANSACTION_STATUS NOT NULL, @@ -77,17 +86,18 @@ def build_create_table(self) -> DatabaseOperation: updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() ); """, - lock="aggregate_transaction", + lock=self.build_table_name(), ), ] ) - def build_submit_row( + def build_submit( self, uuid: UUID, destination_uuid: UUID, status: TransactionStatus, event_offset: int, + **kwargs, ) -> DatabaseOperation: """Build the database operation to submit a row. @@ -95,6 +105,7 @@ def build_submit_row( :param destination_uuid: The identifier of the destination transaction. :param status: The status of the transaction. :param event_offset: The event offset of the transaction. + :param kwargs: Additional named arguments. :return: A ``DatabaseOperation`` instance. """ @@ -106,8 +117,8 @@ def build_submit_row( } return AiopgDatabaseOperation( - """ - INSERT INTO aggregate_transaction AS t (uuid, destination_uuid, status, event_offset) + f""" + INSERT INTO {self.build_table_name()} AS t (uuid, destination_uuid, status, event_offset) VALUES (%(uuid)s, %(destination_uuid)s, %(status)s, %(event_offset)s) ON CONFLICT (uuid) DO @@ -125,14 +136,14 @@ def build_submit_row( lock=uuid.int & (1 << 32) - 1, ) - def build_select_rows( + def build_query( self, uuid: Optional[UUID] = None, uuid_ne: Optional[UUID] = None, - uuid_in: Optional[tuple[UUID]] = None, + uuid_in: Optional[Iterable[UUID]] = None, destination_uuid: Optional[UUID] = None, status: Optional[str] = None, - status_in: Optional[tuple[str]] = None, + status_in: Optional[Iterable[str]] = None, event_offset: Optional[int] = None, event_offset_lt: Optional[int] = None, event_offset_gt: Optional[int] = None, @@ -166,6 +177,11 @@ def build_select_rows( :param kwargs: Additional named arguments. :return: A ``DatabaseOperation`` instance. """ + if uuid_in is not None: + uuid_in = tuple(uuid_in) + + if status_in is not None: + status_in = tuple(status_in) conditions = list() @@ -202,9 +218,9 @@ def build_select_rows( if updated_at_ge is not None: conditions.append("updated_at >= %(updated_at_ge)s") - select_all = """ + select_all = f""" SELECT uuid, status, event_offset, destination_uuid, updated_at - FROM aggregate_transaction + FROM {self.build_table_name()} """.strip() if not conditions: @@ -233,4 +249,4 @@ def build_select_rows( ) -AiopgDatabaseClient.register_factory(TransactionDatabaseOperationFactory, AiopgTransactionDatabaseOperationFactory) +AiopgDatabaseClient.set_factory(TransactionDatabaseOperationFactory, AiopgTransactionDatabaseOperationFactory) diff --git a/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/common/__init__.py b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/common/__init__.py new file mode 100644 index 000000000..bd213d8b7 --- /dev/null +++ b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/common/__init__.py @@ -0,0 +1,6 @@ +from .locks import ( + AiopgLockDatabaseOperationFactory, +) +from .managemens import ( + AiopgManagementDatabaseOperationFactory, +) diff --git a/packages/core/minos-microservice-common/minos/common/database/locks/factories/aiopg.py b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/common/locks.py similarity index 89% rename from packages/core/minos-microservice-common/minos/common/database/locks/factories/aiopg.py rename to packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/common/locks.py index b590a58f7..db23a3249 100644 --- a/packages/core/minos-microservice-common/minos/common/database/locks/factories/aiopg.py +++ b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/common/locks.py @@ -1,12 +1,13 @@ +from minos.common import ( + DatabaseOperation, + LockDatabaseOperationFactory, +) + from ...clients import ( AiopgDatabaseClient, ) from ...operations import ( AiopgDatabaseOperation, - DatabaseOperation, -) -from .abc import ( - LockDatabaseOperationFactory, ) @@ -30,4 +31,4 @@ def build_release(self, hashed_key: int) -> DatabaseOperation: return AiopgDatabaseOperation("select pg_advisory_unlock(%(hashed_key)s)", {"hashed_key": hashed_key}) -AiopgDatabaseClient.register_factory(LockDatabaseOperationFactory, AiopgLockDatabaseOperationFactory) +AiopgDatabaseClient.set_factory(LockDatabaseOperationFactory, AiopgLockDatabaseOperationFactory) diff --git a/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/common/managemens.py b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/common/managemens.py new file mode 100644 index 000000000..814dec80f --- /dev/null +++ b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/common/managemens.py @@ -0,0 +1,35 @@ +from minos.common import ( + DatabaseOperation, + ManagementDatabaseOperationFactory, +) + +from ...clients import ( + AiopgDatabaseClient, +) +from ...operations import ( + AiopgDatabaseOperation, +) + + +# noinspection SqlNoDataSourceInspection +class AiopgManagementDatabaseOperationFactory(ManagementDatabaseOperationFactory): + """Aiopg Manage Database Operation Factory class.""" + + def build_create(self, database: str) -> DatabaseOperation: + """Build the database operation to create a database. + + :param database: The new database's name. + :return: A ``DatabaseOperation``. + """ + return AiopgDatabaseOperation(f"CREATE DATABASE {database};") + + def build_delete(self, database: str) -> DatabaseOperation: + """Build the database operation to create a database. + + :param database: The name of the database to be deleted. + :return: A ``DatabaseOperation``. + """ + return AiopgDatabaseOperation(f"DROP DATABASE IF EXISTS {database};") + + +AiopgDatabaseClient.set_factory(ManagementDatabaseOperationFactory, AiopgManagementDatabaseOperationFactory) diff --git a/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/networks/__init__.py b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/networks/__init__.py new file mode 100644 index 000000000..04568ca64 --- /dev/null +++ b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/networks/__init__.py @@ -0,0 +1,10 @@ +from .collections import ( + AiopgBrokerQueueDatabaseOperationFactory, +) +from .publishers import ( + AiopgBrokerPublisherQueueDatabaseOperationFactory, +) +from .subscribers import ( + AiopgBrokerSubscriberDuplicateValidatorDatabaseOperationFactory, + AiopgBrokerSubscriberQueueDatabaseOperationFactory, +) diff --git a/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/networks/collections/__init__.py b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/networks/collections/__init__.py new file mode 100644 index 000000000..7f6fc30e8 --- /dev/null +++ b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/networks/collections/__init__.py @@ -0,0 +1,3 @@ +from .queues import ( + AiopgBrokerQueueDatabaseOperationFactory, +) diff --git a/packages/core/minos-microservice-networks/minos/networks/brokers/collections/queues/database/factories/aiopg.py b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/networks/collections/queues.py similarity index 88% rename from packages/core/minos-microservice-networks/minos/networks/brokers/collections/queues/database/factories/aiopg.py rename to packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/networks/collections/queues.py index 126d370c1..339453e68 100644 --- a/packages/core/minos-microservice-networks/minos/networks/brokers/collections/queues/database/factories/aiopg.py +++ b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/networks/collections/queues.py @@ -11,14 +11,16 @@ ) from minos.common import ( - AiopgDatabaseOperation, DatabaseOperation, ) - -from .abc import ( +from minos.networks import ( BrokerQueueDatabaseOperationFactory, ) +from ....clients import ( + AiopgDatabaseOperation, +) + # noinspection SqlResolve,SqlNoDataSourceInspection,SqlNoDataSourceInspection,SqlResolve class AiopgBrokerQueueDatabaseOperationFactory(BrokerQueueDatabaseOperationFactory, ABC): @@ -32,7 +34,7 @@ def build_table_name(self) -> str: """ raise NotImplementedError - def build_create_table(self) -> DatabaseOperation: + def build_create(self) -> DatabaseOperation: """Build the "create table" query. :return: A ``SQL`` instance. @@ -51,7 +53,7 @@ def build_create_table(self) -> DatabaseOperation: lock=self.build_table_name(), ) - def build_update_not_processed(self, id_: int) -> DatabaseOperation: + def build_mark_processed(self, id_: int) -> DatabaseOperation: """Build the "update not processed" query. :return: A ``SQL`` instance. @@ -64,7 +66,7 @@ def build_update_not_processed(self, id_: int) -> DatabaseOperation: {"id": id_}, ) - def build_delete_processed(self, id_: int) -> DatabaseOperation: + def build_delete(self, id_: int) -> DatabaseOperation: """Build the "delete processed" query. :return: A ``SQL`` instance. @@ -84,7 +86,7 @@ def build_mark_processing(self, ids: Iterable[int]) -> DatabaseOperation: {"ids": tuple(ids)}, ) - def build_count_not_processed(self, retry: int, *args, **kwargs) -> DatabaseOperation: + def build_count(self, retry: int, *args, **kwargs) -> DatabaseOperation: """Build the "count not processed" query. :return: @@ -97,7 +99,7 @@ def build_count_not_processed(self, retry: int, *args, **kwargs) -> DatabaseOper {"retry": retry}, ) - def build_insert(self, topic: str, data: bytes) -> DatabaseOperation: + def build_submit(self, topic: str, data: bytes) -> DatabaseOperation: """Build the "insert" query. :return: A ``SQL`` instance. @@ -107,7 +109,7 @@ def build_insert(self, topic: str, data: bytes) -> DatabaseOperation: {"topic": topic, "data": data}, ) - def build_select_not_processed(self, retry: int, records: int, *args, **kwargs) -> DatabaseOperation: + def build_query(self, retry: int, records: int, *args, **kwargs) -> DatabaseOperation: """Build the "select not processed" query. :return: A ``SQL`` instance. diff --git a/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/networks/publishers/__init__.py b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/networks/publishers/__init__.py new file mode 100644 index 000000000..fd07a5ba7 --- /dev/null +++ b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/networks/publishers/__init__.py @@ -0,0 +1,3 @@ +from .queues import ( + AiopgBrokerPublisherQueueDatabaseOperationFactory, +) diff --git a/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/networks/publishers/queues.py b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/networks/publishers/queues.py new file mode 100644 index 000000000..428ec65fb --- /dev/null +++ b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/networks/publishers/queues.py @@ -0,0 +1,28 @@ +from minos.networks import ( + BrokerPublisherQueueDatabaseOperationFactory, +) + +from ....clients import ( + AiopgDatabaseClient, +) +from ..collections import ( + AiopgBrokerQueueDatabaseOperationFactory, +) + + +class AiopgBrokerPublisherQueueDatabaseOperationFactory( + BrokerPublisherQueueDatabaseOperationFactory, AiopgBrokerQueueDatabaseOperationFactory +): + """Aiopg Broker Publisher Queue Query Factory class.""" + + def build_table_name(self) -> str: + """Get the table name. + + :return: A ``str`` value. + """ + return "broker_publisher_queue" + + +AiopgDatabaseClient.set_factory( + BrokerPublisherQueueDatabaseOperationFactory, AiopgBrokerPublisherQueueDatabaseOperationFactory +) diff --git a/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/networks/subscribers/__init__.py b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/networks/subscribers/__init__.py new file mode 100644 index 000000000..e4eedf459 --- /dev/null +++ b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/networks/subscribers/__init__.py @@ -0,0 +1,6 @@ +from .queues import ( + AiopgBrokerSubscriberQueueDatabaseOperationFactory, +) +from .validators import ( + AiopgBrokerSubscriberDuplicateValidatorDatabaseOperationFactory, +) diff --git a/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/queued/queues/database/factories/aiopg.py b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/networks/subscribers/queues.py similarity index 87% rename from packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/queued/queues/database/factories/aiopg.py rename to packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/networks/subscribers/queues.py index f4811179f..878aa6e70 100644 --- a/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/queued/queues/database/factories/aiopg.py +++ b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/networks/subscribers/queues.py @@ -7,16 +7,20 @@ ) from minos.common import ( - AiopgDatabaseClient, - AiopgDatabaseOperation, DatabaseOperation, ) +from minos.networks import ( + BrokerSubscriberQueueDatabaseOperationFactory, +) -from ......collections import ( - AiopgBrokerQueueDatabaseOperationFactory, +from ....clients import ( + AiopgDatabaseClient, ) -from .abc import ( - BrokerSubscriberQueueDatabaseOperationFactory, +from ....operations import ( + AiopgDatabaseOperation, +) +from ..collections import ( + AiopgBrokerQueueDatabaseOperationFactory, ) @@ -33,9 +37,7 @@ def build_table_name(self) -> str: """ return "broker_subscriber_queue" - def build_count_not_processed( - self, retry: int, topics: Iterable[str] = tuple(), *args, **kwargs - ) -> DatabaseOperation: + def build_count(self, retry: int, topics: Iterable[str] = tuple(), *args, **kwargs) -> DatabaseOperation: """Build the "count not processed" query. :return: @@ -48,7 +50,7 @@ def build_count_not_processed( {"retry": retry, "topics": tuple(topics)}, ) - def build_select_not_processed( + def build_query( self, retry: int, records: int, topics: Iterable[str] = tuple(), *args, **kwargs ) -> DatabaseOperation: """Build the "select not processed" query. @@ -68,7 +70,7 @@ def build_select_not_processed( ) -AiopgDatabaseClient.register_factory( +AiopgDatabaseClient.set_factory( BrokerSubscriberQueueDatabaseOperationFactory, AiopgBrokerSubscriberQueueDatabaseOperationFactory, ) diff --git a/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/filtered/validators/duplicates/database/factories/aiopg.py b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/networks/subscribers/validators.py similarity index 89% rename from packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/filtered/validators/duplicates/database/factories/aiopg.py rename to packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/networks/subscribers/validators.py index 48dbeeadb..350897eea 100644 --- a/packages/core/minos-microservice-networks/minos/networks/brokers/subscribers/filtered/validators/duplicates/database/factories/aiopg.py +++ b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/factories/networks/subscribers/validators.py @@ -7,16 +7,20 @@ ) from minos.common import ( - AiopgDatabaseClient, - AiopgDatabaseOperation, ComposedDatabaseOperation, DatabaseOperation, ) - -from .abc import ( +from minos.networks import ( BrokerSubscriberDuplicateValidatorDatabaseOperationFactory, ) +from ....clients import ( + AiopgDatabaseClient, +) +from ....operations import ( + AiopgDatabaseOperation, +) + # noinspection SqlNoDataSourceInspection,SqlResolve class AiopgBrokerSubscriberDuplicateValidatorDatabaseOperationFactory( @@ -32,7 +36,7 @@ def build_table_name() -> str: """ return "broker_subscriber_processed_messages" - def build_create_table(self) -> DatabaseOperation: + def build_create(self) -> DatabaseOperation: """Build the "create table" query. :return: A ``SQL`` instance. @@ -57,7 +61,7 @@ def build_create_table(self) -> DatabaseOperation: ] ) - def build_insert_row(self, topic: str, uuid: UUID) -> DatabaseOperation: + def build_submit(self, topic: str, uuid: UUID) -> DatabaseOperation: """Build the "insert row" query. :return: A ``SQL`` instance. @@ -71,7 +75,7 @@ def build_insert_row(self, topic: str, uuid: UUID) -> DatabaseOperation: ) -AiopgDatabaseClient.register_factory( +AiopgDatabaseClient.set_factory( BrokerSubscriberDuplicateValidatorDatabaseOperationFactory, AiopgBrokerSubscriberDuplicateValidatorDatabaseOperationFactory, ) diff --git a/packages/core/minos-microservice-common/minos/common/database/operations/aiopg.py b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/operations.py similarity index 94% rename from packages/core/minos-microservice-common/minos/common/database/operations/aiopg.py rename to packages/plugins/minos-database-aiopg/minos/plugins/aiopg/operations.py index 5126ff888..f404578f7 100644 --- a/packages/core/minos-microservice-common/minos/common/database/operations/aiopg.py +++ b/packages/plugins/minos-database-aiopg/minos/plugins/aiopg/operations.py @@ -7,7 +7,7 @@ Composable, ) -from .abc import ( +from minos.common import ( DatabaseOperation, ) diff --git a/packages/plugins/minos-database-aiopg/poetry.lock b/packages/plugins/minos-database-aiopg/poetry.lock new file mode 100644 index 000000000..305d0cd6c --- /dev/null +++ b/packages/plugins/minos-database-aiopg/poetry.lock @@ -0,0 +1,874 @@ +[[package]] +name = "aiomisc" +version = "15.7.3" +description = "aiomisc - miscellaneous utils for asyncio" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +colorlog = "*" + +[package.extras] +aiohttp = ["aiohttp"] +asgi = ["aiohttp-asgi"] +carbon = ["aiocarbon (>=0.15,<1.0)"] +contextvars = ["contextvars (>=2.4,<3.0)"] +cron = ["croniter (>=0.3.34,<0.4.0)"] +develop = ["aiocontextvars (==0.2.2)", "aiohttp-asgi", "aiohttp (<4)", "async-timeout", "coveralls", "croniter (>=0.3.34,<0.4.0)", "fastapi", "freezegun (<1.1)", "mypy (>=0.782,<1.0)", "pylava", "pytest", "pytest-cov (>=3.0,<4.0)", "pytest-freezegun (>=0.4.2,<0.5.0)", "pytest-rst", "pytest-subtests", "rich", "setproctitle", "sphinx-autobuild", "sphinx-intl", "sphinx (>=3.5.1)", "timeout-decorator", "tox (>=2.4)", "types-croniter"] +raven = ["raven-aiohttp"] +uvloop = ["uvloop (>=0.14,<1)"] + +[[package]] +name = "aiopg" +version = "1.3.3" +description = "Postgres integration with asyncio." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +async-timeout = ">=3.0,<5.0" +psycopg2-binary = ">=2.8.4" + +[package.extras] +sa = ["sqlalchemy[postgresql_psycopg2binary] (>=1.3,<1.5)"] + +[[package]] +name = "async-timeout" +version = "4.0.2" +description = "Timeout context manager for asyncio programs" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "atomicwrites" +version = "1.4.0" +description = "Atomic file writes." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "attrs" +version = "21.4.0" +description = "Classes Without Boilerplate" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.extras] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] +docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] + +[[package]] +name = "black" +version = "22.3.0" +description = "The uncompromising code formatter." +category = "dev" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "cached-property" +version = "1.5.2" +description = "A decorator for caching properties in classes." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "click" +version = "8.1.2" +description = "Composable command line interface toolkit" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.4" +description = "Cross-platform colored terminal text." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "colorlog" +version = "6.6.0" +description = "Add colours to the output of Python's logging module." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} + +[package.extras] +development = ["black", "flake8", "mypy", "pytest", "types-colorama"] + +[[package]] +name = "coverage" +version = "6.3.2" +description = "Code coverage measurement for Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "crontab" +version = "0.23.0" +description = "Parse and use crontab schedules in Python" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "dependency-injector" +version = "4.39.1" +description = "Dependency injection framework for Python" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +six = ">=1.7.0,<=1.16.0" + +[package.extras] +aiohttp = ["aiohttp"] +flask = ["flask"] +pydantic = ["pydantic"] +yaml = ["pyyaml"] + +[[package]] +name = "fastavro" +version = "1.4.10" +description = "Fast read/write of AVRO files" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +codecs = ["python-snappy", "zstandard", "lz4"] +lz4 = ["lz4"] +snappy = ["python-snappy"] +zstandard = ["zstandard"] + +[[package]] +name = "flake8" +version = "4.0.1" +description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +mccabe = ">=0.6.0,<0.7.0" +pycodestyle = ">=2.8.0,<2.9.0" +pyflakes = ">=2.4.0,<2.5.0" + +[[package]] +name = "iniconfig" +version = "1.1.1" +description = "iniconfig: brain-dead simple config-ini parsing" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "isort" +version = "5.10.1" +description = "A Python utility / library to sort Python imports." +category = "dev" +optional = false +python-versions = ">=3.6.1,<4.0" + +[package.extras] +pipfile_deprecated_finder = ["pipreqs", "requirementslib"] +requirements_deprecated_finder = ["pipreqs", "pip-api"] +colors = ["colorama (>=0.4.3,<0.5.0)"] +plugins = ["setuptools"] + +[[package]] +name = "lmdb" +version = "1.3.0" +description = "Universal Python binding for the LMDB 'Lightning' Database" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "mccabe" +version = "0.6.1" +description = "McCabe checker, plugin for flake8" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "minos-microservice-aggregate" +version = "0.6.0" +description = "The Aggregate pattern of the Minos Framework" +category = "main" +optional = false +python-versions = "^3.9" +develop = true + +[package.dependencies] +cached-property = "^1.5.2" +minos-microservice-common = "^0.6.0" +minos-microservice-networks = "^0.6.0" +psycopg2-binary = "^2.9.3" + +[package.source] +type = "directory" +url = "../../core/minos-microservice-aggregate" + +[[package]] +name = "minos-microservice-common" +version = "0.6.1" +description = "The common core of the Minos Framework" +category = "main" +optional = false +python-versions = "^3.9" +develop = true + +[package.dependencies] +aiomisc = ">=14.0.3,<15.8.0" +aiopg = "^1.2.1" +cached-property = "^1.5.2" +dependency-injector = "^4.32.2" +fastavro = "^1.4.0" +lmdb = "^1.2.1" +orjson = "^3.5.2" +PyYAML = ">=5.4.1,<7.0.0" +uvloop = "^0.16.0" + +[package.source] +type = "directory" +url = "../../core/minos-microservice-common" + +[[package]] +name = "minos-microservice-networks" +version = "0.6.0" +description = "The networks core of the Minos Framework" +category = "main" +optional = false +python-versions = "^3.9" +develop = true + +[package.dependencies] +aiopg = "^1.2.1" +crontab = "^0.23.0" +minos-microservice-common = "^0.6.0" +psycopg2-binary = "^2.9.3" + +[package.source] +type = "directory" +url = "../../core/minos-microservice-networks" + +[[package]] +name = "mypy-extensions" +version = "0.4.3" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "orjson" +version = "3.6.7" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "packaging" +version = "21.3" +description = "Core utilities for Python packages" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" + +[[package]] +name = "pathspec" +version = "0.9.0" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" + +[[package]] +name = "platformdirs" +version = "2.5.1" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] +test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] + +[[package]] +name = "pluggy" +version = "1.0.0" +description = "plugin and hook calling mechanisms for python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "psycopg2-binary" +version = "2.9.3" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "pycodestyle" +version = "2.8.0" +description = "Python style guide checker" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "pyflakes" +version = "2.4.0" +description = "passive checker of Python programs" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "pyparsing" +version = "3.0.8" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +category = "dev" +optional = false +python-versions = ">=3.6.8" + +[package.extras] +diagrams = ["railroad-diagrams", "jinja2"] + +[[package]] +name = "pytest" +version = "7.1.1" +description = "pytest: simple powerful testing with Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +tomli = ">=1.0.0" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] + +[[package]] +name = "pyyaml" +version = "6.0" +description = "YAML parser and emitter for Python" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "typing-extensions" +version = "4.1.1" +description = "Backported and Experimental Type Hints for Python 3.6+" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "uvloop" +version = "0.16.0" +description = "Fast implementation of asyncio event loop on top of libuv" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +dev = ["Cython (>=0.29.24,<0.30.0)", "pytest (>=3.6.0)", "Sphinx (>=4.1.2,<4.2.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "aiohttp", "flake8 (>=3.9.2,<3.10.0)", "psutil", "pycodestyle (>=2.7.0,<2.8.0)", "pyOpenSSL (>=19.0.0,<19.1.0)", "mypy (>=0.800)"] +docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)"] +test = ["aiohttp", "flake8 (>=3.9.2,<3.10.0)", "psutil", "pycodestyle (>=2.7.0,<2.8.0)", "pyOpenSSL (>=19.0.0,<19.1.0)", "mypy (>=0.800)"] + +[metadata] +lock-version = "1.1" +python-versions = "^3.9" +content-hash = "1810729aaeb2889a1550499890e0b5377ddf4f57896bdfbce6023ee2673399cb" + +[metadata.files] +aiomisc = [ + {file = "aiomisc-15.7.3-py3-none-any.whl", hash = "sha256:0403e83268e98d0f2a125a70d13303fe1a2358e36db3daf02df032c7fa4f1525"}, + {file = "aiomisc-15.7.3.tar.gz", hash = "sha256:ba250a34bd4609ced36111cb50580f57c3d52f3955f953a53ecb2986988baedc"}, +] +aiopg = [ + {file = "aiopg-1.3.3-py3-none-any.whl", hash = "sha256:2842dd8741460eeef940032dcb577bfba4d4115205dd82a73ce13b3271f5bf0a"}, + {file = "aiopg-1.3.3.tar.gz", hash = "sha256:547c6ba4ea0d73c2a11a2f44387d7133cc01d3c6f3b8ed976c0ac1eff4f595d7"}, +] +async-timeout = [ + {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, + {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, +] +atomicwrites = [ + {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, + {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, +] +attrs = [ + {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, + {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, +] +black = [ + {file = "black-22.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2497f9c2386572e28921fa8bec7be3e51de6801f7459dffd6e62492531c47e09"}, + {file = "black-22.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5795a0375eb87bfe902e80e0c8cfaedf8af4d49694d69161e5bd3206c18618bb"}, + {file = "black-22.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3556168e2e5c49629f7b0f377070240bd5511e45e25a4497bb0073d9dda776a"}, + {file = "black-22.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67c8301ec94e3bcc8906740fe071391bce40a862b7be0b86fb5382beefecd968"}, + {file = "black-22.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:fd57160949179ec517d32ac2ac898b5f20d68ed1a9c977346efbac9c2f1e779d"}, + {file = "black-22.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cc1e1de68c8e5444e8f94c3670bb48a2beef0e91dddfd4fcc29595ebd90bb9ce"}, + {file = "black-22.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2fc92002d44746d3e7db7cf9313cf4452f43e9ea77a2c939defce3b10b5c82"}, + {file = "black-22.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a6342964b43a99dbc72f72812bf88cad8f0217ae9acb47c0d4f141a6416d2d7b"}, + {file = "black-22.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:328efc0cc70ccb23429d6be184a15ce613f676bdfc85e5fe8ea2a9354b4e9015"}, + {file = "black-22.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06f9d8846f2340dfac80ceb20200ea5d1b3f181dd0556b47af4e8e0b24fa0a6b"}, + {file = "black-22.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4efa5fad66b903b4a5f96d91461d90b9507a812b3c5de657d544215bb7877a"}, + {file = "black-22.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8477ec6bbfe0312c128e74644ac8a02ca06bcdb8982d4ee06f209be28cdf163"}, + {file = "black-22.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:637a4014c63fbf42a692d22b55d8ad6968a946b4a6ebc385c5505d9625b6a464"}, + {file = "black-22.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:863714200ada56cbc366dc9ae5291ceb936573155f8bf8e9de92aef51f3ad0f0"}, + {file = "black-22.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10dbe6e6d2988049b4655b2b739f98785a884d4d6b85bc35133a8fb9a2233176"}, + {file = "black-22.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:cee3e11161dde1b2a33a904b850b0899e0424cc331b7295f2a9698e79f9a69a0"}, + {file = "black-22.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5891ef8abc06576985de8fa88e95ab70641de6c1fca97e2a15820a9b69e51b20"}, + {file = "black-22.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:30d78ba6bf080eeaf0b7b875d924b15cd46fec5fd044ddfbad38c8ea9171043a"}, + {file = "black-22.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee8f1f7228cce7dffc2b464f07ce769f478968bfb3dd1254a4c2eeed84928aad"}, + {file = "black-22.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ee227b696ca60dd1c507be80a6bc849a5a6ab57ac7352aad1ffec9e8b805f21"}, + {file = "black-22.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:9b542ced1ec0ceeff5b37d69838106a6348e60db7b8fdd245294dc1d26136265"}, + {file = "black-22.3.0-py3-none-any.whl", hash = "sha256:bc58025940a896d7e5356952228b68f793cf5fcb342be703c3a2669a1488cb72"}, + {file = "black-22.3.0.tar.gz", hash = "sha256:35020b8886c022ced9282b51b5a875b6d1ab0c387b31a065b84db7c33085ca79"}, +] +cached-property = [ + {file = "cached-property-1.5.2.tar.gz", hash = "sha256:9fa5755838eecbb2d234c3aa390bd80fbd3ac6b6869109bfc1b499f7bd89a130"}, + {file = "cached_property-1.5.2-py2.py3-none-any.whl", hash = "sha256:df4f613cf7ad9a588cc381aaf4a512d26265ecebd5eb9e1ba12f1319eb85a6a0"}, +] +click = [ + {file = "click-8.1.2-py3-none-any.whl", hash = "sha256:24e1a4a9ec5bf6299411369b208c1df2188d9eb8d916302fe6bf03faed227f1e"}, + {file = "click-8.1.2.tar.gz", hash = "sha256:479707fe14d9ec9a0757618b7a100a0ae4c4e236fac5b7f80ca68028141a1a72"}, +] +colorama = [ + {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, + {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, +] +colorlog = [ + {file = "colorlog-6.6.0-py2.py3-none-any.whl", hash = "sha256:351c51e866c86c3217f08e4b067a7974a678be78f07f85fc2d55b8babde6d94e"}, + {file = "colorlog-6.6.0.tar.gz", hash = "sha256:344f73204009e4c83c5b6beb00b3c45dc70fcdae3c80db919e0a4171d006fde8"}, +] +coverage = [ + {file = "coverage-6.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9b27d894748475fa858f9597c0ee1d4829f44683f3813633aaf94b19cb5453cf"}, + {file = "coverage-6.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37d1141ad6b2466a7b53a22e08fe76994c2d35a5b6b469590424a9953155afac"}, + {file = "coverage-6.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9987b0354b06d4df0f4d3e0ec1ae76d7ce7cbca9a2f98c25041eb79eec766f1"}, + {file = "coverage-6.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:26e2deacd414fc2f97dd9f7676ee3eaecd299ca751412d89f40bc01557a6b1b4"}, + {file = "coverage-6.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4dd8bafa458b5c7d061540f1ee9f18025a68e2d8471b3e858a9dad47c8d41903"}, + {file = "coverage-6.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:46191097ebc381fbf89bdce207a6c107ac4ec0890d8d20f3360345ff5976155c"}, + {file = "coverage-6.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6f89d05e028d274ce4fa1a86887b071ae1755082ef94a6740238cd7a8178804f"}, + {file = "coverage-6.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:58303469e9a272b4abdb9e302a780072c0633cdcc0165db7eec0f9e32f901e05"}, + {file = "coverage-6.3.2-cp310-cp310-win32.whl", hash = "sha256:2fea046bfb455510e05be95e879f0e768d45c10c11509e20e06d8fcaa31d9e39"}, + {file = "coverage-6.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:a2a8b8bcc399edb4347a5ca8b9b87e7524c0967b335fbb08a83c8421489ddee1"}, + {file = "coverage-6.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f1555ea6d6da108e1999b2463ea1003fe03f29213e459145e70edbaf3e004aaa"}, + {file = "coverage-6.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5f4e1edcf57ce94e5475fe09e5afa3e3145081318e5fd1a43a6b4539a97e518"}, + {file = "coverage-6.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a15dc0a14008f1da3d1ebd44bdda3e357dbabdf5a0b5034d38fcde0b5c234b7"}, + {file = "coverage-6.3.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21b7745788866028adeb1e0eca3bf1101109e2dc58456cb49d2d9b99a8c516e6"}, + {file = "coverage-6.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8ce257cac556cb03be4a248d92ed36904a59a4a5ff55a994e92214cde15c5bad"}, + {file = "coverage-6.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b0be84e5a6209858a1d3e8d1806c46214e867ce1b0fd32e4ea03f4bd8b2e3359"}, + {file = "coverage-6.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:acf53bc2cf7282ab9b8ba346746afe703474004d9e566ad164c91a7a59f188a4"}, + {file = "coverage-6.3.2-cp37-cp37m-win32.whl", hash = "sha256:8bdde1177f2311ee552f47ae6e5aa7750c0e3291ca6b75f71f7ffe1f1dab3dca"}, + {file = "coverage-6.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b31651d018b23ec463e95cf10070d0b2c548aa950a03d0b559eaa11c7e5a6fa3"}, + {file = "coverage-6.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:07e6db90cd9686c767dcc593dff16c8c09f9814f5e9c51034066cad3373b914d"}, + {file = "coverage-6.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c6dbb42f3ad25760010c45191e9757e7dce981cbfb90e42feef301d71540059"}, + {file = "coverage-6.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c76aeef1b95aff3905fb2ae2d96e319caca5b76fa41d3470b19d4e4a3a313512"}, + {file = "coverage-6.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cf5cfcb1521dc3255d845d9dca3ff204b3229401994ef8d1984b32746bb45ca"}, + {file = "coverage-6.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fbbdc8d55990eac1b0919ca69eb5a988a802b854488c34b8f37f3e2025fa90d"}, + {file = "coverage-6.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ec6bc7fe73a938933d4178c9b23c4e0568e43e220aef9472c4f6044bfc6dd0f0"}, + {file = "coverage-6.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9baff2a45ae1f17c8078452e9e5962e518eab705e50a0aa8083733ea7d45f3a6"}, + {file = "coverage-6.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd9e830e9d8d89b20ab1e5af09b32d33e1a08ef4c4e14411e559556fd788e6b2"}, + {file = "coverage-6.3.2-cp38-cp38-win32.whl", hash = "sha256:f7331dbf301b7289013175087636bbaf5b2405e57259dd2c42fdcc9fcc47325e"}, + {file = "coverage-6.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:68353fe7cdf91f109fc7d474461b46e7f1f14e533e911a2a2cbb8b0fc8613cf1"}, + {file = "coverage-6.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b78e5afb39941572209f71866aa0b206c12f0109835aa0d601e41552f9b3e620"}, + {file = "coverage-6.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4e21876082ed887baed0146fe222f861b5815455ada3b33b890f4105d806128d"}, + {file = "coverage-6.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34626a7eee2a3da12af0507780bb51eb52dca0e1751fd1471d0810539cefb536"}, + {file = "coverage-6.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ebf730d2381158ecf3dfd4453fbca0613e16eaa547b4170e2450c9707665ce7"}, + {file = "coverage-6.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd6fe30bd519694b356cbfcaca9bd5c1737cddd20778c6a581ae20dc8c04def2"}, + {file = "coverage-6.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:96f8a1cb43ca1422f36492bebe63312d396491a9165ed3b9231e778d43a7fca4"}, + {file = "coverage-6.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:dd035edafefee4d573140a76fdc785dc38829fe5a455c4bb12bac8c20cfc3d69"}, + {file = "coverage-6.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5ca5aeb4344b30d0bec47481536b8ba1181d50dbe783b0e4ad03c95dc1296684"}, + {file = "coverage-6.3.2-cp39-cp39-win32.whl", hash = "sha256:f5fa5803f47e095d7ad8443d28b01d48c0359484fec1b9d8606d0e3282084bc4"}, + {file = "coverage-6.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:9548f10d8be799551eb3a9c74bbf2b4934ddb330e08a73320123c07f95cc2d92"}, + {file = "coverage-6.3.2-pp36.pp37.pp38-none-any.whl", hash = "sha256:18d520c6860515a771708937d2f78f63cc47ab3b80cb78e86573b0a760161faf"}, + {file = "coverage-6.3.2.tar.gz", hash = "sha256:03e2a7826086b91ef345ff18742ee9fc47a6839ccd517061ef8fa1976e652ce9"}, +] +crontab = [ + {file = "crontab-0.23.0.tar.gz", hash = "sha256:ca79dede9c2f572bb32f38703e8fddcf3427e86edc838f2ffe7ae4b9ee2b0733"}, +] +dependency-injector = [ + {file = "dependency-injector-4.39.1.tar.gz", hash = "sha256:9ab76dc5e19b2692aaca49e00f9b41a087138d139b0ec985f92ff0498f038772"}, + {file = "dependency_injector-4.39.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c788a3d5482e63b5fd55e14fc258f1ff0b51b411927ab132ef0f689cb5d1183f"}, + {file = "dependency_injector-4.39.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0832e0457a360725cd1d1037b77b85478aeeaacc60e85ecceeb8020409e7b62"}, + {file = "dependency_injector-4.39.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d51998fff9704fd01d11c3c48f4e88d8506cb6afa1ee41409a881a5a51dae3fc"}, + {file = "dependency_injector-4.39.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea484cd13fc62966bf5582df0816205feee072d18f228aac75f7807b43f223ae"}, + {file = "dependency_injector-4.39.1-cp310-cp310-win32.whl", hash = "sha256:17389e53ec29ca13570319cf2065dcc4c2f6d36db5dd792bb1e8f2c39a9f146b"}, + {file = "dependency_injector-4.39.1-cp310-cp310-win_amd64.whl", hash = "sha256:55b0988489267c5a580f419133770ffe293057f2064da1c9ad6a2cc69666739b"}, + {file = "dependency_injector-4.39.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b297c3e79d3633cc56366386ae1f7dbce1587673cca2f559c368c1e936a1fa94"}, + {file = "dependency_injector-4.39.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2728827c5abb2420c5811e218262ae1b77a48e76cc9eebc6b4f55fee48a1a18d"}, + {file = "dependency_injector-4.39.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:029e42b064ab8cb5b2559be040ff682c7aa81592f1654a82355475956df17803"}, + {file = "dependency_injector-4.39.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:305e3db3f4bf40f64caaa1303e03005174e78d2339d2ae517b34c011ec2300f9"}, + {file = "dependency_injector-4.39.1-cp36-cp36m-win32.whl", hash = "sha256:a661dd23a5e4e2f6bf4a729de7fadbe148c9a4a298dbcadfc5a94584b6930970"}, + {file = "dependency_injector-4.39.1-cp36-cp36m-win_amd64.whl", hash = "sha256:340d6e7af5c4729d20c837d6d1e8a2626c24a05259dff746406cc823e26ba1e7"}, + {file = "dependency_injector-4.39.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a1553dac1c95f0de1f910b0025ee4570ea078a07d576bcdc2168990e719cea50"}, + {file = "dependency_injector-4.39.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c7baaa64d93359ee08c15d68579cc803e11d9becaf961f5a66b94ff627248e1"}, + {file = "dependency_injector-4.39.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:7a27bf1951a066cf347b886cc7ab0f37dcbd1ad59bffcfe721c8c12a189a150d"}, + {file = "dependency_injector-4.39.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:da6e2d685f7d0c65257f08133b68d9bf74ec319b90a0f21b4f629d168ce5f68f"}, + {file = "dependency_injector-4.39.1-cp37-cp37m-win32.whl", hash = "sha256:a8ddd03ca86e67e9d3cc038793d34fbfccab12e6145df813e72bf14f9371f2ea"}, + {file = "dependency_injector-4.39.1-cp37-cp37m-win_amd64.whl", hash = "sha256:e01a319ea05cd86b520201386dcb53a81a5400cb82fcc2f006bd7e92c0c51a0a"}, + {file = "dependency_injector-4.39.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4704804bde25b694aa65201927937a9d82d8bc055cb3dadc68eb05988bd34fa9"}, + {file = "dependency_injector-4.39.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0935d50b263169e7b0826a2fb6be80d6a4f2a7c59e6dd9876f86da3243bea9eb"}, + {file = "dependency_injector-4.39.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:3c34aa5abb1826b6189f47daf6e469d4293c1d01693233da2c1b923816270cc5"}, + {file = "dependency_injector-4.39.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:db64904c9b9a88756cfece3e3ed078a2b57127642711dd08af342dba8abf9667"}, + {file = "dependency_injector-4.39.1-cp38-cp38-win32.whl", hash = "sha256:66ebe728194adc8720dbc4d662edbbfa55659ff23c9c493fb2dae0bfd4df5734"}, + {file = "dependency_injector-4.39.1-cp38-cp38-win_amd64.whl", hash = "sha256:4349974620f630d6726808e1291ec99713b64d449b84eb01581ee807a5a5c224"}, + {file = "dependency_injector-4.39.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d87cad0899b05cd08931bfb68ddf7be77711a67b0649c37f2045d7808977b082"}, + {file = "dependency_injector-4.39.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8efd965e5cfdd9f339ec895e73c119569851adedc175088d34a670f5206fea63"}, + {file = "dependency_injector-4.39.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:27f69485ca6b85c31d162ee86cf6ef71bb71dce9cd2b5d0745425dfc551eefa1"}, + {file = "dependency_injector-4.39.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a25b63de59dff04ec78f5161f00c0222a04a23def5d1f0eda14e389a32baf428"}, + {file = "dependency_injector-4.39.1-cp39-cp39-win32.whl", hash = "sha256:a14274f50d125b4579314c355e22af07def1a96641ca94bd75edcf1400b89477"}, + {file = "dependency_injector-4.39.1-cp39-cp39-win_amd64.whl", hash = "sha256:9950039d00625f9252cd26378a4406342b256886bb61e4db8b65e9f01270f53e"}, + {file = "dependency_injector-4.39.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6d7f39cd54678741e132e13da3a1367ac18058cbda61fe39d61c8583aa6fd757"}, + {file = "dependency_injector-4.39.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2368c7ba3c9ffaf816ea0f2d14c78d481491b805f62ac8496a78a51397d4689"}, + {file = "dependency_injector-4.39.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:84e32407bb16c58aa0d4b5ed8485537bc66ccc14cfffae7022f1204e35ec939a"}, + {file = "dependency_injector-4.39.1-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f2d80a42c546e1f934d427b071630d86653cd4a60c74b570c4ffb03025c1f1f9"}, + {file = "dependency_injector-4.39.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ca126bbed370b8c002c859ebeb76f6d83eba2d7fb5d66f37f47cfc19661d2889"}, +] +fastavro = [ + {file = "fastavro-1.4.10-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:f225c81e869b3cefef6f2b478cd452693181ed7e645be3cea4d82024354ecaa0"}, + {file = "fastavro-1.4.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7669302c9124b7cd6c1bdff80c77116b2290c984814077fb7d01d7389586054"}, + {file = "fastavro-1.4.10-cp310-cp310-win_amd64.whl", hash = "sha256:995525bdfbdfef205ea148b5bc6a9fe5ccf921931123c39d9aad75a2b661681e"}, + {file = "fastavro-1.4.10-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:64244c53f1e4853184c2f7383d0332e1dcb34c38c05e6613530ade0378e8acfc"}, + {file = "fastavro-1.4.10-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c60965da6dc7a91e00ccd84d84797fad746555f44e8a816c4cc460fb231c44fe"}, + {file = "fastavro-1.4.10-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10eb25378f37dc00c10e83c4c0442c1a6d1b871f74a6dfdfc12b6447962bbdd0"}, + {file = "fastavro-1.4.10-cp37-cp37m-win_amd64.whl", hash = "sha256:d5719adf6045fc743de5fa738d561a81e58dc782c94f1b16cb21b5dd6253e7fd"}, + {file = "fastavro-1.4.10-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:24c4a1a8cc92e135ecfcd9cbd1f6cfa088cbc74d78c18e02a609cb11fa33778d"}, + {file = "fastavro-1.4.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0934490b0c3bcfd6bcbacbcb5144c6b5e4298cda209fbb17c856adf5405127dd"}, + {file = "fastavro-1.4.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a23763d73412c077aee401a0368c64cbc23859e26711dbae78a3cf0227f65165"}, + {file = "fastavro-1.4.10-cp38-cp38-win_amd64.whl", hash = "sha256:09f1dfdd8192ae09e0f477d1f024d8054fccdb099ad495d2a796bcee3cadebd1"}, + {file = "fastavro-1.4.10-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:0c6695753fa3035bbd0fa5cb21bf1b5dad39483c669b32ca0bb55fb07c1ccc87"}, + {file = "fastavro-1.4.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35f960dbba04647d8d6d5616f879622d2a1e8a84eb2d2e02a883a22e0803463a"}, + {file = "fastavro-1.4.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9660878ca01e3dbbee12385c5902a2b6b12ecbb5af4733d1026175a14b1ef67f"}, + {file = "fastavro-1.4.10-cp39-cp39-win_amd64.whl", hash = "sha256:64cbd386e408b3bcb2de53b1f847163073eb0d0d0338db65f76051c6ba9a9bc1"}, + {file = "fastavro-1.4.10.tar.gz", hash = "sha256:a24f9dd803c44bfb599476b000f9bd0088f7ac2401e6c20818f38d8af12785a0"}, +] +flake8 = [ + {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, + {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, +] +iniconfig = [ + {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, + {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, +] +isort = [ + {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, + {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, +] +lmdb = [ + {file = "lmdb-1.3.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:63cb73fe7ce9eb93d992d632c85a0476b4332670d9e6a2802b5062f603b7809f"}, + {file = "lmdb-1.3.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:abbc439cd9fe60ffd6197009087ea885ac150017dc85384093b1d376f83f0ec4"}, + {file = "lmdb-1.3.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6260a526e4ad85b1f374a5ba9475bf369fb07e7728ea6ec57226b02c40d1976b"}, + {file = "lmdb-1.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e568ae0887ae196340947d9800136e90feaed6b86a261ef01f01b2ba65fc8106"}, + {file = "lmdb-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6a816954d212f40fd15007cd81ab7a6bebb77436d949a6a9ae04af57fc127f3"}, + {file = "lmdb-1.3.0-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:fa6439356e591d3249ab0e1778a6f8d8408e993f66dc911914c78208f5310309"}, + {file = "lmdb-1.3.0-cp35-cp35m-win_amd64.whl", hash = "sha256:c6adbd6f7f9048e97f31a069e652eb51020a81e80a0ce92dbb9810d21da2409a"}, + {file = "lmdb-1.3.0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:eefb392f6b5cd43aada49258c5a79be11cb2c8cd3fc3e2d9319a1e0b9f906458"}, + {file = "lmdb-1.3.0-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5a14aca2651c3af6f0d0a6b9168200eea0c8f2d27c40b01a442f33329a6e8dff"}, + {file = "lmdb-1.3.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cfa4aa9c67f8aee89b23005e98d1f3f32490b6b905fd1cb604b207cbd5755ab"}, + {file = "lmdb-1.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:7da05d70fcc6561ac6b09e9fb1bf64b7ca294652c64c8a2889273970cee796b9"}, + {file = "lmdb-1.3.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:008243762decf8f6c90430a9bced56290ebbcdb5e877d90e42343bb97033e494"}, + {file = "lmdb-1.3.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:17215a42a4b9814c383deabecb160581e4fb75d00198eef0e3cea54f230ffbea"}, + {file = "lmdb-1.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65334eafa5d430b18d81ebd5362559a41483c362e1931f6e1b15bab2ecb7d75d"}, + {file = "lmdb-1.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:18c69fabdaf04efaf246587739cc1062b3e57c6ef0743f5c418df89e5e7e7b9b"}, + {file = "lmdb-1.3.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:41318717ab5d15ad2d6d263d34fbf614a045210f64b25e59ce734bb2105e421f"}, + {file = "lmdb-1.3.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:df2724bad7820114a205472994091097d0fa65a3e5fff5a8e688d123fb8c6326"}, + {file = "lmdb-1.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ddd590e1c7fcb395931aa3782fb89b9db4550ab2d81d006ecd239e0d462bc41"}, + {file = "lmdb-1.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:4172fba19417d7b29409beca7d73c067b54e5d8ab1fb9b51d7b4c1445d20a167"}, + {file = "lmdb-1.3.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:2df38115dd9428a54d59ae7c712a4c7cce0d6b1d66056de4b1a8c38718066106"}, + {file = "lmdb-1.3.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d9103aa4908f0bca43c5911ca067d4e3d01f682dff0c0381a1239bd2bd757984"}, + {file = "lmdb-1.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:394df860c3f93cfd92b6f4caba785f38208cc9614c18b3803f83a2cc1695042f"}, + {file = "lmdb-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:62ab28e3593bdc318ea2f2fa1574e5fca3b6d1f264686d773ba54a637d4f563b"}, + {file = "lmdb-1.3.0-pp27-pypy_73-macosx_10_7_x86_64.whl", hash = "sha256:e6a704b3baced9182836c7f77b769f23856f3a8f62d0282b1bc1feaf81a86712"}, + {file = "lmdb-1.3.0-pp27-pypy_73-win_amd64.whl", hash = "sha256:08f4b5129f4683802569b02581142e415c8dcc0ff07605983ec1b07804cecbad"}, + {file = "lmdb-1.3.0-pp36-pypy36_pp73-win32.whl", hash = "sha256:f291e3f561f58dddf63a92a5a6a4b8af3a0920b6705d35e2f80e52e86ee238a2"}, + {file = "lmdb-1.3.0.tar.gz", hash = "sha256:60a11efc21aaf009d06518996360eed346f6000bfc9de05114374230879f992e"}, +] +mccabe = [ + {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, + {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, +] +minos-microservice-aggregate = [] +minos-microservice-common = [] +minos-microservice-networks = [] +mypy-extensions = [ + {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, + {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, +] +orjson = [ + {file = "orjson-3.6.7-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:93188a9d6eb566419ad48befa202dfe7cd7a161756444b99c4ec77faea9352a4"}, + {file = "orjson-3.6.7-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:82515226ecb77689a029061552b5df1802b75d861780c401e96ca6bc8495f775"}, + {file = "orjson-3.6.7-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3af57ffab7848aaec6ba6b9e9b41331250b57bf696f9d502bacdc71a0ebab0ba"}, + {file = "orjson-3.6.7-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:a7297504d1142e7efa236ffc53f056d73934a993a08646dbcee89fc4308a8fcf"}, + {file = "orjson-3.6.7-cp310-cp310-manylinux_2_24_x86_64.whl", hash = "sha256:5a50cde0dbbde255ce751fd1bca39d00ecd878ba0903c0480961b31984f2fab7"}, + {file = "orjson-3.6.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d21f9a2d1c30e58070f93988db4cad154b9009fafbde238b52c1c760e3607fbe"}, + {file = "orjson-3.6.7-cp310-none-win_amd64.whl", hash = "sha256:e152464c4606b49398afd911777decebcf9749cc8810c5b4199039e1afb0991e"}, + {file = "orjson-3.6.7-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:0a65f3c403f38b0117c6dd8e76e85a7bd51fcd92f06c5598dfeddbc44697d3e5"}, + {file = "orjson-3.6.7-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:6c47cfca18e41f7f37b08ff3e7abf5ada2d0f27b5ade934f05be5fc5bb956e9d"}, + {file = "orjson-3.6.7-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:63185af814c243fad7a72441e5f98120c9ecddf2675befa486d669fb65539e9b"}, + {file = "orjson-3.6.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2da6fde42182b80b40df2e6ab855c55090ebfa3fcc21c182b7ad1762b61d55c"}, + {file = "orjson-3.6.7-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:48c5831ec388b4e2682d4ff56d6bfa4a2ef76c963f5e75f4ff4785f9cf338a80"}, + {file = "orjson-3.6.7-cp37-cp37m-manylinux_2_24_x86_64.whl", hash = "sha256:913fac5d594ccabf5e8fbac15b9b3bb9c576d537d49eeec9f664e7a64dde4c4b"}, + {file = "orjson-3.6.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:58f244775f20476e5851e7546df109f75160a5178d44257d437ba6d7e562bfe8"}, + {file = "orjson-3.6.7-cp37-none-win_amd64.whl", hash = "sha256:2d5f45c6b85e5f14646df2d32ecd7ff20fcccc71c0ea1155f4d3df8c5299bbb7"}, + {file = "orjson-3.6.7-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:612d242493afeeb2068bc72ff2544aa3b1e627578fcf92edee9daebb5893ffea"}, + {file = "orjson-3.6.7-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:539cdc5067db38db27985e257772d073cd2eb9462d0a41bde96da4e4e60bd99b"}, + {file = "orjson-3.6.7-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6d103b721bbc4f5703f62b3882e638c0b65fcdd48622531c7ffd45047ef8e87c"}, + {file = "orjson-3.6.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb10a20f80e95102dd35dfbc3a22531661b44a09b55236b012a446955846b023"}, + {file = "orjson-3.6.7-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:bb68d0da349cf8a68971a48ad179434f75256159fe8b0715275d9b49fa23b7a3"}, + {file = "orjson-3.6.7-cp38-cp38-manylinux_2_24_x86_64.whl", hash = "sha256:4a2c7d0a236aaeab7f69c17b7ab4c078874e817da1bfbb9827cb8c73058b3050"}, + {file = "orjson-3.6.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3be045ca3b96119f592904cf34b962969ce97bd7843cbfca084009f6c8d2f268"}, + {file = "orjson-3.6.7-cp38-none-win_amd64.whl", hash = "sha256:bd765c06c359d8a814b90f948538f957fa8a1f55ad1aaffcdc5771996aaea061"}, + {file = "orjson-3.6.7-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:7dd9e1e46c0776eee9e0649e3ae9584ea368d96851bcaeba18e217fa5d755283"}, + {file = "orjson-3.6.7-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:c4b4f20a1e3df7e7c83717aff0ef4ab69e42ce2fb1f5234682f618153c458406"}, + {file = "orjson-3.6.7-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7107a5673fd0b05adbb58bf71c1578fc84d662d29c096eb6d998982c8635c221"}, + {file = "orjson-3.6.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a08b6940dd9a98ccf09785890112a0f81eadb4f35b51b9a80736d1725437e22c"}, + {file = "orjson-3.6.7-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:f5d1648e5a9d1070f3628a69a7c6c17634dbb0caf22f2085eca6910f7427bf1f"}, + {file = "orjson-3.6.7-cp39-cp39-manylinux_2_24_x86_64.whl", hash = "sha256:e6201494e8dff2ce7fd21da4e3f6dfca1a3fed38f9dcefc972f552f6596a7621"}, + {file = "orjson-3.6.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:70d0386abe02879ebaead2f9632dd2acb71000b4721fd8c1a2fb8c031a38d4d5"}, + {file = "orjson-3.6.7-cp39-none-win_amd64.whl", hash = "sha256:d9a3288861bfd26f3511fb4081561ca768674612bac59513cb9081bb61fcc87f"}, + {file = "orjson-3.6.7.tar.gz", hash = "sha256:a4bb62b11289b7620eead2f25695212e9ac77fcfba76f050fa8a540fb5c32401"}, +] +packaging = [ + {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, + {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, +] +pathspec = [ + {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, + {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, +] +platformdirs = [ + {file = "platformdirs-2.5.1-py3-none-any.whl", hash = "sha256:bcae7cab893c2d310a711b70b24efb93334febe65f8de776ee320b517471e227"}, + {file = "platformdirs-2.5.1.tar.gz", hash = "sha256:7535e70dfa32e84d4b34996ea99c5e432fa29a708d0f4e394bbcb2a8faa4f16d"}, +] +pluggy = [ + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, +] +psycopg2-binary = [ + {file = "psycopg2-binary-2.9.3.tar.gz", hash = "sha256:761df5313dc15da1502b21453642d7599d26be88bff659382f8f9747c7ebea4e"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:539b28661b71da7c0e428692438efbcd048ca21ea81af618d845e06ebfd29478"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e82d38390a03da28c7985b394ec3f56873174e2c88130e6966cb1c946508e65"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57804fc02ca3ce0dbfbef35c4b3a4a774da66d66ea20f4bda601294ad2ea6092"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:083a55275f09a62b8ca4902dd11f4b33075b743cf0d360419e2051a8a5d5ff76"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-manylinux_2_24_ppc64le.whl", hash = "sha256:0a29729145aaaf1ad8bafe663131890e2111f13416b60e460dae0a96af5905c9"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a79d622f5206d695d7824cbf609a4f5b88ea6d6dab5f7c147fc6d333a8787e4"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:090f3348c0ab2cceb6dfbe6bf721ef61262ddf518cd6cc6ecc7d334996d64efa"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a9e1f75f96ea388fbcef36c70640c4efbe4650658f3d6a2967b4cc70e907352e"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c3ae8e75eb7160851e59adc77b3a19a976e50622e44fd4fd47b8b18208189d42"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-win32.whl", hash = "sha256:7b1e9b80afca7b7a386ef087db614faebbf8839b7f4db5eb107d0f1a53225029"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:8b344adbb9a862de0c635f4f0425b7958bf5a4b927c8594e6e8d261775796d53"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:e847774f8ffd5b398a75bc1c18fbb56564cda3d629fe68fd81971fece2d3c67e"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:68641a34023d306be959101b345732360fc2ea4938982309b786f7be1b43a4a1"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3303f8807f342641851578ee7ed1f3efc9802d00a6f83c101d21c608cb864460"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-manylinux_2_24_aarch64.whl", hash = "sha256:e3699852e22aa68c10de06524a3721ade969abf382da95884e6a10ff798f9281"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-manylinux_2_24_ppc64le.whl", hash = "sha256:526ea0378246d9b080148f2d6681229f4b5964543c170dd10bf4faaab6e0d27f"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:b1c8068513f5b158cf7e29c43a77eb34b407db29aca749d3eb9293ee0d3103ca"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:15803fa813ea05bef089fa78835118b5434204f3a17cb9f1e5dbfd0b9deea5af"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:152f09f57417b831418304c7f30d727dc83a12761627bb826951692cc6491e57"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:404224e5fef3b193f892abdbf8961ce20e0b6642886cfe1fe1923f41aaa75c9d"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-win32.whl", hash = "sha256:1f6b813106a3abdf7b03640d36e24669234120c72e91d5cbaeb87c5f7c36c65b"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:2d872e3c9d5d075a2e104540965a1cf898b52274a5923936e5bfddb58c59c7c2"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:10bb90fb4d523a2aa67773d4ff2b833ec00857f5912bafcfd5f5414e45280fb1"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:874a52ecab70af13e899f7847b3e074eeb16ebac5615665db33bce8a1009cf33"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a29b3ca4ec9defec6d42bf5feb36bb5817ba3c0230dd83b4edf4bf02684cd0ae"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:12b11322ea00ad8db8c46f18b7dfc47ae215e4df55b46c67a94b4effbaec7094"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-manylinux_2_24_ppc64le.whl", hash = "sha256:53293533fcbb94c202b7c800a12c873cfe24599656b341f56e71dd2b557be063"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c381bda330ddf2fccbafab789d83ebc6c53db126e4383e73794c74eedce855ef"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d29409b625a143649d03d0fd7b57e4b92e0ecad9726ba682244b73be91d2fdb"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:183a517a3a63503f70f808b58bfbf962f23d73b6dccddae5aa56152ef2bcb232"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:15c4e4cfa45f5a60599d9cec5f46cd7b1b29d86a6390ec23e8eebaae84e64554"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-win32.whl", hash = "sha256:adf20d9a67e0b6393eac162eb81fb10bc9130a80540f4df7e7355c2dd4af9fba"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:2f9ffd643bc7349eeb664eba8864d9e01f057880f510e4681ba40a6532f93c71"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:def68d7c21984b0f8218e8a15d514f714d96904265164f75f8d3a70f9c295667"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dffc08ca91c9ac09008870c9eb77b00a46b3378719584059c034b8945e26b272"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:280b0bb5cbfe8039205c7981cceb006156a675362a00fe29b16fbc264e242834"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:af9813db73395fb1fc211bac696faea4ca9ef53f32dc0cfa27e4e7cf766dcf24"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-manylinux_2_24_ppc64le.whl", hash = "sha256:63638d875be8c2784cfc952c9ac34e2b50e43f9f0a0660b65e2a87d656b3116c"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ffb7a888a047696e7f8240d649b43fb3644f14f0ee229077e7f6b9f9081635bd"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0c9d5450c566c80c396b7402895c4369a410cab5a82707b11aee1e624da7d004"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:d1c1b569ecafe3a69380a94e6ae09a4789bbb23666f3d3a08d06bbd2451f5ef1"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8fc53f9af09426a61db9ba357865c77f26076d48669f2e1bb24d85a22fb52307"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-win32.whl", hash = "sha256:6472a178e291b59e7f16ab49ec8b4f3bdada0a879c68d3817ff0963e722a82ce"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:35168209c9d51b145e459e05c31a9eaeffa9a6b0fd61689b48e07464ffd1a83e"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:47133f3f872faf28c1e87d4357220e809dfd3fa7c64295a4a148bcd1e6e34ec9"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91920527dea30175cc02a1099f331aa8c1ba39bf8b7762b7b56cbf54bc5cce42"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:887dd9aac71765ac0d0bac1d0d4b4f2c99d5f5c1382d8b770404f0f3d0ce8a39"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:1f14c8b0942714eb3c74e1e71700cbbcb415acbc311c730370e70c578a44a25c"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-manylinux_2_24_ppc64le.whl", hash = "sha256:7af0dd86ddb2f8af5da57a976d27cd2cd15510518d582b478fbb2292428710b4"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93cd1967a18aa0edd4b95b1dfd554cf15af657cb606280996d393dadc88c3c35"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bda845b664bb6c91446ca9609fc69f7db6c334ec5e4adc87571c34e4f47b7ddb"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:01310cf4cf26db9aea5158c217caa92d291f0500051a6469ac52166e1a16f5b7"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:99485cab9ba0fa9b84f1f9e1fef106f44a46ef6afdeec8885e0b88d0772b49e8"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-win32.whl", hash = "sha256:46f0e0a6b5fa5851bbd9ab1bc805eef362d3a230fbdfbc209f4a236d0a7a990d"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:accfe7e982411da3178ec690baaceaad3c278652998b2c45828aaac66cd8285f"}, +] +py = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] +pycodestyle = [ + {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, + {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, +] +pyflakes = [ + {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, + {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, +] +pyparsing = [ + {file = "pyparsing-3.0.8-py3-none-any.whl", hash = "sha256:ef7b523f6356f763771559412c0d7134753f037822dad1b16945b7b846f7ad06"}, + {file = "pyparsing-3.0.8.tar.gz", hash = "sha256:7bf433498c016c4314268d95df76c81b842a4cb2b276fa3312cfb1e1d85f6954"}, +] +pytest = [ + {file = "pytest-7.1.1-py3-none-any.whl", hash = "sha256:92f723789a8fdd7180b6b06483874feca4c48a5c76968e03bb3e7f806a1869ea"}, + {file = "pytest-7.1.1.tar.gz", hash = "sha256:841132caef6b1ad17a9afde46dc4f6cfa59a05f9555aae5151f73bdf2820ca63"}, +] +pyyaml = [ + {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, + {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, + {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, + {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, + {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, + {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, + {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, + {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, + {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, + {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, + {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, + {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, + {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, + {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, + {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, +] +six = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] +tomli = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] +typing-extensions = [ + {file = "typing_extensions-4.1.1-py3-none-any.whl", hash = "sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2"}, + {file = "typing_extensions-4.1.1.tar.gz", hash = "sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42"}, +] +uvloop = [ + {file = "uvloop-0.16.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6224f1401025b748ffecb7a6e2652b17768f30b1a6a3f7b44660e5b5b690b12d"}, + {file = "uvloop-0.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:30ba9dcbd0965f5c812b7c2112a1ddf60cf904c1c160f398e7eed3a6b82dcd9c"}, + {file = "uvloop-0.16.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bd53f7f5db562f37cd64a3af5012df8cac2c464c97e732ed556800129505bd64"}, + {file = "uvloop-0.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:772206116b9b57cd625c8a88f2413df2fcfd0b496eb188b82a43bed7af2c2ec9"}, + {file = "uvloop-0.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b572256409f194521a9895aef274cea88731d14732343da3ecdb175228881638"}, + {file = "uvloop-0.16.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:04ff57aa137230d8cc968f03481176041ae789308b4d5079118331ab01112450"}, + {file = "uvloop-0.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a19828c4f15687675ea912cc28bbcb48e9bb907c801873bd1519b96b04fb805"}, + {file = "uvloop-0.16.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e814ac2c6f9daf4c36eb8e85266859f42174a4ff0d71b99405ed559257750382"}, + {file = "uvloop-0.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bd8f42ea1ea8f4e84d265769089964ddda95eb2bb38b5cbe26712b0616c3edee"}, + {file = "uvloop-0.16.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:647e481940379eebd314c00440314c81ea547aa636056f554d491e40503c8464"}, + {file = "uvloop-0.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e0d26fa5875d43ddbb0d9d79a447d2ace4180d9e3239788208527c4784f7cab"}, + {file = "uvloop-0.16.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6ccd57ae8db17d677e9e06192e9c9ec4bd2066b77790f9aa7dede2cc4008ee8f"}, + {file = "uvloop-0.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:089b4834fd299d82d83a25e3335372f12117a7d38525217c2258e9b9f4578897"}, + {file = "uvloop-0.16.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98d117332cc9e5ea8dfdc2b28b0a23f60370d02e1395f88f40d1effd2cb86c4f"}, + {file = "uvloop-0.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e5f2e2ff51aefe6c19ee98af12b4ae61f5be456cd24396953244a30880ad861"}, + {file = "uvloop-0.16.0.tar.gz", hash = "sha256:f74bc20c7b67d1c27c72601c78cf95be99d5c2cdd4514502b4f3eb0933ff1228"}, +] diff --git a/packages/plugins/minos-database-aiopg/poetry.toml b/packages/plugins/minos-database-aiopg/poetry.toml new file mode 100644 index 000000000..ab1033bd3 --- /dev/null +++ b/packages/plugins/minos-database-aiopg/poetry.toml @@ -0,0 +1,2 @@ +[virtualenvs] +in-project = true diff --git a/packages/plugins/minos-database-aiopg/pyproject.toml b/packages/plugins/minos-database-aiopg/pyproject.toml new file mode 100644 index 000000000..f98dd1d25 --- /dev/null +++ b/packages/plugins/minos-database-aiopg/pyproject.toml @@ -0,0 +1,52 @@ +[tool.poetry] +name = "minos-database-aiopg" +version = "0.0.0" +description = "The aiopg plugin of the Minos Framework" +readme = "README.md" +repository = "https://github.com/minos-framework/minos-python" +homepage = "https://www.minos.run/" +authors = ["Minos Framework Devs "] +license = "MIT" +classifiers = [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Natural Language :: English", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.9", +] +keywords = [ + "clariteia", + "minos", + "microservice", + "saga", +] +packages = [ + { include = "minos" } +] +include = [ + "AUTHORS.md", + "HISTORY.md", + "LICENSE", +] + +[tool.poetry.dependencies] +python = "^3.9" +minos-microservice-common = "^0.6.0" +minos-microservice-networks = "^0.6.0" +minos-microservice-aggregate = "^0.6.0" +aiopg = "^1.2.1" +psycopg2-binary = "^2.9.3" + +[tool.poetry.dev-dependencies] +minos-microservice-common = { path = "../../core/minos-microservice-common", develop = true } +minos-microservice-networks = { path = "../../core/minos-microservice-networks", develop = true } +minos-microservice-aggregate = { path = "../../core/minos-microservice-aggregate", develop = true } +black = "^22.3" +isort = "^5.8.0" +pytest = "^7.0.1" +coverage = "^6.3" +flake8 = "^4.0.1" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" diff --git a/packages/plugins/minos-database-aiopg/setup.cfg b/packages/plugins/minos-database-aiopg/setup.cfg new file mode 100644 index 000000000..dbb9ac849 --- /dev/null +++ b/packages/plugins/minos-database-aiopg/setup.cfg @@ -0,0 +1,28 @@ +[coverage:run] +source = + minos + +[coverage:report] +exclude_lines = + pragma: no cover + raise NotImplementedError + if TYPE_CHECKING: + pass +precision = 2 + +[flake8] +filename = + ./minos/**/*.py, + ./tests/**/*.py, + ./examples/**/*.py +max-line-length = 120 +per-file-ignores = + ./**/__init__.py:F401,W391 + +[isort] +known_first_party=minos +multi_line_output = 3 +include_trailing_comma = True +force_grid_wrap = 1 +use_parentheses = True +line_length = 120 diff --git a/packages/plugins/minos-database-aiopg/tests/__init__.py b/packages/plugins/minos-database-aiopg/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/packages/plugins/minos-database-aiopg/tests/test_aiopg/__init__.py b/packages/plugins/minos-database-aiopg/tests/test_aiopg/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/packages/core/minos-microservice-common/tests/test_common/test_database/test_clients/test_aiopg.py b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_clients.py similarity index 67% rename from packages/core/minos-microservice-common/tests/test_common/test_database/test_clients/test_aiopg.py rename to packages/plugins/minos-database-aiopg/tests/test_aiopg/test_clients.py index 22683030f..efddd6269 100644 --- a/packages/core/minos-microservice-common/tests/test_common/test_database/test_clients/test_aiopg.py +++ b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_clients.py @@ -16,23 +16,22 @@ ) from minos.common import ( - AiopgDatabaseClient, - AiopgDatabaseOperation, - DatabaseLock, + ConnectionException, DatabaseOperation, IntegrityException, - UnableToConnectException, + ProgrammingException, ) -from minos.common.testing import ( - DatabaseMinosTestCase, +from minos.plugins.aiopg import ( + AiopgDatabaseClient, + AiopgDatabaseOperation, ) from tests.utils import ( - CommonTestCase, + AiopgTestCase, ) # noinspection SqlNoDataSourceInspection -class TestAiopgDatabaseClient(CommonTestCase, DatabaseMinosTestCase): +class TestAiopgDatabaseClient(AiopgTestCase): def setUp(self): super().setUp() self.operation = AiopgDatabaseOperation("SELECT * FROM information_schema.tables") @@ -81,7 +80,7 @@ async def test_connection(self): async def test_connection_raises(self): with patch.object(aiopg, "connect", new_callable=PropertyMock, side_effect=OperationalError): - with self.assertRaises(UnableToConnectException): + with self.assertRaises(ConnectionException): async with AiopgDatabaseClient.from_config(self.config): pass @@ -103,25 +102,6 @@ async def test_cursor_reset(self): await client.reset() self.assertIsNone(client.cursor) - async def test_lock(self): - op1 = AiopgDatabaseOperation("SELECT * FROM information_schema.tables", lock="foo") - client = AiopgDatabaseClient.from_config(self.config) - self.assertIsNone(client.lock) - async with client: - self.assertIsNone(client.lock) - await client.execute(op1) - self.assertIsInstance(client.lock, DatabaseLock) - - self.assertIsNone(client.lock) - - async def test_lock_reset(self): - op1 = AiopgDatabaseOperation("SELECT * FROM information_schema.tables", lock="foo") - async with AiopgDatabaseClient.from_config(self.config) as client: - await client.execute(op1) - self.assertIsInstance(client.lock, DatabaseLock) - await client.reset() - self.assertIsNone(client.lock) - async def test_execute(self): async with AiopgDatabaseClient.from_config(self.config) as client: with patch.object(Cursor, "execute") as execute_mock: @@ -131,36 +111,6 @@ async def test_execute(self): execute_mock.call_args_list, ) - async def test_execute_with_lock(self): - op1 = AiopgDatabaseOperation("SELECT * FROM information_schema.tables", lock="foo") - with patch.object(DatabaseLock, "acquire") as enter_lock_mock: - with patch.object(DatabaseLock, "release") as exit_lock_mock: - async with AiopgDatabaseClient.from_config(self.config) as client: - await client.execute(op1) - self.assertEqual(1, enter_lock_mock.call_count) - self.assertEqual(0, exit_lock_mock.call_count) - enter_lock_mock.reset_mock() - exit_lock_mock.reset_mock() - self.assertEqual(0, enter_lock_mock.call_count) - self.assertEqual(1, exit_lock_mock.call_count) - - async def test_execute_with_lock_multiple(self): - op1 = AiopgDatabaseOperation("SELECT * FROM information_schema.tables", lock="foo") - op2 = AiopgDatabaseOperation("SELECT * FROM information_schema.tables", lock="bar") - async with AiopgDatabaseClient.from_config(self.config) as client: - self.assertIsNone(client.lock) - - await client.execute(op1) - foo_lock = client.lock - self.assertIsInstance(foo_lock, DatabaseLock) - - await client.execute(op1) - self.assertEqual(foo_lock, client.lock) - - await client.execute(op2) - self.assertNotEqual(foo_lock, client.lock) - self.assertIsInstance(client.lock, DatabaseLock) - async def test_execute_raises_unsupported(self): class _DatabaseOperation(DatabaseOperation): """For testing purposes.""" @@ -181,6 +131,11 @@ async def test_fetch_one(self): observed = await client.fetch_one() self.assertIsInstance(observed, tuple) + async def test_fetch_one_raises(self): + async with AiopgDatabaseClient.from_config(self.config) as client: + with self.assertRaises(ProgrammingException): + await client.fetch_one() + async def test_fetch_all(self): async with AiopgDatabaseClient.from_config(self.config) as client: await client.execute(self.operation) diff --git a/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/__init__.py b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_aggregate/__init__.py b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_aggregate/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_aggregate/test_events/__init__.py b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_aggregate/test_events/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_aggregate/test_events/test_factory.py b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_aggregate/test_events/test_factory.py new file mode 100644 index 000000000..9c33d3906 --- /dev/null +++ b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_aggregate/test_events/test_factory.py @@ -0,0 +1,76 @@ +import unittest +from uuid import ( + uuid4, +) + +from minos.aggregate import ( + Action, + EventDatabaseOperationFactory, +) +from minos.common import ( + ComposedDatabaseOperation, + current_datetime, +) +from minos.plugins.aiopg import ( + AiopgDatabaseOperation, + AiopgEventDatabaseOperationFactory, +) + + +class TestAiopgEventDatabaseOperationFactory(unittest.TestCase): + def setUp(self) -> None: + self.factory = AiopgEventDatabaseOperationFactory() + + def test_is_subclass(self): + self.assertTrue(issubclass(AiopgEventDatabaseOperationFactory, EventDatabaseOperationFactory)) + + def test_build_table_name(self): + self.assertEqual("aggregate_event", self.factory.build_table_name()) + + def test_build_create(self): + operation = self.factory.build_create() + self.assertIsInstance(operation, ComposedDatabaseOperation) + self.assertEqual(3, len(operation.operations)) + for sub in operation.operations: + self.assertIsInstance(sub, AiopgDatabaseOperation) + + def test_build_submit(self): + operation = self.factory.build_submit( + transaction_uuids=[uuid4(), uuid4()], + uuid=uuid4(), + action=Action.CREATE, + name="Foo", + version=3, + data=bytes(), + created_at=current_datetime(), + transaction_uuid=uuid4(), + lock="foo", + ) + self.assertIsInstance(operation, AiopgDatabaseOperation) + + def test_build_query(self): + operation = self.factory.build_query( + uuid=uuid4(), + name="Foo", + version=423453, + version_lt=234, + version_gt=342, + version_le=5433, + version_ge=897, + id=234, + id_lt=34, + id_gt=543, + id_ge=123, + transaction_uuid=uuid4(), + transaction_uuid_ne=uuid4(), + transaction_uuid_in=[uuid4(), uuid4(), uuid4()], + ) + self.assertIsInstance(operation, AiopgDatabaseOperation) + + def test_build_query_offset(self): + operation = self.factory.build_query_offset() + self.assertIsInstance(operation, AiopgDatabaseOperation) + + +if __name__ == "__main__": + unittest.main() diff --git a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_events/test_repositories/test_pg.py b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_aggregate/test_events/test_repositories.py similarity index 61% rename from packages/core/minos-microservice-aggregate/tests/test_aggregate/test_events/test_repositories/test_pg.py rename to packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_aggregate/test_events/test_repositories.py index 68618a05b..bf57368d0 100644 --- a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_events/test_repositories/test_pg.py +++ b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_aggregate/test_events/test_repositories.py @@ -4,28 +4,28 @@ DatabaseEventRepository, EventRepository, ) +from minos.aggregate.testing import ( + EventRepositoryTestCase, +) from minos.common import ( - AiopgDatabaseClient, - AiopgDatabaseOperation, DatabaseClientPool, ) -from minos.common.testing import ( - DatabaseMinosTestCase, +from minos.plugins.aiopg import ( + AiopgDatabaseClient, + AiopgDatabaseOperation, ) -from tests.testcases import ( - EventRepositorySelectTestCase, - EventRepositorySubmitTestCase, +from tests.utils import ( + AiopgTestCase, ) # noinspection SqlNoDataSourceInspection -class TestDatabaseEventRepositorySubmit(EventRepositorySubmitTestCase, DatabaseMinosTestCase): +class TestDatabaseEventRepositorySubmit(AiopgTestCase, EventRepositoryTestCase): __test__ = True - @staticmethod - def build_event_repository() -> EventRepository: + def build_event_repository(self) -> EventRepository: """Fort testing purposes.""" - return DatabaseEventRepository() + return DatabaseEventRepository.from_config(self.config) def test_constructor(self): pool = DatabaseClientPool.from_config(self.config) @@ -38,7 +38,7 @@ def test_from_config(self): self.assertIsInstance(repository.database_pool, DatabaseClientPool) async def test_setup(self): - async with AiopgDatabaseClient(**self.config.get_default_database()) as client: + async with AiopgDatabaseClient.from_config(self.config) as client: operation = AiopgDatabaseOperation( "SELECT EXISTS (SELECT FROM information_schema.tables WHERE table_name = 'aggregate_event');" ) @@ -47,14 +47,5 @@ async def test_setup(self): self.assertTrue(response) -class TestDatabaseEventRepositorySelect(EventRepositorySelectTestCase, DatabaseMinosTestCase): - __test__ = True - - @staticmethod - def build_event_repository() -> EventRepository: - """Fort testing purposes.""" - return DatabaseEventRepository() - - if __name__ == "__main__": unittest.main() diff --git a/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_aggregate/test_snapshots/__init__.py b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_aggregate/test_snapshots/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_aggregate/test_snapshots/test_factory.py b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_aggregate/test_snapshots/test_factory.py new file mode 100644 index 000000000..eccd8d2f1 --- /dev/null +++ b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_aggregate/test_snapshots/test_factory.py @@ -0,0 +1,79 @@ +import unittest +from uuid import ( + uuid4, +) + +from minos.aggregate import ( + Condition, + Ordering, + SnapshotDatabaseOperationFactory, +) +from minos.common import ( + ComposedDatabaseOperation, + current_datetime, +) +from minos.plugins.aiopg import ( + AiopgDatabaseOperation, + AiopgSnapshotDatabaseOperationFactory, +) + + +class TestAiopgSnapshotDatabaseOperationFactory(unittest.TestCase): + def setUp(self) -> None: + self.factory = AiopgSnapshotDatabaseOperationFactory() + + def test_is_subclass(self): + self.assertTrue(issubclass(AiopgSnapshotDatabaseOperationFactory, SnapshotDatabaseOperationFactory)) + + def test_build_table_name(self): + self.assertEqual("snapshot", self.factory.build_table_name()) + + def test_build_offset_table_name(self): + self.assertEqual("snapshot_aux_offset", self.factory.build_offset_table_name()) + + def test_build_create(self): + operation = self.factory.build_create() + self.assertIsInstance(operation, ComposedDatabaseOperation) + self.assertEqual(3, len(operation.operations)) + for sub in operation.operations: + self.assertIsInstance(sub, AiopgDatabaseOperation) + + def test_build_build_delete(self): + operation = self.factory.build_delete({uuid4(), uuid4()}) + self.assertIsInstance(operation, AiopgDatabaseOperation) + + def test_build_submit(self): + operation = self.factory.build_submit( + uuid=uuid4(), + name="Foo", + version=34243, + schema=bytes(), + data={"foo": "bar"}, + created_at=current_datetime(), + updated_at=current_datetime(), + transaction_uuid=uuid4(), + ) + self.assertIsInstance(operation, AiopgDatabaseOperation) + + def test_build_query(self): + operation = self.factory.build_query( + name="Foo", + condition=Condition.EQUAL("foo", "bar"), + ordering=Ordering.ASC("foobar"), + limit=2342, + transaction_uuids=[uuid4(), uuid4()], + exclude_deleted=True, + ) + self.assertIsInstance(operation, AiopgDatabaseOperation) + + def test_build_submit_offset(self): + operation = self.factory.build_submit_offset(56) + self.assertIsInstance(operation, AiopgDatabaseOperation) + + def test_build_query_offset(self): + operation = self.factory.build_query_offset() + self.assertIsInstance(operation, AiopgDatabaseOperation) + + +if __name__ == "__main__": + unittest.main() diff --git a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_pg/test_queries.py b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_aggregate/test_snapshots/test_queries.py similarity index 97% rename from packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_pg/test_queries.py rename to packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_aggregate/test_snapshots/test_queries.py index bc3403e5b..4f5a9f351 100644 --- a/packages/core/minos-microservice-aggregate/tests/test_aggregate/test_snapshots/test_pg/test_queries.py +++ b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_aggregate/test_snapshots/test_queries.py @@ -12,29 +12,29 @@ ) from psycopg2.sql import ( SQL, + Identifier, Literal, Placeholder, ) from minos.aggregate import ( IS_REPOSITORY_SERIALIZATION_CONTEXT_VAR, - AiopgSnapshotQueryDatabaseOperationBuilder, Condition, Ordering, ) from minos.common import ( NULL_UUID, - AiopgDatabaseClient, ) -from minos.common.testing import ( - DatabaseMinosTestCase, +from minos.plugins.aiopg import ( + AiopgDatabaseClient, + AiopgSnapshotQueryDatabaseOperationBuilder, ) from tests.utils import ( - AggregateTestCase, + AiopgTestCase, ) -class TestAiopgSnapshotQueryDatabaseOperationBuilder(AggregateTestCase, DatabaseMinosTestCase): +class TestAiopgSnapshotQueryDatabaseOperationBuilder(AiopgTestCase): def setUp(self) -> None: super().setUp() self.classname = "path.to.Product" @@ -44,7 +44,7 @@ def setUp(self) -> None: } self.base_select = AiopgSnapshotQueryDatabaseOperationBuilder._SELECT_ENTRIES_QUERY.format( from_parts=AiopgSnapshotQueryDatabaseOperationBuilder._SELECT_TRANSACTION_CHUNK.format( - index=Literal(1), transaction_uuid=Placeholder("transaction_uuid_1") + index=Literal(1), transaction_uuid=Placeholder("transaction_uuid_1"), table_name=Identifier("snapshot") ) ) @@ -100,10 +100,14 @@ async def test_build_with_transactions(self): from_parts=SQL(" UNION ALL ").join( [ AiopgSnapshotQueryDatabaseOperationBuilder._SELECT_TRANSACTION_CHUNK.format( - index=Literal(1), transaction_uuid=Placeholder("transaction_uuid_1") + index=Literal(1), + transaction_uuid=Placeholder("transaction_uuid_1"), + table_name=Identifier("snapshot"), ), AiopgSnapshotQueryDatabaseOperationBuilder._SELECT_TRANSACTION_CHUNK.format( - index=Literal(2), transaction_uuid=Placeholder("transaction_uuid_2") + index=Literal(2), + transaction_uuid=Placeholder("transaction_uuid_2"), + table_name=Identifier("snapshot"), ), ] ) diff --git a/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_aggregate/test_snapshots/test_repository.py b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_aggregate/test_snapshots/test_repository.py new file mode 100644 index 000000000..791e82b7c --- /dev/null +++ b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_aggregate/test_snapshots/test_repository.py @@ -0,0 +1,52 @@ +import unittest + +from minos.aggregate import ( + DatabaseSnapshotRepository, +) +from minos.aggregate.testing import ( + SnapshotRepositoryTestCase, +) +from minos.plugins.aiopg import ( + AiopgDatabaseClient, + AiopgDatabaseOperation, +) +from tests.utils import ( + AiopgTestCase, +) + + +# noinspection SqlNoDataSourceInspection +class TestDatabaseSnapshotRepository(AiopgTestCase, SnapshotRepositoryTestCase): + __test__ = True + + def build_snapshot_repository(self): + return DatabaseSnapshotRepository.from_config(self.config) + + async def test_setup_snapshot_table(self): + async with AiopgDatabaseClient.from_config(self.config) as client: + operation = AiopgDatabaseOperation( + "SELECT EXISTS (SELECT FROM pg_tables WHERE schemaname = 'public' AND tablename = 'snapshot');" + ) + await client.execute(operation) + observed = (await client.fetch_one())[0] + self.assertEqual(True, observed) + + async def test_setup_snapshot_aux_offset_table(self): + async with AiopgDatabaseClient.from_config(self.config) as client: + operation = AiopgDatabaseOperation( + "SELECT EXISTS (SELECT FROM pg_tables WHERE " + "schemaname = 'public' AND tablename = 'snapshot_aux_offset');" + ) + await client.execute(operation) + observed = (await client.fetch_one())[0] + self.assertEqual(True, observed) + + async def test_is_synced(self): + await self.populate() + self.assertFalse(await self.snapshot_repository.is_synced(SnapshotRepositoryTestCase.Car)) + await self.snapshot_repository.synchronize() + self.assertTrue(await self.snapshot_repository.is_synced(SnapshotRepositoryTestCase.Car)) + + +if __name__ == "__main__": + unittest.main() diff --git a/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_aggregate/test_transactions/__init__.py b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_aggregate/test_transactions/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_aggregate/test_transactions/test_factory.py b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_aggregate/test_transactions/test_factory.py new file mode 100644 index 000000000..d6559ae1d --- /dev/null +++ b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_aggregate/test_transactions/test_factory.py @@ -0,0 +1,69 @@ +import unittest +from uuid import ( + uuid4, +) + +from minos.aggregate import ( + TransactionDatabaseOperationFactory, + TransactionStatus, +) +from minos.common import ( + ComposedDatabaseOperation, + current_datetime, +) +from minos.plugins.aiopg import ( + AiopgDatabaseOperation, + AiopgTransactionDatabaseOperationFactory, +) + + +class TestAiopgTransactionDatabaseOperationFactory(unittest.TestCase): + def setUp(self) -> None: + self.factory = AiopgTransactionDatabaseOperationFactory() + + def test_is_subclass(self): + self.assertTrue(issubclass(AiopgTransactionDatabaseOperationFactory, TransactionDatabaseOperationFactory)) + + def test_build_table_name(self): + self.assertEqual("aggregate_transaction", self.factory.build_table_name()) + + def test_build_create(self): + operation = self.factory.build_create() + self.assertIsInstance(operation, ComposedDatabaseOperation) + self.assertEqual(3, len(operation.operations)) + for sub in operation.operations: + self.assertIsInstance(sub, AiopgDatabaseOperation) + + def test_build_submit(self): + operation = self.factory.build_submit( + uuid=uuid4(), + destination_uuid=uuid4(), + status=TransactionStatus.COMMITTED, + event_offset=234234, + ) + self.assertIsInstance(operation, AiopgDatabaseOperation) + + def test_build_query(self): + operation = self.factory.build_query( + uuid=uuid4(), + uuid_ne=uuid4(), + uuid_in={uuid4(), uuid4()}, + destination_uuid=uuid4(), + status=TransactionStatus.COMMITTED, + status_in={TransactionStatus.REJECTED, TransactionStatus.RESERVED}, + event_offset=234, + event_offset_lt=24342, + event_offset_gt=3424, + event_offset_le=2342, + event_offset_ge=234342, + updated_at=current_datetime(), + updated_at_lt=current_datetime(), + updated_at_gt=current_datetime(), + updated_at_le=current_datetime(), + updated_at_ge=current_datetime(), + ) + self.assertIsInstance(operation, AiopgDatabaseOperation) + + +if __name__ == "__main__": + unittest.main() diff --git a/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_aggregate/test_transactions/test_repository.py b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_aggregate/test_transactions/test_repository.py new file mode 100644 index 000000000..c86eab02a --- /dev/null +++ b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_aggregate/test_transactions/test_repository.py @@ -0,0 +1,50 @@ +import unittest + +from minos.aggregate import ( + DatabaseTransactionRepository, + TransactionRepository, +) +from minos.aggregate.testing import ( + TransactionRepositoryTestCase, +) +from minos.common import ( + DatabaseClientPool, +) +from minos.plugins.aiopg import ( + AiopgDatabaseClient, + AiopgDatabaseOperation, +) +from tests.utils import ( + AiopgTestCase, +) + + +# noinspection SqlNoDataSourceInspection +class TestDatabaseTransactionRepository(AiopgTestCase, TransactionRepositoryTestCase): + __test__ = True + + def build_transaction_repository(self) -> TransactionRepository: + return DatabaseTransactionRepository.from_config(self.config) + + def test_constructor(self): + pool = DatabaseClientPool.from_config(self.config) + repository = DatabaseTransactionRepository(pool) + self.assertIsInstance(repository, DatabaseTransactionRepository) + self.assertEqual(pool, repository.database_pool) + + def test_from_config(self): + repository = DatabaseTransactionRepository.from_config(self.config) + self.assertIsInstance(repository.database_pool, DatabaseClientPool) + + async def test_setup(self): + async with AiopgDatabaseClient.from_config(self.config) as client: + operation = AiopgDatabaseOperation( + "SELECT EXISTS (SELECT FROM information_schema.tables WHERE table_name = 'aggregate_transaction');" + ) + await client.execute(operation) + response = (await client.fetch_one())[0] + self.assertTrue(response) + + +if __name__ == "__main__": + unittest.main() diff --git a/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_common/__init__.py b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_common/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_common/test_locks.py b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_common/test_locks.py new file mode 100644 index 000000000..6f518a290 --- /dev/null +++ b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_common/test_locks.py @@ -0,0 +1,29 @@ +import unittest + +from minos.common import ( + LockDatabaseOperationFactory, +) +from minos.plugins.aiopg import ( + AiopgDatabaseOperation, + AiopgLockDatabaseOperationFactory, +) + + +class TestAiopgLockDatabaseOperationFactory(unittest.TestCase): + def setUp(self) -> None: + self.factory = AiopgLockDatabaseOperationFactory() + + def test_is_subclass(self): + self.assertTrue(issubclass(AiopgLockDatabaseOperationFactory, LockDatabaseOperationFactory)) + + def test_build_acquire(self): + operation = self.factory.build_acquire(56) + self.assertIsInstance(operation, AiopgDatabaseOperation) + + def test_build_release(self): + operation = self.factory.build_release(56) + self.assertIsInstance(operation, AiopgDatabaseOperation) + + +if __name__ == "__main__": + unittest.main() diff --git a/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_common/test_managements.py b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_common/test_managements.py new file mode 100644 index 000000000..fe0996797 --- /dev/null +++ b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_common/test_managements.py @@ -0,0 +1,29 @@ +import unittest + +from minos.common import ( + ManagementDatabaseOperationFactory, +) +from minos.plugins.aiopg import ( + AiopgDatabaseOperation, + AiopgManagementDatabaseOperationFactory, +) + + +class TestAiopgManagementDatabaseOperationFactory(unittest.TestCase): + def setUp(self) -> None: + self.factory = AiopgManagementDatabaseOperationFactory() + + def test_is_subclass(self): + self.assertTrue(issubclass(AiopgManagementDatabaseOperationFactory, ManagementDatabaseOperationFactory)) + + def test_build_create(self): + operation = self.factory.build_create("foo") + self.assertIsInstance(operation, AiopgDatabaseOperation) + + def test_build_delete(self): + operation = self.factory.build_delete("foo") + self.assertIsInstance(operation, AiopgDatabaseOperation) + + +if __name__ == "__main__": + unittest.main() diff --git a/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_networks/__init__.py b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_networks/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_networks/test_collections/__init__.py b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_networks/test_collections/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_networks/test_collections/test_queues.py b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_networks/test_collections/test_queues.py new file mode 100644 index 000000000..c90a12de5 --- /dev/null +++ b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_networks/test_collections/test_queues.py @@ -0,0 +1,60 @@ +import unittest + +from minos.networks import ( + BrokerQueueDatabaseOperationFactory, +) +from minos.plugins.aiopg import ( + AiopgBrokerQueueDatabaseOperationFactory, + AiopgDatabaseOperation, +) + + +class _BrokerQueueDatabaseOperationFactory(AiopgBrokerQueueDatabaseOperationFactory): + """For testing purposes.""" + + def build_table_name(self) -> str: + """For testing purposes.""" + return "foo" + + +class TestAiopgBrokerQueueDatabaseOperationFactory(unittest.TestCase): + def setUp(self) -> None: + self.factory = _BrokerQueueDatabaseOperationFactory() + + def test_is_subclass(self): + self.assertTrue(AiopgBrokerQueueDatabaseOperationFactory, BrokerQueueDatabaseOperationFactory) + + def test_build_table_name(self): + self.assertEqual("foo", self.factory.build_table_name()) + + def test_build_create(self): + operation = self.factory.build_create() + self.assertIsInstance(operation, AiopgDatabaseOperation) + + def test_build_mark_processed(self): + operation = self.factory.build_mark_processed(id_=56) + self.assertIsInstance(operation, AiopgDatabaseOperation) + + def test_build_delete(self): + operation = self.factory.build_delete(id_=56) + self.assertIsInstance(operation, AiopgDatabaseOperation) + + def test_build_mark_processing(self): + operation = self.factory.build_mark_processing(ids={56, 78}) + self.assertIsInstance(operation, AiopgDatabaseOperation) + + def test_build_count(self): + operation = self.factory.build_count(retry=3) + self.assertIsInstance(operation, AiopgDatabaseOperation) + + def test_build_submit(self): + operation = self.factory.build_submit(topic="foo", data=bytes()) + self.assertIsInstance(operation, AiopgDatabaseOperation) + + def test_build_query(self): + operation = self.factory.build_query(retry=3, records=1000) + self.assertIsInstance(operation, AiopgDatabaseOperation) + + +if __name__ == "__main__": + unittest.main() diff --git a/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_networks/test_publishers/__init__.py b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_networks/test_publishers/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_networks/test_publishers/test_queues.py b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_networks/test_publishers/test_queues.py new file mode 100644 index 000000000..b0f90fe21 --- /dev/null +++ b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_networks/test_publishers/test_queues.py @@ -0,0 +1,27 @@ +import unittest + +from minos.networks import ( + BrokerPublisherQueueDatabaseOperationFactory, +) +from minos.plugins.aiopg import ( + AiopgBrokerPublisherQueueDatabaseOperationFactory, + AiopgBrokerQueueDatabaseOperationFactory, +) + + +class TestAiopgBrokerPublisherQueueDatabaseOperationFactory(unittest.TestCase): + def setUp(self) -> None: + self.factory = AiopgBrokerPublisherQueueDatabaseOperationFactory() + + def test_is_subclass(self): + self.assertTrue( + AiopgBrokerPublisherQueueDatabaseOperationFactory, + (BrokerPublisherQueueDatabaseOperationFactory, AiopgBrokerQueueDatabaseOperationFactory), + ) + + def test_build_table_name(self): + self.assertEqual("broker_publisher_queue", self.factory.build_table_name()) + + +if __name__ == "__main__": + unittest.main() diff --git a/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_networks/test_subscribers/__init__.py b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_networks/test_subscribers/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_networks/test_subscribers/test_queues.py b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_networks/test_subscribers/test_queues.py new file mode 100644 index 000000000..4367230ca --- /dev/null +++ b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_networks/test_subscribers/test_queues.py @@ -0,0 +1,38 @@ +import unittest + +from minos.networks import ( + BrokerSubscriberQueueDatabaseOperationFactory, +) +from minos.plugins.aiopg import ( + AiopgBrokerQueueDatabaseOperationFactory, + AiopgBrokerSubscriberQueueDatabaseOperationFactory, + AiopgDatabaseOperation, +) + + +class TestAiopgBrokerSubscriberQueueDatabaseOperationFactory(unittest.TestCase): + def setUp(self) -> None: + self.factory = AiopgBrokerSubscriberQueueDatabaseOperationFactory() + + def test_is_subclass(self): + self.assertTrue( + issubclass( + AiopgBrokerSubscriberQueueDatabaseOperationFactory, + (BrokerSubscriberQueueDatabaseOperationFactory, AiopgBrokerQueueDatabaseOperationFactory), + ) + ) + + def test_build_table_name(self): + self.assertEqual("broker_subscriber_queue", self.factory.build_table_name()) + + def test_build_build_count(self): + operation = self.factory.build_count(retry=3, topics={"foo", "bar"}) + self.assertIsInstance(operation, AiopgDatabaseOperation) + + def test_build_build_query(self): + operation = self.factory.build_query(retry=3, records=100, topics={"foo", "bar"}) + self.assertIsInstance(operation, AiopgDatabaseOperation) + + +if __name__ == "__main__": + unittest.main() diff --git a/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_networks/test_subscribers/test_validators.py b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_networks/test_subscribers/test_validators.py new file mode 100644 index 000000000..f3d20a54f --- /dev/null +++ b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_factories/test_networks/test_subscribers/test_validators.py @@ -0,0 +1,46 @@ +import unittest +from uuid import ( + uuid4, +) + +from minos.common import ( + ComposedDatabaseOperation, +) +from minos.networks import ( + BrokerSubscriberDuplicateValidatorDatabaseOperationFactory, +) +from minos.plugins.aiopg import ( + AiopgBrokerSubscriberDuplicateValidatorDatabaseOperationFactory, + AiopgDatabaseOperation, +) + + +class TestAiopgBrokerSubscriberDuplicateValidatorDatabaseOperationFactory(unittest.TestCase): + def setUp(self) -> None: + self.factory = AiopgBrokerSubscriberDuplicateValidatorDatabaseOperationFactory() + + def test_is_subclass(self): + self.assertTrue( + issubclass( + AiopgBrokerSubscriberDuplicateValidatorDatabaseOperationFactory, + BrokerSubscriberDuplicateValidatorDatabaseOperationFactory, + ) + ) + + def test_build_table_name(self): + self.assertEqual("broker_subscriber_processed_messages", self.factory.build_table_name()) + + def test_build_create(self): + operation = self.factory.build_create() + self.assertIsInstance(operation, ComposedDatabaseOperation) + self.assertEqual(2, len(operation.operations)) + for sub in operation.operations: + self.assertIsInstance(sub, AiopgDatabaseOperation) + + def test_build_submit(self): + operation = self.factory.build_submit("foo", uuid4()) + self.assertIsInstance(operation, AiopgDatabaseOperation) + + +if __name__ == "__main__": + unittest.main() diff --git a/packages/core/minos-microservice-common/tests/test_common/test_database/test_operations/test_aiopg.py b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_operations.py similarity index 94% rename from packages/core/minos-microservice-common/tests/test_common/test_database/test_operations/test_aiopg.py rename to packages/plugins/minos-database-aiopg/tests/test_aiopg/test_operations.py index 766105ac0..531fc4d52 100644 --- a/packages/core/minos-microservice-common/tests/test_common/test_database/test_operations/test_aiopg.py +++ b/packages/plugins/minos-database-aiopg/tests/test_aiopg/test_operations.py @@ -1,9 +1,11 @@ import unittest from minos.common import ( - AiopgDatabaseOperation, DatabaseOperation, ) +from minos.plugins.aiopg import ( + AiopgDatabaseOperation, +) class TestAiopgDatabaseOperation(unittest.TestCase): diff --git a/packages/plugins/minos-database-aiopg/tests/test_config.yml b/packages/plugins/minos-database-aiopg/tests/test_config.yml new file mode 100644 index 000000000..3f2398a08 --- /dev/null +++ b/packages/plugins/minos-database-aiopg/tests/test_config.yml @@ -0,0 +1,9 @@ +version: 2 +databases: + default: + client: minos.plugins.aiopg.AiopgDatabaseClient + database: order_db + user: minos + password: min0s + host: localhost + port: 5432 \ No newline at end of file diff --git a/packages/plugins/minos-database-aiopg/tests/utils.py b/packages/plugins/minos-database-aiopg/tests/utils.py new file mode 100644 index 000000000..f3670c839 --- /dev/null +++ b/packages/plugins/minos-database-aiopg/tests/utils.py @@ -0,0 +1,88 @@ +from pathlib import ( + Path, +) + +from minos.aggregate import ( + InMemoryEventRepository, + InMemorySnapshotRepository, + InMemoryTransactionRepository, +) +from minos.common import ( + DatabaseClientPool, + Lock, + LockPool, + PoolFactory, +) +from minos.common.testing import ( + DatabaseMinosTestCase, +) +from minos.networks import ( + BrokerClientPool, + InMemoryBrokerPublisher, + InMemoryBrokerSubscriberBuilder, +) + +BASE_PATH = Path(__file__).parent +CONFIG_FILE_PATH = BASE_PATH / "test_config.yml" + + +class AiopgTestCase(DatabaseMinosTestCase): + def get_config_file_path(self) -> Path: + return CONFIG_FILE_PATH + + def get_injections(self): + pool_factory = PoolFactory.from_config( + self.config, + default_classes={ + "broker": BrokerClientPool, + "lock": FakeLockPool, + "database": DatabaseClientPool, + }, + ) + broker_publisher = InMemoryBrokerPublisher() + broker_subscriber_builder = InMemoryBrokerSubscriberBuilder() + transaction_repository = InMemoryTransactionRepository( + lock_pool=pool_factory.get_pool("lock"), + ) + event_repository = InMemoryEventRepository( + broker_publisher=broker_publisher, + transaction_repository=transaction_repository, + lock_pool=pool_factory.get_pool("lock"), + ) + snapshot_repository = InMemorySnapshotRepository( + event_repository=event_repository, + transaction_repository=transaction_repository, + ) + return [ + pool_factory, + broker_publisher, + broker_subscriber_builder, + transaction_repository, + event_repository, + snapshot_repository, + ] + + +class FakeLock(Lock): + """For testing purposes.""" + + def __init__(self, key=None, *args, **kwargs): + if key is None: + key = "fake" + super().__init__(key, *args, **kwargs) + + async def acquire(self) -> None: + """For testing purposes.""" + + async def release(self): + """For testing purposes.""" + + +class FakeLockPool(LockPool): + """For testing purposes.""" + + async def _create_instance(self): + return FakeLock() + + async def _destroy_instance(self, instance) -> None: + """For testing purposes."""