From 149febe7ab191a369d89876cd2e0a37da0d64837 Mon Sep 17 00:00:00 2001 From: John Bywater Date: Wed, 11 Mar 2020 18:49:20 +0000 Subject: [PATCH 01/50] Increased version number to 8.2.2dev0. --- eventsourcing/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eventsourcing/__init__.py b/eventsourcing/__init__.py index 1f1024b19..129aeba2a 100644 --- a/eventsourcing/__init__.py +++ b/eventsourcing/__init__.py @@ -1 +1 @@ -__version__ = "8.2.1" +__version__ = "8.2.2dev0" From 9afb0d95e33a9c9ec378cc051d80a2e16f60b378 Mon Sep 17 00:00:00 2001 From: John Bywater Date: Wed, 11 Mar 2020 20:07:19 +0000 Subject: [PATCH 02/50] Fixed URL. --- docs/topics/support.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/topics/support.rst b/docs/topics/support.rst index a0a3214f4..f6832edae 100644 --- a/docs/topics/support.rst +++ b/docs/topics/support.rst @@ -58,5 +58,5 @@ The library has a growing community that may be able to help. Support the project =================== -Please follow the `Sponsor button `__ +Please follow the `Sponsor button `__ on the GitHub project for options. From df6f4c0ce7694c5040c4d3e25a476077b94ca1ee Mon Sep 17 00:00:00 2001 From: John Bywater Date: Wed, 1 Apr 2020 12:47:17 +0100 Subject: [PATCH 03/50] Reformatted runner.py. --- eventsourcing/system/runner.py | 27 ++++++++++++++------------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/eventsourcing/system/runner.py b/eventsourcing/system/runner.py index 38a1e3a05..cbc5430c6 100644 --- a/eventsourcing/system/runner.py +++ b/eventsourcing/system/runner.py @@ -10,12 +10,10 @@ Deque, Dict, Generic, - Iterable, List, Optional, TYPE_CHECKING, Type, - TypeVar, Union, no_type_check, ) @@ -25,17 +23,22 @@ ProcessApplication, PromptToQuit, ) -from eventsourcing.application.simple import ApplicationWithConcreteInfrastructure, Prompt, is_prompt_to_pull, \ - PromptToPull +from eventsourcing.application.simple import ( + ApplicationWithConcreteInfrastructure, + Prompt, + PromptToPull, + is_prompt_to_pull, +) from eventsourcing.domain.model.decorators import retry from eventsourcing.domain.model.events import subscribe, unsubscribe from eventsourcing.exceptions import ( CausalDependencyFailed, EventSourcingError, OperationalError, + ProgrammingError, RecordConflictError, - ProgrammingError) -from eventsourcing.system.definition import System, AbstractSystemRunner +) +from eventsourcing.system.definition import AbstractSystemRunner, System from eventsourcing.whitehead import T DEFAULT_POLL_INTERVAL = 5 @@ -130,7 +133,9 @@ def run_followers(self, prompt: Prompt) -> None: break else: if isinstance(prompt, PromptToPull): - downstream_names = self.system.downstream_names[prompt.process_name] + downstream_names = self.system.downstream_names[ + prompt.process_name + ] for downstream_name in downstream_names: downstream_process = self.processes[downstream_name] downstream_process.run(prompt) @@ -798,9 +803,7 @@ def run(self) -> None: try: # Get all notifications. for upstream_name in self.app.readers: - notifications = list( - self.app.read_reader(upstream_name) - ) + notifications = list(self.app.read_reader(upstream_name)) all_notifications.append((upstream_name, notifications)) except: @@ -819,9 +822,7 @@ def run(self) -> None: # Process all notifications. for upstream_name, notifications in all_notifications: for notification in notifications: - event = self.app.get_event_from_notification( - notification - ) + event = self.app.get_event_from_notification(notification) self.app.process_upstream_event( event, notification["id"], upstream_name ) From b94d8e0a5fd5df5291b8b1184bbb0fb2d95162f3 Mon Sep 17 00:00:00 2001 From: John Bywater Date: Wed, 1 Apr 2020 12:48:37 +0100 Subject: [PATCH 04/50] Added test to make sure __notifiable__ domain event class feature isn't being used without set_notification_ids (just to be helpful). --- eventsourcing/application/simple.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/eventsourcing/application/simple.py b/eventsourcing/application/simple.py index 211fc8ba2..f62fafa30 100644 --- a/eventsourcing/application/simple.py +++ b/eventsourcing/application/simple.py @@ -495,6 +495,10 @@ def construct_event_records( setattr( event_record, notification_id_name, "event-not-notifiable" ) + else: + if any((not e.__notifiable__ for e in pending_events)): + raise Exception("Can't set __notifiable__=False withut " + "set_notification_ids=True ") if self.use_causal_dependencies: assert hasattr(record_manager.record_class, "causal_dependencies") From 5b6d40b202575a01cb0b2770bb59de8ef69b6526 Mon Sep 17 00:00:00 2001 From: John Bywater Date: Thu, 16 Apr 2020 23:01:48 +0100 Subject: [PATCH 05/50] Changed doc "apply_policy_to_generated_events = False" to "True". --- docs/topics/process.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/topics/process.rst b/docs/topics/process.rst index ef78b4616..db7889724 100644 --- a/docs/topics/process.rst +++ b/docs/topics/process.rst @@ -828,7 +828,7 @@ process application follow itself in a system. .. code:: python class ReflexiveApplication(ProcessApplication): - apply_policy_to_generated_events = False + apply_policy_to_generated_events = True This will have no effect unless the policy is written to respond to the types From 569e658d7ee0d867046a2205a741af832d1981ec Mon Sep 17 00:00:00 2001 From: John Bywater Date: Sat, 18 Apr 2020 21:59:55 +0100 Subject: [PATCH 06/50] Adjusted error message. --- eventsourcing/application/simple.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/eventsourcing/application/simple.py b/eventsourcing/application/simple.py index f62fafa30..13193303f 100644 --- a/eventsourcing/application/simple.py +++ b/eventsourcing/application/simple.py @@ -253,16 +253,17 @@ def persistence_policy(self) -> PersistencePolicy: return self._persistence_policy def _raise_on_missing_infrastructure(self, what_is_missing): - msg = "Application class %s does not have a %s." % ( - type(self).__name__, - what_is_missing, - ) if not isinstance(self, ApplicationWithConcreteInfrastructure): - msg += ( - " and is not an ApplicationWithConcreteInfrastructure." + msg = ( + "Application class %s is not an subclass of %s." " Try using or inheriting from or mixin() an application" " class with concrete infrastructure such as SQLAlchemyApplication" " or DjangoApplication or AxonApplication." + ) % (type(self), ApplicationWithConcreteInfrastructure) + else: + msg = "Application class %s does not have a %s" % ( + type(self).__name__, + what_is_missing, ) raise ProgrammingError(msg) From f5c6befd12728ab324b46e5af2107af4306b12c9 Mon Sep 17 00:00:00 2001 From: Alexandre Fedossov Date: Thu, 23 Apr 2020 01:55:57 +0300 Subject: [PATCH 07/50] Apply YAML style with indent of two spaces --- .editorconfig | 3 ++ .readthedocs.yml | 8 ++--- .travis.yml | 16 ++++----- dev/docker-compose.yaml | 76 ++++++++++++++++++++--------------------- 4 files changed, 53 insertions(+), 50 deletions(-) diff --git a/.editorconfig b/.editorconfig index 64e66ea60..fd8e278cc 100644 --- a/.editorconfig +++ b/.editorconfig @@ -39,3 +39,6 @@ indent_style = tab # Batch files use tabs for indentation [*.bat] indent_style = tab + +[*.{yml, yaml}] +indent_size = 2 diff --git a/.readthedocs.yml b/.readthedocs.yml index f614f8699..b752bb3d5 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -1,5 +1,5 @@ python: - version: 3 - pip_install: true - extra_requirements: - - docs + version: 3 + pip_install: true + extra_requirements: + - docs diff --git a/.travis.yml b/.travis.yml index 68c6a9b31..578231bfc 100644 --- a/.travis.yml +++ b/.travis.yml @@ -20,9 +20,9 @@ before_install: - sudo apt-get -qq update - sudo apt-get install -y openjdk-8-jdk - sudo apt-get install -y icedtea-8-plugin -# - sudo update-java-alternatives --set /usr/lib/jvm/java-1.8.0-openjdk-amd64 + # - sudo update-java-alternatives --set /usr/lib/jvm/java-1.8.0-openjdk-amd64 - sudo update-java-alternatives -v --set java-1.8.0-openjdk-amd64 -# - source /etc/environment + # - source /etc/environment - export JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-amd64 - java -version @@ -68,16 +68,16 @@ env: - POSTGRES_USER=postgres script: -- if [[ $TRAVIS_PYTHON_VERSION != pypy* ]]; then + - if [[ $TRAVIS_PYTHON_VERSION != pypy* ]]; then coverage run --concurrency=multiprocessing -m unittest discover eventsourcing.tests -v; - fi + fi -- if [[ $TRAVIS_PYTHON_VERSION == pypy* ]]; then + - if [[ $TRAVIS_PYTHON_VERSION == pypy* ]]; then python -m unittest discover eventsourcing.tests -v; - fi + fi after_success: -- if [[ $TRAVIS_PYTHON_VERSION != pypy* ]]; then + - if [[ $TRAVIS_PYTHON_VERSION != pypy* ]]; then coverage combine; coveralls; - fi + fi diff --git a/dev/docker-compose.yaml b/dev/docker-compose.yaml index 529a62dbb..c6101af6b 100644 --- a/dev/docker-compose.yaml +++ b/dev/docker-compose.yaml @@ -5,45 +5,45 @@ # first time the containers are run, so connections might not work immediately. version: '2' services: - eventsourcing_requirements: - build: - context: .. - dockerfile: ./dev/Dockerfile_eventsourcing_requirements - image: "eventsourcing_requirements:latest" - volumes: - - .:/app - links: - - service_cassandra - - service_mysql - - service_postgres - - service_redis - environment: - CASSANDRA_HOSTS: service_cassandra - MYSQL_HOST: service_mysql - MYSQL_USER: eventsourcing - MYSQL_PASSWORD: eventsourcing - POSTGRES_HOST: service_postgres - POSTGRES_USER: eventsourcing - POSTGRES_PASSWORD: eventsourcing - REDIS_HOST: service_redis + eventsourcing_requirements: + build: + context: .. + dockerfile: ./dev/Dockerfile_eventsourcing_requirements + image: "eventsourcing_requirements:latest" + volumes: + - .:/app + links: + - service_cassandra + - service_mysql + - service_postgres + - service_redis + environment: + CASSANDRA_HOSTS: service_cassandra + MYSQL_HOST: service_mysql + MYSQL_USER: eventsourcing + MYSQL_PASSWORD: eventsourcing + POSTGRES_HOST: service_postgres + POSTGRES_USER: eventsourcing + POSTGRES_PASSWORD: eventsourcing + REDIS_HOST: service_redis - service_cassandra: - image: "cassandra:latest" + service_cassandra: + image: "cassandra:latest" - service_mysql: - image: "mysql:latest" - environment: - MYSQL_ROOT_PASSWORD: eventsourcing - MYSQL_USER: eventsourcing - MYSQL_PASSWORD: eventsourcing - MYSQL_DATABASE: eventsourcing + service_mysql: + image: "mysql:latest" + environment: + MYSQL_ROOT_PASSWORD: eventsourcing + MYSQL_USER: eventsourcing + MYSQL_PASSWORD: eventsourcing + MYSQL_DATABASE: eventsourcing - service_postgres: - image: "postgres:latest" - environment: - POSTGRES_USER: eventsourcing - POSTGRES_PASSWORD: eventsourcing - POSTGRES_DB: eventsourcing + service_postgres: + image: "postgres:latest" + environment: + POSTGRES_USER: eventsourcing + POSTGRES_PASSWORD: eventsourcing + POSTGRES_DB: eventsourcing - service_redis: - image: "redis:latest" + service_redis: + image: "redis:latest" From e4e3262518a1ba3260d2ba9318633e00be0d49e5 Mon Sep 17 00:00:00 2001 From: Alexandre Fedossov Date: Thu, 23 Apr 2020 04:39:27 +0300 Subject: [PATCH 08/50] Updated dockerization for local development --- .dockerignore | 73 ++++++++++++++++ .env | 16 ++++ .travis.yml | 4 - Makefile | 46 ++++++++++ dev/.env | 1 + dev/Dockerfile_eventsourcing_requirements | 20 ----- dev/docker-compose.yaml | 82 +++++++++--------- docs/topics/contributing.rst | 101 ++++++++++++++++++++++ setup.py | 3 + 9 files changed, 282 insertions(+), 64 deletions(-) create mode 100644 .dockerignore create mode 100644 .env create mode 100644 Makefile create mode 120000 dev/.env delete mode 100644 dev/Dockerfile_eventsourcing_requirements diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 000000000..3ae3c3fdd --- /dev/null +++ b/.dockerignore @@ -0,0 +1,73 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.coveragerc +.cache +nosetests.xml +coverage.xml +*,cover + +# Translations +*.mo +*.pot + +# Django stuff: +*.log + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Other ignores +.github/ +.idea/ +.mypy_cache/ +.pytest_cache/ +.editorconfig +.env +.travis.yml +AUTHORS +CODE_OF_CONDUCT.md +dev +eventsourcing/tests/djangoproject/db.sqlite3 +README.md +README_example_with_axon.md diff --git a/.env b/.env new file mode 100644 index 000000000..d831f5ae9 --- /dev/null +++ b/.env @@ -0,0 +1,16 @@ +COMPOSE_FILE=dev/docker-compose.yaml +COMPOSE_PROJECT_NAME=eventsourcing + +CASSANDRA_HOSTS=127.0.0.1 + +MYSQL_HOST=127.0.0.1 +MYSQL_DATABASE=eventsourcing +MYSQL_PASSWORD=eventsourcing +MYSQL_ROOT_PASSWORD=eventsourcing_root +MYSQL_USER=eventsourcing + +POSTGRES_HOST=127.0.0.1 +POSTGRES_PASSWORD=eventsourcing +POSTGRES_USER=eventsourcing + +REDIS_HOST=127.0.0.1 diff --git a/.travis.yml b/.travis.yml index 578231bfc..4ef67ecdb 100644 --- a/.travis.yml +++ b/.travis.yml @@ -56,10 +56,6 @@ install: - python --version - pip install -U pip wheel - CASS_DRIVER_NO_CYTHON=1 pip install -e .[testing] - - pip install pymysql - - pip install mysql-connector-python-rf - - pip install python-coveralls - - pip install -U "coverage<5.0.0" # v5 is incompatible ATM. env: global: diff --git a/Makefile b/Makefile new file mode 100644 index 000000000..ae6523bcf --- /dev/null +++ b/Makefile @@ -0,0 +1,46 @@ +.EXPORT_ALL_VARIABLES: +DOTENV_FILE ?= .env + +-include $(DOTENV_FILE) + +.PHONY: install +install: + CASS_DRIVER_NO_CYTHON=1 + @pip install -e ".[cassandra,sqlalchemy,axonserver,axon,ray,django,testing,dev,docs]" + +.PHONY: docker-pull +docker-pull: + @docker-compose pull + +.PHONY: docker-up +docker-up: + @docker-compose up -d + @docker-compose ps + +.PHONY: docker-stop +docker-stop: + @docker-compose stop + +.PHONY: docker-down +docker-down: + @docker-compose down -v --remove-orphans + +.PHONY: docker-logs +docker-logs: + @docker-compose logs --follow --tail=1000 + + +.PHONY: test +test: + @coverage run \ + --concurrency=multiprocessing \ + -m unittest discover \ + eventsourcing.tests -vv --failfast + @coverage combine + @coverage report + @coverage html + + +.PHONY: docs +docs: + cd docs && make html diff --git a/dev/.env b/dev/.env new file mode 120000 index 000000000..4a82335f5 --- /dev/null +++ b/dev/.env @@ -0,0 +1 @@ +../.env \ No newline at end of file diff --git a/dev/Dockerfile_eventsourcing_requirements b/dev/Dockerfile_eventsourcing_requirements deleted file mode 100644 index 48762c9a2..000000000 --- a/dev/Dockerfile_eventsourcing_requirements +++ /dev/null @@ -1,20 +0,0 @@ -# To use this Docker file in PyCharm, just add a new Docker project interpreter, -# and set an image name such as "eventsourcing_requirements:latest". It will -# take a little while to download and build everything, but then tests which -# do not depend on other services such as MySQL and Cassandra should pass. -# To run containers needed to pass the full test suite, see docker-compose.yaml. -FROM python:3.7 - -WORKDIR /app - -# Copy enough to install the eventsourcing requirements. -COPY setup.py /app/setup.py -RUN mkdir eventsourcing -COPY eventsourcing/__init__.py /app/eventsourcing/ - -# Install the requirements. -RUN pip install -e .[testing] - -# Remove the package source files. -RUN pip uninstall eventsourcing --yes -RUN rm -rf /app/eventsourcing diff --git a/dev/docker-compose.yaml b/dev/docker-compose.yaml index c6101af6b..cff3c967f 100644 --- a/dev/docker-compose.yaml +++ b/dev/docker-compose.yaml @@ -1,49 +1,51 @@ -# To use this Docker Compose file in PyCharm, just add a new Docker Compose -# project interpreter, and pick "all_eventsourcing_requirements" as the service. It -# will take a little while to download and build everything, then all tests should pass. -# Please note, both MySQL and Cassandra containers need a little while to get started -# first time the containers are run, so connections might not work immediately. version: '2' -services: - eventsourcing_requirements: - build: - context: .. - dockerfile: ./dev/Dockerfile_eventsourcing_requirements - image: "eventsourcing_requirements:latest" - volumes: - - .:/app - links: - - service_cassandra - - service_mysql - - service_postgres - - service_redis - environment: - CASSANDRA_HOSTS: service_cassandra - MYSQL_HOST: service_mysql - MYSQL_USER: eventsourcing - MYSQL_PASSWORD: eventsourcing - POSTGRES_HOST: service_postgres - POSTGRES_USER: eventsourcing - POSTGRES_PASSWORD: eventsourcing - REDIS_HOST: service_redis - service_cassandra: +services: + cassandra: image: "cassandra:latest" + volumes: + - cassandra_data:/var/lib/cassandra + ports: + - "9042:9042" - service_mysql: + mysql: image: "mysql:latest" - environment: - MYSQL_ROOT_PASSWORD: eventsourcing - MYSQL_USER: eventsourcing - MYSQL_PASSWORD: eventsourcing - MYSQL_DATABASE: eventsourcing + env_file: + - .env + volumes: + - mysql_data:/var/lib/mysql + ports: + - "3306:3306" - service_postgres: + postgres: image: "postgres:latest" - environment: - POSTGRES_USER: eventsourcing - POSTGRES_PASSWORD: eventsourcing - POSTGRES_DB: eventsourcing + env_file: + - .env + volumes: + - postgres_data:/var/lib/postgresql/data + ports: + - "5432:5432" - service_redis: + redis: image: "redis:latest" + volumes: + - redis_data:/data + ports: + - "6379:6379" + + axon: + image: "axoniq/axonserver:latest" + volumes: + - axon_data:/data + - axon_eventdata:/eventdata + ports: + - "8024:8024" + - "8124:8124" + +volumes: + cassandra_data: + mysql_data: + postgres_data: + redis_data: + axon_data: + axon_eventdata: diff --git a/docs/topics/contributing.rst b/docs/topics/contributing.rst index b2a38eb06..3623e2b64 100644 --- a/docs/topics/contributing.rst +++ b/docs/topics/contributing.rst @@ -27,3 +27,104 @@ are several ways you can help the library’s development: open to suggestions. - Submit patches or pull requests for new and/or fixed behavior. - Improve the documentation or write unit tests. + + +Local development +================= + +This library using `GNU make`_ utility and multiple python packages for code linting. You can read more +about it at `this article`_. The actual code of the provided commands below can be easily found and read in +`Makefile` at the root of this project. But first, you need to set up your local development environment. + +.. _GNU make: https://www.gnu.org/software/make/ +.. _this article: https://opensource.com/article/18/8/what-how-makefile + + +.. _development-environment: + +Configure the development environment +===================================== + +This project runs on Python 3.6+, and you need to have it installed on your system. +The recommended way for all platforms is to use the `official download page`_. +But also, you may use pyenv_. + +.. _official download page: https://www.python.org/downloads/ +.. _pyenv: https://github.com/pyenv/pyenv + +Consider to use virtualenv for development, choose for your flavor: + +- virtualenvwrapper_ +- pyenv-virtualenv_ for pyenv_ + +.. _virtualenvwrapper: https://virtualenvwrapper.readthedocs.io/en/latest/ +.. _pyenv-virtualenv: https://github.com/pyenv/pyenv-virtualenv + +Use the following command inside your activated virtualenv to install all project dependencies +required for contribution:: + + make install + + +.. _docker-containers: + +Manage Docker containers +======================== + +You need to manage Docker containers with third-party services to have a better experience with local development. +To pull docker images:: + + make docker-pull + +To up and keep running containers in detached mode:: + + make docker-up + +To stop the containers:: + + make docker-stop + +To tear down the containers removing volumes and “orphans”:: + + make docker-down + +To attach to the latest containers output:: + + make docker-logs + +All of the commands using predefined “COMPOSE_FILE “and “COMPOSE_PROJECT_NAME “to keep +your containers in a more organized and straightforward way. + +**COMPOSE_FILE** is used by *docker-compose* utility to pick development services +configuration. The valid format of this value is: ``dev/docker-compose.yaml “. + +**COMPOSE_PROJECT_NAME** sets the project name. This value used to prepend the +containers on startup along with the service name. “eventsourcing “is a great default value for it. + + +Testing +======= + +Ensure that you’ve set up your development environment (see :ref:`development-environment`) and +and required services are up and running (see :ref:`docker-containers`). + +Just run this:: + + make test + +.. note:: + Not all tests doing teardown in the right way: sometimes tests failed because of the + data left at DBs, so it requires ``make docker-down`` for a fresh start. + +... or push the code to trigger TravisCI checks. + + +Building documentation +====================== + +This project using Sphinx_ documentation builder tool. Run this command to compile documentation +into static HTML files at `./docs/_build/html`:: + + make docs + +.. _Sphinx: https://www.sphinx-doc.org/en/master/ diff --git a/setup.py b/setup.py index 7909fbdb2..ebe2264d4 100644 --- a/setup.py +++ b/setup.py @@ -45,6 +45,9 @@ "redis<=3.4.99999", "celery<=4.4.99999", "pymysql<=0.9.99999", + "mysql-connector-python-rf<=2.2.99999", + "python-coveralls<=2.9.99999", + "coverage<5.0.0", # v5 is incompatible ATM. "cryptography", # Tests use Django with PostgreSQL. "psycopg2cffi<=2.8.99999" if is_pypy else "psycopg2-binary<=2.8.99999", From e2a7a11e273d6e63c6a83f9d2dedc434b14a3fbe Mon Sep 17 00:00:00 2001 From: John Bywater Date: Wed, 29 Apr 2020 11:19:33 +0100 Subject: [PATCH 09/50] Corrected skip reason typo. --- eventsourcing/tests/test_performance.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eventsourcing/tests/test_performance.py b/eventsourcing/tests/test_performance.py index 9cd80f056..c27fd0e95 100644 --- a/eventsourcing/tests/test_performance.py +++ b/eventsourcing/tests/test_performance.py @@ -355,7 +355,7 @@ def construct_entity_record_manager(self): def construct_log_record_manager(self): return self.factory.construct_record_manager(record_class=None) - @skip("Axon does support sequencing by timestamp, so don't test log performance") + @skip("Axon doesn't support sequencing by timestamp, so don't test log performance") def test_log_performance(self): pass From 3cc99037267ff2a917466df2ee34bdd4f4e0f135 Mon Sep 17 00:00:00 2001 From: John Bywater Date: Wed, 29 Apr 2020 23:20:10 +0100 Subject: [PATCH 10/50] Changed default database settings (harmonise on Docker env vars). --- README_example_with_axon.md | 2 +- docs/topics/process.rst | 4 ++-- eventsourcing/infrastructure/axonserver/datastore.py | 8 +++++++- eventsourcing/tests/datastore_tests/test_axonserver.py | 2 +- .../tests/djangoproject/djangoproject/settings.py | 2 +- eventsourcing/tests/system_test_fixtures.py | 4 ++-- eventsourcing/tests/test_docs.py | 4 ++-- 7 files changed, 16 insertions(+), 10 deletions(-) diff --git a/README_example_with_axon.md b/README_example_with_axon.md index 8f41052b0..37f10e509 100644 --- a/README_example_with_axon.md +++ b/README_example_with_axon.md @@ -117,7 +117,7 @@ import os os.environ['CIPHER_KEY'] = cipher_key # AxonServer database connection string. -os.environ['DB_URI'] = "localhost:8124" +os.environ['DB_URI'] = "{}:8124".format(os.environ.get('AXON_HOST', 'localhost')) ``` Run the code. Construct application and use as context manager. diff --git a/docs/topics/process.rst b/docs/topics/process.rst index db7889724..f2aafd2a4 100644 --- a/docs/topics/process.rst +++ b/docs/topics/process.rst @@ -1025,8 +1025,8 @@ The following MySQL database connection string is compatible with SQLAlchemy. import os os.environ['DB_URI'] = 'mysql+pymysql://{}:{}@{}/eventsourcing?charset=utf8mb4&binary_prefix=true'.format( - os.getenv('MYSQL_USER', 'root'), - os.getenv('MYSQL_PASSWORD', ''), + os.getenv('MYSQL_USER', 'eventsourcing'), + os.getenv('MYSQL_PASSWORD', 'eventsourcing'), os.getenv('MYSQL_HOST', '127.0.0.1'), ) diff --git a/eventsourcing/infrastructure/axonserver/datastore.py b/eventsourcing/infrastructure/axonserver/datastore.py index 1f38cc2d2..b324ffdd1 100644 --- a/eventsourcing/infrastructure/axonserver/datastore.py +++ b/eventsourcing/infrastructure/axonserver/datastore.py @@ -11,7 +11,13 @@ def __init__(self, uri: Optional[str] = None): if uri is not None: self.uri = uri else: - self.uri = os.getenv("DB_URI", DEFAULT_LOCAL_AXONSERVER_URI) + if "AXON_HOST" in os.environ: + axon_db_uri = "{}:{}".format( + os.getenv("AXON_HOST"), os.getenv("AXON_PORT", "8124") + ) + else: + axon_db_uri = DEFAULT_LOCAL_AXONSERVER_URI + self.uri = os.getenv("DB_URI", axon_db_uri) class AxonDatastore(AbstractDatastore[AxonSettings]): diff --git a/eventsourcing/tests/datastore_tests/test_axonserver.py b/eventsourcing/tests/datastore_tests/test_axonserver.py index cf24445d5..e1523a4f3 100644 --- a/eventsourcing/tests/datastore_tests/test_axonserver.py +++ b/eventsourcing/tests/datastore_tests/test_axonserver.py @@ -22,7 +22,7 @@ def construct_datastore(self): def create_factory_kwargs(self): kwargs = super(AxonDatastoreTestCase, self).create_factory_kwargs() - kwargs["axon_client"] = AxonClient(DEFAULT_LOCAL_AXONSERVER_URI) + kwargs["axon_client"] = AxonClient(self.datastore.settings.uri) return kwargs diff --git a/eventsourcing/tests/djangoproject/djangoproject/settings.py b/eventsourcing/tests/djangoproject/djangoproject/settings.py index 06b090675..74c84a1eb 100644 --- a/eventsourcing/tests/djangoproject/djangoproject/settings.py +++ b/eventsourcing/tests/djangoproject/djangoproject/settings.py @@ -94,7 +94,7 @@ "HOST": os.getenv("POSTGRES_HOST", "127.0.0.1"), "NAME": "eventsourcing", "USER": os.getenv("POSTGRES_USER", "eventsourcing"), - "PASSWORD": os.getenv("POSTGRES_PASSWORD", ""), + "PASSWORD": os.getenv("POSTGRES_PASSWORD", "eventsourcing"), } } diff --git a/eventsourcing/tests/system_test_fixtures.py b/eventsourcing/tests/system_test_fixtures.py index ddc7553b3..164ab19f3 100644 --- a/eventsourcing/tests/system_test_fixtures.py +++ b/eventsourcing/tests/system_test_fixtures.py @@ -11,8 +11,8 @@ def set_db_uri(): host = os.getenv("MYSQL_HOST", "127.0.0.1") - user = os.getenv("MYSQL_USER", "root") - password = os.getenv("MYSQL_PASSWORD", "") + user = os.getenv("MYSQL_USER", "eventsourcing") + password = os.getenv("MYSQL_PASSWORD", "eventsourcing") db_uri = ( "mysql+pymysql://{}:{}@{}/eventsourcing?charset=utf8mb4&binary_prefix=true" ).format(user, password, host) diff --git a/eventsourcing/tests/test_docs.py b/eventsourcing/tests/test_docs.py index bba0749fa..3bbb5ba7c 100644 --- a/eventsourcing/tests/test_docs.py +++ b/eventsourcing/tests/test_docs.py @@ -32,8 +32,8 @@ def tearDown(self): os.environ[ "DB_URI" ] = "mysql+pymysql://{}:{}@{}/eventsourcing?charset=utf8mb4&binary_prefix=true".format( - os.getenv("MYSQL_USER", "root"), - os.getenv("MYSQL_PASSWORD", ""), + os.getenv("MYSQL_USER", "eventsourcing"), + os.getenv("MYSQL_PASSWORD", "eventsourcing"), os.getenv("MYSQL_HOST", "127.0.0.1"), ) database = SQLAlchemyDatastore(settings=SQLAlchemySettings()) From 22de4a7a205c37d9e86b2ba1786f00f8f4811de5 Mon Sep 17 00:00:00 2001 From: John Bywater Date: Wed, 29 Apr 2020 23:22:02 +0100 Subject: [PATCH 11/50] Relaxed test timeouts (was too tight for Docker containers). --- docs/topics/process.rst | 2 +- eventsourcing/tests/test_system.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/topics/process.rst b/docs/topics/process.rst index f2aafd2a4..47639bf17 100644 --- a/docs/topics/process.rst +++ b/docs/topics/process.rst @@ -1053,7 +1053,7 @@ things are eventually done. .. code:: python - @retry((AssertionError, KeyError), max_attempts=50, wait=0.1) + @retry((AssertionError, KeyError), max_attempts=60, wait=0.5) def assert_eventually_done(repository, cmd_id): """Checks the command is eventually done.""" assert repository[cmd_id].is_done diff --git a/eventsourcing/tests/test_system.py b/eventsourcing/tests/test_system.py index 11de47240..4307dc14a 100644 --- a/eventsourcing/tests/test_system.py +++ b/eventsourcing/tests/test_system.py @@ -282,7 +282,7 @@ def test_multithreaded_runner_with_multiapp_system(self): # assert retries, "Failed set order.is_reserved" while retries and not orders.repository[order_id].is_paid: - sleep(0.1) + sleep(0.5) retries -= 1 assert retries, "Failed set order.is_paid" From 7c52f7b90fe621ae2225879670920e4921ae4826 Mon Sep 17 00:00:00 2001 From: John Bywater Date: Wed, 29 Apr 2020 23:24:40 +0100 Subject: [PATCH 12/50] Fixed AxonApplication test (doesn't need to inherit DjangoTestCase). --- eventsourcing/tests/core_tests/test_simple_application.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eventsourcing/tests/core_tests/test_simple_application.py b/eventsourcing/tests/core_tests/test_simple_application.py index f84d94683..5caf7a2fe 100644 --- a/eventsourcing/tests/core_tests/test_simple_application.py +++ b/eventsourcing/tests/core_tests/test_simple_application.py @@ -87,7 +87,7 @@ class TestDjangoApplication(DjangoTestCase, TestSimpleApplication): infrastructure_class = DjangoApplication -class TestAxonApplication(DjangoTestCase, TestSimpleApplication): +class TestAxonApplication(TestSimpleApplication): infrastructure_class = AxonApplication From 1d2be3f1da8e4128e03b09da847fe0e987df7911 Mon Sep 17 00:00:00 2001 From: John Bywater Date: Wed, 29 Apr 2020 23:25:26 +0100 Subject: [PATCH 13/50] Added 'quicktest' to Makefile. --- Makefile | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/Makefile b/Makefile index ae6523bcf..baeb267d1 100644 --- a/Makefile +++ b/Makefile @@ -41,6 +41,31 @@ test: @coverage html +.PHONY: quicktest +quicktest: + QUICK_TESTS_ONLY=1 + @coverage run -m unittest discover eventsourcing.tests -vv + @coverage combine + @coverage report + @coverage html + + .PHONY: docs docs: cd docs && make html + + +.PHONY: brew_services_start +brew_services_start: + brew services start mysql + brew services start postgresql + brew services start redis + ~/axonserver/axonserver.jar & + cassandra -f & + +.PHONY: brew_services_stop +brew_services_stop: + brew services stop mysql + brew services stop postgresql + brew services stop redis + pkill -15 java From 55e8d2ddcc4187a8b68a5a83ab41a56a62a83236 Mon Sep 17 00:00:00 2001 From: John Bywater Date: Wed, 29 Apr 2020 23:37:10 +0100 Subject: [PATCH 14/50] Added some notes about setting up databases on MacOS. --- dev/MACOS_SETUP_NOTES.md | 52 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) create mode 100644 dev/MACOS_SETUP_NOTES.md diff --git a/dev/MACOS_SETUP_NOTES.md b/dev/MACOS_SETUP_NOTES.md new file mode 100644 index 000000000..b34e9e1a9 --- /dev/null +++ b/dev/MACOS_SETUP_NOTES.md @@ -0,0 +1,52 @@ +This document describes how to setup MacOS with databases needed to run the test suite: + +- MySQL +- PostgreSQL +- Redis +- Cassandra +- Axon Server + + +To setup MySQL: + +$ brew install mysql +$ brew services start mysql +$ mysql -u root +mysql> CREATE DATABASE EVENTSOURCING; +mysql> CREATE USER 'eventsourcing'@'localhost' IDENTIFIED BY 'eventsourcing'; +mysql> GRANT ALL PRIVILEGES ON eventsourcing.* TO 'eventsourcing'@'localhost'; + +To setup PostgreSQL: + +$ brew install postgresql +$ brew services start postgresql + + +To setup Redis: + +$ brew install redis +$ brew services start redis + + +To setup Cassandra: + +$ brew install cassandra +$ brew services start cassandra + +If that doesn't actually start Cassandra, then try this in a terminal: +$ cassandra -f + + +To setup Axon: +$ ./dev/download_axon_server.sh +$ ./axonserver/axonserver.jar + + +After this, the databases can be stopped with: + +$ make brew_services_stop + + +The database can be started with: + +$ make brew_services_start From 51a5ea257e9273330e7e5f660a7f8658d0558b6a Mon Sep 17 00:00:00 2001 From: John Bywater Date: Wed, 29 Apr 2020 23:43:24 +0100 Subject: [PATCH 15/50] Added Dockerfile for requirements, to run tests in Docker from PyCharm. --- dev/Dockerfile_eventsourcing_requirements | 15 ++++++++++++++ dev/docker-compose.yaml | 24 +++++++++++++++++++++++ 2 files changed, 39 insertions(+) create mode 100644 dev/Dockerfile_eventsourcing_requirements diff --git a/dev/Dockerfile_eventsourcing_requirements b/dev/Dockerfile_eventsourcing_requirements new file mode 100644 index 000000000..881b95ae8 --- /dev/null +++ b/dev/Dockerfile_eventsourcing_requirements @@ -0,0 +1,15 @@ +FROM python:3.7 + +WORKDIR /app + +# Copy enough to install the eventsourcing requirements. +COPY setup.py /app/setup.py +RUN mkdir eventsourcing +COPY eventsourcing/__init__.py /app/eventsourcing/ + +# Install the requirements. +RUN pip install -e .[testing] + +# Remove the package source files. +RUN pip uninstall eventsourcing --yes +RUN rm -rf /app/eventsourcing diff --git a/dev/docker-compose.yaml b/dev/docker-compose.yaml index cff3c967f..a0011827e 100644 --- a/dev/docker-compose.yaml +++ b/dev/docker-compose.yaml @@ -1,6 +1,30 @@ version: '2' services: + eventsourcing_requirements: + build: + context: .. + dockerfile: ./dev/Dockerfile_eventsourcing_requirements + image: "eventsourcing_requirements:latest" + volumes: + - .:/app + links: + - cassandra + - mysql + - postgres + - redis + - axon + environment: + CASSANDRA_HOSTS: cassandra + MYSQL_HOST: mysql + MYSQL_USER: eventsourcing + MYSQL_PASSWORD: eventsourcing + POSTGRES_HOST: postgres + POSTGRES_USER: eventsourcing + POSTGRES_PASSWORD: eventsourcing + REDIS_HOST: redis + AXON_HOST: axon + cassandra: image: "cassandra:latest" volumes: From b45c825746a5193ab1d149798a5235f5a6a5ba67 Mon Sep 17 00:00:00 2001 From: John Bywater Date: Thu, 30 Apr 2020 00:04:22 +0100 Subject: [PATCH 16/50] Added MYSQL_PASSWORD to travis config. --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index 4ef67ecdb..598cf3c28 100644 --- a/.travis.yml +++ b/.travis.yml @@ -61,6 +61,7 @@ env: global: - CASSANDRA_HOSTS=127.0.0.1 - MYSQL_USER=travis + - MYSQL_PASSWORD= - POSTGRES_USER=postgres script: From 5ad4d605b6602ae1926bb6ffedd978a087f19048 Mon Sep 17 00:00:00 2001 From: John Bywater Date: Thu, 30 Apr 2020 00:05:26 +0100 Subject: [PATCH 17/50] Added POSTGRES_PASSWORD to travis config. --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index 598cf3c28..3f4a639b7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -63,6 +63,7 @@ env: - MYSQL_USER=travis - MYSQL_PASSWORD= - POSTGRES_USER=postgres + - POSTGRES_PASSWORD= script: - if [[ $TRAVIS_PYTHON_VERSION != pypy* ]]; then From 6d9ec698a6310b701ffbcdf987ff5a3675f6e943 Mon Sep 17 00:00:00 2001 From: John Bywater Date: Thu, 30 Apr 2020 01:25:44 +0100 Subject: [PATCH 18/50] Blackened whole package. --- eventsourcing/application/decorators.py | 2 +- eventsourcing/application/django.py | 2 +- eventsourcing/application/simple.py | 6 ++- eventsourcing/contrib/paxos/composable.py | 4 +- eventsourcing/domain/model/array.py | 4 +- eventsourcing/domain/model/command.py | 4 +- eventsourcing/domain/model/entity.py | 11 +++-- eventsourcing/domain/model/snapshot.py | 3 +- eventsourcing/domain/model/versioning.py | 6 +-- eventsourcing/example/application.py | 4 +- eventsourcing/example/domainmodel.py | 14 +++++-- .../infrastructure/axonserver/manager.py | 13 +++--- eventsourcing/infrastructure/base.py | 10 ++--- eventsourcing/infrastructure/iterators.py | 8 +--- eventsourcing/infrastructure/popo/manager.py | 1 + eventsourcing/system/ray.py | 12 +++--- eventsourcing/system/thespian.py | 4 +- .../paxos_tests/test_paxos_system.py | 23 +++++++---- .../tests/core_tests/test_aggregate_root.py | 34 +++++++++++----- .../tests/core_tests/test_decorators.py | 9 +---- eventsourcing/tests/core_tests/test_entity.py | 5 +-- .../tests/core_tests/test_event_store.py | 12 ++---- .../tests/core_tests/test_event_versioning.py | 12 +++--- ...se_with_alternative_sequenced_item_type.py | 4 +- ..._customise_with_extended_sequenced_item.py | 4 +- eventsourcing/tests/datastore_tests/base.py | 3 +- .../djangoproject/djangoproject/settings.py | 4 +- .../tests/sequenced_item_tests/base.py | 9 +++-- eventsourcing/tests/system_test_fixtures.py | 6 +-- eventsourcing/tests/test_docs.py | 24 +++++------ eventsourcing/tests/test_flask.py | 2 +- eventsourcing/tests/test_notificationlog.py | 2 +- eventsourcing/tests/test_performance.py | 13 ++++-- eventsourcing/tests/test_process.py | 40 ++++++++++++++----- .../tests/test_ray_runner_with_django.py | 2 +- eventsourcing/tests/test_system.py | 22 ++++------ eventsourcing/utils/transcoding_v1.py | 2 +- 37 files changed, 191 insertions(+), 149 deletions(-) diff --git a/eventsourcing/application/decorators.py b/eventsourcing/application/decorators.py index c19cefb96..7a7574af2 100644 --- a/eventsourcing/application/decorators.py +++ b/eventsourcing/application/decorators.py @@ -37,7 +37,6 @@ def applicationpolicy2(arg: Callable) -> Callable: cache = {} def _mutator(func): - def dispatch(event_type): try: return cache[event_type] @@ -55,6 +54,7 @@ def register(event_type): def registered_function_decorator(func): handlers[event_type] = func return func + return registered_function_decorator policy_function_wrapper.register = register diff --git a/eventsourcing/application/django.py b/eventsourcing/application/django.py index 2ada26380..4a12b0ca3 100644 --- a/eventsourcing/application/django.py +++ b/eventsourcing/application/django.py @@ -11,7 +11,7 @@ class DjangoApplication(ApplicationWithConcreteInfrastructure): infrastructure_factory_class = DjangoInfrastructureFactory - def __init__(self, tracking_record_class: Any=None, *args: Any, **kwargs: Any): + def __init__(self, tracking_record_class: Any = None, *args: Any, **kwargs: Any): self._tracking_record_class = tracking_record_class super(DjangoApplication, self).__init__(*args, **kwargs) diff --git a/eventsourcing/application/simple.py b/eventsourcing/application/simple.py index 13193303f..dfc6c1370 100644 --- a/eventsourcing/application/simple.py +++ b/eventsourcing/application/simple.py @@ -498,8 +498,10 @@ def construct_event_records( ) else: if any((not e.__notifiable__ for e in pending_events)): - raise Exception("Can't set __notifiable__=False withut " - "set_notification_ids=True ") + raise Exception( + "Can't set __notifiable__=False withut " + "set_notification_ids=True " + ) if self.use_causal_dependencies: assert hasattr(record_manager.record_class, "causal_dependencies") diff --git a/eventsourcing/contrib/paxos/composable.py b/eventsourcing/contrib/paxos/composable.py index b37d3be5e..cdbc0e9dd 100644 --- a/eventsourcing/contrib/paxos/composable.py +++ b/eventsourcing/contrib/paxos/composable.py @@ -385,8 +385,8 @@ def __init__(self, network_uid, quorum_size): self.acceptors = dict() # maps from_uid => last_accepted_proposal_id self.final_value = None self.final_acceptors = ( - None - ) # Will be a set of acceptor UIDs once the final value is chosen + None # Will be a set of acceptor UIDs once the final value is chosen + ) self.final_proposal_id = None def receive_accepted(self, msg): diff --git a/eventsourcing/domain/model/array.py b/eventsourcing/domain/model/array.py index 74a5fd40b..89d1aec07 100644 --- a/eventsourcing/domain/model/array.py +++ b/eventsourcing/domain/model/array.py @@ -4,9 +4,7 @@ from uuid import uuid5 from eventsourcing.domain.model.decorators import retry -from eventsourcing.domain.model.entity import ( - TimestampedVersionedEntity, -) +from eventsourcing.domain.model.entity import TimestampedVersionedEntity from eventsourcing.domain.model.events import publish from eventsourcing.exceptions import ArrayIndexError, ConcurrencyError from eventsourcing.domain.model.repository import AbstractEntityRepository diff --git a/eventsourcing/domain/model/command.py b/eventsourcing/domain/model/command.py index 0700a08b7..b31421139 100644 --- a/eventsourcing/domain/model/command.py +++ b/eventsourcing/domain/model/command.py @@ -14,7 +14,9 @@ class Event(BaseAggregateRoot.Event[TAggregate]): class Created(Event[TAggregate], BaseAggregateRoot.Created[TAggregate]): pass - class AttributeChanged(Event[TAggregate], BaseAggregateRoot.AttributeChanged[TAggregate]): + class AttributeChanged( + Event[TAggregate], BaseAggregateRoot.AttributeChanged[TAggregate] + ): pass class Discarded(Event[TAggregate], BaseAggregateRoot.Discarded[TAggregate]): diff --git a/eventsourcing/domain/model/entity.py b/eventsourcing/domain/model/entity.py index e4c6db4fd..bc9f86d22 100644 --- a/eventsourcing/domain/model/entity.py +++ b/eventsourcing/domain/model/entity.py @@ -194,7 +194,9 @@ def id(self) -> UUID: """ return self._id - def __change_attribute__(self: TDomainEntity, name: str, value: Any, **kwargs) -> None: + def __change_attribute__( + self: TDomainEntity, name: str, value: Any, **kwargs + ) -> None: """ Changes named attribute with the given value, by triggering an AttributeChanged event. @@ -203,7 +205,9 @@ def __change_attribute__(self: TDomainEntity, name: str, value: Any, **kwargs) - "DomainEntity.AttributeChanged[TDomainEntity]" ] = self.AttributeChanged assert isinstance(self, DomainEntity) # For PyCharm navigation. - self.__trigger_event__(event_class=event_class, name=name, value=value, **kwargs) + self.__trigger_event__( + event_class=event_class, name=name, value=value, **kwargs + ) class AttributeChanged(Event[TDomainEntity], AttributeChangedEvent[TDomainEntity]): """ @@ -510,10 +514,11 @@ class EntityWithECC(DomainEntity): """ Entity whose events have event ID, correlation ID, and causation ID. """ + class Event(DomainEntity.Event): def __init__(self, *, processed_event=None, application_name, **kwargs): - event_id = kwargs.get('event_id') or "{}:{}:{}".format( + event_id = kwargs.get("event_id") or "{}:{}:{}".format( application_name, kwargs["originator_id"], kwargs["originator_version"] ) kwargs["event_id"] = event_id diff --git a/eventsourcing/domain/model/snapshot.py b/eventsourcing/domain/model/snapshot.py index e446b5621..c085bb9ce 100644 --- a/eventsourcing/domain/model/snapshot.py +++ b/eventsourcing/domain/model/snapshot.py @@ -5,7 +5,8 @@ EventWithOriginatorID, EventWithOriginatorVersion, EventWithTimestamp, - AbstractSnapshot) + AbstractSnapshot, +) from eventsourcing.utils.topic import resolve_topic, reconstruct_object from eventsourcing.whitehead import TEntity diff --git a/eventsourcing/domain/model/versioning.py b/eventsourcing/domain/model/versioning.py index 949fa4a65..780b25499 100644 --- a/eventsourcing/domain/model/versioning.py +++ b/eventsourcing/domain/model/versioning.py @@ -33,7 +33,7 @@ class Upcastable(Event): def __init__(self): if type(self).__class_version__ > 0: - self.__dict__['__class_version__'] = type(self).__class_version__ + self.__dict__["__class_version__"] = type(self).__class_version__ super(Upcastable, self).__init__() @classmethod @@ -42,11 +42,11 @@ def __upcast_state__(cls, obj_state: Dict) -> Dict: Upcasts obj_state from the version of the class when the object state was recorded, to be compatible with current version of the class. """ - class_version = obj_state.get('__class_version__', 0) + class_version = obj_state.get("__class_version__", 0) while class_version < cls.__class_version__: obj_state = cls.__upcast__(obj_state, class_version) class_version += 1 - obj_state['__class_version__'] = class_version + obj_state["__class_version__"] = class_version return obj_state @classmethod diff --git a/eventsourcing/example/application.py b/eventsourcing/example/application.py index 0f99fa469..522a187ae 100644 --- a/eventsourcing/example/application.py +++ b/eventsourcing/example/application.py @@ -83,7 +83,7 @@ def construct_event_store( event_sequence_id_attr=event_sequence_id_attr, event_position_attr=event_position_attr, cipher=cipher, - compressor=zlib if cipher else None + compressor=zlib if cipher else None, ) event_store = EventStore( record_manager=record_manager, event_mapper=sequenced_item_mapper @@ -108,7 +108,7 @@ def construct_sequenced_item_mapper( json_encoder_class=json_encoder_class, json_decoder_class=json_decoder_class, cipher=cipher, - compressor=compressor + compressor=compressor, ) def close(self): diff --git a/eventsourcing/example/domainmodel.py b/eventsourcing/example/domainmodel.py index 9601edaad..bdf961df7 100644 --- a/eventsourcing/example/domainmodel.py +++ b/eventsourcing/example/domainmodel.py @@ -2,7 +2,8 @@ from eventsourcing.domain.model.entity import ( EntityWithHashchain, TimestampedVersionedEntity, - TDomainEntity) + TDomainEntity, +) from eventsourcing.domain.model.repository import AbstractEntityRepository @@ -12,7 +13,8 @@ class Example(EntityWithHashchain, TimestampedVersionedEntity): """ class Event( - EntityWithHashchain.Event[TDomainEntity], TimestampedVersionedEntity.Event[TDomainEntity] + EntityWithHashchain.Event[TDomainEntity], + TimestampedVersionedEntity.Event[TDomainEntity], ): """Supertype for events of example entities.""" @@ -28,10 +30,14 @@ class AttributeChanged( ): """Published when an Example is created.""" - class Discarded(Event[TDomainEntity], TimestampedVersionedEntity.Discarded[TDomainEntity]): + class Discarded( + Event[TDomainEntity], TimestampedVersionedEntity.Discarded[TDomainEntity] + ): """Published when an Example is discarded.""" - class Heartbeat(Event[TDomainEntity], TimestampedVersionedEntity.Event[TDomainEntity]): + class Heartbeat( + Event[TDomainEntity], TimestampedVersionedEntity.Event[TDomainEntity] + ): """Published when a heartbeat in the entity occurs (see below).""" def mutate(self, obj: "Example") -> None: diff --git a/eventsourcing/infrastructure/axonserver/manager.py b/eventsourcing/infrastructure/axonserver/manager.py index 03ba70db8..78a777998 100644 --- a/eventsourcing/infrastructure/axonserver/manager.py +++ b/eventsourcing/infrastructure/axonserver/manager.py @@ -33,7 +33,7 @@ def __init__(self, axon_client: AxonClient, *args: Any, **kwargs: Any): super(AxonRecordManager, self).__init__(*args, **kwargs) self.axon_client = axon_client self.contiguous_record_ids = True - self.notification_id_name = 'id' + self.notification_id_name = "id" def all_sequence_ids(self): return [] @@ -55,8 +55,7 @@ def get_notification_records( stop = stop or INT32_MAX number_of_permits = min(stop - start, INT32_MAX) events = self.axon_client.iter_events( - tracking_token=start, - number_of_permits=number_of_permits + tracking_token=start, number_of_permits=number_of_permits ) for tracking_token, event in events: yield AxonNotification(tracking_token, axon_event=event) @@ -120,18 +119,18 @@ def to_axon_event(self, item): aggregate_sequence_number = getattr(item, self.field_names.position) payload_type = getattr(item, self.field_names.topic) payload_data = getattr(item, self.field_names.state) - is_snapshot = payload_type.endswith('#Snapshot') + is_snapshot = payload_type.endswith("#Snapshot") return AxonEvent( message_identifier=message_identifier, aggregate_identifier=sequence_id, aggregate_sequence_number=aggregate_sequence_number, - aggregate_type='AggregateRoot', + aggregate_type="AggregateRoot", timestamp=0, payload_type=payload_type, - payload_revision='1', + payload_revision="1", payload_data=payload_data, snapshot=is_snapshot, - meta_data={} + meta_data={}, ) def write_records(self, records): diff --git a/eventsourcing/infrastructure/base.py b/eventsourcing/infrastructure/base.py index b3bb2c809..b3cd62b76 100644 --- a/eventsourcing/infrastructure/base.py +++ b/eventsourcing/infrastructure/base.py @@ -127,7 +127,7 @@ def __init__( record_class: type, sequenced_item_class: Type[NamedTuple] = SequencedItem, # type: ignore contiguous_record_ids: bool = False, - application_name: str = '', + application_name: str = "", pipeline_id: int = DEFAULT_PIPELINE_ID, **kwargs: Any ): @@ -294,9 +294,7 @@ def get_notifications( yield self.create_notification_from_record(record) def create_notification_from_record(self, record): - notification = { - "id": getattr(record, self.notification_id_name) - } + notification = {"id": getattr(record, self.notification_id_name)} for field_name in self.field_names: notification[field_name] = getattr(record, field_name) if hasattr(record, "causal_dependencies"): @@ -534,9 +532,7 @@ class AbstractEventStore(ABC, Generic[TEvent, TRecordManager]): """ def __init__( - self, - record_manager: TRecordManager, - event_mapper: AbstractSequencedItemMapper, + self, record_manager: TRecordManager, event_mapper: AbstractSequencedItemMapper, ): """ Initialises event store object. diff --git a/eventsourcing/infrastructure/iterators.py b/eventsourcing/infrastructure/iterators.py index 5ffbe0819..77429d52e 100644 --- a/eventsourcing/infrastructure/iterators.py +++ b/eventsourcing/infrastructure/iterators.py @@ -34,9 +34,7 @@ def __init__( :param limit: Limit to the number of items returned. :param is_ascending: Whether or not to iterate in ascending order. """ - assert isinstance(record_manager, BaseRecordManager), type( - record_manager - ) + assert isinstance(record_manager, BaseRecordManager), type(record_manager) assert isinstance(page_size, (int, type(None))) assert isinstance(limit, (int, type(None))) self.record_manager = record_manager @@ -255,9 +253,7 @@ def __init__( **kwargs: Any ): super(GetEntityEventsThread, self).__init__(*args, **kwargs) - assert isinstance(record_manager, BaseRecordManager), type( - record_manager - ) + assert isinstance(record_manager, BaseRecordManager), type(record_manager) self.record_manager = record_manager self.stored_entity_id = sequence_id self.gt = gt diff --git a/eventsourcing/infrastructure/popo/manager.py b/eventsourcing/infrastructure/popo/manager.py index 19723fdb6..82d169453 100644 --- a/eventsourcing/infrastructure/popo/manager.py +++ b/eventsourcing/infrastructure/popo/manager.py @@ -326,6 +326,7 @@ class PopoStoredEventRecord(object): Allows other attributes to be set, such as notification ID. """ + def __init__(self, sequenced_item: NamedTuple): self.sequenced_item = sequenced_item diff --git a/eventsourcing/system/ray.py b/eventsourcing/system/ray.py index d0998127a..5919782e3 100644 --- a/eventsourcing/system/ray.py +++ b/eventsourcing/system/ray.py @@ -8,11 +8,13 @@ import ray -from eventsourcing.application.process import ( - ProcessApplication, +from eventsourcing.application.process import ProcessApplication +from eventsourcing.application.simple import ( + ApplicationWithConcreteInfrastructure, + Prompt, + is_prompt_to_pull, + PromptToPull, ) -from eventsourcing.application.simple import ApplicationWithConcreteInfrastructure, \ - Prompt, is_prompt_to_pull, PromptToPull from eventsourcing.domain.model.decorators import retry from eventsourcing.domain.model.events import subscribe, unsubscribe from eventsourcing.exceptions import ( @@ -305,7 +307,7 @@ def prompt(self, prompt: RayPrompt) -> None: if PROMPT_WITH_NOTIFICATION_OBJS: for notification in prompt.notifications: self._prompted_notifications[ - (upstream_name, notification['id']) + (upstream_name, notification["id"]) ] = notification if latest_head is not None: with self.heads_lock: diff --git a/eventsourcing/system/thespian.py b/eventsourcing/system/thespian.py index b1936d7b0..dc85e11c6 100644 --- a/eventsourcing/system/thespian.py +++ b/eventsourcing/system/thespian.py @@ -4,9 +4,7 @@ from thespian.actors import Actor, ActorExitRequest, ActorSystem from eventsourcing.application.notificationlog import RecordManagerNotificationLog -from eventsourcing.application.process import ( - ProcessApplication, -) +from eventsourcing.application.process import ProcessApplication from eventsourcing.application.simple import is_prompt_to_pull, PromptToPull from eventsourcing.system.definition import System, AbstractSystemRunner from eventsourcing.domain.model.events import subscribe, unsubscribe diff --git a/eventsourcing/tests/contrib_tests/paxos_tests/test_paxos_system.py b/eventsourcing/tests/contrib_tests/paxos_tests/test_paxos_system.py index 5e0e3472e..dbac6d35d 100644 --- a/eventsourcing/tests/contrib_tests/paxos_tests/test_paxos_system.py +++ b/eventsourcing/tests/contrib_tests/paxos_tests/test_paxos_system.py @@ -142,7 +142,9 @@ def test_multiprocessing(self): # Start running operating system processes. with runner: # Get local application object. - paxosapplication0 = runner.get(runner.system.process_classes["paxosapplication0"]) + paxosapplication0 = runner.get( + runner.system.process_classes["paxosapplication0"] + ) assert isinstance(paxosapplication0, PaxosApplication) # Start proposing values on the different system pipelines. @@ -159,8 +161,12 @@ def test_multiprocessing(self): paxosapplication0.propose_value(key3, value3) # Check all the process applications have expected final values. - paxosapplication1 = runner.get(runner.system.process_classes["paxosapplication1"]) - paxosapplication2 = runner.get(runner.system.process_classes["paxosapplication2"]) + paxosapplication1 = runner.get( + runner.system.process_classes["paxosapplication1"] + ) + paxosapplication2 = runner.get( + runner.system.process_classes["paxosapplication2"] + ) assert isinstance(paxosapplication1, PaxosApplication) paxosapplication0.repository.use_cache = False @@ -188,7 +194,7 @@ def test_multiprocessing(self): @notquick @skipIf( sys.version_info[:2] == (3, 6), - "RayRunner not working with Python36 (pickle issue)" + "RayRunner not working with Python36 (pickle issue)", ) def test_ray_performance(self): @@ -239,9 +245,10 @@ def assert_final_value(process_name, pipeline_id, id, expected): print( "Resolved {} paxoses with ray in {:.4f}s " "({:.1f} values/s, {:.4f}s each)".format( - num_proposals, duration, + num_proposals, + duration, num_proposals / duration, - duration / num_proposals + duration / num_proposals, ) ) @@ -268,7 +275,9 @@ def test_multiprocessing_performance(self): sleep(1) # Construct an application instance in this process. - paxosapplication0 = runner.get(runner.system.process_classes["paxosapplication0"]) + paxosapplication0 = runner.get( + runner.system.process_classes["paxosapplication0"] + ) assert isinstance(paxosapplication0, PaxosApplication) diff --git a/eventsourcing/tests/core_tests/test_aggregate_root.py b/eventsourcing/tests/core_tests/test_aggregate_root.py index 501b187e6..9e8429a0e 100644 --- a/eventsourcing/tests/core_tests/test_aggregate_root.py +++ b/eventsourcing/tests/core_tests/test_aggregate_root.py @@ -77,31 +77,47 @@ def test_example_aggregate_event_classes(self): self.assertIn("AttributeChanged", ExampleAggregateRoot.__dict__) self.assertEqual(ExampleAggregateRoot.Event.__name__, "Event") - self.assertEqual(ExampleAggregateRoot.Event.__qualname__, "ExampleAggregateRoot.Event") + self.assertEqual( + ExampleAggregateRoot.Event.__qualname__, "ExampleAggregateRoot.Event" + ) topic = "eventsourcing.tests.core_tests.test_aggregate_root#ExampleAggregateRoot.Event" self.assertEqual(get_topic(ExampleAggregateRoot.Event), topic) self.assertEqual(resolve_topic(topic), ExampleAggregateRoot.Event) self.assertEqual(ExampleAggregateRoot.Created.__name__, "Created") - self.assertEqual(ExampleAggregateRoot.Created.__qualname__, "ExampleAggregateRoot.Created") + self.assertEqual( + ExampleAggregateRoot.Created.__qualname__, "ExampleAggregateRoot.Created" + ) topic = "eventsourcing.tests.core_tests.test_aggregate_root#ExampleAggregateRoot.Created" self.assertEqual(get_topic(ExampleAggregateRoot.Created), topic) self.assertEqual(resolve_topic(topic), ExampleAggregateRoot.Created) - self.assertTrue(issubclass(ExampleAggregateRoot.Created, ExampleAggregateRoot.Event)) + self.assertTrue( + issubclass(ExampleAggregateRoot.Created, ExampleAggregateRoot.Event) + ) self.assertEqual(ExampleAggregateRoot.Discarded.__name__, "Discarded") - self.assertEqual(ExampleAggregateRoot.Discarded.__qualname__, "ExampleAggregateRoot.Discarded") + self.assertEqual( + ExampleAggregateRoot.Discarded.__qualname__, + "ExampleAggregateRoot.Discarded", + ) topic = "eventsourcing.tests.core_tests.test_aggregate_root#ExampleAggregateRoot.Discarded" self.assertEqual(get_topic(ExampleAggregateRoot.Discarded), topic) self.assertEqual(resolve_topic(topic), ExampleAggregateRoot.Discarded) - self.assertTrue(issubclass(ExampleAggregateRoot.Discarded, ExampleAggregateRoot.Event)) + self.assertTrue( + issubclass(ExampleAggregateRoot.Discarded, ExampleAggregateRoot.Event) + ) self.assertEqual(ExampleAggregateRoot.ExampleCreated.__name__, "ExampleCreated") - self.assertEqual(ExampleAggregateRoot.ExampleCreated.__qualname__, "ExampleAggregateRoot.ExampleCreated") + self.assertEqual( + ExampleAggregateRoot.ExampleCreated.__qualname__, + "ExampleAggregateRoot.ExampleCreated", + ) topic = "eventsourcing.tests.core_tests.test_aggregate_root#ExampleAggregateRoot.ExampleCreated" self.assertEqual(get_topic(ExampleAggregateRoot.ExampleCreated), topic) self.assertEqual(resolve_topic(topic), ExampleAggregateRoot.ExampleCreated) - self.assertTrue(issubclass(ExampleAggregateRoot.ExampleCreated, ExampleAggregateRoot.Event)) + self.assertTrue( + issubclass(ExampleAggregateRoot.ExampleCreated, ExampleAggregateRoot.Event) + ) def test_aggregate1_event_classes(self): self.assertIn("Event", Aggregate1.__dict__) @@ -433,14 +449,14 @@ def create_aggregate1(self) -> Aggregate1: a: Aggregate1 = Aggregate1.__create__() return a - def create_aggregate2(self)-> Aggregate2: + def create_aggregate2(self) -> Aggregate2: """ Factory method, creates and returns a new aggregate1 root entity. """ a: Aggregate2 = Aggregate2.__create__() return a - def create_aggregate3(self)-> Aggregate3: + def create_aggregate3(self) -> Aggregate3: """ Factory method, creates and returns a new aggregate1 root entity. """ diff --git a/eventsourcing/tests/core_tests/test_decorators.py b/eventsourcing/tests/core_tests/test_decorators.py index d814a57c3..6c1a35a17 100644 --- a/eventsourcing/tests/core_tests/test_decorators.py +++ b/eventsourcing/tests/core_tests/test_decorators.py @@ -212,7 +212,6 @@ def test_applicationpolicy_decorator(self): self.seen_int = False class Application(object): - def __init__(self, test_case): self.test_case = test_case @@ -224,11 +223,10 @@ def policy(self, repository, event): def _(self, repository, event): self.test_case.seen_int = True - app = Application(self) self.assertFalse(self.seen_default) self.assertFalse(self.seen_int) - app.policy(None, '') + app.policy(None, "") self.assertTrue(self.seen_default) self.assertFalse(self.seen_int) app.policy(None, 1) @@ -252,7 +250,6 @@ class C(B): pass class Application(object): - def __init__(self, test_case): self.test_case = test_case @@ -272,11 +269,10 @@ def _(self, repository, event): def _(self, repository, event): self.test_case.seen_b = True - app = Application(self) self.assertFalse(self.seen_default) self.assertFalse(self.seen_int) - app.policy(None, '') + app.policy(None, "") self.assertTrue(self.seen_default) self.assertFalse(self.seen_int) app.policy(None, 1) @@ -284,4 +280,3 @@ def _(self, repository, event): self.assertTrue(self.seen_int) app.policy(None, C()) self.assertTrue(self.seen_b) - diff --git a/eventsourcing/tests/core_tests/test_entity.py b/eventsourcing/tests/core_tests/test_entity.py index 16821c16a..556245b22 100644 --- a/eventsourcing/tests/core_tests/test_entity.py +++ b/eventsourcing/tests/core_tests/test_entity.py @@ -215,10 +215,7 @@ def a(self): def receive(x): published_events.extend(x) - subscription = ( - lambda x: True, - receive - ) + subscription = (lambda x: True, receive) subscribe(*subscription) entity_id = uuid4() try: diff --git a/eventsourcing/tests/core_tests/test_event_store.py b/eventsourcing/tests/core_tests/test_event_store.py index ab659aae0..88ad689cd 100644 --- a/eventsourcing/tests/core_tests/test_event_store.py +++ b/eventsourcing/tests/core_tests/test_event_store.py @@ -61,9 +61,7 @@ def test_list_events(self): self.assertEqual(0, len(entity_events)) # Check there are zero events in the event store, using iterator. - entity_events = event_store.list_events( - originator_id=entity_id1, page_size=1 - ) + entity_events = event_store.list_events(originator_id=entity_id1, page_size=1) self.assertEqual(0, len(entity_events)) # Store a domain event. @@ -77,9 +75,7 @@ def test_list_events(self): self.assertEqual(1, len(entity_events)) # Check there are two events in the event store, using iterator. - entity_events = event_store.list_events( - originator_id=entity_id1, page_size=1 - ) + entity_events = event_store.list_events(originator_id=entity_id1, page_size=1) self.assertEqual(1, len(entity_events)) # Store another domain event. @@ -93,9 +89,7 @@ def test_list_events(self): self.assertEqual(2, len(entity_events)) # Check there are two events in the event store, using iterator. - entity_events = event_store.list_events( - originator_id=entity_id1, page_size=1 - ) + entity_events = event_store.list_events(originator_id=entity_id1, page_size=1) self.assertEqual(2, len(entity_events)) def test_get_most_recent_event(self): diff --git a/eventsourcing/tests/core_tests/test_event_versioning.py b/eventsourcing/tests/core_tests/test_event_versioning.py index f753c126d..56d3fb3bf 100644 --- a/eventsourcing/tests/core_tests/test_event_versioning.py +++ b/eventsourcing/tests/core_tests/test_event_versioning.py @@ -114,13 +114,12 @@ def __upcast__(cls, obj_state: Dict, class_version: int): class TestUpcastingActiveWhenStoringAndRetrievingEvents(TestCase): - def tearDown(self) -> None: # Reset class versions (in case running in suite). MultiVersionAggregateFixture.Triggered.__class_version__ = 0 MultiVersionAggregateFixture.__class_version__ = 0 - del(MultiVersionAggregateFixture.Triggered.__upcast__) - del(MultiVersionAggregateFixture.Triggered.mutate) + del MultiVersionAggregateFixture.Triggered.__upcast__ + del MultiVersionAggregateFixture.Triggered.mutate def test_aggregate_state_after_versioning_events(self): """ @@ -225,7 +224,6 @@ def test_snapshot_state_after_versioning_events(self): """ app = SnapshottingApplication.mixin(SQLAlchemyApplication)( persist_event_type=BaseAggregateRoot.Event, - ) with app: my_aggregate = MultiVersionAggregateFixture.__create__() @@ -291,7 +289,7 @@ def test_snapshot_state_after_versioning_events(self): copy = app.repository[my_aggregate.id] assert isinstance(copy, MultiVersionAggregateFixture) self.assertEqual(copy.value, 10) - self.assertEqual(copy.units, '') # gets default + self.assertEqual(copy.units, "") # gets default class MultiVersionAggregateFixture(BaseAggregateRoot): @@ -301,9 +299,9 @@ class MultiVersionAggregateFixture(BaseAggregateRoot): @classmethod def __upcast__(cls, obj_state: Dict, class_version: int) -> Dict: if class_version == 0: - obj_state['value'] = cls.DEFAULT_VALUE + obj_state["value"] = cls.DEFAULT_VALUE elif class_version == 1: - obj_state['units'] = cls.DEFAULT_UNITS + obj_state["units"] = cls.DEFAULT_UNITS return obj_state def __init__(self, **kwargs): diff --git a/eventsourcing/tests/customization_tests/test_customise_with_alternative_sequenced_item_type.py b/eventsourcing/tests/customization_tests/test_customise_with_alternative_sequenced_item_type.py index 3e45409d4..3ad613713 100644 --- a/eventsourcing/tests/customization_tests/test_customise_with_alternative_sequenced_item_type.py +++ b/eventsourcing/tests/customization_tests/test_customise_with_alternative_sequenced_item_type.py @@ -76,7 +76,9 @@ def _test(self): self.assertEqual(entity1.b, "b") # Check there is a stored event. - all_records = list(app.event_store.record_manager.get_notification_records()) + all_records = list( + app.event_store.record_manager.get_notification_records() + ) self.assertEqual(1, len(all_records)) stored_event = all_records[0] self.assertEqual(stored_event.originator_id, entity1.id) diff --git a/eventsourcing/tests/customization_tests/test_customise_with_extended_sequenced_item.py b/eventsourcing/tests/customization_tests/test_customise_with_extended_sequenced_item.py index 8ae727d59..829aa9e19 100644 --- a/eventsourcing/tests/customization_tests/test_customise_with_extended_sequenced_item.py +++ b/eventsourcing/tests/customization_tests/test_customise_with_extended_sequenced_item.py @@ -132,7 +132,9 @@ def test(self): self.assertEqual(entity1.b, "b") # Check there is a stored event. - all_records = list(app.event_store.record_manager.get_notification_records()) + all_records = list( + app.event_store.record_manager.get_notification_records() + ) self.assertEqual(len(all_records), 1) record = all_records[0] self.assertEqual(record.sequence_id, entity1.id) diff --git a/eventsourcing/tests/datastore_tests/base.py b/eventsourcing/tests/datastore_tests/base.py index ba33ce677..457014f51 100644 --- a/eventsourcing/tests/datastore_tests/base.py +++ b/eventsourcing/tests/datastore_tests/base.py @@ -3,7 +3,8 @@ from eventsourcing.infrastructure.datastore import ( DatastoreTableError, - AbstractDatastore) + AbstractDatastore, +) from eventsourcing.infrastructure.factory import InfrastructureFactory from eventsourcing.tests.base import AbstractTestCase diff --git a/eventsourcing/tests/djangoproject/djangoproject/settings.py b/eventsourcing/tests/djangoproject/djangoproject/settings.py index 74c84a1eb..05797a34c 100644 --- a/eventsourcing/tests/djangoproject/djangoproject/settings.py +++ b/eventsourcing/tests/djangoproject/djangoproject/settings.py @@ -102,7 +102,9 @@ # https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ - {"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"}, + { + "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator" + }, {"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"}, {"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"}, {"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"}, diff --git a/eventsourcing/tests/sequenced_item_tests/base.py b/eventsourcing/tests/sequenced_item_tests/base.py index d428cc7c9..2f0b7e45c 100644 --- a/eventsourcing/tests/sequenced_item_tests/base.py +++ b/eventsourcing/tests/sequenced_item_tests/base.py @@ -205,7 +205,9 @@ def test(self): # Get items less then or equal to a position. if self.record_manager.can_lt_lte_get_records: - retrieved_items = self.record_manager.list_items(sequence_id1, lte=position2) + retrieved_items = self.record_manager.list_items( + sequence_id1, lte=position2 + ) self.assertEqual(len(retrieved_items), 2) self.assertEqual(retrieved_items[0].state, item1.state) self.assertEqual(retrieved_items[1].state, item4.state) @@ -319,8 +321,9 @@ def test(self): ) # Resume from after the first event. - retrieved_items = self.record_manager.get_notification_records(start=1 + len_old, - stop=3 + len_old) + retrieved_items = self.record_manager.get_notification_records( + start=1 + len_old, stop=3 + len_old + ) retrieved_items = list(retrieved_items) self.assertEqual(len(retrieved_items), 2) self.assertEqual(retrieved_items[0].id, 2 + len_old) diff --git a/eventsourcing/tests/system_test_fixtures.py b/eventsourcing/tests/system_test_fixtures.py index 164ab19f3..3bdd9c051 100644 --- a/eventsourcing/tests/system_test_fixtures.py +++ b/eventsourcing/tests/system_test_fixtures.py @@ -42,7 +42,7 @@ def mutate(self, order): @property def reservation_id(self): - return self.__dict__['reservation_id'] + return self.__dict__["reservation_id"] def set_is_paid(self, payment_id): self.__trigger_event__(self.Paid, payment_id=payment_id) @@ -55,7 +55,7 @@ def mutate(self, order): @property def payment_id(self): - return self.__dict__['payment_id'] + return self.__dict__["payment_id"] class Reservation(BaseAggregateRoot): @@ -139,7 +139,7 @@ def is_order_paid(self, order_id): order = self.get_order(order_id) return order is not None and order.is_paid - def get_order(self, order_id, repository: WrappedRepository=None): + def get_order(self, order_id, repository: WrappedRepository = None): try: return (repository or self.repository)[order_id] except KeyError: diff --git a/eventsourcing/tests/test_docs.py b/eventsourcing/tests/test_docs.py index 3bbb5ba7c..2ea379a21 100644 --- a/eventsourcing/tests/test_docs.py +++ b/eventsourcing/tests/test_docs.py @@ -74,18 +74,18 @@ def test_docs(self): if name in skipped: continue if name.endswith(".rst"): - # if name.endswith('domainmodel.rst'): - # if name.endswith('quick_start.rst'): - # if name.endswith('aggregates_in_ddd.rst'): - # if name.endswith('example_application.rst'): - # if name.endswith('everything.rst'): - # if name.endswith('infrastructure.rst'): - # if name.endswith('application.rst'): - # if name.endswith('snapshotting.rst'): - # if name.endswith('notifications.rst'): - # if name.endswith('projections.rst'): - # if name.endswith('deployment.rst'): - # if name.endswith('process.rst'): + # if name.endswith('domainmodel.rst'): + # if name.endswith('quick_start.rst'): + # if name.endswith('aggregates_in_ddd.rst'): + # if name.endswith('example_application.rst'): + # if name.endswith('everything.rst'): + # if name.endswith('infrastructure.rst'): + # if name.endswith('application.rst'): + # if name.endswith('snapshotting.rst'): + # if name.endswith('notifications.rst'): + # if name.endswith('projections.rst'): + # if name.endswith('deployment.rst'): + # if name.endswith('process.rst'): file_paths.append(os.path.join(docs_path, dirpath, name)) file_paths = sorted(file_paths) diff --git a/eventsourcing/tests/test_flask.py b/eventsourcing/tests/test_flask.py index f184dc54d..fe4e61d22 100644 --- a/eventsourcing/tests/test_flask.py +++ b/eventsourcing/tests/test_flask.py @@ -116,7 +116,7 @@ def start_app(self): # if not os.path.exists(path_to_uwsgi): # raise AssertionError("Can't find uwsgi: %s" % path_to_uwsgi) # # Todo: Maybe use shutil.which, after dropping support for Python 2.7. - path_to_uwsgi = shutil.which('uwsgi') + path_to_uwsgi = shutil.which("uwsgi") if not os.path.exists(path_to_uwsgi): raise AssertionError("Can't find uwsgi: %s" % path_to_uwsgi) cmd = [path_to_uwsgi] diff --git a/eventsourcing/tests/test_notificationlog.py b/eventsourcing/tests/test_notificationlog.py index e521c90d3..d69ca206a 100644 --- a/eventsourcing/tests/test_notificationlog.py +++ b/eventsourcing/tests/test_notificationlog.py @@ -309,7 +309,7 @@ def simple_app(environ, start_response): notification_log = RemoteNotificationLog(base_url) # Just before we start, test the deserialise_section_size exceptions. - notification_log.deserialize_section_size('1') + notification_log.deserialize_section_size("1") with self.assertRaises(ValueError): notification_log.deserialize_section_size('"1') with self.assertRaises(TypeError): diff --git a/eventsourcing/tests/test_performance.py b/eventsourcing/tests/test_performance.py index c27fd0e95..7e89c61c9 100644 --- a/eventsourcing/tests/test_performance.py +++ b/eventsourcing/tests/test_performance.py @@ -122,8 +122,7 @@ def last_n(n): assert isinstance(app.example_repository.event_store, EventStore) start_last_n = time.time() events = app.example_repository.event_store.list_events( - originator_id=example.id, - gt=num_beats - n, + originator_id=example.id, gt=num_beats - n, ) assert len(events) == n, "Asked for %s but got %s" % ( n, @@ -232,7 +231,10 @@ def test_log_performance(self): total_num_reads = 0 while True: start_read = time.time() - page_of_events, next_position = self.get_message_logged_events_and_next_position( + ( + page_of_events, + next_position, + ) = self.get_message_logged_events_and_next_position( log_reader, position, page_size ) time_to_read = time.time() - start_read @@ -254,7 +256,10 @@ def test_log_performance(self): position = None while True: start_read = time.time() - page_of_events, next_position = self.get_message_logged_events_and_next_position( + ( + page_of_events, + next_position, + ) = self.get_message_logged_events_and_next_position( log_reader, position, page_size, is_ascending=True ) time_to_read = time.time() - start_read diff --git a/eventsourcing/tests/test_process.py b/eventsourcing/tests/test_process.py index 329bf622e..9f7ea2720 100644 --- a/eventsourcing/tests/test_process.py +++ b/eventsourcing/tests/test_process.py @@ -23,8 +23,11 @@ unsubscribe, ) from eventsourcing.domain.model.snapshot import Snapshot -from eventsourcing.exceptions import CausalDependencyFailed, PromptFailed, \ - ProgrammingError +from eventsourcing.exceptions import ( + CausalDependencyFailed, + PromptFailed, + ProgrammingError, +) from eventsourcing.infrastructure.sqlalchemy.records import Base from eventsourcing.whitehead import TEntity from eventsourcing.utils.topic import resolve_topic @@ -200,30 +203,48 @@ def test_causal_dependencies(self): downstream2.run() self.assertEqual( - 0, len(list(downstream1.event_store.record_manager.get_notification_records())) + 0, + len( + list(downstream1.event_store.record_manager.get_notification_records()) + ), ) self.assertEqual( - 0, len(list(downstream2.event_store.record_manager.get_notification_records())) + 0, + len( + list(downstream2.event_store.record_manager.get_notification_records()) + ), ) # Try to process pipeline 1, should work. downstream1.run() self.assertEqual( - 1, len(list(downstream1.event_store.record_manager.get_notification_records())) + 1, + len( + list(downstream1.event_store.record_manager.get_notification_records()) + ), ) self.assertEqual( - 0, len(list(downstream2.event_store.record_manager.get_notification_records())) + 0, + len( + list(downstream2.event_store.record_manager.get_notification_records()) + ), ) # Try again to process pipeline 2, should work this time. downstream2.run() self.assertEqual( - 1, len(list(downstream1.event_store.record_manager.get_notification_records())) + 1, + len( + list(downstream1.event_store.record_manager.get_notification_records()) + ), ) self.assertEqual( - 2, len(list(downstream2.event_store.record_manager.get_notification_records())) + 2, + len( + list(downstream2.event_store.record_manager.get_notification_records()) + ), ) core1.close() @@ -354,7 +375,6 @@ def test_clear_cache_after_exception_recording_event(self): self.assertNotIn(aggregate.id, process.repository._cache) def test_policy_returns_sequence_of_new_aggregates(self): - def policy(repository, event): first = AggregateRoot.__create__(originator_id=(uuid4())) second = AggregateRoot.__create__(originator_id=(uuid4())) @@ -386,8 +406,6 @@ def policy(repository, event): ids = process.repository.event_store.record_manager.all_sequence_ids() self.assertEqual(len(list(ids)), 3) - - def define_projection_record_class(self): class ProjectionRecord(Base): __tablename__ = "projections" diff --git a/eventsourcing/tests/test_ray_runner_with_django.py b/eventsourcing/tests/test_ray_runner_with_django.py index 1ea282f5b..771b72981 100644 --- a/eventsourcing/tests/test_ray_runner_with_django.py +++ b/eventsourcing/tests/test_ray_runner_with_django.py @@ -13,4 +13,4 @@ class DontTestRayRunnerWithDjango(DjangoTestCase, TestRayRunner): # Don't let this be found here. -del(TestRayRunner) +del TestRayRunner diff --git a/eventsourcing/tests/test_system.py b/eventsourcing/tests/test_system.py index 4307dc14a..7abee672d 100644 --- a/eventsourcing/tests/test_system.py +++ b/eventsourcing/tests/test_system.py @@ -76,9 +76,7 @@ def test_bind(self): def test_singlethreaded_runner_with_single_application_class(self): system = System( - Orders, - setup_tables=True, - infrastructure_class=self.infrastructure_class, + Orders, setup_tables=True, infrastructure_class=self.infrastructure_class, ) with system as runner: orders = runner.get(Orders) @@ -88,9 +86,7 @@ def test_singlethreaded_runner_with_single_application_class(self): def test_can_run_if_already_running(self): system = System( - Orders, - setup_tables=True, - infrastructure_class=self.infrastructure_class, + Orders, setup_tables=True, infrastructure_class=self.infrastructure_class, ) with system: with self.assertRaises(ProgrammingError): @@ -104,9 +100,7 @@ def test_can_run_if_already_running(self): def test_multithreaded_runner_with_single_application_class(self): system = System( - Orders, - setup_tables=True, - infrastructure_class=self.infrastructure_class, + Orders, setup_tables=True, infrastructure_class=self.infrastructure_class, ) with MultiThreadedRunner(system) as runner: app = runner.get(Orders) @@ -116,9 +110,7 @@ def test_multithreaded_runner_with_single_application_class(self): def test_multiprocess_runner_with_single_application_class(self): system = System( - Orders, - setup_tables=True, - infrastructure_class=self.infrastructure_class, + Orders, setup_tables=True, infrastructure_class=self.infrastructure_class, ) self.set_db_uri() @@ -164,11 +156,13 @@ def test_multithreaded_runner_with_single_pipe(self): patience = 10 while True: try: - self.assertEqual(reservations_repo[reservation_id].order_id, order_id) + self.assertEqual( + reservations_repo[reservation_id].order_id, order_id + ) except (RepositoryKeyError, AssertionError): if patience: patience -= 1 - sleep(.1) + sleep(0.1) else: raise else: diff --git a/eventsourcing/utils/transcoding_v1.py b/eventsourcing/utils/transcoding_v1.py index 28ff846fb..c94aa0e64 100644 --- a/eventsourcing/utils/transcoding_v1.py +++ b/eventsourcing/utils/transcoding_v1.py @@ -89,7 +89,7 @@ def decorator(decoder_func): class ObjectJSONEncoder(JSONEncoder): def encode(self, o): - return super(ObjectJSONEncoder, self).encode(o).encode('utf8') + return super(ObjectJSONEncoder, self).encode(o).encode("utf8") def iterencode(self, o, _one_shot=False): if isinstance(o, tuple): From 53cfd70288051457999074479ddbfbaecb052f10 Mon Sep 17 00:00:00 2001 From: John Bywater Date: Thu, 30 Apr 2020 02:11:24 +0100 Subject: [PATCH 19/50] Sorted imports (and blackened again). --- eventsourcing/application/decorators.py | 2 +- eventsourcing/application/notificationlog.py | 2 +- eventsourcing/application/policies.py | 7 ++---- eventsourcing/application/process.py | 10 ++------- eventsourcing/application/simple.py | 2 +- eventsourcing/application/snapshotting.py | 5 +---- .../contrib/cargo_shipping_example.py | 7 ++---- eventsourcing/contrib/paxos/application.py | 7 ++---- eventsourcing/domain/model/__init__.py | 1 - eventsourcing/domain/model/array.py | 2 +- eventsourcing/domain/model/collection.py | 4 ++-- eventsourcing/domain/model/decorators.py | 2 +- eventsourcing/domain/model/snapshot.py | 6 ++--- eventsourcing/domain/model/timebucketedlog.py | 4 ++-- eventsourcing/example/application.py | 4 +--- eventsourcing/example/domainmodel.py | 2 +- eventsourcing/example/interface/flaskapp.py | 1 + eventsourcing/example/interface/flaskwsgi.py | 1 - .../infrastructure/axonserver/datastore.py | 4 ++-- eventsourcing/infrastructure/base.py | 1 - .../infrastructure/cassandra/manager.py | 1 + .../infrastructure/cassandra/records.py | 2 +- eventsourcing/infrastructure/datastore.py | 2 +- .../infrastructure/django/manager.py | 2 +- .../infrastructure/eventsourcedrepository.py | 9 +++----- eventsourcing/infrastructure/eventstore.py | 1 - eventsourcing/infrastructure/factory.py | 2 +- .../integersequencegenerators/redisincr.py | 4 ++-- eventsourcing/infrastructure/iterators.py | 2 +- eventsourcing/infrastructure/popo/mapper.py | 4 ++-- .../infrastructure/sequenceditemmapper.py | 2 +- eventsourcing/infrastructure/snapshotting.py | 7 ++---- .../infrastructure/sqlalchemy/datastore.py | 4 ++-- .../infrastructure/sqlalchemy/factory.py | 2 +- .../infrastructure/sqlalchemy/records.py | 2 +- eventsourcing/system/definition.py | 2 +- eventsourcing/system/multiprocess.py | 22 +++++-------------- eventsourcing/system/ray.py | 5 ++--- eventsourcing/system/runner.py | 7 ++---- eventsourcing/system/thespian.py | 4 ++-- .../paxos_tests/test_paxos_system.py | 2 +- .../test_paxos_system_with_django.py | 4 ++-- .../test_paxos_system_with_popo.py | 4 ++-- .../tests/core_tests/test_aggregate_root.py | 4 ++-- .../tests/core_tests/test_decorators.py | 2 +- eventsourcing/tests/core_tests/test_entity.py | 6 +---- .../core_tests/test_persistence_policy.py | 7 ++---- .../core_tests/test_simple_application.py | 13 +++++------ eventsourcing/tests/core_tests/test_utils.py | 7 +++--- .../test_customise_with_alternative_cql.py | 5 ++--- ...mise_with_alternative_domain_event_type.py | 5 ++--- ...se_with_alternative_sequenced_item_type.py | 5 ++--- .../test_customized_projections_old.py | 1 - eventsourcing/tests/datastore_tests/base.py | 4 ++-- .../tests/datastore_tests/test_axonserver.py | 2 +- .../tests/datastore_tests/test_cassandra.py | 8 +++---- .../tests/example_application_tests/base.py | 3 +-- ...est_example_application_with_encryption.py | 2 +- eventsourcing/tests/system_test_fixtures.py | 2 +- eventsourcing/tests/test_notificationlog.py | 2 +- eventsourcing/tests/test_process.py | 4 ++-- .../tests/test_process_with_django.py | 2 +- eventsourcing/tests/test_process_with_popo.py | 1 - eventsourcing/tests/test_ray_runner.py | 5 +---- .../tests/test_ray_runner_with_django.py | 2 +- eventsourcing/tests/test_system.py | 10 ++++----- .../tests/test_system_with_django.py | 3 +-- eventsourcing/tests/test_system_with_popo.py | 13 +++++------ eventsourcing/tests/test_thespian_runner.py | 14 ++++++------ .../tests/test_thespian_runner_with_django.py | 2 +- eventsourcing/tests/test_transcoding.py | 15 ++++++------- eventsourcing/tests/test_transcoding_v1.py | 16 +++++--------- eventsourcing/utils/cipher/aes.py | 5 ----- eventsourcing/utils/random.py | 3 +-- eventsourcing/utils/transcoding.py | 6 ++--- 75 files changed, 137 insertions(+), 212 deletions(-) diff --git a/eventsourcing/application/decorators.py b/eventsourcing/application/decorators.py index 7a7574af2..2580c3ec1 100644 --- a/eventsourcing/application/decorators.py +++ b/eventsourcing/application/decorators.py @@ -1,4 +1,4 @@ -from functools import singledispatch, wraps, _find_impl +from functools import _find_impl, singledispatch, wraps from inspect import isfunction from typing import Callable, no_type_check diff --git a/eventsourcing/application/notificationlog.py b/eventsourcing/application/notificationlog.py index 6e4be7973..cd33e7a64 100644 --- a/eventsourcing/application/notificationlog.py +++ b/eventsourcing/application/notificationlog.py @@ -3,8 +3,8 @@ from eventsourcing.domain.model.array import BigArray from eventsourcing.infrastructure.base import ( - RecordManagerWithNotifications, AbstractRecordManager, + RecordManagerWithNotifications, ) DEFAULT_SECTION_SIZE = 20 diff --git a/eventsourcing/application/policies.py b/eventsourcing/application/policies.py index f67cd74d3..40c96188c 100644 --- a/eventsourcing/application/policies.py +++ b/eventsourcing/application/policies.py @@ -2,16 +2,13 @@ from eventsourcing.domain.model.entity import VersionedEntity from eventsourcing.domain.model.events import ( + AbstractSnapshot, EventWithOriginatorVersion, subscribe, unsubscribe, - AbstractSnapshot, -) -from eventsourcing.infrastructure.base import ( - AbstractEventStore, - AbstractRecordManager, ) from eventsourcing.domain.model.repository import AbstractEntityRepository +from eventsourcing.infrastructure.base import AbstractEventStore, AbstractRecordManager from eventsourcing.whitehead import IterableOfEvents, TEvent diff --git a/eventsourcing/application/process.py b/eventsourcing/application/process.py index c48dee612..7250e96ba 100644 --- a/eventsourcing/application/process.py +++ b/eventsourcing/application/process.py @@ -36,14 +36,8 @@ TAggregateEvent, ) from eventsourcing.domain.model.events import subscribe, unsubscribe -from eventsourcing.exceptions import ( - CausalDependencyFailed, - ProgrammingError, -) -from eventsourcing.infrastructure.base import ( - RecordManagerWithTracking, - TrackingKwargs, -) +from eventsourcing.exceptions import CausalDependencyFailed, ProgrammingError +from eventsourcing.infrastructure.base import RecordManagerWithTracking, TrackingKwargs from eventsourcing.infrastructure.eventsourcedrepository import EventSourcedRepository from eventsourcing.whitehead import IterableOfEvents diff --git a/eventsourcing/application/simple.py b/eventsourcing/application/simple.py index dfc6c1370..75a543c87 100644 --- a/eventsourcing/application/simple.py +++ b/eventsourcing/application/simple.py @@ -30,10 +30,10 @@ from eventsourcing.domain.model.events import DomainEvent, publish from eventsourcing.exceptions import ProgrammingError, PromptFailed from eventsourcing.infrastructure.base import ( + DEFAULT_PIPELINE_ID, AbstractEventStore, AbstractRecordManager, BaseRecordManager, - DEFAULT_PIPELINE_ID, RecordManagerWithTracking, TrackingKwargs, ) diff --git a/eventsourcing/application/snapshotting.py b/eventsourcing/application/snapshotting.py index faaadbf4f..d69b65e00 100644 --- a/eventsourcing/application/snapshotting.py +++ b/eventsourcing/application/snapshotting.py @@ -3,11 +3,8 @@ from eventsourcing.application.policies import SnapshottingPolicy from eventsourcing.application.simple import SimpleApplication from eventsourcing.domain.model.entity import TVersionedEntity, TVersionedEvent -from eventsourcing.infrastructure.base import ( - AbstractEventStore, - AbstractRecordManager, -) from eventsourcing.domain.model.events import AbstractSnapshot +from eventsourcing.infrastructure.base import AbstractEventStore, AbstractRecordManager from eventsourcing.infrastructure.eventstore import EventStore from eventsourcing.infrastructure.snapshotting import EventSourcedSnapshotStrategy diff --git a/eventsourcing/contrib/cargo_shipping_example.py b/eventsourcing/contrib/cargo_shipping_example.py index 9027ec83d..75e6e0b23 100644 --- a/eventsourcing/contrib/cargo_shipping_example.py +++ b/eventsourcing/contrib/cargo_shipping_example.py @@ -24,16 +24,13 @@ from eventsourcing.application.process import ProcessApplication from eventsourcing.application.sqlalchemy import SQLAlchemyApplication -from eventsourcing.system.runner import ( - InProcessRunner, - SingleThreadedRunner, -) -from eventsourcing.system.definition import System from eventsourcing.domain.model.aggregate import ( AggregateRoot, TAggregate, TAggregateEvent, ) +from eventsourcing.system.definition import System +from eventsourcing.system.runner import InProcessRunner, SingleThreadedRunner # Locations in the world. diff --git a/eventsourcing/contrib/paxos/application.py b/eventsourcing/contrib/paxos/application.py index 8ca701da3..9d39c7e2d 100644 --- a/eventsourcing/contrib/paxos/application.py +++ b/eventsourcing/contrib/paxos/application.py @@ -3,11 +3,7 @@ from typing import Any, Dict, Optional, Sequence, Set, Union from uuid import UUID -from eventsourcing.application.process import ( - ProcessApplication, - WrappedRepository, -) -from eventsourcing.system.definition import System +from eventsourcing.application.process import ProcessApplication, WrappedRepository from eventsourcing.contrib.paxos.composable import ( PaxosInstance, PaxosMessage, @@ -21,6 +17,7 @@ RecordConflictError, RepositoryKeyError, ) +from eventsourcing.system.definition import System class PaxosAggregate(BaseAggregateRoot): diff --git a/eventsourcing/domain/model/__init__.py b/eventsourcing/domain/model/__init__.py index 8b1378917..e69de29bb 100644 --- a/eventsourcing/domain/model/__init__.py +++ b/eventsourcing/domain/model/__init__.py @@ -1 +0,0 @@ - diff --git a/eventsourcing/domain/model/array.py b/eventsourcing/domain/model/array.py index 89d1aec07..86f9717a9 100644 --- a/eventsourcing/domain/model/array.py +++ b/eventsourcing/domain/model/array.py @@ -6,8 +6,8 @@ from eventsourcing.domain.model.decorators import retry from eventsourcing.domain.model.entity import TimestampedVersionedEntity from eventsourcing.domain.model.events import publish -from eventsourcing.exceptions import ArrayIndexError, ConcurrencyError from eventsourcing.domain.model.repository import AbstractEntityRepository +from eventsourcing.exceptions import ArrayIndexError, ConcurrencyError class ItemAssigned(TimestampedVersionedEntity.Event): diff --git a/eventsourcing/domain/model/collection.py b/eventsourcing/domain/model/collection.py index 1d25aea21..eca16aa37 100644 --- a/eventsourcing/domain/model/collection.py +++ b/eventsourcing/domain/model/collection.py @@ -1,11 +1,11 @@ from __future__ import absolute_import, division, print_function, unicode_literals -from typing import Any, Set, Iterator, Optional +from typing import Any, Iterator, Optional, Set from uuid import UUID from eventsourcing.domain.model.entity import ( - TTimestampedVersionedEntity, TimestampedVersionedEntity, + TTimestampedVersionedEntity, ) from eventsourcing.domain.model.repository import AbstractEntityRepository diff --git a/eventsourcing/domain/model/decorators.py b/eventsourcing/domain/model/decorators.py index e89b19593..ca55a1be4 100644 --- a/eventsourcing/domain/model/decorators.py +++ b/eventsourcing/domain/model/decorators.py @@ -2,7 +2,7 @@ from functools import singledispatch, wraps from inspect import isfunction from time import sleep -from typing import Dict, Type, Callable, no_type_check, Union, Optional, Sequence +from typing import Callable, Dict, Optional, Sequence, Type, Union, no_type_check from eventsourcing.domain.model.events import DomainEvent, subscribe from eventsourcing.exceptions import ProgrammingError diff --git a/eventsourcing/domain/model/snapshot.py b/eventsourcing/domain/model/snapshot.py index c085bb9ce..edc2eb74b 100644 --- a/eventsourcing/domain/model/snapshot.py +++ b/eventsourcing/domain/model/snapshot.py @@ -1,13 +1,13 @@ -from typing import Dict, Optional, Any +from typing import Any, Dict, Optional from uuid import UUID from eventsourcing.domain.model.events import ( + AbstractSnapshot, EventWithOriginatorID, EventWithOriginatorVersion, EventWithTimestamp, - AbstractSnapshot, ) -from eventsourcing.utils.topic import resolve_topic, reconstruct_object +from eventsourcing.utils.topic import reconstruct_object, resolve_topic from eventsourcing.whitehead import TEntity diff --git a/eventsourcing/domain/model/timebucketedlog.py b/eventsourcing/domain/model/timebucketedlog.py index d4d34e2a4..32548ce91 100644 --- a/eventsourcing/domain/model/timebucketedlog.py +++ b/eventsourcing/domain/model/timebucketedlog.py @@ -1,6 +1,6 @@ import datetime from decimal import Decimal -from typing import Optional, Any, Union +from typing import Any, Optional, Union from uuid import UUID, uuid5 from dateutil.relativedelta import relativedelta @@ -12,8 +12,8 @@ LoggedEvent, publish, ) -from eventsourcing.exceptions import RepositoryKeyError from eventsourcing.domain.model.repository import AbstractEntityRepository +from eventsourcing.exceptions import RepositoryKeyError from eventsourcing.utils.times import ( datetime_from_timestamp, decimaltimestamp, diff --git a/eventsourcing/example/application.py b/eventsourcing/example/application.py index 522a187ae..6e9763dc7 100644 --- a/eventsourcing/example/application.py +++ b/eventsourcing/example/application.py @@ -1,6 +1,5 @@ -from abc import ABC - import zlib +from abc import ABC from eventsourcing.application.policies import PersistencePolicy from eventsourcing.domain.model.entity import VersionedEntity @@ -13,7 +12,6 @@ from eventsourcing.infrastructure.snapshotting import EventSourcedSnapshotStrategy from eventsourcing.utils.transcoding import ObjectJSONDecoder, ObjectJSONEncoder - # Please note, the code is this module is basically old-fashioned, and will # be removed when the tests that depend on it are rewritten to use the new # application classes in package eventsourcing.application. These were the diff --git a/eventsourcing/example/domainmodel.py b/eventsourcing/example/domainmodel.py index bdf961df7..0a0754768 100644 --- a/eventsourcing/example/domainmodel.py +++ b/eventsourcing/example/domainmodel.py @@ -1,8 +1,8 @@ from eventsourcing.domain.model.decorators import attribute from eventsourcing.domain.model.entity import ( EntityWithHashchain, - TimestampedVersionedEntity, TDomainEntity, + TimestampedVersionedEntity, ) from eventsourcing.domain.model.repository import AbstractEntityRepository diff --git a/eventsourcing/example/interface/flaskapp.py b/eventsourcing/example/interface/flaskapp.py index dc1d3c517..210406b5c 100644 --- a/eventsourcing/example/interface/flaskapp.py +++ b/eventsourcing/example/interface/flaskapp.py @@ -1,4 +1,5 @@ import os + from flask import Flask from flask_sqlalchemy import SQLAlchemy from sqlalchemy_utils.types.uuid import UUIDType diff --git a/eventsourcing/example/interface/flaskwsgi.py b/eventsourcing/example/interface/flaskwsgi.py index 808e2a65d..887ec4142 100644 --- a/eventsourcing/example/interface/flaskwsgi.py +++ b/eventsourcing/example/interface/flaskwsgi.py @@ -1,6 +1,5 @@ import eventsourcing.example.interface.flaskapp - # Todo: Investigate why removing this file breaks python3.3. # For some reason, this file is needed to run flaskapp.py # with uWSGI and python3.3. diff --git a/eventsourcing/infrastructure/axonserver/datastore.py b/eventsourcing/infrastructure/axonserver/datastore.py index b324ffdd1..9ede8ccf5 100644 --- a/eventsourcing/infrastructure/axonserver/datastore.py +++ b/eventsourcing/infrastructure/axonserver/datastore.py @@ -1,7 +1,7 @@ import os -from typing import Optional, Any +from typing import Any, Optional -from axonclient.client import AxonClient, DEFAULT_LOCAL_AXONSERVER_URI +from axonclient.client import DEFAULT_LOCAL_AXONSERVER_URI, AxonClient from eventsourcing.infrastructure.datastore import AbstractDatastore, DatastoreSettings diff --git a/eventsourcing/infrastructure/base.py b/eventsourcing/infrastructure/base.py index b3cd62b76..753994f15 100644 --- a/eventsourcing/infrastructure/base.py +++ b/eventsourcing/infrastructure/base.py @@ -24,7 +24,6 @@ from eventsourcing.infrastructure.sequenceditemmapper import AbstractSequencedItemMapper from eventsourcing.whitehead import TEvent - DEFAULT_PIPELINE_ID = 0 TrackingKwargs = Dict[str, Union[str, int]] diff --git a/eventsourcing/infrastructure/cassandra/manager.py b/eventsourcing/infrastructure/cassandra/manager.py index d9c62c20f..5cd7790d7 100644 --- a/eventsourcing/infrastructure/cassandra/manager.py +++ b/eventsourcing/infrastructure/cassandra/manager.py @@ -1,4 +1,5 @@ import os + from cassandra import InvalidRequest from cassandra.cqlengine.functions import Token from cassandra.cqlengine.query import BatchQuery, LWTException diff --git a/eventsourcing/infrastructure/cassandra/records.py b/eventsourcing/infrastructure/cassandra/records.py index 5a16c2c1b..25cbd904c 100644 --- a/eventsourcing/infrastructure/cassandra/records.py +++ b/eventsourcing/infrastructure/cassandra/records.py @@ -1,4 +1,4 @@ -from cassandra.cqlengine.models import columns, Model +from cassandra.cqlengine.models import Model, columns class IntegerSequencedRecord(Model): diff --git a/eventsourcing/infrastructure/datastore.py b/eventsourcing/infrastructure/datastore.py index d6d7f775a..2ae573b3e 100644 --- a/eventsourcing/infrastructure/datastore.py +++ b/eventsourcing/infrastructure/datastore.py @@ -1,5 +1,5 @@ from abc import ABC, abstractmethod -from typing import Optional, Any, TypeVar, Generic +from typing import Any, Generic, Optional, TypeVar class DatastoreSettings(object): diff --git a/eventsourcing/infrastructure/django/manager.py b/eventsourcing/infrastructure/django/manager.py index 7b545355e..5b58a8304 100644 --- a/eventsourcing/infrastructure/django/manager.py +++ b/eventsourcing/infrastructure/django/manager.py @@ -1,4 +1,4 @@ -from typing import Any, Iterable, Optional, Sequence, Dict +from typing import Any, Dict, Iterable, Optional, Sequence from uuid import UUID from django.db import IntegrityError, ProgrammingError, connection, transaction diff --git a/eventsourcing/infrastructure/eventsourcedrepository.py b/eventsourcing/infrastructure/eventsourcedrepository.py index 54ce4de10..7dbd311ca 100644 --- a/eventsourcing/infrastructure/eventsourcedrepository.py +++ b/eventsourcing/infrastructure/eventsourcedrepository.py @@ -4,13 +4,10 @@ from uuid import UUID from eventsourcing.domain.model.entity import TVersionedEntity, TVersionedEvent -from eventsourcing.exceptions import RepositoryKeyError -from eventsourcing.infrastructure.base import ( - AbstractEventStore, - AbstractRecordManager, -) -from eventsourcing.domain.model.repository import AbstractEntityRepository from eventsourcing.domain.model.events import AbstractSnapshot +from eventsourcing.domain.model.repository import AbstractEntityRepository +from eventsourcing.exceptions import RepositoryKeyError +from eventsourcing.infrastructure.base import AbstractEventStore, AbstractRecordManager from eventsourcing.infrastructure.snapshotting import AbstractSnapshotStrategy from eventsourcing.whitehead import SEntity diff --git a/eventsourcing/infrastructure/eventstore.py b/eventsourcing/infrastructure/eventstore.py index 84fba8cef..0499e36db 100644 --- a/eventsourcing/infrastructure/eventstore.py +++ b/eventsourcing/infrastructure/eventstore.py @@ -6,7 +6,6 @@ from eventsourcing.infrastructure.iterators import SequencedItemIterator from eventsourcing.whitehead import TEvent - # Todo: Unify iterators in EventStore and in NotificationLog, # by pushing behaviour down to record manager? diff --git a/eventsourcing/infrastructure/factory.py b/eventsourcing/infrastructure/factory.py index 74cbbdb4f..4e221d037 100644 --- a/eventsourcing/infrastructure/factory.py +++ b/eventsourcing/infrastructure/factory.py @@ -2,9 +2,9 @@ from typing import Any, Generic, NamedTuple, Optional, Type from eventsourcing.infrastructure.base import ( + DEFAULT_PIPELINE_ID, AbstractEventStore, AbstractRecordManager, - DEFAULT_PIPELINE_ID, ) from eventsourcing.infrastructure.datastore import AbstractDatastore from eventsourcing.infrastructure.eventstore import EventStore diff --git a/eventsourcing/infrastructure/integersequencegenerators/redisincr.py b/eventsourcing/infrastructure/integersequencegenerators/redisincr.py index d5f6da71f..9e4a3896e 100644 --- a/eventsourcing/infrastructure/integersequencegenerators/redisincr.py +++ b/eventsourcing/infrastructure/integersequencegenerators/redisincr.py @@ -1,8 +1,8 @@ +import os from typing import Optional from uuid import uuid4 -import os -from redis import StrictRedis, Redis +from redis import Redis, StrictRedis from eventsourcing.infrastructure.integersequencegenerators.base import ( AbstractIntegerSequenceGenerator, diff --git a/eventsourcing/infrastructure/iterators.py b/eventsourcing/infrastructure/iterators.py index 77429d52e..05d40eec8 100644 --- a/eventsourcing/infrastructure/iterators.py +++ b/eventsourcing/infrastructure/iterators.py @@ -1,6 +1,6 @@ from abc import abstractmethod from threading import Thread -from typing import Iterable, Optional, NamedTuple, Iterator, Any, List +from typing import Any, Iterable, Iterator, List, NamedTuple, Optional from uuid import UUID from eventsourcing.infrastructure.base import AbstractRecordManager, BaseRecordManager diff --git a/eventsourcing/infrastructure/popo/mapper.py b/eventsourcing/infrastructure/popo/mapper.py index f95f9ae00..e6fe7cf2d 100644 --- a/eventsourcing/infrastructure/popo/mapper.py +++ b/eventsourcing/infrastructure/popo/mapper.py @@ -1,7 +1,7 @@ -from typing import Tuple, Type, Dict, Any +from typing import Any, Dict, Tuple, Type from eventsourcing.infrastructure.sequenceditemmapper import SequencedItemMapper -from eventsourcing.utils.topic import resolve_topic, get_topic +from eventsourcing.utils.topic import get_topic, resolve_topic from eventsourcing.whitehead import TEvent diff --git a/eventsourcing/infrastructure/sequenceditemmapper.py b/eventsourcing/infrastructure/sequenceditemmapper.py index ff884eaab..aac900db8 100644 --- a/eventsourcing/infrastructure/sequenceditemmapper.py +++ b/eventsourcing/infrastructure/sequenceditemmapper.py @@ -7,7 +7,7 @@ SequencedItemFieldNames, ) from eventsourcing.utils.cipher.aes import AESCipher -from eventsourcing.utils.topic import get_topic, resolve_topic, reconstruct_object +from eventsourcing.utils.topic import get_topic, reconstruct_object, resolve_topic from eventsourcing.utils.transcoding import ObjectJSONDecoder, ObjectJSONEncoder from eventsourcing.whitehead import TEvent diff --git a/eventsourcing/infrastructure/snapshotting.py b/eventsourcing/infrastructure/snapshotting.py index 300d9bf65..496bfda2f 100644 --- a/eventsourcing/infrastructure/snapshotting.py +++ b/eventsourcing/infrastructure/snapshotting.py @@ -3,12 +3,9 @@ from typing import Optional from uuid import UUID -from eventsourcing.domain.model.snapshot import Snapshot -from eventsourcing.infrastructure.base import ( - AbstractEventStore, - AbstractRecordManager, -) from eventsourcing.domain.model.events import AbstractSnapshot +from eventsourcing.domain.model.snapshot import Snapshot +from eventsourcing.infrastructure.base import AbstractEventStore, AbstractRecordManager from eventsourcing.utils.topic import get_topic diff --git a/eventsourcing/infrastructure/sqlalchemy/datastore.py b/eventsourcing/infrastructure/sqlalchemy/datastore.py index 41e5a1175..d2def5b3a 100644 --- a/eventsourcing/infrastructure/sqlalchemy/datastore.py +++ b/eventsourcing/infrastructure/sqlalchemy/datastore.py @@ -1,11 +1,11 @@ import os -from typing import Optional, Union, Sequence, Any, Dict +from typing import Any, Dict, Optional, Sequence, Union from sqlalchemy import create_engine from sqlalchemy.engine import Engine from sqlalchemy.exc import InternalError from sqlalchemy.ext.declarative import DeclarativeMeta -from sqlalchemy.orm import scoped_session, sessionmaker, Session +from sqlalchemy.orm import Session, scoped_session, sessionmaker from sqlalchemy.pool import StaticPool from eventsourcing.infrastructure.datastore import AbstractDatastore, DatastoreSettings diff --git a/eventsourcing/infrastructure/sqlalchemy/factory.py b/eventsourcing/infrastructure/sqlalchemy/factory.py index cae4fa3b2..74c1f7728 100644 --- a/eventsourcing/infrastructure/sqlalchemy/factory.py +++ b/eventsourcing/infrastructure/sqlalchemy/factory.py @@ -1,7 +1,7 @@ from typing import Any, NamedTuple, Optional, Type from eventsourcing.domain.model.events import DomainEvent -from eventsourcing.infrastructure.base import AbstractRecordManager, DEFAULT_PIPELINE_ID +from eventsourcing.infrastructure.base import DEFAULT_PIPELINE_ID, AbstractRecordManager from eventsourcing.infrastructure.datastore import AbstractDatastore from eventsourcing.infrastructure.eventstore import EventStore from eventsourcing.infrastructure.factory import InfrastructureFactory diff --git a/eventsourcing/infrastructure/sqlalchemy/records.py b/eventsourcing/infrastructure/sqlalchemy/records.py index e9addadd8..0aa856d48 100644 --- a/eventsourcing/infrastructure/sqlalchemy/records.py +++ b/eventsourcing/infrastructure/sqlalchemy/records.py @@ -1,4 +1,4 @@ -from sqlalchemy import DECIMAL, String, LargeBinary +from sqlalchemy import DECIMAL, LargeBinary, String from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.sql.schema import Column, Index from sqlalchemy.sql.sqltypes import BigInteger, Integer, Text diff --git a/eventsourcing/system/definition.py b/eventsourcing/system/definition.py index 67f557fc6..a68c40241 100644 --- a/eventsourcing/system/definition.py +++ b/eventsourcing/system/definition.py @@ -1,10 +1,10 @@ import weakref -from _weakref import ReferenceType from abc import ABC, abstractmethod from collections import OrderedDict from copy import deepcopy from typing import Any, Dict, List, Optional, Type, TypeVar +from _weakref import ReferenceType from eventsourcing.application.popo import PopoApplication from eventsourcing.application.process import ProcessApplication from eventsourcing.application.simple import ApplicationWithConcreteInfrastructure diff --git a/eventsourcing/system/multiprocess.py b/eventsourcing/system/multiprocess.py index 5f5402bba..b98e3f348 100644 --- a/eventsourcing/system/multiprocess.py +++ b/eventsourcing/system/multiprocess.py @@ -1,35 +1,23 @@ import multiprocessing from multiprocessing import Manager -from queue import Queue, Empty +from queue import Empty, Queue from time import sleep -from typing import ( - Sequence, - Optional, - Any, - List, - TYPE_CHECKING, - Dict, - Tuple, - Type, -) +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Tuple, Type from eventsourcing.application.notificationlog import RecordManagerNotificationLog -from eventsourcing.application.process import ( - PromptToQuit, - ProcessApplication, -) +from eventsourcing.application.process import ProcessApplication, PromptToQuit from eventsourcing.application.simple import ( ApplicationWithConcreteInfrastructure, Prompt, - is_prompt_to_pull, PromptToPull, + is_prompt_to_pull, ) from eventsourcing.domain.model.decorators import retry from eventsourcing.domain.model.events import subscribe, unsubscribe from eventsourcing.exceptions import ( - ProgrammingError, CausalDependencyFailed, OperationalError, + ProgrammingError, RecordConflictError, ) from eventsourcing.infrastructure.base import DEFAULT_PIPELINE_ID diff --git a/eventsourcing/system/ray.py b/eventsourcing/system/ray.py index 5919782e3..2f9e4dc4d 100644 --- a/eventsourcing/system/ray.py +++ b/eventsourcing/system/ray.py @@ -12,8 +12,8 @@ from eventsourcing.application.simple import ( ApplicationWithConcreteInfrastructure, Prompt, - is_prompt_to_pull, PromptToPull, + is_prompt_to_pull, ) from eventsourcing.domain.model.decorators import retry from eventsourcing.domain.model.events import subscribe, unsubscribe @@ -28,9 +28,8 @@ ) from eventsourcing.system.definition import AbstractSystemRunner, System from eventsourcing.system.rayhelpers import RayDbJob, RayPrompt -from eventsourcing.system.runner import DEFAULT_POLL_INTERVAL - from eventsourcing.system.raysettings import ray_init_kwargs +from eventsourcing.system.runner import DEFAULT_POLL_INTERVAL ray.init(**ray_init_kwargs) diff --git a/eventsourcing/system/runner.py b/eventsourcing/system/runner.py index cbc5430c6..bd4b4014f 100644 --- a/eventsourcing/system/runner.py +++ b/eventsourcing/system/runner.py @@ -5,6 +5,7 @@ from threading import Barrier, BrokenBarrierError, Event, Lock, Thread, Timer from time import sleep from typing import ( + TYPE_CHECKING, Any, Callable, Deque, @@ -12,17 +13,13 @@ Generic, List, Optional, - TYPE_CHECKING, Type, Union, no_type_check, ) from eventsourcing.application.notificationlog import NotificationLogReader -from eventsourcing.application.process import ( - ProcessApplication, - PromptToQuit, -) +from eventsourcing.application.process import ProcessApplication, PromptToQuit from eventsourcing.application.simple import ( ApplicationWithConcreteInfrastructure, Prompt, diff --git a/eventsourcing/system/thespian.py b/eventsourcing/system/thespian.py index dc85e11c6..1588e89ee 100644 --- a/eventsourcing/system/thespian.py +++ b/eventsourcing/system/thespian.py @@ -5,11 +5,11 @@ from eventsourcing.application.notificationlog import RecordManagerNotificationLog from eventsourcing.application.process import ProcessApplication -from eventsourcing.application.simple import is_prompt_to_pull, PromptToPull -from eventsourcing.system.definition import System, AbstractSystemRunner +from eventsourcing.application.simple import PromptToPull, is_prompt_to_pull from eventsourcing.domain.model.events import subscribe, unsubscribe from eventsourcing.exceptions import RecordConflictError from eventsourcing.infrastructure.base import DEFAULT_PIPELINE_ID +from eventsourcing.system.definition import AbstractSystemRunner, System logger = logging.getLogger() diff --git a/eventsourcing/tests/contrib_tests/paxos_tests/test_paxos_system.py b/eventsourcing/tests/contrib_tests/paxos_tests/test_paxos_system.py index dbac6d35d..ea1205e5c 100644 --- a/eventsourcing/tests/contrib_tests/paxos_tests/test_paxos_system.py +++ b/eventsourcing/tests/contrib_tests/paxos_tests/test_paxos_system.py @@ -307,7 +307,7 @@ def test_multiprocessing_performance(self): "each)".format(num_proposals, duration, duration / num_proposals) ) - @retry((KeyError, AssertionError), max_attempts=100, wait=0.05, stall=0) + @retry((KeyError, AssertionError), max_attempts=100, wait=0.5, stall=0) def assert_final_value(self, process, id, value): self.assertEqual(process.repository[id].final_value, value) diff --git a/eventsourcing/tests/contrib_tests/paxos_tests/test_paxos_system_with_django.py b/eventsourcing/tests/contrib_tests/paxos_tests/test_paxos_system_with_django.py index e839494c7..38a305619 100644 --- a/eventsourcing/tests/contrib_tests/paxos_tests/test_paxos_system_with_django.py +++ b/eventsourcing/tests/contrib_tests/paxos_tests/test_paxos_system_with_django.py @@ -1,10 +1,10 @@ from unittest import skip +from eventsourcing.application.django import DjangoApplication +from eventsourcing.tests.contrib_tests.paxos_tests import test_paxos_system from eventsourcing.tests.sequenced_item_tests.test_django_record_manager import ( DjangoTestCase, ) -from eventsourcing.tests.contrib_tests.paxos_tests import test_paxos_system -from eventsourcing.application.django import DjangoApplication class TestPaxosSystemWithDjango(DjangoTestCase, test_paxos_system.TestPaxosSystem): diff --git a/eventsourcing/tests/contrib_tests/paxos_tests/test_paxos_system_with_popo.py b/eventsourcing/tests/contrib_tests/paxos_tests/test_paxos_system_with_popo.py index 61056ca3f..d15ea66cd 100644 --- a/eventsourcing/tests/contrib_tests/paxos_tests/test_paxos_system_with_popo.py +++ b/eventsourcing/tests/contrib_tests/paxos_tests/test_paxos_system_with_popo.py @@ -1,11 +1,11 @@ import sys from unittest import skip, skipIf +from eventsourcing.application.popo import PopoApplication +from eventsourcing.tests.contrib_tests.paxos_tests import test_paxos_system from eventsourcing.tests.sequenced_item_tests.test_popo_record_manager import ( PopoTestCase, ) -from eventsourcing.tests.contrib_tests.paxos_tests import test_paxos_system -from eventsourcing.application.popo import PopoApplication class TestPaxosSystemWithPopo(PopoTestCase, test_paxos_system.TestPaxosSystem): diff --git a/eventsourcing/tests/core_tests/test_aggregate_root.py b/eventsourcing/tests/core_tests/test_aggregate_root.py index 9e8429a0e..2bb1e7e22 100644 --- a/eventsourcing/tests/core_tests/test_aggregate_root.py +++ b/eventsourcing/tests/core_tests/test_aggregate_root.py @@ -1,5 +1,5 @@ import uuid -from typing import Dict, Optional, cast, Generic +from typing import Dict, Generic, Optional, cast from unittest.case import TestCase from uuid import UUID @@ -16,8 +16,8 @@ from eventsourcing.tests.sequenced_item_tests.test_sqlalchemy_record_manager import ( SQLAlchemyRecordManagerTestCase, ) -from eventsourcing.whitehead import TEntity from eventsourcing.utils.topic import get_topic, resolve_topic +from eventsourcing.whitehead import TEntity class TestAggregateRootEvent(TestCase): diff --git a/eventsourcing/tests/core_tests/test_decorators.py b/eventsourcing/tests/core_tests/test_decorators.py index 6c1a35a17..d18c205ab 100644 --- a/eventsourcing/tests/core_tests/test_decorators.py +++ b/eventsourcing/tests/core_tests/test_decorators.py @@ -8,8 +8,8 @@ from eventsourcing.domain.model.events import ( EventHandlersNotEmptyError, assert_event_handlers_empty, - publish, clear_event_handlers, + publish, ) from eventsourcing.example.domainmodel import Example from eventsourcing.utils.topic import get_topic diff --git a/eventsourcing/tests/core_tests/test_entity.py b/eventsourcing/tests/core_tests/test_entity.py index 556245b22..49c5866ee 100644 --- a/eventsourcing/tests/core_tests/test_entity.py +++ b/eventsourcing/tests/core_tests/test_entity.py @@ -7,11 +7,7 @@ TimestampedVersionedEntity, VersionedEntity, ) -from eventsourcing.domain.model.events import ( - publish, - subscribe, - unsubscribe, -) +from eventsourcing.domain.model.events import publish, subscribe, unsubscribe from eventsourcing.example.domainmodel import Example, create_new_example from eventsourcing.example.infrastructure import ExampleRepository from eventsourcing.exceptions import ( diff --git a/eventsourcing/tests/core_tests/test_persistence_policy.py b/eventsourcing/tests/core_tests/test_persistence_policy.py index 6170d192a..3e6cc6b29 100644 --- a/eventsourcing/tests/core_tests/test_persistence_policy.py +++ b/eventsourcing/tests/core_tests/test_persistence_policy.py @@ -2,13 +2,10 @@ from uuid import uuid4 from eventsourcing.application.policies import PersistencePolicy, SnapshottingPolicy -from eventsourcing.domain.model.entity import ( - VersionedEntity, - TimestampedEntity, -) +from eventsourcing.domain.model.entity import TimestampedEntity, VersionedEntity from eventsourcing.domain.model.events import publish -from eventsourcing.infrastructure.base import AbstractEventStore from eventsourcing.domain.model.repository import AbstractEntityRepository +from eventsourcing.infrastructure.base import AbstractEventStore try: from unittest import mock diff --git a/eventsourcing/tests/core_tests/test_simple_application.py b/eventsourcing/tests/core_tests/test_simple_application.py index 5caf7a2fe..865ac16cb 100644 --- a/eventsourcing/tests/core_tests/test_simple_application.py +++ b/eventsourcing/tests/core_tests/test_simple_application.py @@ -1,19 +1,18 @@ from unittest import TestCase from eventsourcing.application.axon import AxonApplication +from eventsourcing.application.django import DjangoApplication +from eventsourcing.application.notificationlog import NotificationLogReader from eventsourcing.application.popo import PopoApplication from eventsourcing.application.simple import SimpleApplication +from eventsourcing.application.snapshotting import SnapshottingApplication +from eventsourcing.application.sqlalchemy import SQLAlchemyApplication +from eventsourcing.domain.model.events import DomainEvent, assert_event_handlers_empty from eventsourcing.exceptions import ProgrammingError +from eventsourcing.tests.core_tests.test_aggregate_root import ExampleAggregateRoot from eventsourcing.tests.sequenced_item_tests.test_django_record_manager import ( DjangoTestCase, ) - -from eventsourcing.application.django import DjangoApplication -from eventsourcing.application.notificationlog import NotificationLogReader -from eventsourcing.application.snapshotting import SnapshottingApplication -from eventsourcing.application.sqlalchemy import SQLAlchemyApplication -from eventsourcing.domain.model.events import assert_event_handlers_empty, DomainEvent -from eventsourcing.tests.core_tests.test_aggregate_root import ExampleAggregateRoot from eventsourcing.utils.random import encoded_random_bytes diff --git a/eventsourcing/tests/core_tests/test_utils.py b/eventsourcing/tests/core_tests/test_utils.py index 592114c2d..3a1af4f44 100644 --- a/eventsourcing/tests/core_tests/test_utils.py +++ b/eventsourcing/tests/core_tests/test_utils.py @@ -1,17 +1,16 @@ import os +import sys from datetime import datetime, timedelta from decimal import Decimal from time import sleep from unittest.case import TestCase from uuid import uuid1 -import sys - -from eventsourcing.utils.random import encoded_random_bytes, decode_bytes +from eventsourcing.utils.random import decode_bytes, encoded_random_bytes from eventsourcing.utils.times import ( + decimaltimestamp, decimaltimestamp_from_uuid, utc_timezone, - decimaltimestamp, ) diff --git a/eventsourcing/tests/customization_tests/test_customise_with_alternative_cql.py b/eventsourcing/tests/customization_tests/test_customise_with_alternative_cql.py index acd12ff86..5bbb6b5dc 100644 --- a/eventsourcing/tests/customization_tests/test_customise_with_alternative_cql.py +++ b/eventsourcing/tests/customization_tests/test_customise_with_alternative_cql.py @@ -4,18 +4,17 @@ from eventsourcing.domain.model.events import DomainEvent from eventsourcing.example.domainmodel import create_new_example from eventsourcing.example.infrastructure import ExampleRepository -from eventsourcing.infrastructure.cassandra.records import StoredEventRecord -from eventsourcing.infrastructure.cassandra.manager import CassandraRecordManager from eventsourcing.infrastructure.cassandra.datastore import ( CassandraDatastore, CassandraSettings, ) +from eventsourcing.infrastructure.cassandra.manager import CassandraRecordManager +from eventsourcing.infrastructure.cassandra.records import StoredEventRecord from eventsourcing.infrastructure.eventstore import EventStore from eventsourcing.infrastructure.sequenceditem import StoredEvent from eventsourcing.infrastructure.sequenceditemmapper import SequencedItemMapper from eventsourcing.tests.datastore_tests.base import AbstractDatastoreTestCase - # In this test the default SequencedItem class is replaced with a "stored event" class. # How easy is it to customize the infrastructure to support that? We just need # to define the new sequenced item class, define a suitable record class, diff --git a/eventsourcing/tests/customization_tests/test_customise_with_alternative_domain_event_type.py b/eventsourcing/tests/customization_tests/test_customise_with_alternative_domain_event_type.py index 3423b5641..acd264b26 100644 --- a/eventsourcing/tests/customization_tests/test_customise_with_alternative_domain_event_type.py +++ b/eventsourcing/tests/customization_tests/test_customise_with_alternative_domain_event_type.py @@ -2,13 +2,13 @@ from eventsourcing.application.policies import PersistencePolicy from eventsourcing.domain.model.entity import TimeuuidedEntity -from eventsourcing.domain.model.events import EventWithTimeuuid, DomainEvent +from eventsourcing.domain.model.events import DomainEvent, EventWithTimeuuid from eventsourcing.infrastructure.cassandra.datastore import ( CassandraDatastore, CassandraSettings, ) -from eventsourcing.infrastructure.cassandra.records import TimeuuidSequencedRecord from eventsourcing.infrastructure.cassandra.manager import CassandraRecordManager +from eventsourcing.infrastructure.cassandra.records import TimeuuidSequencedRecord from eventsourcing.infrastructure.eventsourcedrepository import EventSourcedRepository from eventsourcing.infrastructure.eventstore import EventStore from eventsourcing.infrastructure.sequenceditem import SequencedItem @@ -19,7 +19,6 @@ ) from eventsourcing.utils.times import decimaltimestamp_from_uuid - # This test has events with TimeUUID value as the 'event ID'. How easy is it to customize # the infrastructure to support that? We just need to make a model that uses these events, # define a suitable database table, and configure the other components. It's easy. diff --git a/eventsourcing/tests/customization_tests/test_customise_with_alternative_sequenced_item_type.py b/eventsourcing/tests/customization_tests/test_customise_with_alternative_sequenced_item_type.py index 3ad613713..676463af7 100644 --- a/eventsourcing/tests/customization_tests/test_customise_with_alternative_sequenced_item_type.py +++ b/eventsourcing/tests/customization_tests/test_customise_with_alternative_sequenced_item_type.py @@ -6,15 +6,14 @@ from eventsourcing.infrastructure.eventstore import EventStore from eventsourcing.infrastructure.sequenceditem import StoredEvent from eventsourcing.infrastructure.sequenceditemmapper import SequencedItemMapper -from eventsourcing.infrastructure.sqlalchemy.records import StoredEventRecord, Base -from eventsourcing.infrastructure.sqlalchemy.manager import SQLAlchemyRecordManager from eventsourcing.infrastructure.sqlalchemy.datastore import ( SQLAlchemyDatastore, SQLAlchemySettings, ) +from eventsourcing.infrastructure.sqlalchemy.manager import SQLAlchemyRecordManager +from eventsourcing.infrastructure.sqlalchemy.records import Base, StoredEventRecord from eventsourcing.tests.datastore_tests.base import AbstractDatastoreTestCase - # This test replaces the default SequencedItem class with a StoredEvent class. # How easy is it to customize the infrastructure to support that? We just need # to define the new sequenced item class, define a suitable record class, diff --git a/eventsourcing/tests/customization_tests/test_customized_projections_old.py b/eventsourcing/tests/customization_tests/test_customized_projections_old.py index 019edf181..d1e81e8c8 100644 --- a/eventsourcing/tests/customization_tests/test_customized_projections_old.py +++ b/eventsourcing/tests/customization_tests/test_customized_projections_old.py @@ -5,7 +5,6 @@ SQLAlchemyRecordManagerTestCase, ) - # ## Old stuff... diff --git a/eventsourcing/tests/datastore_tests/base.py b/eventsourcing/tests/datastore_tests/base.py index 457014f51..4f0f2e2d8 100644 --- a/eventsourcing/tests/datastore_tests/base.py +++ b/eventsourcing/tests/datastore_tests/base.py @@ -1,9 +1,9 @@ from abc import abstractmethod -from typing import Type, Optional +from typing import Optional, Type from eventsourcing.infrastructure.datastore import ( - DatastoreTableError, AbstractDatastore, + DatastoreTableError, ) from eventsourcing.infrastructure.factory import InfrastructureFactory from eventsourcing.tests.base import AbstractTestCase diff --git a/eventsourcing/tests/datastore_tests/test_axonserver.py b/eventsourcing/tests/datastore_tests/test_axonserver.py index e1523a4f3..0b0431d52 100644 --- a/eventsourcing/tests/datastore_tests/test_axonserver.py +++ b/eventsourcing/tests/datastore_tests/test_axonserver.py @@ -1,6 +1,6 @@ from uuid import uuid4 -from axonclient.client import AxonClient, AxonEvent, DEFAULT_LOCAL_AXONSERVER_URI +from axonclient.client import DEFAULT_LOCAL_AXONSERVER_URI, AxonClient, AxonEvent from eventsourcing.infrastructure.axonserver.datastore import ( AxonDatastore, diff --git a/eventsourcing/tests/datastore_tests/test_cassandra.py b/eventsourcing/tests/datastore_tests/test_cassandra.py index f0742ba10..3503f095f 100644 --- a/eventsourcing/tests/datastore_tests/test_cassandra.py +++ b/eventsourcing/tests/datastore_tests/test_cassandra.py @@ -6,6 +6,10 @@ from cassandra.cqlengine import CQLEngineException from eventsourcing.exceptions import DatasourceSettingsError +from eventsourcing.infrastructure.cassandra.datastore import ( + CassandraDatastore, + CassandraSettings, +) from eventsourcing.infrastructure.cassandra.factory import ( CassandraInfrastructureFactory, ) @@ -14,10 +18,6 @@ SnapshotRecord, TimestampSequencedRecord, ) -from eventsourcing.infrastructure.cassandra.datastore import ( - CassandraDatastore, - CassandraSettings, -) from eventsourcing.infrastructure.datastore import ( DatastoreConnectionError, DatastoreTableError, diff --git a/eventsourcing/tests/example_application_tests/base.py b/eventsourcing/tests/example_application_tests/base.py index 7848dd678..61dcddeb5 100644 --- a/eventsourcing/tests/example_application_tests/base.py +++ b/eventsourcing/tests/example_application_tests/base.py @@ -1,6 +1,5 @@ -from uuid import uuid4 - from time import sleep +from uuid import uuid4 from eventsourcing.application.policies import PersistencePolicy from eventsourcing.domain.model.snapshot import Snapshot diff --git a/eventsourcing/tests/example_application_tests/test_example_application_with_encryption.py b/eventsourcing/tests/example_application_tests/test_example_application_with_encryption.py index 7a654ddfe..41bff77e5 100644 --- a/eventsourcing/tests/example_application_tests/test_example_application_with_encryption.py +++ b/eventsourcing/tests/example_application_tests/test_example_application_with_encryption.py @@ -1,4 +1,3 @@ -from eventsourcing.utils.cipher.aes import AESCipher from eventsourcing.tests.example_application_tests import base from eventsourcing.tests.example_application_tests.test_example_application_with_cassandra import ( TestExampleApplicationWithCassandra, @@ -6,6 +5,7 @@ from eventsourcing.tests.example_application_tests.test_example_application_with_sqlalchemy import ( TestExampleApplicationWithSQLAlchemy, ) +from eventsourcing.utils.cipher.aes import AESCipher class WithEncryption(base.WithExampleApplication): diff --git a/eventsourcing/tests/system_test_fixtures.py b/eventsourcing/tests/system_test_fixtures.py index 3bdd9c051..adec0c846 100644 --- a/eventsourcing/tests/system_test_fixtures.py +++ b/eventsourcing/tests/system_test_fixtures.py @@ -1,6 +1,6 @@ import logging import os -from uuid import uuid5, NAMESPACE_OID +from uuid import NAMESPACE_OID, uuid5 from eventsourcing.application.process import ProcessApplication, WrappedRepository from eventsourcing.domain.model.aggregate import BaseAggregateRoot diff --git a/eventsourcing/tests/test_notificationlog.py b/eventsourcing/tests/test_notificationlog.py index d69ca206a..f53f29bef 100644 --- a/eventsourcing/tests/test_notificationlog.py +++ b/eventsourcing/tests/test_notificationlog.py @@ -3,9 +3,9 @@ from uuid import uuid4 from eventsourcing.application.notificationlog import ( - RecordManagerNotificationLog, BigArrayNotificationLog, NotificationLogReader, + RecordManagerNotificationLog, ) from eventsourcing.domain.model.events import DomainEvent from eventsourcing.infrastructure.repositories.array import BigArrayRepository diff --git a/eventsourcing/tests/test_process.py b/eventsourcing/tests/test_process.py index 9f7ea2720..241fc3385 100644 --- a/eventsourcing/tests/test_process.py +++ b/eventsourcing/tests/test_process.py @@ -25,13 +25,13 @@ from eventsourcing.domain.model.snapshot import Snapshot from eventsourcing.exceptions import ( CausalDependencyFailed, - PromptFailed, ProgrammingError, + PromptFailed, ) from eventsourcing.infrastructure.sqlalchemy.records import Base -from eventsourcing.whitehead import TEntity from eventsourcing.utils.topic import resolve_topic from eventsourcing.utils.transcoding import ObjectJSONDecoder +from eventsourcing.whitehead import TEntity class TestProcessApplication(TestCase): diff --git a/eventsourcing/tests/test_process_with_django.py b/eventsourcing/tests/test_process_with_django.py index f374f3844..2212cd1fa 100644 --- a/eventsourcing/tests/test_process_with_django.py +++ b/eventsourcing/tests/test_process_with_django.py @@ -2,10 +2,10 @@ from django.db import models from django.db.backends.base.schema import BaseDatabaseSchemaEditor +from eventsourcing.application.django import DjangoApplication from eventsourcing.tests.sequenced_item_tests.test_django_record_manager import ( DjangoTestCase, ) -from eventsourcing.application.django import DjangoApplication from eventsourcing.tests.test_process import TestProcessApplication diff --git a/eventsourcing/tests/test_process_with_popo.py b/eventsourcing/tests/test_process_with_popo.py index 5edf1d4c0..7d880b918 100644 --- a/eventsourcing/tests/test_process_with_popo.py +++ b/eventsourcing/tests/test_process_with_popo.py @@ -1,7 +1,6 @@ from unittest import skip from eventsourcing.application.popo import PopoApplication - from eventsourcing.tests.sequenced_item_tests.test_popo_record_manager import ( PopoTestCase, ) diff --git a/eventsourcing/tests/test_ray_runner.py b/eventsourcing/tests/test_ray_runner.py index d59961077..5c6ffb920 100644 --- a/eventsourcing/tests/test_ray_runner.py +++ b/eventsourcing/tests/test_ray_runner.py @@ -16,10 +16,7 @@ clear_event_handlers, ) from eventsourcing.system.definition import System -from eventsourcing.system.ray import ( - RayProcess, - RayRunner, -) +from eventsourcing.system.ray import RayProcess, RayRunner from eventsourcing.system.rayhelpers import RayPrompt from eventsourcing.tests.system_test_fixtures import ( Orders, diff --git a/eventsourcing/tests/test_ray_runner_with_django.py b/eventsourcing/tests/test_ray_runner_with_django.py index 771b72981..63776d31c 100644 --- a/eventsourcing/tests/test_ray_runner_with_django.py +++ b/eventsourcing/tests/test_ray_runner_with_django.py @@ -1,9 +1,9 @@ from unittest import skip +from eventsourcing.application.django import DjangoApplication from eventsourcing.tests.sequenced_item_tests.test_django_record_manager import ( DjangoTestCase, ) -from eventsourcing.application.django import DjangoApplication from eventsourcing.tests.test_ray_runner import TestRayRunner diff --git a/eventsourcing/tests/test_system.py b/eventsourcing/tests/test_system.py index 7abee672d..a7e2101a8 100644 --- a/eventsourcing/tests/test_system.py +++ b/eventsourcing/tests/test_system.py @@ -3,16 +3,15 @@ from unittest import TestCase from uuid import uuid4 -from eventsourcing.system.multiprocess import MultiprocessRunner from eventsourcing.application.sqlalchemy import SQLAlchemyApplication -from eventsourcing.system.runner import MultiThreadedRunner -from eventsourcing.system.definition import System from eventsourcing.domain.model.events import ( assert_event_handlers_empty, clear_event_handlers, ) -from eventsourcing.exceptions import RepositoryKeyError, ProgrammingError -from eventsourcing.tests.test_process import ExampleAggregate +from eventsourcing.exceptions import ProgrammingError, RepositoryKeyError +from eventsourcing.system.definition import System +from eventsourcing.system.multiprocess import MultiprocessRunner +from eventsourcing.system.runner import MultiThreadedRunner from eventsourcing.tests.system_test_fixtures import ( Examples, Order, @@ -24,6 +23,7 @@ create_new_order, set_db_uri, ) +from eventsourcing.tests.test_process import ExampleAggregate class TestSystem(TestCase): diff --git a/eventsourcing/tests/test_system_with_django.py b/eventsourcing/tests/test_system_with_django.py index da5d2f664..cc1ea54f7 100644 --- a/eventsourcing/tests/test_system_with_django.py +++ b/eventsourcing/tests/test_system_with_django.py @@ -1,8 +1,7 @@ +from eventsourcing.application.django import DjangoApplication from eventsourcing.tests.sequenced_item_tests.test_django_record_manager import ( DjangoTestCase, ) - -from eventsourcing.application.django import DjangoApplication from eventsourcing.tests.test_system import TestSystem diff --git a/eventsourcing/tests/test_system_with_popo.py b/eventsourcing/tests/test_system_with_popo.py index e2af22174..876d102d2 100644 --- a/eventsourcing/tests/test_system_with_popo.py +++ b/eventsourcing/tests/test_system_with_popo.py @@ -1,23 +1,22 @@ -from time import sleep, time, process_time +from time import process_time, sleep, time from unittest import skip +from eventsourcing.application.popo import PopoApplication +from eventsourcing.system.definition import System from eventsourcing.system.runner import ( SteppingMultiThreadedRunner, SteppingSingleThreadedRunner, ) -from eventsourcing.system.definition import System from eventsourcing.tests.sequenced_item_tests.test_popo_record_manager import ( PopoTestCase, ) - -from eventsourcing.application.popo import PopoApplication -from eventsourcing.tests.test_system import TestSystem from eventsourcing.tests.system_test_fixtures import ( - create_new_order, Orders, - Reservations, Payments, + Reservations, + create_new_order, ) +from eventsourcing.tests.test_system import TestSystem class TestSystemWithPopo(PopoTestCase, TestSystem): diff --git a/eventsourcing/tests/test_thespian_runner.py b/eventsourcing/tests/test_thespian_runner.py index 9d81c6ee7..21e4655b8 100644 --- a/eventsourcing/tests/test_thespian_runner.py +++ b/eventsourcing/tests/test_thespian_runner.py @@ -4,18 +4,18 @@ import unittest from unittest import skip -from eventsourcing.system.thespian import ( - ThespianRunner, - shutdown_thespian_system, - start_thespian_system, - start_multiproc_tcp_base_system, -) from eventsourcing.application.sqlalchemy import SQLAlchemyApplication -from eventsourcing.system.definition import System from eventsourcing.domain.model.events import ( assert_event_handlers_empty, clear_event_handlers, ) +from eventsourcing.system.definition import System +from eventsourcing.system.thespian import ( + ThespianRunner, + shutdown_thespian_system, + start_multiproc_tcp_base_system, + start_thespian_system, +) from eventsourcing.tests.system_test_fixtures import ( Orders, Payments, diff --git a/eventsourcing/tests/test_thespian_runner_with_django.py b/eventsourcing/tests/test_thespian_runner_with_django.py index d58bb32f1..1df290ee8 100644 --- a/eventsourcing/tests/test_thespian_runner_with_django.py +++ b/eventsourcing/tests/test_thespian_runner_with_django.py @@ -1,7 +1,7 @@ +from eventsourcing.application.django import DjangoApplication from eventsourcing.tests.sequenced_item_tests.test_django_record_manager import ( DjangoTestCase, ) -from eventsourcing.application.django import DjangoApplication from eventsourcing.tests.test_thespian_runner import TestThespianRunner diff --git a/eventsourcing/tests/test_transcoding.py b/eventsourcing/tests/test_transcoding.py index a0b6ec317..24e84a86b 100644 --- a/eventsourcing/tests/test_transcoding.py +++ b/eventsourcing/tests/test_transcoding.py @@ -1,20 +1,19 @@ import datetime -from collections import deque, namedtuple, OrderedDict, ChainMap +from collections import ChainMap, OrderedDict, deque, namedtuple +from decimal import Decimal from enum import Enum from fractions import Fraction - -try: - from dataclasses import make_dataclass -except: - make_dataclass = None - -from decimal import Decimal from unittest import TestCase, skipIf from uuid import NAMESPACE_URL, UUID from eventsourcing.utils.times import utc_timezone from eventsourcing.utils.transcoding import ObjectJSONDecoder, ObjectJSONEncoder +try: + from dataclasses import make_dataclass +except: + make_dataclass = None + class TestTranscoding(TestCase): def test_str(self): diff --git a/eventsourcing/tests/test_transcoding_v1.py b/eventsourcing/tests/test_transcoding_v1.py index c6e899d76..45eef212b 100644 --- a/eventsourcing/tests/test_transcoding_v1.py +++ b/eventsourcing/tests/test_transcoding_v1.py @@ -1,23 +1,19 @@ import datetime from collections import deque, namedtuple +from decimal import Decimal from enum import Enum from fractions import Fraction +from unittest import TestCase, skipIf +from uuid import NAMESPACE_URL, UUID + +from eventsourcing.utils.times import utc_timezone +from eventsourcing.utils.transcoding_v1 import ObjectJSONDecoder, ObjectJSONEncoder try: from dataclasses import make_dataclass except: make_dataclass = None -from decimal import Decimal -from unittest import TestCase, skipIf -from uuid import NAMESPACE_URL, UUID - -from eventsourcing.utils.times import utc_timezone -from eventsourcing.utils.transcoding_v1 import ( - ObjectJSONDecoder, - ObjectJSONEncoder, -) - class TestTranscoding(TestCase): def test_str(self): diff --git a/eventsourcing/utils/cipher/aes.py b/eventsourcing/utils/cipher/aes.py index 2fe401b5d..0089b2bc7 100644 --- a/eventsourcing/utils/cipher/aes.py +++ b/eventsourcing/utils/cipher/aes.py @@ -1,8 +1,3 @@ -import zlib - -import binascii -from base64 import b64decode, b64encode - from Crypto.Cipher import AES from eventsourcing.exceptions import DataIntegrityError diff --git a/eventsourcing/utils/random.py b/eventsourcing/utils/random.py index d420c2ba0..6f2ffbbc5 100644 --- a/eventsourcing/utils/random.py +++ b/eventsourcing/utils/random.py @@ -1,6 +1,5 @@ -from base64 import b64encode, b64decode - import os +from base64 import b64decode, b64encode def encoded_random_bytes(num_bytes: int) -> str: diff --git a/eventsourcing/utils/transcoding.py b/eventsourcing/utils/transcoding.py index 690c531c3..ec6ef2ee3 100644 --- a/eventsourcing/utils/transcoding.py +++ b/eventsourcing/utils/transcoding.py @@ -11,16 +11,16 @@ from typing import Optional from uuid import UUID +import dateutil.parser + from eventsourcing.exceptions import EncoderTypeError +from eventsourcing.utils.topic import get_topic, resolve_topic try: import orjson except ImportError: orjson: Optional[Module] = None # type: ignore -import dateutil.parser - -from eventsourcing.utils.topic import get_topic, resolve_topic JSON_SEPARATORS = (",", ":") From 5f25c59ce8d894b82681920b30177938782be603 Mon Sep 17 00:00:00 2001 From: John Bywater Date: Thu, 30 Apr 2020 02:20:34 +0100 Subject: [PATCH 20/50] Removed old __future__ imports, and flaskwsgi.py (needed for py3.3). --- .../domain/model/generalizedsuffixtree.py | 3 --- .../suffixtrees/domain/model/suffixtree.py | 27 ++++++++----------- eventsourcing/domain/model/collection.py | 2 -- eventsourcing/example/interface/flaskwsgi.py | 7 ----- .../suffixtrees_tests/test_suffix_tree.py | 3 --- .../test_suffix_tree_generalized.py | 3 --- eventsourcing/tests/test_collection.py | 2 -- eventsourcing/tests/test_flask.py | 3 +-- .../unit_test_fixtures_suffix_tree_text.py | 3 --- 9 files changed, 12 insertions(+), 41 deletions(-) delete mode 100644 eventsourcing/example/interface/flaskwsgi.py diff --git a/eventsourcing/contrib/suffixtrees/domain/model/generalizedsuffixtree.py b/eventsourcing/contrib/suffixtrees/domain/model/generalizedsuffixtree.py index 336c2baab..b57adce65 100644 --- a/eventsourcing/contrib/suffixtrees/domain/model/generalizedsuffixtree.py +++ b/eventsourcing/contrib/suffixtrees/domain/model/generalizedsuffixtree.py @@ -1,6 +1,3 @@ -# # coding=utf-8 -# from __future__ import unicode_literals -# # from collections import OrderedDict # from time import sleep # from uuid import uuid4 diff --git a/eventsourcing/contrib/suffixtrees/domain/model/suffixtree.py b/eventsourcing/contrib/suffixtrees/domain/model/suffixtree.py index 6edfd4379..faaee24ff 100644 --- a/eventsourcing/contrib/suffixtrees/domain/model/suffixtree.py +++ b/eventsourcing/contrib/suffixtrees/domain/model/suffixtree.py @@ -1,17 +1,12 @@ -# coding=utf-8 -from __future__ import unicode_literals - import datetime from uuid import uuid4 from eventsourcing.domain.model.decorators import attribute from eventsourcing.domain.model.entity import ( - AttributeChanged, - Created, - Discarded, TimestampedVersionedEntity, ) -from eventsourcing.domain.model.events import publish +from eventsourcing.domain.model.events import AttributeChangedEvent, CreatedEvent, \ + DiscardedEvent, publish from eventsourcing.example.application import ExampleApplication from eventsourcing.exceptions import RepositoryKeyError from eventsourcing.infrastructure.eventsourcedrepository import EventSourcedRepository @@ -22,13 +17,13 @@ class SuffixTree(TimestampedVersionedEntity): for construction. """ - class Created(Created): + class Created(CreatedEvent): pass - class AttributeChanged(AttributeChanged): + class AttributeChanged(AttributeChangedEvent): pass - class Discarded(Discarded): + class Discarded(DiscardedEvent): pass def __init__(self, root_node_id, case_insensitive=False, **kwargs): @@ -203,13 +198,13 @@ class Node(TimestampedVersionedEntity): """A node in the suffix tree. """ - class Created(Created): + class Created(CreatedEvent): pass - class AttributeChanged(AttributeChanged): + class AttributeChanged(AttributeChangedEvent): pass - class Discarded(Discarded): + class Discarded(DiscardedEvent): pass def __init__(self, suffix_node_id=None, *args, **kwargs): @@ -232,13 +227,13 @@ class Edge(TimestampedVersionedEntity): """An edge in the suffix tree. """ - class Created(Created): + class Created(CreatedEvent): pass - class AttributeChanged(AttributeChanged): + class AttributeChanged(AttributeChangedEvent): pass - class Discarded(Discarded): + class Discarded(DiscardedEvent): pass def __init__( diff --git a/eventsourcing/domain/model/collection.py b/eventsourcing/domain/model/collection.py index eca16aa37..8f4521830 100644 --- a/eventsourcing/domain/model/collection.py +++ b/eventsourcing/domain/model/collection.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, print_function, unicode_literals - from typing import Any, Iterator, Optional, Set from uuid import UUID diff --git a/eventsourcing/example/interface/flaskwsgi.py b/eventsourcing/example/interface/flaskwsgi.py deleted file mode 100644 index 887ec4142..000000000 --- a/eventsourcing/example/interface/flaskwsgi.py +++ /dev/null @@ -1,7 +0,0 @@ -import eventsourcing.example.interface.flaskapp - -# Todo: Investigate why removing this file breaks python3.3. -# For some reason, this file is needed to run flaskapp.py -# with uWSGI and python3.3. -# -application = eventsourcing.example.interface.flaskapp.application diff --git a/eventsourcing/tests/contrib_tests/suffixtrees_tests/test_suffix_tree.py b/eventsourcing/tests/contrib_tests/suffixtrees_tests/test_suffix_tree.py index 5a04ea183..1f14070c0 100644 --- a/eventsourcing/tests/contrib_tests/suffixtrees_tests/test_suffix_tree.py +++ b/eventsourcing/tests/contrib_tests/suffixtrees_tests/test_suffix_tree.py @@ -1,6 +1,3 @@ -# # coding=utf-8 -# from __future__ import unicode_literals -# # import os # # from eventsourcing.contrib.suffixtrees.domain.model.suffixtree import SuffixTree, SuffixTreeApplication, \ diff --git a/eventsourcing/tests/contrib_tests/suffixtrees_tests/test_suffix_tree_generalized.py b/eventsourcing/tests/contrib_tests/suffixtrees_tests/test_suffix_tree_generalized.py index 550e13a7c..06dd492a3 100644 --- a/eventsourcing/tests/contrib_tests/suffixtrees_tests/test_suffix_tree_generalized.py +++ b/eventsourcing/tests/contrib_tests/suffixtrees_tests/test_suffix_tree_generalized.py @@ -1,6 +1,3 @@ -# # coding=utf-8 -# from __future__ import unicode_literals -# # import datetime # import traceback # import uuid diff --git a/eventsourcing/tests/test_collection.py b/eventsourcing/tests/test_collection.py index b425dbc2c..01d9f4f27 100644 --- a/eventsourcing/tests/test_collection.py +++ b/eventsourcing/tests/test_collection.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, print_function, unicode_literals - from unittest.case import TestCase from uuid import uuid4 diff --git a/eventsourcing/tests/test_flask.py b/eventsourcing/tests/test_flask.py index fe4e61d22..6622c0c2a 100644 --- a/eventsourcing/tests/test_flask.py +++ b/eventsourcing/tests/test_flask.py @@ -29,7 +29,6 @@ path_to_eventsourcing = dirname(dirname(abspath(eventsourcing.__file__))) path_to_interface_module = dirname(abspath(eventsourcing.example.interface.__file__)) path_to_flaskapp = join(path_to_interface_module, "flaskapp.py") -path_to_flaskwsgi = join(dirname(path_to_flaskapp), "flaskwsgi.py") @notquick @@ -125,7 +124,7 @@ def start_app(self): cmd += ["--master"] cmd += ["--processes", "4"] cmd += ["--threads", "2"] - cmd += ["--wsgi-file", path_to_flaskwsgi] + cmd += ["--wsgi-file", path_to_flaskapp] cmd += ["--http", ":{}".format(self.port)] pythonpath = ":".join( os.getenv("PYTHONPATH", "").split(":") + [path_to_eventsourcing] diff --git a/eventsourcing/tests/unit_test_fixtures_suffix_tree_text.py b/eventsourcing/tests/unit_test_fixtures_suffix_tree_text.py index 1b941e5b9..1027e7bb0 100644 --- a/eventsourcing/tests/unit_test_fixtures_suffix_tree_text.py +++ b/eventsourcing/tests/unit_test_fixtures_suffix_tree_text.py @@ -1,6 +1,3 @@ -# # coding=utf-8 -# from __future__ import unicode_literals -# # LONG_TEXT_CONT = """ # This article's use of external links may not follow Wikipedia's policies or guidelines. Please improve this # article by removing excessive and inappropriate external links. (August 2010) From 42dcba344f79f91e75308f756d0ace19ce89fceb Mon Sep 17 00:00:00 2001 From: Alexandre Fedossov Date: Sat, 2 May 2020 17:38:19 +0300 Subject: [PATCH 21/50] Add linting command to Makefile - Updated setup.cfg and setup.py - Updated the contributing.rst documentation Add .git-blame-ignore-revs to preserve repository history That is useful to have clean history after significant refactoring, i.e. import sorting or black formatting (any style guide changes). To apply this file, run locally: git config --local blame.ignoreRevsFile .git-blame-ignore-revs --- .git-blame-ignore-revs | 5 +++ Makefile | 40 ++++++++++++++++++++ docs/topics/contributing.rst | 73 ++++++++++++++++++++++++++++++++++++ setup.cfg | 20 ++++++++++ setup.py | 2 +- 5 files changed, 139 insertions(+), 1 deletion(-) create mode 100644 .git-blame-ignore-revs diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 000000000..8f9d52e9c --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,5 @@ +# applied imports sorting and black formatting +1190c2fe4cf3f9b233b61f8f9a857c1007d11345 +6f5bf49327b66055f1ff865285543d070856c602 +53cfd70288051457999074479ddbfbaecb052f10 +6d9ec698a6310b701ffbcdf987ff5a3675f6e943 diff --git a/Makefile b/Makefile index baeb267d1..1926d8fd4 100644 --- a/Makefile +++ b/Makefile @@ -12,6 +12,10 @@ install: docker-pull: @docker-compose pull +.PHONY: docker-build +docker-build: + @docker-compose build + .PHONY: docker-up docker-up: @docker-compose up -d @@ -30,6 +34,42 @@ docker-logs: @docker-compose logs --follow --tail=1000 +.PHONY: lint-black +lint-black: + @black --check --diff . + +.PHONY: lint-flake8 +lint-flake8: + @flake8 eventsourcing + +.PHONY: lint-isort +lint-isort: + @isort --check-only --diff --recursive . + +.PHONY: lint-mypy +lint-mypy: + @mypy --ignore-missing-imports eventsourcing + +.PHONY: lint-dockerfile +lint-dockerfile: + @docker run --rm -i replicated/dockerfilelint:ad65813 < ./dev/Dockerfile_eventsourcing_requirements + +.PHONY: lint +lint: lint-black lint-flake8 lint-isort lint-mypy lint-dockerfile + + +.PHONY: fmt-isort +fmt-isort: + @isort -y --recursive . + +.PHONY: fmt-black +fmt-black: + @black . + +.PHONY: fmt +fmt: fmt-black fmt-isort + + .PHONY: test test: @coverage run \ diff --git a/docs/topics/contributing.rst b/docs/topics/contributing.rst index 3623e2b64..32148412b 100644 --- a/docs/topics/contributing.rst +++ b/docs/topics/contributing.rst @@ -65,6 +65,25 @@ required for contribution:: make install +Setup `git` to ignore specific revs with `blame`. + +This project is old, and several times in its history a massive changes were performed. +One such change is moving towards `isort/flake8` utility, another] bulk update is +`black` formatter. While these changes are inevitable, they clutter the history, +especially if you use `git blame` or _Annotate_ option in PyCharm. +But in newer versions of git (>= 2.23), this can be mitigated: new options `--ignore-rev`_ +and `--ignore-revs-file`_ were added. There is a file in this repository called +`.git-blame-ignore-revs` which contains all such major reformattings. In order to pick it up by +`git blame` and PyCharm, add a special config line:: + + git config --local blame.ignoreRevsFile .git-blame-ignore-revs + +More info can be found here_. + +.. _--ignore-rev: https://git-scm.com/docs/git-blame#Documentation/git-blame.txt---ignore-revltrevgt +.. _--ignore-revs-file: https://git-scm.com/docs/git-blame#Documentation/git-blame.txt---ignore-revs-fileltfilegt +.. _here: https://www.moxio.com/blog/43/ignoring-bulk-change-commits-with-git-blame + .. _docker-containers: @@ -76,6 +95,10 @@ To pull docker images:: make docker-pull +To build docker images:: + + make docker-build + To up and keep running containers in detached mode:: make docker-up @@ -128,3 +151,53 @@ into static HTML files at `./docs/_build/html`:: make docs .. _Sphinx: https://www.sphinx-doc.org/en/master/ + + +Linting your code +================= + +For now, linting your changes is completely optional - we do not have any checks on CI for it. + +Run isort_ to check imports sorting:: + + make lint-isort + +We are using Black_ as a tool for style guide enforcement:: + + make lint-black + +We are using Flake8_ (and it's `Flake8 BugBear plugin`_) to check the code for PEP8_ compatibility:: + + make lint-flake8 + +Mypy_ is a static type checker for Python 3 and Python 2.7. Run mypy to check code for accurate typing annotations:: + + make lint-mypy + +Dockerfilelint_ is an `npm` module that analyzes a Dockerfile and looks for +common traps, mistakes and helps enforce best practices:: + + make lint-dockerfile + +... and finally, to run all the checks from above, use:: + + make lint + +.. _isort: https://github.com/timothycrosley/isort +.. _Black: https://black.readthedocs.io/en/stable/ +.. _Dockerfilelint: https://hub.docker.com/r/replicated/dockerfilelint +.. _Flake8: https://flake8.pycqa.org/en/latest/ +.. _Flake8 BugBear plugin: https://github.com/PyCQA/flake8-bugbear +.. _PEP8: https://www.python.org/dev/peps/pep-0008/ +.. _Mypy: https://mypy.readthedocs.io/en/stable/ + + +Automatic formatting +--------------------------------- + +.. note:: + In order to keep your Pull Request clean, please, do not apply it for all project but your specific changes. + +To apply automatic formatting by using isort_ and Black_, run:: + + make fmt diff --git a/setup.cfg b/setup.cfg index 1e0eac59c..56675775b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -41,3 +41,23 @@ strict_optional = True no_implicit_optional = True disallow_untyped_defs = True ;disallow_any_generics = True + +[isort] +multi_line_output = 3 +include_trailing_comma = True +force_grid_wrap = 0 +use_parentheses = True +line_length = 88 +combine_as_imports = true +default_section = LOCALFOLDER +known_first_party = eventsourcing +known_standard_library = dataclasses +known_third_party = django +not_skip = __init__.py +sections = FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER +skip = .eggs,.pip-cache,venv,.venv + +[flake8] +max-line-length = 80 +select = C,E,F,W,B,B950 +ignore = E203, E501, W503 diff --git a/setup.py b/setup.py index ebe2264d4..72d197bcf 100644 --- a/setup.py +++ b/setup.py @@ -63,7 +63,7 @@ "sphinx-autobuild", ] -dev_requires = docs_requires + ["black", "mypy"] +dev_requires = docs_requires + ["black", "mypy", "flake8", "flake8-bugbear", "isort"] long_description = """ A library for event sourcing in Python. From 5268b02eef9ce1148dc54b5ffae717cc22d663e6 Mon Sep 17 00:00:00 2001 From: John Bywater Date: Sat, 2 May 2020 21:51:56 +0100 Subject: [PATCH 22/50] Improved contributing.rst (added MacOS section, adjusted other things). --- docs/topics/contributing.rst | 152 ++++++++++++++++++++++++----------- 1 file changed, 107 insertions(+), 45 deletions(-) diff --git a/docs/topics/contributing.rst b/docs/topics/contributing.rst index 32148412b..d7dd5d99a 100644 --- a/docs/topics/contributing.rst +++ b/docs/topics/contributing.rst @@ -26,20 +26,27 @@ are several ways you can help the library’s development: - Join the Slack_ channel and share your ideas for how to improve the library. We’re always open to suggestions. - Submit patches or pull requests for new and/or fixed behavior. -- Improve the documentation or write unit tests. +- Improve the documentation or improve the unit test suites. Local development ================= -This library using `GNU make`_ utility and multiple python packages for code linting. You can read more -about it at `this article`_. The actual code of the provided commands below can be easily found and read in -`Makefile` at the root of this project. But first, you need to set up your local development environment. +To make changes to the library, you want to set up a local environment. + +To get set up, create a virtual Python environment, install Python dependencies, +and install the databases that are used by the test suite. This library is using the +`GNU make`_ utility. You can read more about it in `this article`_. + +There are commands to install Python dependencies into a virtual Python environment, +to start and stop databases, to run the test suite, to build the docs, and for code +linting. The actual code of the commands described below can be easily found in +``Makefile`` at the root of this project. But first, you need to set up your local +development environment. .. _GNU make: https://www.gnu.org/software/make/ .. _this article: https://opensource.com/article/18/8/what-how-makefile - .. _development-environment: Configure the development environment @@ -54,29 +61,40 @@ But also, you may use pyenv_. Consider to use virtualenv for development, choose for your flavor: +- virtualenv_ - virtualenvwrapper_ - pyenv-virtualenv_ for pyenv_ +.. _virtualenv: https://pypi.org/project/virtualenv/ .. _virtualenvwrapper: https://virtualenvwrapper.readthedocs.io/en/latest/ .. _pyenv-virtualenv: https://github.com/pyenv/pyenv-virtualenv -Use the following command inside your activated virtualenv to install all project dependencies +For example, create and activate a virtual Python environment using ``virtualenv`` directly:: + + $ virtualenv ~/.virtualenvs/eventsourcing-py3 + $ source ~/.virtualenvs/eventsourcing-py3/bin/activate + +Inside your activated virtualenv, use the following command to install all project dependencies required for contribution:: - make install + $ make install + + +Git blame +--------- -Setup `git` to ignore specific revs with `blame`. +Setup ``git`` to ignore specific revs with ``blame``. This project is old, and several times in its history a massive changes were performed. -One such change is moving towards `isort/flake8` utility, another] bulk update is -`black` formatter. While these changes are inevitable, they clutter the history, -especially if you use `git blame` or _Annotate_ option in PyCharm. -But in newer versions of git (>= 2.23), this can be mitigated: new options `--ignore-rev`_ -and `--ignore-revs-file`_ were added. There is a file in this repository called -`.git-blame-ignore-revs` which contains all such major reformattings. In order to pick it up by -`git blame` and PyCharm, add a special config line:: +One such change is moving towards use of ``isort`` and ``flake8`` and ``black``. While +these changes are inevitable, they clutter the history, especially if you use ``git blame`` +or _Annotate_ option in PyCharm. But in newer versions of git (>= 2.23), this can be +mitigated: new options `--ignore-rev`_ and `--ignore-revs-file`_ were added. There is +a file in this repository called ``.git-blame-ignore-revs`` which contains all such +major reformattings. In order to pick it up by ``git blame`` and PyCharm, add a special +config line:: - git config --local blame.ignoreRevsFile .git-blame-ignore-revs + $ git config --local blame.ignoreRevsFile .git-blame-ignore-revs More info can be found here_. @@ -85,59 +103,103 @@ More info can be found here_. .. _here: https://www.moxio.com/blog/43/ignoring-bulk-change-commits-with-git-blame +Run databases +============= + .. _docker-containers: -Manage Docker containers -======================== +It's easier to run the databases in Docker containers, but it's faster to run them directly. + +Run databases in Docker containers +---------------------------------- + +You can run the databases in Docker containers. -You need to manage Docker containers with third-party services to have a better experience with local development. To pull docker images:: - make docker-pull + $ make docker-pull To build docker images:: - make docker-build + $ make docker-build To up and keep running containers in detached mode:: - make docker-up + $ make docker-up To stop the containers:: - make docker-stop + $ make docker-stop To tear down the containers removing volumes and “orphans”:: - make docker-down + $ make docker-down To attach to the latest containers output:: - make docker-logs + $ make docker-logs -All of the commands using predefined “COMPOSE_FILE “and “COMPOSE_PROJECT_NAME “to keep +All of the commands using predefined “COMPOSE_FILE“ and “COMPOSE_PROJECT_NAME“ to keep your containers in a more organized and straightforward way. **COMPOSE_FILE** is used by *docker-compose* utility to pick development services -configuration. The valid format of this value is: ``dev/docker-compose.yaml “. +configuration. The valid format of this value is: ``dev/docker-compose.yaml``. **COMPOSE_PROJECT_NAME** sets the project name. This value used to prepend the -containers on startup along with the service name. “eventsourcing “is a great default value for it. +containers on startup along with the service name. ``eventsourcing`` is a great +default value for it. + +.. _macos-databases: + +Run databases on MacOS +---------------------- + +To install the databases on MacOS, you can run the following commands:: + $ brew install mysql + $ brew install posgresql + $ brew install redis + $ brew install cassandra + $ ./dev/download_axon_server.sh -Testing -======= +To start the services, you can run run:: + + $ brew_services_start + +To stop the services, you can run run:: + + $ brew_services_stop + +Before running the tests for the first time, create a database in MySQL, and configure user access:: + + $ mysql -u root + mysql> CREATE DATABASE EVENTSOURCING; + mysql> CREATE USER 'eventsourcing'@'localhost' IDENTIFIED BY 'eventsourcing'; + mysql> GRANT ALL PRIVILEGES ON eventsourcing.* TO 'eventsourcing'@'localhost'; + +You will also need to create a database in PostgreSQL:: + + $ createdb eventsourcing + + +Run tests +========= Ensure that you’ve set up your development environment (see :ref:`development-environment`) and -and required services are up and running (see :ref:`docker-containers`). +and required services are up and running (see :ref:`docker-containers`, or :ref:`macos-databases`). + +Running tests from an IDE such as PyCharm allows easy navigation to code files. + +You can run the full test suite using ``make``:: + + $ make test -Just run this:: +You can also run the test suite, but skip the slower tests:: - make test + $ make quicktest .. note:: - Not all tests doing teardown in the right way: sometimes tests failed because of the - data left at DBs, so it requires ``make docker-down`` for a fresh start. + To re-run tests, sometimes it requires ``make docker-down`` for a fresh start. ... or push the code to trigger TravisCI checks. @@ -146,9 +208,9 @@ Building documentation ====================== This project using Sphinx_ documentation builder tool. Run this command to compile documentation -into static HTML files at `./docs/_build/html`:: +into static HTML files at ``./docs/_build/html``:: - make docs + $ make docs .. _Sphinx: https://www.sphinx-doc.org/en/master/ @@ -160,28 +222,28 @@ For now, linting your changes is completely optional - we do not have any checks Run isort_ to check imports sorting:: - make lint-isort + $ make lint-isort We are using Black_ as a tool for style guide enforcement:: - make lint-black + $ make lint-black We are using Flake8_ (and it's `Flake8 BugBear plugin`_) to check the code for PEP8_ compatibility:: - make lint-flake8 + $ make lint-flake8 Mypy_ is a static type checker for Python 3 and Python 2.7. Run mypy to check code for accurate typing annotations:: - make lint-mypy + $ make lint-mypy -Dockerfilelint_ is an `npm` module that analyzes a Dockerfile and looks for +Dockerfilelint_ is an ``npm`` module that analyzes a Dockerfile and looks for common traps, mistakes and helps enforce best practices:: - make lint-dockerfile + $ make lint-dockerfile ... and finally, to run all the checks from above, use:: - make lint + $ make lint .. _isort: https://github.com/timothycrosley/isort .. _Black: https://black.readthedocs.io/en/stable/ @@ -200,4 +262,4 @@ Automatic formatting To apply automatic formatting by using isort_ and Black_, run:: - make fmt + $ make fmt From eee00255ada89f592349f530f1ef7e618814f725 Mon Sep 17 00:00:00 2001 From: John Bywater Date: Sun, 3 May 2020 00:21:50 +0100 Subject: [PATCH 23/50] Improved contributing.rst (better structure, better wording). --- docs/topics/contributing.rst | 74 +++++++++++++++++++----------------- 1 file changed, 40 insertions(+), 34 deletions(-) diff --git a/docs/topics/contributing.rst b/docs/topics/contributing.rst index d7dd5d99a..bde70b7b0 100644 --- a/docs/topics/contributing.rst +++ b/docs/topics/contributing.rst @@ -5,6 +5,9 @@ Contributing This library depends on its community. As interest keeps growing, we always need more people to help others. As soon as you learn the library, you can contribute in many ways. +Community development +===================== + - Join the Slack_ channel and answer questions. The library has a growing audience. Help to create and maintain a friendly and helpful atmosphere. @@ -29,28 +32,37 @@ are several ways you can help the library’s development: - Improve the documentation or improve the unit test suites. -Local development -================= +Making changes +============== + +To make changes to the library, you will want to set up a local environment. +To get set up, fork the repository on GitHub, clone your fork using Git, and +then checkout the ``develop`` branch. -To make changes to the library, you want to set up a local environment. +Create a virtual Python environment, install Python dependencies, and install +and start the databases that are used by the test suite. Then run the tests. +The library test suite depends on several databases. It's much easier to run +databases in Docker containers, but it's slightly faster to run databases +without containers. -To get set up, create a virtual Python environment, install Python dependencies, -and install the databases that are used by the test suite. This library is using the -`GNU make`_ utility. You can read more about it in `this article`_. +Once you have the tests running, you can make changes, run the tests again, +push changes to your fork, and then maybe create a pull request to the project's +develop branch. -There are commands to install Python dependencies into a virtual Python environment, -to start and stop databases, to run the test suite, to build the docs, and for code -linting. The actual code of the commands described below can be easily found in -``Makefile`` at the root of this project. But first, you need to set up your local -development environment. +This library has a `Makefile` to help with development. You can read more about +the `GNU make`_ utility in `this article`_. There are commands to install Python +dependencies into a virtual Python environment, to start and stop databases, to +run the test suite, to build the docs, and for code linting. The actual code of +the commands described below can be easily found in ``Makefile`` at the root of +this project. .. _GNU make: https://www.gnu.org/software/make/ .. _this article: https://opensource.com/article/18/8/what-how-makefile .. _development-environment: -Configure the development environment -===================================== +Virtual Python environment +-------------------------- This project runs on Python 3.6+, and you need to have it installed on your system. The recommended way for all platforms is to use the `official download page`_. @@ -80,8 +92,8 @@ required for contribution:: $ make install -Git blame ---------- +Git blame (optional) +-------------------- Setup ``git`` to ignore specific revs with ``blame``. @@ -103,15 +115,10 @@ More info can be found here_. .. _here: https://www.moxio.com/blog/43/ignoring-bulk-change-commits-with-git-blame -Run databases -============= - .. _docker-containers: -It's easier to run the databases in Docker containers, but it's faster to run them directly. - -Run databases in Docker containers ----------------------------------- +Run databases with Docker +------------------------- You can run the databases in Docker containers. @@ -154,7 +161,8 @@ default value for it. Run databases on MacOS ---------------------- -To install the databases on MacOS, you can run the following commands:: +If you happen to be using a Mac, you can also install the +databases directly on MacOS:: $ brew install mysql $ brew install posgresql @@ -162,7 +170,7 @@ To install the databases on MacOS, you can run the following commands:: $ brew install cassandra $ ./dev/download_axon_server.sh -To start the services, you can run run:: +To start the databases, you can run run:: $ brew_services_start @@ -183,7 +191,7 @@ You will also need to create a database in PostgreSQL:: Run tests -========= +--------- Ensure that you’ve set up your development environment (see :ref:`development-environment`) and and required services are up and running (see :ref:`docker-containers`, or :ref:`macos-databases`). @@ -201,13 +209,11 @@ You can also run the test suite, but skip the slower tests:: .. note:: To re-run tests, sometimes it requires ``make docker-down`` for a fresh start. -... or push the code to trigger TravisCI checks. - Building documentation -====================== +---------------------- -This project using Sphinx_ documentation builder tool. Run this command to compile documentation +This project is using Sphinx_ documentation builder tool. Run this command to compile documentation into static HTML files at ``./docs/_build/html``:: $ make docs @@ -216,7 +222,7 @@ into static HTML files at ``./docs/_build/html``:: Linting your code -================= +----------------- For now, linting your changes is completely optional - we do not have any checks on CI for it. @@ -255,11 +261,11 @@ common traps, mistakes and helps enforce best practices:: Automatic formatting ---------------------------------- - -.. note:: - In order to keep your Pull Request clean, please, do not apply it for all project but your specific changes. +-------------------- To apply automatic formatting by using isort_ and Black_, run:: $ make fmt + +.. note:: + In order to keep your Pull Request clean, please, do not apply it for all project but your specific changes. From 01d4cc6d91a4cc7060ef6321d93b4af6514676ae Mon Sep 17 00:00:00 2001 From: John Bywater Date: Sun, 3 May 2020 02:11:13 +0100 Subject: [PATCH 24/50] Fixed typo (test suite, not test suites). --- docs/topics/contributing.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/topics/contributing.rst b/docs/topics/contributing.rst index bde70b7b0..a4275d8ba 100644 --- a/docs/topics/contributing.rst +++ b/docs/topics/contributing.rst @@ -29,7 +29,7 @@ are several ways you can help the library’s development: - Join the Slack_ channel and share your ideas for how to improve the library. We’re always open to suggestions. - Submit patches or pull requests for new and/or fixed behavior. -- Improve the documentation or improve the unit test suites. +- Improve the documentation or improve the unit test suite. Making changes From e0ff774c83e3d36cbc70e69e93ef1635ae1ea71f Mon Sep 17 00:00:00 2001 From: John Bywater Date: Sun, 3 May 2020 02:12:19 +0100 Subject: [PATCH 25/50] Improved grammar in contributing.rst. --- docs/topics/contributing.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/topics/contributing.rst b/docs/topics/contributing.rst index a4275d8ba..d52a4a3ce 100644 --- a/docs/topics/contributing.rst +++ b/docs/topics/contributing.rst @@ -39,8 +39,8 @@ To make changes to the library, you will want to set up a local environment. To get set up, fork the repository on GitHub, clone your fork using Git, and then checkout the ``develop`` branch. -Create a virtual Python environment, install Python dependencies, and install -and start the databases that are used by the test suite. Then run the tests. +Create a virtual Python environment, install Python dependencies, install +and start the databases that are used by the test suite. and then run the tests. The library test suite depends on several databases. It's much easier to run databases in Docker containers, but it's slightly faster to run databases without containers. From c4ee6b6518aa4e9d4a35947314034fccb9be479f Mon Sep 17 00:00:00 2001 From: John Bywater Date: Sun, 3 May 2020 02:18:25 +0100 Subject: [PATCH 26/50] Improved wording in contributing.rst. --- docs/topics/contributing.rst | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/docs/topics/contributing.rst b/docs/topics/contributing.rst index d52a4a3ce..84d35177a 100644 --- a/docs/topics/contributing.rst +++ b/docs/topics/contributing.rst @@ -51,10 +51,10 @@ develop branch. This library has a `Makefile` to help with development. You can read more about the `GNU make`_ utility in `this article`_. There are commands to install Python -dependencies into a virtual Python environment, to start and stop databases, to -run the test suite, to build the docs, and for code linting. The actual code of -the commands described below can be easily found in ``Makefile`` at the root of -this project. +dependencies into a virtual Python environment, to build containers for the databases, +to start and stop databases, to run the test suite, to build the docs, to reformat code, +and for static type checking and linting. The actual code of the commands described +below can be easily found in ``Makefile`` at the root of the project repository. .. _GNU make: https://www.gnu.org/software/make/ .. _this article: https://opensource.com/article/18/8/what-how-makefile @@ -71,7 +71,7 @@ But also, you may use pyenv_. .. _official download page: https://www.python.org/downloads/ .. _pyenv: https://github.com/pyenv/pyenv -Consider to use virtualenv for development, choose for your flavor: +You can use virtualenv to create a virtual Python environment. Choose for your flavour: - virtualenv_ - virtualenvwrapper_ @@ -81,7 +81,7 @@ Consider to use virtualenv for development, choose for your flavor: .. _virtualenvwrapper: https://virtualenvwrapper.readthedocs.io/en/latest/ .. _pyenv-virtualenv: https://github.com/pyenv/pyenv-virtualenv -For example, create and activate a virtual Python environment using ``virtualenv`` directly:: +For example, you can create and activate a virtual Python environment using ``virtualenv`` directly:: $ virtualenv ~/.virtualenvs/eventsourcing-py3 $ source ~/.virtualenvs/eventsourcing-py3/bin/activate @@ -170,11 +170,11 @@ databases directly on MacOS:: $ brew install cassandra $ ./dev/download_axon_server.sh -To start the databases, you can run run:: +To start the databases, you can run:: $ brew_services_start -To stop the services, you can run run:: +To stop the services, you can run:: $ brew_services_stop From cb6dc724f2a83a46016a680cff95330ab8f5e063 Mon Sep 17 00:00:00 2001 From: John Bywater Date: Sun, 3 May 2020 02:20:18 +0100 Subject: [PATCH 27/50] Improved wording in contributing.rst. --- docs/topics/contributing.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/topics/contributing.rst b/docs/topics/contributing.rst index 84d35177a..08ccc7bd8 100644 --- a/docs/topics/contributing.rst +++ b/docs/topics/contributing.rst @@ -198,11 +198,11 @@ and required services are up and running (see :ref:`docker-containers`, or :ref: Running tests from an IDE such as PyCharm allows easy navigation to code files. -You can run the full test suite using ``make``:: +You can run the full test suite using ``make test``:: $ make test -You can also run the test suite, but skip the slower tests:: +You can skip the slower tests when running the test suite with ``make quicktest``:: $ make quicktest From f0ad0232ae168a1627ac9b8ce91297fd819ab44b Mon Sep 17 00:00:00 2001 From: John Bywater Date: Sun, 3 May 2020 02:21:52 +0100 Subject: [PATCH 28/50] Improved wording in contributing.rst. --- docs/topics/contributing.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/topics/contributing.rst b/docs/topics/contributing.rst index 08ccc7bd8..0a10f199b 100644 --- a/docs/topics/contributing.rst +++ b/docs/topics/contributing.rst @@ -208,6 +208,9 @@ You can skip the slower tests when running the test suite with ``make quicktest` .. note:: To re-run tests, sometimes it requires ``make docker-down`` for a fresh start. + At the moment, Axon Server sometimes doesn't return everything that is expected + when listing all the events of an application. But restarting Axon Server seems + to clear this up. Building documentation From 59df7bdcf89f3e9dbf1901cb849d9b7b1fffc447 Mon Sep 17 00:00:00 2001 From: John Bywater Date: Sun, 3 May 2020 02:26:14 +0100 Subject: [PATCH 29/50] Improved wording in contributing.rst. --- docs/topics/contributing.rst | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/docs/topics/contributing.rst b/docs/topics/contributing.rst index 0a10f199b..a465c4173 100644 --- a/docs/topics/contributing.rst +++ b/docs/topics/contributing.rst @@ -271,4 +271,8 @@ To apply automatic formatting by using isort_ and Black_, run:: $ make fmt .. note:: - In order to keep your Pull Request clean, please, do not apply it for all project but your specific changes. + In order to keep your Pull Request clean, please, do not apply it for all project + but your specific changes. The project is now well formatted, but static typing + and and strict compliance with PEP8 is still a working in progress. If you want + to help improve the type hints and formatting, please do so in a dedicated PR + so things aren't mixed with other changes (it's just easier to review this way). From e1e5290fa9253a0f42ed8fe5f79ab1d3cf32038c Mon Sep 17 00:00:00 2001 From: John Bywater Date: Sun, 3 May 2020 02:38:41 +0100 Subject: [PATCH 30/50] Changed MacOS to macOS. --- docs/topics/contributing.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/topics/contributing.rst b/docs/topics/contributing.rst index a465c4173..2205cb7fe 100644 --- a/docs/topics/contributing.rst +++ b/docs/topics/contributing.rst @@ -158,11 +158,11 @@ default value for it. .. _macos-databases: -Run databases on MacOS +Run databases on macOS ---------------------- If you happen to be using a Mac, you can also install the -databases directly on MacOS:: +databases directly on macOS:: $ brew install mysql $ brew install posgresql From 57c6f0a637911ebc5bf4cbe1f9f56af2be153082 Mon Sep 17 00:00:00 2001 From: John Bywater Date: Sun, 3 May 2020 02:40:08 +0100 Subject: [PATCH 31/50] Added missing 'make' part of command. --- docs/topics/contributing.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/topics/contributing.rst b/docs/topics/contributing.rst index 2205cb7fe..328a236e3 100644 --- a/docs/topics/contributing.rst +++ b/docs/topics/contributing.rst @@ -161,8 +161,8 @@ default value for it. Run databases on macOS ---------------------- -If you happen to be using a Mac, you can also install the -databases directly on macOS:: +If you happen to be using a Mac, you can install the databases directly on macOS +using the following commands:: $ brew install mysql $ brew install posgresql @@ -172,11 +172,11 @@ databases directly on macOS:: To start the databases, you can run:: - $ brew_services_start + $ make brew_services_start To stop the services, you can run:: - $ brew_services_stop + $ make brew_services_stop Before running the tests for the first time, create a database in MySQL, and configure user access:: From e35389b9c588eb29cab08dd57cb16262e4fa755e Mon Sep 17 00:00:00 2001 From: John Bywater Date: Fri, 15 May 2020 23:58:44 +0100 Subject: [PATCH 32/50] Updated copyright year. --- LICENSE | 2 +- dev/RELEASE_SCRIPT.md | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/LICENSE b/LICENSE index 636c10eb3..fc2c56be6 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ BSD 3-Clause License -Copyright (c) 2018, John Bywater +Copyright (c) 2020, John Bywater All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/dev/RELEASE_SCRIPT.md b/dev/RELEASE_SCRIPT.md index 97a810be8..fa6c12b9c 100644 --- a/dev/RELEASE_SCRIPT.md +++ b/dev/RELEASE_SCRIPT.md @@ -8,6 +8,7 @@ Steps to make a new release. 1. Push branch to GitHub and start a PR to master. 1. Review versions of all dependencies. 1. Update release notes to describe what's new in this release. +1. Update copyright year in LICENSE file. 1. Run 'prepare-distribution' script. 1. Increase version number to 'rc1', 'rc2' in case of failure. 1. Try to fix and push changes to GitHub. From 11b8c64dc6272fb9da520e67604a069d869514ab Mon Sep 17 00:00:00 2001 From: John Bywater Date: Sat, 16 May 2020 00:54:30 +0100 Subject: [PATCH 33/50] Added missing docstrings to SimpleApplication methods. --- eventsourcing/application/simple.py | 49 +++++++++++++++++++++++++++-- 1 file changed, 47 insertions(+), 2 deletions(-) diff --git a/eventsourcing/application/simple.py b/eventsourcing/application/simple.py index 75a543c87..cc58cc84f 100644 --- a/eventsourcing/application/simple.py +++ b/eventsourcing/application/simple.py @@ -319,7 +319,7 @@ def construct_infrastructure_factory( def construct_datastore(self) -> None: """ - Constructs datastore object (which helps by creating and dropping tables). + Constructs datastore object (used to create and drop database tables). """ assert self.infrastructure_factory self._datastore = self.infrastructure_factory.construct_datastore() @@ -384,6 +384,12 @@ def change_pipeline(self, pipeline_id: int) -> None: self.event_store.record_manager.pipeline_id = pipeline_id def close(self) -> None: + """ + Closes the application for further use. + + The persistence policy is closed, and the application's + connection to the database is closed. + """ # Close the persistence policy. if self._persistence_policy is not None: self._persistence_policy.close() @@ -417,7 +423,21 @@ def mixin(cls: T, infrastructure_class: type) -> T: def save( self, aggregates=(), orm_objects_pending_save=(), orm_objects_pending_delete=(), - ): + ) -> None: + """ + Saves state of aggregates, and ORM objects. + + All of the pending events of the aggregates, along with the + ORM objects, are recorded atomically as a process event. + + Then a "prompt to pull" is published, and, if the repository cache + is in use, then puts the aggregates in the cache. + + :param aggregates: One or many aggregates. + :param orm_objects_pending_save: Sequence of ORM objects to be saved. + :param orm_objects_pending_delete: Sequance of ORM objects to be deleted. + """ + # Collect pending events from the aggregates. new_events = [] if isinstance(aggregates, BaseAggregateRoot): aggregates = [aggregates] @@ -453,6 +473,17 @@ def save( self.repository.put_entity_in_cache(aggregate.id, aggregate) def record_process_event(self, process_event: ProcessEvent) -> List: + """ + Records a process event. + + Converts the domain events of the process event to event record + objects, and writes the event records and the ORM objects to + the database using the application's event store's record manager. + + :param process_event: An instance of + :class:`~eventsourcing.application.simple.ProcessEvent` + :return: A list of event records. + """ # Construct event records. event_records = self.construct_event_records( process_event.domain_events, process_event.causal_dependencies @@ -474,6 +505,13 @@ def construct_event_records( pending_events: Iterable[TAggregateEvent], causal_dependencies: Optional[ListOfCausalDependencies], ) -> List: + """ + Constructs event records from domain events. + + :param pending_events: An iterable of domain events. + :param causal_dependencies: A list of causal dependencies. + :return: A list of event records. + """ # Convert to event records. sequenced_items = self.event_store.items_from_events(pending_events) record_manager = self.event_store.record_manager @@ -514,6 +552,13 @@ def construct_event_records( return event_records def publish_prompt(self, head_notification_id=None): + """ + Publishes a "prompt to pull" (instance of + :class:`~eventsourcing.application.simple.PromptToPull`). + + :param head_notification_id: Maximum notification ID of event records + to be pulled. + """ prompt = PromptToPull(self.name, self.pipeline_id, head_notification_id) try: publish(prompt) From b01fee2f69681211d1cd06112e23f86a6d7b2ffa Mon Sep 17 00:00:00 2001 From: John Bywater Date: Sat, 16 May 2020 01:43:37 +0100 Subject: [PATCH 34/50] Prompt to pull from "save" only when record manager writes notifications. --- eventsourcing/application/simple.py | 34 +++++++++++++++-------------- 1 file changed, 18 insertions(+), 16 deletions(-) diff --git a/eventsourcing/application/simple.py b/eventsourcing/application/simple.py index cc58cc84f..13d95514e 100644 --- a/eventsourcing/application/simple.py +++ b/eventsourcing/application/simple.py @@ -34,6 +34,7 @@ AbstractEventStore, AbstractRecordManager, BaseRecordManager, + RecordManagerWithNotifications, RecordManagerWithTracking, TrackingKwargs, ) @@ -449,21 +450,22 @@ def save( orm_objs_pending_delete=orm_objects_pending_delete, ) new_records = self.record_process_event(process_event) - # Find the head notification ID. - notifiable_events = [e for e in new_events if e.__notifiable__] - head_notification_id = None - if len(notifiable_events): - record_manager = self.event_store.record_manager - notification_id_name = record_manager.notification_id_name - notifications = [] - for record in new_records: - if not hasattr(record, notification_id_name): - continue - if not isinstance(getattr(record, notification_id_name), int): - continue - notifications.append( - record_manager.create_notification_from_record(record) - ) + record_manager = self.event_store.record_manager + if isinstance(record_manager, RecordManagerWithNotifications): + # Find the head notification ID. + notifiable_events = [e for e in new_events if e.__notifiable__] + head_notification_id = None + if len(notifiable_events): + notification_id_name = record_manager.notification_id_name + notifications = [] + for record in new_records: + if not hasattr(record, notification_id_name): + continue + if not isinstance(getattr(record, notification_id_name), int): + continue + notifications.append( + record_manager.create_notification_from_record(record) + ) if len(notifications): head_notification_id = notifications[-1]["id"] @@ -537,7 +539,7 @@ def construct_event_records( else: if any((not e.__notifiable__ for e in pending_events)): raise Exception( - "Can't set __notifiable__=False withut " + "Can't set __notifiable__=False without " "set_notification_ids=True " ) From a8e60252314101ac9b67571a30d8a72639c1bcc0 Mon Sep 17 00:00:00 2001 From: John Bywater Date: Sat, 16 May 2020 01:44:06 +0100 Subject: [PATCH 35/50] Added a "todo" to check writing events from more than one aggregate is supported by the record manager's database. --- eventsourcing/application/simple.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/eventsourcing/application/simple.py b/eventsourcing/application/simple.py index 13d95514e..e9fbec6d2 100644 --- a/eventsourcing/application/simple.py +++ b/eventsourcing/application/simple.py @@ -442,6 +442,11 @@ def save( new_events = [] if isinstance(aggregates, BaseAggregateRoot): aggregates = [aggregates] + else: + pass + # Todo: Make sure record manager supports writing events from more than + # one aggregate atomically (e.g. Cassandra and EventStore don't). + for aggregate in aggregates: new_events += aggregate.__batch_pending_events__() process_event = ProcessEvent( From 72ce8afe45ac98da7095dc2a0a5452dcc4f4833e Mon Sep 17 00:00:00 2001 From: John Bywater Date: Sat, 16 May 2020 01:44:54 +0100 Subject: [PATCH 36/50] Put conditional outside loop, so it's only done once. --- eventsourcing/application/simple.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/eventsourcing/application/simple.py b/eventsourcing/application/simple.py index e9fbec6d2..b17bacd9c 100644 --- a/eventsourcing/application/simple.py +++ b/eventsourcing/application/simple.py @@ -472,11 +472,11 @@ def save( record_manager.create_notification_from_record(record) ) - if len(notifications): - head_notification_id = notifications[-1]["id"] - self.publish_prompt(head_notification_id) - for aggregate in aggregates: - if self.repository.use_cache: + if len(notifications): + head_notification_id = notifications[-1]["id"] + self.publish_prompt(head_notification_id) + if self.repository.use_cache: + for aggregate in aggregates: self.repository.put_entity_in_cache(aggregate.id, aggregate) def record_process_event(self, process_event: ProcessEvent) -> List: From 916c22c68b0d58175d4230f30ee028840aa8e30f Mon Sep 17 00:00:00 2001 From: John Bywater Date: Sat, 16 May 2020 01:45:37 +0100 Subject: [PATCH 37/50] Fixed setting of environment variable (wasn't effective). --- Makefile | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/Makefile b/Makefile index 1926d8fd4..f0b250d6e 100644 --- a/Makefile +++ b/Makefile @@ -83,11 +83,7 @@ test: .PHONY: quicktest quicktest: - QUICK_TESTS_ONLY=1 - @coverage run -m unittest discover eventsourcing.tests -vv - @coverage combine - @coverage report - @coverage html + QUICK_TESTS_ONLY=1 python -m unittest discover eventsourcing.tests -vv .PHONY: docs From 4255a38c0d1cf8f6e8e520c57dd25369740b82ad Mon Sep 17 00:00:00 2001 From: John Bywater Date: Sat, 16 May 2020 02:14:53 +0100 Subject: [PATCH 38/50] Added docstrings to ProcessApplication and class. Also to ProcessApplicationWithSnapshotting class. --- eventsourcing/application/process.py | 61 ++++++++++++++++++++++++++++ 1 file changed, 61 insertions(+) diff --git a/eventsourcing/application/process.py b/eventsourcing/application/process.py index 7250e96ba..2aacbb882 100644 --- a/eventsourcing/application/process.py +++ b/eventsourcing/application/process.py @@ -180,6 +180,13 @@ def publish_prompt_for_events(self, _: Optional[IterableOfEvents] = None) -> Non def follow( self, upstream_application_name: str, notification_log: AbstractNotificationLog ) -> None: + """ + Sets up process application to follow the given notification log of an + upstream application. + + :param upstream_application_name: Name of the upstream application. + :param notification_log: Notification log that will be processed. + """ if ( upstream_application_name == self.name and self.apply_policy_to_generated_events @@ -199,6 +206,14 @@ def follow( def run( self, prompt: Optional[Prompt] = None, advance_by: Optional[int] = None ) -> int: + """ + Pulls event notifications from notification logs being followed by + this process application, and processes the contained domain events. + + :param prompt: Optional prompt, specifying a particular notification log. + :param advance_by: Maximum event notifications to process. + :return: Returns number of events that have been processed. + """ if prompt: assert isinstance(prompt, PromptToPull) @@ -265,6 +280,13 @@ def run( return notification_count def check_causal_dependencies(self, upstream_name, causal_dependencies_json): + """ + Checks the causal dependencies are satisfied (have already been processed). + + :param upstream_name: Name of the upstream application being processed. + :param causal_dependencies_json: Pipelines and positions in notification logs. + :raises CausalDependencyFailed: If causal dependencies are not satisfied. + """ # Decode causal dependencies of the domain event notification. if causal_dependencies_json: causal_dependencies = self.event_store.event_mapper.json_loads( @@ -307,6 +329,18 @@ def check_causal_dependencies(self, upstream_name, causal_dependencies_json): def process_upstream_event( self, domain_event: TAggregateEvent, notification_id: int, upstream_name: str ) -> Tuple[ListOfAggregateEvents, List]: + """ + Processes given domain event from an upstream notification log. + + Calls the process application policy, and then records a process event, + hence recording atomically all new domain events created by the call + to the policy along with any ORM objects that may result. + + :param domain_event: Domain event to be processed. + :param notification_id: Position in notification log. + :param upstream_name: Name of upstream application. + :return: Returns a list of new domain events. + """ cycle_started: Optional[float] = None if self.tick_interval is not None: cycle_started = time.process_time() @@ -376,6 +410,12 @@ def process_upstream_event( def get_event_from_notification( self, notification: Dict[str, Any] ) -> TAggregateEvent: + """ + Extracts the domain event of an event notification. + + :param notification: The event notification. + :return: A domain event. + """ return self.event_store.event_mapper.event_from_topic_and_state( topic=notification[self.notification_topic_key], state=notification[self.notification_state_key], @@ -425,6 +465,13 @@ def get_recorded_position(self, upstream_name): def call_policy( self, domain_event: TAggregateEvent ) -> Tuple[ListOfAggregateEvents, ListOfCausalDependencies, List[Any], List[Any]]: + """ + Calls the process application policy with the given domain event. + + :param domain_event: Domain event that will be given to the policy. + + :return: Returns a list of domain events, and a list of causal dependencies. + """ # Get the application policy. policy = self.policy_func or self.policy @@ -522,6 +569,12 @@ def policy( def collect_pending_events( self, aggregates: Sequence[TAggregate] ) -> ListOfAggregateEvents: + """ + Collects all the pending events from the given sequence of aggregates. + + :param aggregates: Sequence of aggregates. + :return: Returns a list of domain events. + """ pending_events: ListOfAggregateEvents = [] num_changed_aggregates = 0 # This doesn't necessarily obtain events in causal order... @@ -586,7 +639,15 @@ def drop_table(self) -> None: class ProcessApplicationWithSnapshotting(SnapshottingApplication, ProcessApplication): + """ + Supplements process applications that will use snapshotting. + """ def take_snapshots(self, new_events: Sequence[TAggregateEvent]) -> None: + """ + Takes snapshot of aggregates, according to the policy. + + :param new_events: Domain events used to detect if a snapshot is to be taken. + """ assert self.snapshotting_policy for event in new_events: if self.snapshotting_policy.condition([event]): From 28a82255e8cf171647a3a58d919737b0b090a19d Mon Sep 17 00:00:00 2001 From: John Bywater Date: Sat, 16 May 2020 02:15:55 +0100 Subject: [PATCH 39/50] Included application decorators module in modules reference. --- docs/ref/application.rst | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/docs/ref/application.rst b/docs/ref/application.rst index 942328954..b9f34e5b4 100644 --- a/docs/ref/application.rst +++ b/docs/ref/application.rst @@ -84,6 +84,17 @@ process :exclude-members: __weakref__, __dict__ +decorators +---------- + +.. automodule:: eventsourcing.application.decorators + :show-inheritance: + :member-order: bysource + :members: + :special-members: + :exclude-members: __weakref__, __dict__ + + command ------- From c565da34bda3a2c479b4c334879ba53ef659bd4a Mon Sep 17 00:00:00 2001 From: John Bywater Date: Sat, 16 May 2020 11:44:34 +0100 Subject: [PATCH 40/50] Increased version number to 8.2.2rc0. --- eventsourcing/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eventsourcing/__init__.py b/eventsourcing/__init__.py index 129aeba2a..d575422c9 100644 --- a/eventsourcing/__init__.py +++ b/eventsourcing/__init__.py @@ -1 +1 @@ -__version__ = "8.2.2dev0" +__version__ = "8.2.2rc0" From 83b11cd622e525cf9a9e193296bc40dc9e29c516 Mon Sep 17 00:00:00 2001 From: John Bywater Date: Sat, 16 May 2020 11:50:28 +0100 Subject: [PATCH 41/50] Increased versions of dependencies. --- setup.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/setup.py b/setup.py index 72d197bcf..1d7f6542b 100644 --- a/setup.py +++ b/setup.py @@ -24,9 +24,9 @@ ray_requires = ["ray<=0.8.99999", "psutil", "setproctitle"] -thespian_requires = ["thespian<=3.9.99999"] +thespian_requires = ["thespian<=3.10.99999"] -cassandra_requires = ["cassandra-driver<=3.22.99999"] +cassandra_requires = ["cassandra-driver<=3.23.99999"] django_requires = ["django<=3.0.99999"] @@ -42,7 +42,7 @@ "flask<=1.1.99999", "flask_sqlalchemy<=2.4.99", "uwsgi<=2.0.99999", - "redis<=3.4.99999", + "redis<=3.5.99999", "celery<=4.4.99999", "pymysql<=0.9.99999", "mysql-connector-python-rf<=2.2.99999", From 68fee0f285046e59fd4687d6afabb3030edd44c6 Mon Sep 17 00:00:00 2001 From: John Bywater Date: Sat, 16 May 2020 13:28:19 +0100 Subject: [PATCH 42/50] Adjusted Makefile rule names (to have consistent hyphenation). --- Makefile | 29 +++++++++++++++++++++++------ dev/MACOS_SETUP_NOTES.md | 4 ++-- docs/topics/contributing.rst | 8 ++++---- setup.py | 2 +- 4 files changed, 30 insertions(+), 13 deletions(-) diff --git a/Makefile b/Makefile index f0b250d6e..3d6d58a95 100644 --- a/Makefile +++ b/Makefile @@ -29,6 +29,7 @@ docker-stop: docker-down: @docker-compose down -v --remove-orphans + .PHONY: docker-logs docker-logs: @docker-compose logs --follow --tail=1000 @@ -81,8 +82,8 @@ test: @coverage html -.PHONY: quicktest -quicktest: +.PHONY: quick-test +quick-test: QUICK_TESTS_ONLY=1 python -m unittest discover eventsourcing.tests -vv @@ -91,17 +92,33 @@ docs: cd docs && make html -.PHONY: brew_services_start -brew_services_start: +.PHONY: brew-services-start +brew-services-start: brew services start mysql brew services start postgresql brew services start redis ~/axonserver/axonserver.jar & cassandra -f & -.PHONY: brew_services_stop -brew_services_stop: + +.PHONY: brew-services-stop +brew-services-stop: brew services stop mysql brew services stop postgresql brew services stop redis pkill -15 java + + +.PHONY: prepare-distribution +prepare-distribution: + cd dev && python ./prepare-distribution.py + + +.PHONY: release-distribution +release-distribution: + cd dev && python ./release-distribution.py + + +.PHONY: test-distribution +test-distribution: + cd dev && python ./test-released-distribution.py diff --git a/dev/MACOS_SETUP_NOTES.md b/dev/MACOS_SETUP_NOTES.md index b34e9e1a9..51a3af007 100644 --- a/dev/MACOS_SETUP_NOTES.md +++ b/dev/MACOS_SETUP_NOTES.md @@ -44,9 +44,9 @@ $ ./axonserver/axonserver.jar After this, the databases can be stopped with: -$ make brew_services_stop +$ make brew-services-stop The database can be started with: -$ make brew_services_start +$ make brew-services-start diff --git a/docs/topics/contributing.rst b/docs/topics/contributing.rst index 328a236e3..82c16f142 100644 --- a/docs/topics/contributing.rst +++ b/docs/topics/contributing.rst @@ -172,11 +172,11 @@ using the following commands:: To start the databases, you can run:: - $ make brew_services_start + $ make brew-services-start To stop the services, you can run:: - $ make brew_services_stop + $ make brew-services-stop Before running the tests for the first time, create a database in MySQL, and configure user access:: @@ -202,9 +202,9 @@ You can run the full test suite using ``make test``:: $ make test -You can skip the slower tests when running the test suite with ``make quicktest``:: +You can skip the slower tests when running the test suite with ``make quick-test``:: - $ make quicktest + $ make quick-test .. note:: To re-run tests, sometimes it requires ``make docker-down`` for a fresh start. diff --git a/setup.py b/setup.py index 1d7f6542b..e5adb0f06 100644 --- a/setup.py +++ b/setup.py @@ -42,7 +42,7 @@ "flask<=1.1.99999", "flask_sqlalchemy<=2.4.99", "uwsgi<=2.0.99999", - "redis<=3.5.99999", + # "redis<=3.5.99999", "celery<=4.4.99999", "pymysql<=0.9.99999", "mysql-connector-python-rf<=2.2.99999", From a43e8ea73676254ba84dd429b1b8843fca412d22 Mon Sep 17 00:00:00 2001 From: John Bywater Date: Sat, 16 May 2020 18:01:45 +0100 Subject: [PATCH 43/50] Reworked prepare-distribution.py script. --- dev/prepare-distribution.py | 130 +++++++++++++++++------------------- 1 file changed, 60 insertions(+), 70 deletions(-) diff --git a/dev/prepare-distribution.py b/dev/prepare-distribution.py index fcd07fa89..aa513c025 100755 --- a/dev/prepare-distribution.py +++ b/dev/prepare-distribution.py @@ -5,58 +5,50 @@ from subprocess import CalledProcessError from time import sleep -try: - del os.environ["PYTHONPATH"] -except KeyError: - pass -os.environ["CASS_DRIVER_NO_CYTHON"] = "1" -sys.path.insert(0, "../") -from eventsourcing import __version__ - - -def build_and_test(cwd): - # Declare temporary working directory variable. - # tmpcwd27 = os.path.join(cwd, 'tmpve2.7') - tmpcwd37 = os.path.join(cwd, "tmpve3.7") - - # Build distribution. - subprocess.check_call([sys.executable, "setup.py", "clean", "--all"], cwd=cwd) - subprocess.check_call([sys.executable, "setup.py", "sdist"], cwd=cwd) - is_uploaded_testpypi = False - - targets = [ - # (tmpcwd27, 'python2.7'), - (tmpcwd37, "python") - ] - for (tmpcwd, python_executable) in targets: - - check_locally = False - if check_locally: - # Rebuild virtualenvs. - rebuild_virtualenv(cwd, tmpcwd, python_executable) - - # Install from dist folder. - tar_path = "../dist/eventsourcing-{}.tar.gz[testing]".format(__version__) - subprocess.check_call(["bin/pip", "install", "-U", tar_path], cwd=tmpcwd) - - # Check installed tests all pass. - test_installation(tmpcwd) - - # Build and upload to Test PyPI. - if not is_uploaded_testpypi: - subprocess.check_call( - [sys.executable, "setup.py", "sdist", "upload", "-r", "pypitest"], - cwd=cwd, - ) - is_uploaded_testpypi = True - - # Rebuild virtualenvs. - rebuild_virtualenv(cwd, tmpcwd, python_executable) +def main(): + proj_path = os.path.abspath(".") + readme_path = os.path.join(proj_path, "README.md") + if "A library for event sourcing in Python" in open(readme_path).read(): + print("Project root dir: %s" % proj_path) + else: + raise Exception("Project README file not found: %s" % readme_path) + + try: + del os.environ["PYTHONPATH"] + except KeyError: + pass + + os.environ["CASS_DRIVER_NO_CYTHON"] = "1" + from eventsourcing import __version__ + + # Build and upload to Test PyPI. + subprocess.check_call([sys.executable, "setup.py", "clean", "--all"], cwd=proj_path) + try: + subprocess.check_call( + [sys.executable, "setup.py", "sdist", "upload", "-r", "pypitest"], + cwd=proj_path, + ) + except CalledProcessError: + sys.exit(1) + + # Test distribution for various targets. + targets = [(os.path.join(proj_path, "tmpve3.7"), "python")] + for (venv_path, python_bin) in targets: + + # Rebuild virtualenv. + if os.path.exists(venv_path): + remove_virtualenv(proj_path, venv_path) + subprocess.check_call( + ["virtualenv", "-p", python_bin, venv_path], cwd=proj_path + ) + subprocess.check_call( + ["bin/pip", "install", "-U", "pip", "wheel"], cwd=venv_path + ) # Install from Test PyPI. - cmd = [ - "bin/pip", + pip_install_from_testpypi = [ + os.path.join(venv_path, "bin/pip"), "install", "-U", "eventsourcing[testing]==" + __version__, @@ -67,37 +59,35 @@ def build_and_test(cwd): ] patience = 10 - while patience: + is_test_pass = False + sleep(1) + while True: try: - subprocess.check_call(cmd, cwd=tmpcwd) + subprocess.check_call(pip_install_from_testpypi, cwd=venv_path) + is_test_pass = True break except CalledProcessError: patience -= 1 + if patience < 0: + break print("Patience:", patience) - sleep(6) + sleep(1) + if not is_test_pass: + print("Failed to install from testpypi.") + sys.exit(1) # Check installed tests all pass. - test_installation(tmpcwd) - - remove_virtualenv(cwd, tmpcwd) - - -def test_installation(tmpcwd): - subprocess.check_call( - ["bin/python", "-m" "unittest", "discover", "eventsourcing.tests"], cwd=tmpcwd - ) - + subprocess.check_call( + ["bin/python", "-m" "unittest", "discover", "eventsourcing.tests"], + cwd=venv_path, + ) -def rebuild_virtualenv(cwd, venv_path, python_executable): - remove_virtualenv(cwd, venv_path) - subprocess.check_call(["virtualenv", "-p", python_executable, venv_path], cwd=cwd) - subprocess.check_call(["bin/pip", "install", "-U", "pip", "wheel"], cwd=venv_path) + remove_virtualenv(proj_path, venv_path) -def remove_virtualenv(cwd, venv_path): - subprocess.check_call(["rm", "-rf", venv_path], cwd=cwd) +def remove_virtualenv(proj_path, venv_path): + subprocess.check_call(["rm", "-r", venv_path], cwd=proj_path) if __name__ == "__main__": - cwd = os.path.join(os.environ["HOME"], "PyCharmProjects", "eventsourcing") - build_and_test(cwd) + main() From 813e0f34f84b6fefb3112e36d93d8126afa4d417 Mon Sep 17 00:00:00 2001 From: John Bywater Date: Sat, 16 May 2020 18:12:09 +0100 Subject: [PATCH 44/50] Adjusted prepare-distribution.py script. --- dev/prepare-distribution.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/dev/prepare-distribution.py b/dev/prepare-distribution.py index aa513c025..95334bffe 100755 --- a/dev/prepare-distribution.py +++ b/dev/prepare-distribution.py @@ -9,19 +9,16 @@ def main(): proj_path = os.path.abspath(".") readme_path = os.path.join(proj_path, "README.md") - if "A library for event sourcing in Python" in open(readme_path).read(): - print("Project root dir: %s" % proj_path) + if os.path.exists(readme_path): + assert "A library for event sourcing in Python" in open(readme_path).read() else: - raise Exception("Project README file not found: %s" % readme_path) + raise Exception("Couldn't find project README.md") try: del os.environ["PYTHONPATH"] except KeyError: pass - os.environ["CASS_DRIVER_NO_CYTHON"] = "1" - from eventsourcing import __version__ - # Build and upload to Test PyPI. subprocess.check_call([sys.executable, "setup.py", "clean", "--all"], cwd=proj_path) try: @@ -34,6 +31,8 @@ def main(): # Test distribution for various targets. targets = [(os.path.join(proj_path, "tmpve3.7"), "python")] + os.environ["CASS_DRIVER_NO_CYTHON"] = "1" + from eventsourcing import __version__ for (venv_path, python_bin) in targets: # Rebuild virtualenv. From 63d9fc4948d5bd1881dea3194ba4717c73f68c15 Mon Sep 17 00:00:00 2001 From: John Bywater Date: Sat, 16 May 2020 18:20:56 +0100 Subject: [PATCH 45/50] Adjusted dev scripts. --- dev/prepare-distribution.py | 1 + dev/release-distribution.py | 13 +++++-- dev/test-released-distribution.py | 57 +++++++++++++++---------------- 3 files changed, 38 insertions(+), 33 deletions(-) diff --git a/dev/prepare-distribution.py b/dev/prepare-distribution.py index 95334bffe..dc4c9b517 100755 --- a/dev/prepare-distribution.py +++ b/dev/prepare-distribution.py @@ -7,6 +7,7 @@ def main(): + # Validate current working dir (should be project root). proj_path = os.path.abspath(".") readme_path = os.path.join(proj_path, "README.md") if os.path.exists(readme_path): diff --git a/dev/release-distribution.py b/dev/release-distribution.py index 966fddc14..c6a6b65c3 100755 --- a/dev/release-distribution.py +++ b/dev/release-distribution.py @@ -4,7 +4,15 @@ import sys -def build_and_release(cwd): +def main(): + # Validate current working dir (should be project root). + proj_path = os.path.abspath(".") + readme_path = os.path.join(proj_path, "README.md") + if os.path.exists(readme_path): + assert "A library for event sourcing in Python" in open(readme_path).read() + else: + raise Exception("Couldn't find project README.md") + # Build and upload to PyPI. subprocess.check_call([sys.executable, "setup.py", "clean", "--all"], cwd=cwd) subprocess.check_call( @@ -13,5 +21,4 @@ def build_and_release(cwd): if __name__ == "__main__": - cwd = os.path.join(os.environ["HOME"], "PyCharmProjects", "eventsourcing") - build_and_release(cwd) + main() diff --git a/dev/test-released-distribution.py b/dev/test-released-distribution.py index 6e81514b6..668493014 100755 --- a/dev/test-released-distribution.py +++ b/dev/test-released-distribution.py @@ -3,49 +3,46 @@ import subprocess -if "PYTHONPATH" in os.environ: - del os.environ["PYTHONPATH"] +def main(): + # Validate current working dir (should be project root). + proj_path = os.path.abspath(".") + readme_path = os.path.join(proj_path, "README.md") + if os.path.exists(readme_path): + assert "A library for event sourcing in Python" in open(readme_path).read() + else: + raise Exception("Couldn't find project README.md") + + try: + del os.environ["PYTHONPATH"] + except KeyError: + pass - -def test_released_distribution(cwd): # Declare temporary working directory variable. - tmpcwd27 = os.path.join(cwd, "tmpve2.7") - tmpcwd36 = os.path.join(cwd, "tmpve3.6") - tmpcwd37 = os.path.join(cwd, "tmpve3.7") - build_targets = [ - # (tmpcwd27, 'python2.7'), - # (tmpcwd36, 'python3.6'), - (tmpcwd37, "python") + (os.path.join(proj_path, "tmpve3.7"), "python") ] - for (tmpcwd, python_executable) in build_targets: + for (venv_path, python_bin) in build_targets: - # Rebuild virtualenvs. - rebuild_virtualenv(cwd, tmpcwd, python_executable) + # Rebuild virtualenv. + subprocess.check_call(["rm", "-r", venv_path], cwd=proj_path) + subprocess.check_call(["virtualenv", "-p", python_bin, venv_path], + cwd=proj_path) + subprocess.check_call(["bin/pip", "install", "-U", "pip", "wheel"], + cwd=venv_path) # Install from PyPI. os.environ["CASS_DRIVER_NO_CYTHON"] = "1" subprocess.check_call( ["bin/pip", "install", "--no-cache-dir", "eventsourcing[testing]"], - cwd=tmpcwd, + cwd=venv_path, ) # Check installed tests all pass. - test_installation(tmpcwd) - - -def test_installation(tmpcwd): - subprocess.check_call( - ["bin/python", "-m" "unittest", "discover", "eventsourcing.tests"], cwd=tmpcwd - ) - - -def rebuild_virtualenv(cwd, venv_path, python_executable): - subprocess.check_call(["rm", "-rf", venv_path], cwd=cwd) - subprocess.check_call(["virtualenv", "-p", python_executable, venv_path], cwd=cwd) - subprocess.check_call(["bin/pip", "install", "-U", "pip", "wheel"], cwd=venv_path) + subprocess.check_call( + ["bin/python", "-m" "unittest", "discover", "eventsourcing.tests"], + cwd=venv_path + ) if __name__ == "__main__": - cwd = os.path.join(os.environ["HOME"], "PyCharmProjects", "eventsourcing") - test_released_distribution(cwd) + main() From aab3e61a8ddb87a797f928fb5eff8734c1a21594 Mon Sep 17 00:00:00 2001 From: John Bywater Date: Sat, 16 May 2020 18:21:48 +0100 Subject: [PATCH 46/50] Updated release notes. --- docs/topics/release_notes.rst | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/docs/topics/release_notes.rst b/docs/topics/release_notes.rst index eebf271ca..f163c45e6 100644 --- a/docs/topics/release_notes.rst +++ b/docs/topics/release_notes.rst @@ -22,6 +22,15 @@ systems of application, previously in the "application" package, has been moved to a new "system" package. +Version 8.2.2 (released 16 May 2020) +-------------------------------------- + +Improved documentation. Updated dockerization for local +development. Added Makefile, to setup development environment, +to build and run docker containers, to run the test suite, to +format the code, and to build the docs. Reformatted the code. + + Version 8.2.1 (released 11 March 2020) -------------------------------------- From b9fc5b583daf3340ab5d3bc7988a85fbfb3e1cb9 Mon Sep 17 00:00:00 2001 From: John Bywater Date: Sat, 16 May 2020 18:22:12 +0100 Subject: [PATCH 47/50] Updated RELEASE_SCRIPT.md. --- dev/RELEASE_SCRIPT.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dev/RELEASE_SCRIPT.md b/dev/RELEASE_SCRIPT.md index fa6c12b9c..82af7ebd9 100644 --- a/dev/RELEASE_SCRIPT.md +++ b/dev/RELEASE_SCRIPT.md @@ -9,7 +9,7 @@ Steps to make a new release. 1. Review versions of all dependencies. 1. Update release notes to describe what's new in this release. 1. Update copyright year in LICENSE file. -1. Run 'prepare-distribution' script. +1. Run 'make prepare-distribution'. 1. Increase version number to 'rc1', 'rc2' in case of failure. 1. Try to fix and push changes to GitHub. 1. Make changes until built distribution is working. @@ -17,7 +17,7 @@ Steps to make a new release. 1. Finish release (merge into master and develop). Tag master 'vX.Y.Z'. 1. Push all changes to GitHub. 1. Checkout master branch (at the tag). -1. Run './dev/release-distribution' script (from project root directory). +1. Run 'make release-distribution'. 1. Run './dev/test-released-distribution' script (from project root directory). 1. Manually check documentation has been built and installed. 1. Manually check PyPI. From 4c8fb77fd3675ec7a069820c0e37d0b00a1d79f5 Mon Sep 17 00:00:00 2001 From: John Bywater Date: Sat, 16 May 2020 18:23:07 +0100 Subject: [PATCH 48/50] Fixed Makefile (run dev scripts from project root dir). --- Makefile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index 3d6d58a95..a06102e32 100644 --- a/Makefile +++ b/Makefile @@ -111,14 +111,14 @@ brew-services-stop: .PHONY: prepare-distribution prepare-distribution: - cd dev && python ./prepare-distribution.py + python ./dev/prepare-distribution.py .PHONY: release-distribution release-distribution: - cd dev && python ./release-distribution.py + python ./dev/release-distribution.py .PHONY: test-distribution test-distribution: - cd dev && python ./test-released-distribution.py + python ./dev/test-released-distribution.py From 6ae4ad76531afbae1b50baf3e7282bf9bf960132 Mon Sep 17 00:00:00 2001 From: John Bywater Date: Sat, 16 May 2020 18:23:53 +0100 Subject: [PATCH 49/50] Increased version number to 8.2.2. --- eventsourcing/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eventsourcing/__init__.py b/eventsourcing/__init__.py index d575422c9..50bb51d17 100644 --- a/eventsourcing/__init__.py +++ b/eventsourcing/__init__.py @@ -1 +1 @@ -__version__ = "8.2.2rc0" +__version__ = "8.2.2" From ba3d4db028de65afd607d3637e59a4c4a0456f49 Mon Sep 17 00:00:00 2001 From: John Bywater Date: Sat, 16 May 2020 18:26:47 +0100 Subject: [PATCH 50/50] Fixed release-distribution.py script. --- dev/release-distribution.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dev/release-distribution.py b/dev/release-distribution.py index c6a6b65c3..656dad00b 100755 --- a/dev/release-distribution.py +++ b/dev/release-distribution.py @@ -14,9 +14,9 @@ def main(): raise Exception("Couldn't find project README.md") # Build and upload to PyPI. - subprocess.check_call([sys.executable, "setup.py", "clean", "--all"], cwd=cwd) + subprocess.check_call([sys.executable, "setup.py", "clean", "--all"], cwd=proj_path) subprocess.check_call( - [sys.executable, "setup.py", "sdist", "upload", "-r", "pypi"], cwd=cwd + [sys.executable, "setup.py", "sdist", "upload", "-r", "pypi"], cwd=proj_path )