diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 000000000..3ae3c3fdd --- /dev/null +++ b/.dockerignore @@ -0,0 +1,73 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.coveragerc +.cache +nosetests.xml +coverage.xml +*,cover + +# Translations +*.mo +*.pot + +# Django stuff: +*.log + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Other ignores +.github/ +.idea/ +.mypy_cache/ +.pytest_cache/ +.editorconfig +.env +.travis.yml +AUTHORS +CODE_OF_CONDUCT.md +dev +eventsourcing/tests/djangoproject/db.sqlite3 +README.md +README_example_with_axon.md diff --git a/.editorconfig b/.editorconfig index 64e66ea60..fd8e278cc 100644 --- a/.editorconfig +++ b/.editorconfig @@ -39,3 +39,6 @@ indent_style = tab # Batch files use tabs for indentation [*.bat] indent_style = tab + +[*.{yml, yaml}] +indent_size = 2 diff --git a/.env b/.env new file mode 100644 index 000000000..d831f5ae9 --- /dev/null +++ b/.env @@ -0,0 +1,16 @@ +COMPOSE_FILE=dev/docker-compose.yaml +COMPOSE_PROJECT_NAME=eventsourcing + +CASSANDRA_HOSTS=127.0.0.1 + +MYSQL_HOST=127.0.0.1 +MYSQL_DATABASE=eventsourcing +MYSQL_PASSWORD=eventsourcing +MYSQL_ROOT_PASSWORD=eventsourcing_root +MYSQL_USER=eventsourcing + +POSTGRES_HOST=127.0.0.1 +POSTGRES_PASSWORD=eventsourcing +POSTGRES_USER=eventsourcing + +REDIS_HOST=127.0.0.1 diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 000000000..8f9d52e9c --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,5 @@ +# applied imports sorting and black formatting +1190c2fe4cf3f9b233b61f8f9a857c1007d11345 +6f5bf49327b66055f1ff865285543d070856c602 +53cfd70288051457999074479ddbfbaecb052f10 +6d9ec698a6310b701ffbcdf987ff5a3675f6e943 diff --git a/.readthedocs.yml b/.readthedocs.yml index f614f8699..b752bb3d5 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -1,5 +1,5 @@ python: - version: 3 - pip_install: true - extra_requirements: - - docs + version: 3 + pip_install: true + extra_requirements: + - docs diff --git a/.travis.yml b/.travis.yml index 68c6a9b31..3f4a639b7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -20,9 +20,9 @@ before_install: - sudo apt-get -qq update - sudo apt-get install -y openjdk-8-jdk - sudo apt-get install -y icedtea-8-plugin -# - sudo update-java-alternatives --set /usr/lib/jvm/java-1.8.0-openjdk-amd64 + # - sudo update-java-alternatives --set /usr/lib/jvm/java-1.8.0-openjdk-amd64 - sudo update-java-alternatives -v --set java-1.8.0-openjdk-amd64 -# - source /etc/environment + # - source /etc/environment - export JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-amd64 - java -version @@ -56,28 +56,26 @@ install: - python --version - pip install -U pip wheel - CASS_DRIVER_NO_CYTHON=1 pip install -e .[testing] - - pip install pymysql - - pip install mysql-connector-python-rf - - pip install python-coveralls - - pip install -U "coverage<5.0.0" # v5 is incompatible ATM. env: global: - CASSANDRA_HOSTS=127.0.0.1 - MYSQL_USER=travis + - MYSQL_PASSWORD= - POSTGRES_USER=postgres + - POSTGRES_PASSWORD= script: -- if [[ $TRAVIS_PYTHON_VERSION != pypy* ]]; then + - if [[ $TRAVIS_PYTHON_VERSION != pypy* ]]; then coverage run --concurrency=multiprocessing -m unittest discover eventsourcing.tests -v; - fi + fi -- if [[ $TRAVIS_PYTHON_VERSION == pypy* ]]; then + - if [[ $TRAVIS_PYTHON_VERSION == pypy* ]]; then python -m unittest discover eventsourcing.tests -v; - fi + fi after_success: -- if [[ $TRAVIS_PYTHON_VERSION != pypy* ]]; then + - if [[ $TRAVIS_PYTHON_VERSION != pypy* ]]; then coverage combine; coveralls; - fi + fi diff --git a/LICENSE b/LICENSE index 636c10eb3..fc2c56be6 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ BSD 3-Clause License -Copyright (c) 2018, John Bywater +Copyright (c) 2020, John Bywater All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/Makefile b/Makefile new file mode 100644 index 000000000..a06102e32 --- /dev/null +++ b/Makefile @@ -0,0 +1,124 @@ +.EXPORT_ALL_VARIABLES: +DOTENV_FILE ?= .env + +-include $(DOTENV_FILE) + +.PHONY: install +install: + CASS_DRIVER_NO_CYTHON=1 + @pip install -e ".[cassandra,sqlalchemy,axonserver,axon,ray,django,testing,dev,docs]" + +.PHONY: docker-pull +docker-pull: + @docker-compose pull + +.PHONY: docker-build +docker-build: + @docker-compose build + +.PHONY: docker-up +docker-up: + @docker-compose up -d + @docker-compose ps + +.PHONY: docker-stop +docker-stop: + @docker-compose stop + +.PHONY: docker-down +docker-down: + @docker-compose down -v --remove-orphans + + +.PHONY: docker-logs +docker-logs: + @docker-compose logs --follow --tail=1000 + + +.PHONY: lint-black +lint-black: + @black --check --diff . + +.PHONY: lint-flake8 +lint-flake8: + @flake8 eventsourcing + +.PHONY: lint-isort +lint-isort: + @isort --check-only --diff --recursive . + +.PHONY: lint-mypy +lint-mypy: + @mypy --ignore-missing-imports eventsourcing + +.PHONY: lint-dockerfile +lint-dockerfile: + @docker run --rm -i replicated/dockerfilelint:ad65813 < ./dev/Dockerfile_eventsourcing_requirements + +.PHONY: lint +lint: lint-black lint-flake8 lint-isort lint-mypy lint-dockerfile + + +.PHONY: fmt-isort +fmt-isort: + @isort -y --recursive . + +.PHONY: fmt-black +fmt-black: + @black . + +.PHONY: fmt +fmt: fmt-black fmt-isort + + +.PHONY: test +test: + @coverage run \ + --concurrency=multiprocessing \ + -m unittest discover \ + eventsourcing.tests -vv --failfast + @coverage combine + @coverage report + @coverage html + + +.PHONY: quick-test +quick-test: + QUICK_TESTS_ONLY=1 python -m unittest discover eventsourcing.tests -vv + + +.PHONY: docs +docs: + cd docs && make html + + +.PHONY: brew-services-start +brew-services-start: + brew services start mysql + brew services start postgresql + brew services start redis + ~/axonserver/axonserver.jar & + cassandra -f & + + +.PHONY: brew-services-stop +brew-services-stop: + brew services stop mysql + brew services stop postgresql + brew services stop redis + pkill -15 java + + +.PHONY: prepare-distribution +prepare-distribution: + python ./dev/prepare-distribution.py + + +.PHONY: release-distribution +release-distribution: + python ./dev/release-distribution.py + + +.PHONY: test-distribution +test-distribution: + python ./dev/test-released-distribution.py diff --git a/README_example_with_axon.md b/README_example_with_axon.md index 8f41052b0..37f10e509 100644 --- a/README_example_with_axon.md +++ b/README_example_with_axon.md @@ -117,7 +117,7 @@ import os os.environ['CIPHER_KEY'] = cipher_key # AxonServer database connection string. -os.environ['DB_URI'] = "localhost:8124" +os.environ['DB_URI'] = "{}:8124".format(os.environ.get('AXON_HOST', 'localhost')) ``` Run the code. Construct application and use as context manager. diff --git a/dev/.env b/dev/.env new file mode 120000 index 000000000..4a82335f5 --- /dev/null +++ b/dev/.env @@ -0,0 +1 @@ +../.env \ No newline at end of file diff --git a/dev/Dockerfile_eventsourcing_requirements b/dev/Dockerfile_eventsourcing_requirements index 48762c9a2..881b95ae8 100644 --- a/dev/Dockerfile_eventsourcing_requirements +++ b/dev/Dockerfile_eventsourcing_requirements @@ -1,8 +1,3 @@ -# To use this Docker file in PyCharm, just add a new Docker project interpreter, -# and set an image name such as "eventsourcing_requirements:latest". It will -# take a little while to download and build everything, but then tests which -# do not depend on other services such as MySQL and Cassandra should pass. -# To run containers needed to pass the full test suite, see docker-compose.yaml. FROM python:3.7 WORKDIR /app diff --git a/dev/MACOS_SETUP_NOTES.md b/dev/MACOS_SETUP_NOTES.md new file mode 100644 index 000000000..51a3af007 --- /dev/null +++ b/dev/MACOS_SETUP_NOTES.md @@ -0,0 +1,52 @@ +This document describes how to setup MacOS with databases needed to run the test suite: + +- MySQL +- PostgreSQL +- Redis +- Cassandra +- Axon Server + + +To setup MySQL: + +$ brew install mysql +$ brew services start mysql +$ mysql -u root +mysql> CREATE DATABASE EVENTSOURCING; +mysql> CREATE USER 'eventsourcing'@'localhost' IDENTIFIED BY 'eventsourcing'; +mysql> GRANT ALL PRIVILEGES ON eventsourcing.* TO 'eventsourcing'@'localhost'; + +To setup PostgreSQL: + +$ brew install postgresql +$ brew services start postgresql + + +To setup Redis: + +$ brew install redis +$ brew services start redis + + +To setup Cassandra: + +$ brew install cassandra +$ brew services start cassandra + +If that doesn't actually start Cassandra, then try this in a terminal: +$ cassandra -f + + +To setup Axon: +$ ./dev/download_axon_server.sh +$ ./axonserver/axonserver.jar + + +After this, the databases can be stopped with: + +$ make brew-services-stop + + +The database can be started with: + +$ make brew-services-start diff --git a/dev/RELEASE_SCRIPT.md b/dev/RELEASE_SCRIPT.md index 97a810be8..82af7ebd9 100644 --- a/dev/RELEASE_SCRIPT.md +++ b/dev/RELEASE_SCRIPT.md @@ -8,7 +8,8 @@ Steps to make a new release. 1. Push branch to GitHub and start a PR to master. 1. Review versions of all dependencies. 1. Update release notes to describe what's new in this release. -1. Run 'prepare-distribution' script. +1. Update copyright year in LICENSE file. +1. Run 'make prepare-distribution'. 1. Increase version number to 'rc1', 'rc2' in case of failure. 1. Try to fix and push changes to GitHub. 1. Make changes until built distribution is working. @@ -16,7 +17,7 @@ Steps to make a new release. 1. Finish release (merge into master and develop). Tag master 'vX.Y.Z'. 1. Push all changes to GitHub. 1. Checkout master branch (at the tag). -1. Run './dev/release-distribution' script (from project root directory). +1. Run 'make release-distribution'. 1. Run './dev/test-released-distribution' script (from project root directory). 1. Manually check documentation has been built and installed. 1. Manually check PyPI. diff --git a/dev/docker-compose.yaml b/dev/docker-compose.yaml index 529a62dbb..a0011827e 100644 --- a/dev/docker-compose.yaml +++ b/dev/docker-compose.yaml @@ -1,49 +1,75 @@ -# To use this Docker Compose file in PyCharm, just add a new Docker Compose -# project interpreter, and pick "all_eventsourcing_requirements" as the service. It -# will take a little while to download and build everything, then all tests should pass. -# Please note, both MySQL and Cassandra containers need a little while to get started -# first time the containers are run, so connections might not work immediately. version: '2' + services: - eventsourcing_requirements: - build: - context: .. - dockerfile: ./dev/Dockerfile_eventsourcing_requirements - image: "eventsourcing_requirements:latest" - volumes: - - .:/app - links: - - service_cassandra - - service_mysql - - service_postgres - - service_redis - environment: - CASSANDRA_HOSTS: service_cassandra - MYSQL_HOST: service_mysql - MYSQL_USER: eventsourcing - MYSQL_PASSWORD: eventsourcing - POSTGRES_HOST: service_postgres - POSTGRES_USER: eventsourcing - POSTGRES_PASSWORD: eventsourcing - REDIS_HOST: service_redis + eventsourcing_requirements: + build: + context: .. + dockerfile: ./dev/Dockerfile_eventsourcing_requirements + image: "eventsourcing_requirements:latest" + volumes: + - .:/app + links: + - cassandra + - mysql + - postgres + - redis + - axon + environment: + CASSANDRA_HOSTS: cassandra + MYSQL_HOST: mysql + MYSQL_USER: eventsourcing + MYSQL_PASSWORD: eventsourcing + POSTGRES_HOST: postgres + POSTGRES_USER: eventsourcing + POSTGRES_PASSWORD: eventsourcing + REDIS_HOST: redis + AXON_HOST: axon + + cassandra: + image: "cassandra:latest" + volumes: + - cassandra_data:/var/lib/cassandra + ports: + - "9042:9042" + + mysql: + image: "mysql:latest" + env_file: + - .env + volumes: + - mysql_data:/var/lib/mysql + ports: + - "3306:3306" - service_cassandra: - image: "cassandra:latest" + postgres: + image: "postgres:latest" + env_file: + - .env + volumes: + - postgres_data:/var/lib/postgresql/data + ports: + - "5432:5432" - service_mysql: - image: "mysql:latest" - environment: - MYSQL_ROOT_PASSWORD: eventsourcing - MYSQL_USER: eventsourcing - MYSQL_PASSWORD: eventsourcing - MYSQL_DATABASE: eventsourcing + redis: + image: "redis:latest" + volumes: + - redis_data:/data + ports: + - "6379:6379" - service_postgres: - image: "postgres:latest" - environment: - POSTGRES_USER: eventsourcing - POSTGRES_PASSWORD: eventsourcing - POSTGRES_DB: eventsourcing + axon: + image: "axoniq/axonserver:latest" + volumes: + - axon_data:/data + - axon_eventdata:/eventdata + ports: + - "8024:8024" + - "8124:8124" - service_redis: - image: "redis:latest" +volumes: + cassandra_data: + mysql_data: + postgres_data: + redis_data: + axon_data: + axon_eventdata: diff --git a/dev/prepare-distribution.py b/dev/prepare-distribution.py index fcd07fa89..dc4c9b517 100755 --- a/dev/prepare-distribution.py +++ b/dev/prepare-distribution.py @@ -5,58 +5,50 @@ from subprocess import CalledProcessError from time import sleep -try: - del os.environ["PYTHONPATH"] -except KeyError: - pass -os.environ["CASS_DRIVER_NO_CYTHON"] = "1" -sys.path.insert(0, "../") -from eventsourcing import __version__ - - -def build_and_test(cwd): - # Declare temporary working directory variable. - # tmpcwd27 = os.path.join(cwd, 'tmpve2.7') - tmpcwd37 = os.path.join(cwd, "tmpve3.7") - - # Build distribution. - subprocess.check_call([sys.executable, "setup.py", "clean", "--all"], cwd=cwd) - subprocess.check_call([sys.executable, "setup.py", "sdist"], cwd=cwd) - is_uploaded_testpypi = False - - targets = [ - # (tmpcwd27, 'python2.7'), - (tmpcwd37, "python") - ] - for (tmpcwd, python_executable) in targets: - - check_locally = False - if check_locally: - # Rebuild virtualenvs. - rebuild_virtualenv(cwd, tmpcwd, python_executable) - - # Install from dist folder. - tar_path = "../dist/eventsourcing-{}.tar.gz[testing]".format(__version__) - subprocess.check_call(["bin/pip", "install", "-U", tar_path], cwd=tmpcwd) - - # Check installed tests all pass. - test_installation(tmpcwd) - - # Build and upload to Test PyPI. - if not is_uploaded_testpypi: - subprocess.check_call( - [sys.executable, "setup.py", "sdist", "upload", "-r", "pypitest"], - cwd=cwd, - ) - is_uploaded_testpypi = True - - # Rebuild virtualenvs. - rebuild_virtualenv(cwd, tmpcwd, python_executable) +def main(): + # Validate current working dir (should be project root). + proj_path = os.path.abspath(".") + readme_path = os.path.join(proj_path, "README.md") + if os.path.exists(readme_path): + assert "A library for event sourcing in Python" in open(readme_path).read() + else: + raise Exception("Couldn't find project README.md") + + try: + del os.environ["PYTHONPATH"] + except KeyError: + pass + + # Build and upload to Test PyPI. + subprocess.check_call([sys.executable, "setup.py", "clean", "--all"], cwd=proj_path) + try: + subprocess.check_call( + [sys.executable, "setup.py", "sdist", "upload", "-r", "pypitest"], + cwd=proj_path, + ) + except CalledProcessError: + sys.exit(1) + + # Test distribution for various targets. + targets = [(os.path.join(proj_path, "tmpve3.7"), "python")] + os.environ["CASS_DRIVER_NO_CYTHON"] = "1" + from eventsourcing import __version__ + for (venv_path, python_bin) in targets: + + # Rebuild virtualenv. + if os.path.exists(venv_path): + remove_virtualenv(proj_path, venv_path) + subprocess.check_call( + ["virtualenv", "-p", python_bin, venv_path], cwd=proj_path + ) + subprocess.check_call( + ["bin/pip", "install", "-U", "pip", "wheel"], cwd=venv_path + ) # Install from Test PyPI. - cmd = [ - "bin/pip", + pip_install_from_testpypi = [ + os.path.join(venv_path, "bin/pip"), "install", "-U", "eventsourcing[testing]==" + __version__, @@ -67,37 +59,35 @@ def build_and_test(cwd): ] patience = 10 - while patience: + is_test_pass = False + sleep(1) + while True: try: - subprocess.check_call(cmd, cwd=tmpcwd) + subprocess.check_call(pip_install_from_testpypi, cwd=venv_path) + is_test_pass = True break except CalledProcessError: patience -= 1 + if patience < 0: + break print("Patience:", patience) - sleep(6) + sleep(1) + if not is_test_pass: + print("Failed to install from testpypi.") + sys.exit(1) # Check installed tests all pass. - test_installation(tmpcwd) - - remove_virtualenv(cwd, tmpcwd) - - -def test_installation(tmpcwd): - subprocess.check_call( - ["bin/python", "-m" "unittest", "discover", "eventsourcing.tests"], cwd=tmpcwd - ) - + subprocess.check_call( + ["bin/python", "-m" "unittest", "discover", "eventsourcing.tests"], + cwd=venv_path, + ) -def rebuild_virtualenv(cwd, venv_path, python_executable): - remove_virtualenv(cwd, venv_path) - subprocess.check_call(["virtualenv", "-p", python_executable, venv_path], cwd=cwd) - subprocess.check_call(["bin/pip", "install", "-U", "pip", "wheel"], cwd=venv_path) + remove_virtualenv(proj_path, venv_path) -def remove_virtualenv(cwd, venv_path): - subprocess.check_call(["rm", "-rf", venv_path], cwd=cwd) +def remove_virtualenv(proj_path, venv_path): + subprocess.check_call(["rm", "-r", venv_path], cwd=proj_path) if __name__ == "__main__": - cwd = os.path.join(os.environ["HOME"], "PyCharmProjects", "eventsourcing") - build_and_test(cwd) + main() diff --git a/dev/release-distribution.py b/dev/release-distribution.py index 966fddc14..656dad00b 100755 --- a/dev/release-distribution.py +++ b/dev/release-distribution.py @@ -4,14 +4,21 @@ import sys -def build_and_release(cwd): +def main(): + # Validate current working dir (should be project root). + proj_path = os.path.abspath(".") + readme_path = os.path.join(proj_path, "README.md") + if os.path.exists(readme_path): + assert "A library for event sourcing in Python" in open(readme_path).read() + else: + raise Exception("Couldn't find project README.md") + # Build and upload to PyPI. - subprocess.check_call([sys.executable, "setup.py", "clean", "--all"], cwd=cwd) + subprocess.check_call([sys.executable, "setup.py", "clean", "--all"], cwd=proj_path) subprocess.check_call( - [sys.executable, "setup.py", "sdist", "upload", "-r", "pypi"], cwd=cwd + [sys.executable, "setup.py", "sdist", "upload", "-r", "pypi"], cwd=proj_path ) if __name__ == "__main__": - cwd = os.path.join(os.environ["HOME"], "PyCharmProjects", "eventsourcing") - build_and_release(cwd) + main() diff --git a/dev/test-released-distribution.py b/dev/test-released-distribution.py index 6e81514b6..668493014 100755 --- a/dev/test-released-distribution.py +++ b/dev/test-released-distribution.py @@ -3,49 +3,46 @@ import subprocess -if "PYTHONPATH" in os.environ: - del os.environ["PYTHONPATH"] +def main(): + # Validate current working dir (should be project root). + proj_path = os.path.abspath(".") + readme_path = os.path.join(proj_path, "README.md") + if os.path.exists(readme_path): + assert "A library for event sourcing in Python" in open(readme_path).read() + else: + raise Exception("Couldn't find project README.md") + + try: + del os.environ["PYTHONPATH"] + except KeyError: + pass - -def test_released_distribution(cwd): # Declare temporary working directory variable. - tmpcwd27 = os.path.join(cwd, "tmpve2.7") - tmpcwd36 = os.path.join(cwd, "tmpve3.6") - tmpcwd37 = os.path.join(cwd, "tmpve3.7") - build_targets = [ - # (tmpcwd27, 'python2.7'), - # (tmpcwd36, 'python3.6'), - (tmpcwd37, "python") + (os.path.join(proj_path, "tmpve3.7"), "python") ] - for (tmpcwd, python_executable) in build_targets: + for (venv_path, python_bin) in build_targets: - # Rebuild virtualenvs. - rebuild_virtualenv(cwd, tmpcwd, python_executable) + # Rebuild virtualenv. + subprocess.check_call(["rm", "-r", venv_path], cwd=proj_path) + subprocess.check_call(["virtualenv", "-p", python_bin, venv_path], + cwd=proj_path) + subprocess.check_call(["bin/pip", "install", "-U", "pip", "wheel"], + cwd=venv_path) # Install from PyPI. os.environ["CASS_DRIVER_NO_CYTHON"] = "1" subprocess.check_call( ["bin/pip", "install", "--no-cache-dir", "eventsourcing[testing]"], - cwd=tmpcwd, + cwd=venv_path, ) # Check installed tests all pass. - test_installation(tmpcwd) - - -def test_installation(tmpcwd): - subprocess.check_call( - ["bin/python", "-m" "unittest", "discover", "eventsourcing.tests"], cwd=tmpcwd - ) - - -def rebuild_virtualenv(cwd, venv_path, python_executable): - subprocess.check_call(["rm", "-rf", venv_path], cwd=cwd) - subprocess.check_call(["virtualenv", "-p", python_executable, venv_path], cwd=cwd) - subprocess.check_call(["bin/pip", "install", "-U", "pip", "wheel"], cwd=venv_path) + subprocess.check_call( + ["bin/python", "-m" "unittest", "discover", "eventsourcing.tests"], + cwd=venv_path + ) if __name__ == "__main__": - cwd = os.path.join(os.environ["HOME"], "PyCharmProjects", "eventsourcing") - test_released_distribution(cwd) + main() diff --git a/docs/ref/application.rst b/docs/ref/application.rst index 942328954..b9f34e5b4 100644 --- a/docs/ref/application.rst +++ b/docs/ref/application.rst @@ -84,6 +84,17 @@ process :exclude-members: __weakref__, __dict__ +decorators +---------- + +.. automodule:: eventsourcing.application.decorators + :show-inheritance: + :member-order: bysource + :members: + :special-members: + :exclude-members: __weakref__, __dict__ + + command ------- diff --git a/docs/topics/contributing.rst b/docs/topics/contributing.rst index b2a38eb06..82c16f142 100644 --- a/docs/topics/contributing.rst +++ b/docs/topics/contributing.rst @@ -5,6 +5,9 @@ Contributing This library depends on its community. As interest keeps growing, we always need more people to help others. As soon as you learn the library, you can contribute in many ways. +Community development +===================== + - Join the Slack_ channel and answer questions. The library has a growing audience. Help to create and maintain a friendly and helpful atmosphere. @@ -26,4 +29,250 @@ are several ways you can help the library’s development: - Join the Slack_ channel and share your ideas for how to improve the library. We’re always open to suggestions. - Submit patches or pull requests for new and/or fixed behavior. -- Improve the documentation or write unit tests. +- Improve the documentation or improve the unit test suite. + + +Making changes +============== + +To make changes to the library, you will want to set up a local environment. +To get set up, fork the repository on GitHub, clone your fork using Git, and +then checkout the ``develop`` branch. + +Create a virtual Python environment, install Python dependencies, install +and start the databases that are used by the test suite. and then run the tests. +The library test suite depends on several databases. It's much easier to run +databases in Docker containers, but it's slightly faster to run databases +without containers. + +Once you have the tests running, you can make changes, run the tests again, +push changes to your fork, and then maybe create a pull request to the project's +develop branch. + +This library has a `Makefile` to help with development. You can read more about +the `GNU make`_ utility in `this article`_. There are commands to install Python +dependencies into a virtual Python environment, to build containers for the databases, +to start and stop databases, to run the test suite, to build the docs, to reformat code, +and for static type checking and linting. The actual code of the commands described +below can be easily found in ``Makefile`` at the root of the project repository. + +.. _GNU make: https://www.gnu.org/software/make/ +.. _this article: https://opensource.com/article/18/8/what-how-makefile + +.. _development-environment: + +Virtual Python environment +-------------------------- + +This project runs on Python 3.6+, and you need to have it installed on your system. +The recommended way for all platforms is to use the `official download page`_. +But also, you may use pyenv_. + +.. _official download page: https://www.python.org/downloads/ +.. _pyenv: https://github.com/pyenv/pyenv + +You can use virtualenv to create a virtual Python environment. Choose for your flavour: + +- virtualenv_ +- virtualenvwrapper_ +- pyenv-virtualenv_ for pyenv_ + +.. _virtualenv: https://pypi.org/project/virtualenv/ +.. _virtualenvwrapper: https://virtualenvwrapper.readthedocs.io/en/latest/ +.. _pyenv-virtualenv: https://github.com/pyenv/pyenv-virtualenv + +For example, you can create and activate a virtual Python environment using ``virtualenv`` directly:: + + $ virtualenv ~/.virtualenvs/eventsourcing-py3 + $ source ~/.virtualenvs/eventsourcing-py3/bin/activate + +Inside your activated virtualenv, use the following command to install all project dependencies +required for contribution:: + + $ make install + + +Git blame (optional) +-------------------- + +Setup ``git`` to ignore specific revs with ``blame``. + +This project is old, and several times in its history a massive changes were performed. +One such change is moving towards use of ``isort`` and ``flake8`` and ``black``. While +these changes are inevitable, they clutter the history, especially if you use ``git blame`` +or _Annotate_ option in PyCharm. But in newer versions of git (>= 2.23), this can be +mitigated: new options `--ignore-rev`_ and `--ignore-revs-file`_ were added. There is +a file in this repository called ``.git-blame-ignore-revs`` which contains all such +major reformattings. In order to pick it up by ``git blame`` and PyCharm, add a special +config line:: + + $ git config --local blame.ignoreRevsFile .git-blame-ignore-revs + +More info can be found here_. + +.. _--ignore-rev: https://git-scm.com/docs/git-blame#Documentation/git-blame.txt---ignore-revltrevgt +.. _--ignore-revs-file: https://git-scm.com/docs/git-blame#Documentation/git-blame.txt---ignore-revs-fileltfilegt +.. _here: https://www.moxio.com/blog/43/ignoring-bulk-change-commits-with-git-blame + + +.. _docker-containers: + +Run databases with Docker +------------------------- + +You can run the databases in Docker containers. + +To pull docker images:: + + $ make docker-pull + +To build docker images:: + + $ make docker-build + +To up and keep running containers in detached mode:: + + $ make docker-up + +To stop the containers:: + + $ make docker-stop + +To tear down the containers removing volumes and “orphans”:: + + $ make docker-down + +To attach to the latest containers output:: + + $ make docker-logs + +All of the commands using predefined “COMPOSE_FILE“ and “COMPOSE_PROJECT_NAME“ to keep +your containers in a more organized and straightforward way. + +**COMPOSE_FILE** is used by *docker-compose* utility to pick development services +configuration. The valid format of this value is: ``dev/docker-compose.yaml``. + +**COMPOSE_PROJECT_NAME** sets the project name. This value used to prepend the +containers on startup along with the service name. ``eventsourcing`` is a great +default value for it. + +.. _macos-databases: + +Run databases on macOS +---------------------- + +If you happen to be using a Mac, you can install the databases directly on macOS +using the following commands:: + + $ brew install mysql + $ brew install posgresql + $ brew install redis + $ brew install cassandra + $ ./dev/download_axon_server.sh + +To start the databases, you can run:: + + $ make brew-services-start + +To stop the services, you can run:: + + $ make brew-services-stop + +Before running the tests for the first time, create a database in MySQL, and configure user access:: + + $ mysql -u root + mysql> CREATE DATABASE EVENTSOURCING; + mysql> CREATE USER 'eventsourcing'@'localhost' IDENTIFIED BY 'eventsourcing'; + mysql> GRANT ALL PRIVILEGES ON eventsourcing.* TO 'eventsourcing'@'localhost'; + +You will also need to create a database in PostgreSQL:: + + $ createdb eventsourcing + + +Run tests +--------- + +Ensure that you’ve set up your development environment (see :ref:`development-environment`) and +and required services are up and running (see :ref:`docker-containers`, or :ref:`macos-databases`). + +Running tests from an IDE such as PyCharm allows easy navigation to code files. + +You can run the full test suite using ``make test``:: + + $ make test + +You can skip the slower tests when running the test suite with ``make quick-test``:: + + $ make quick-test + +.. note:: + To re-run tests, sometimes it requires ``make docker-down`` for a fresh start. + At the moment, Axon Server sometimes doesn't return everything that is expected + when listing all the events of an application. But restarting Axon Server seems + to clear this up. + + +Building documentation +---------------------- + +This project is using Sphinx_ documentation builder tool. Run this command to compile documentation +into static HTML files at ``./docs/_build/html``:: + + $ make docs + +.. _Sphinx: https://www.sphinx-doc.org/en/master/ + + +Linting your code +----------------- + +For now, linting your changes is completely optional - we do not have any checks on CI for it. + +Run isort_ to check imports sorting:: + + $ make lint-isort + +We are using Black_ as a tool for style guide enforcement:: + + $ make lint-black + +We are using Flake8_ (and it's `Flake8 BugBear plugin`_) to check the code for PEP8_ compatibility:: + + $ make lint-flake8 + +Mypy_ is a static type checker for Python 3 and Python 2.7. Run mypy to check code for accurate typing annotations:: + + $ make lint-mypy + +Dockerfilelint_ is an ``npm`` module that analyzes a Dockerfile and looks for +common traps, mistakes and helps enforce best practices:: + + $ make lint-dockerfile + +... and finally, to run all the checks from above, use:: + + $ make lint + +.. _isort: https://github.com/timothycrosley/isort +.. _Black: https://black.readthedocs.io/en/stable/ +.. _Dockerfilelint: https://hub.docker.com/r/replicated/dockerfilelint +.. _Flake8: https://flake8.pycqa.org/en/latest/ +.. _Flake8 BugBear plugin: https://github.com/PyCQA/flake8-bugbear +.. _PEP8: https://www.python.org/dev/peps/pep-0008/ +.. _Mypy: https://mypy.readthedocs.io/en/stable/ + + +Automatic formatting +-------------------- + +To apply automatic formatting by using isort_ and Black_, run:: + + $ make fmt + +.. note:: + In order to keep your Pull Request clean, please, do not apply it for all project + but your specific changes. The project is now well formatted, but static typing + and and strict compliance with PEP8 is still a working in progress. If you want + to help improve the type hints and formatting, please do so in a dedicated PR + so things aren't mixed with other changes (it's just easier to review this way). diff --git a/docs/topics/process.rst b/docs/topics/process.rst index ef78b4616..47639bf17 100644 --- a/docs/topics/process.rst +++ b/docs/topics/process.rst @@ -828,7 +828,7 @@ process application follow itself in a system. .. code:: python class ReflexiveApplication(ProcessApplication): - apply_policy_to_generated_events = False + apply_policy_to_generated_events = True This will have no effect unless the policy is written to respond to the types @@ -1025,8 +1025,8 @@ The following MySQL database connection string is compatible with SQLAlchemy. import os os.environ['DB_URI'] = 'mysql+pymysql://{}:{}@{}/eventsourcing?charset=utf8mb4&binary_prefix=true'.format( - os.getenv('MYSQL_USER', 'root'), - os.getenv('MYSQL_PASSWORD', ''), + os.getenv('MYSQL_USER', 'eventsourcing'), + os.getenv('MYSQL_PASSWORD', 'eventsourcing'), os.getenv('MYSQL_HOST', '127.0.0.1'), ) @@ -1053,7 +1053,7 @@ things are eventually done. .. code:: python - @retry((AssertionError, KeyError), max_attempts=50, wait=0.1) + @retry((AssertionError, KeyError), max_attempts=60, wait=0.5) def assert_eventually_done(repository, cmd_id): """Checks the command is eventually done.""" assert repository[cmd_id].is_done diff --git a/docs/topics/release_notes.rst b/docs/topics/release_notes.rst index eebf271ca..f163c45e6 100644 --- a/docs/topics/release_notes.rst +++ b/docs/topics/release_notes.rst @@ -22,6 +22,15 @@ systems of application, previously in the "application" package, has been moved to a new "system" package. +Version 8.2.2 (released 16 May 2020) +-------------------------------------- + +Improved documentation. Updated dockerization for local +development. Added Makefile, to setup development environment, +to build and run docker containers, to run the test suite, to +format the code, and to build the docs. Reformatted the code. + + Version 8.2.1 (released 11 March 2020) -------------------------------------- diff --git a/docs/topics/support.rst b/docs/topics/support.rst index a0a3214f4..f6832edae 100644 --- a/docs/topics/support.rst +++ b/docs/topics/support.rst @@ -58,5 +58,5 @@ The library has a growing community that may be able to help. Support the project =================== -Please follow the `Sponsor button `__ +Please follow the `Sponsor button `__ on the GitHub project for options. diff --git a/eventsourcing/__init__.py b/eventsourcing/__init__.py index 1f1024b19..50bb51d17 100644 --- a/eventsourcing/__init__.py +++ b/eventsourcing/__init__.py @@ -1 +1 @@ -__version__ = "8.2.1" +__version__ = "8.2.2" diff --git a/eventsourcing/application/decorators.py b/eventsourcing/application/decorators.py index c19cefb96..2580c3ec1 100644 --- a/eventsourcing/application/decorators.py +++ b/eventsourcing/application/decorators.py @@ -1,4 +1,4 @@ -from functools import singledispatch, wraps, _find_impl +from functools import _find_impl, singledispatch, wraps from inspect import isfunction from typing import Callable, no_type_check @@ -37,7 +37,6 @@ def applicationpolicy2(arg: Callable) -> Callable: cache = {} def _mutator(func): - def dispatch(event_type): try: return cache[event_type] @@ -55,6 +54,7 @@ def register(event_type): def registered_function_decorator(func): handlers[event_type] = func return func + return registered_function_decorator policy_function_wrapper.register = register diff --git a/eventsourcing/application/django.py b/eventsourcing/application/django.py index 2ada26380..4a12b0ca3 100644 --- a/eventsourcing/application/django.py +++ b/eventsourcing/application/django.py @@ -11,7 +11,7 @@ class DjangoApplication(ApplicationWithConcreteInfrastructure): infrastructure_factory_class = DjangoInfrastructureFactory - def __init__(self, tracking_record_class: Any=None, *args: Any, **kwargs: Any): + def __init__(self, tracking_record_class: Any = None, *args: Any, **kwargs: Any): self._tracking_record_class = tracking_record_class super(DjangoApplication, self).__init__(*args, **kwargs) diff --git a/eventsourcing/application/notificationlog.py b/eventsourcing/application/notificationlog.py index 6e4be7973..cd33e7a64 100644 --- a/eventsourcing/application/notificationlog.py +++ b/eventsourcing/application/notificationlog.py @@ -3,8 +3,8 @@ from eventsourcing.domain.model.array import BigArray from eventsourcing.infrastructure.base import ( - RecordManagerWithNotifications, AbstractRecordManager, + RecordManagerWithNotifications, ) DEFAULT_SECTION_SIZE = 20 diff --git a/eventsourcing/application/policies.py b/eventsourcing/application/policies.py index f67cd74d3..40c96188c 100644 --- a/eventsourcing/application/policies.py +++ b/eventsourcing/application/policies.py @@ -2,16 +2,13 @@ from eventsourcing.domain.model.entity import VersionedEntity from eventsourcing.domain.model.events import ( + AbstractSnapshot, EventWithOriginatorVersion, subscribe, unsubscribe, - AbstractSnapshot, -) -from eventsourcing.infrastructure.base import ( - AbstractEventStore, - AbstractRecordManager, ) from eventsourcing.domain.model.repository import AbstractEntityRepository +from eventsourcing.infrastructure.base import AbstractEventStore, AbstractRecordManager from eventsourcing.whitehead import IterableOfEvents, TEvent diff --git a/eventsourcing/application/process.py b/eventsourcing/application/process.py index c48dee612..2aacbb882 100644 --- a/eventsourcing/application/process.py +++ b/eventsourcing/application/process.py @@ -36,14 +36,8 @@ TAggregateEvent, ) from eventsourcing.domain.model.events import subscribe, unsubscribe -from eventsourcing.exceptions import ( - CausalDependencyFailed, - ProgrammingError, -) -from eventsourcing.infrastructure.base import ( - RecordManagerWithTracking, - TrackingKwargs, -) +from eventsourcing.exceptions import CausalDependencyFailed, ProgrammingError +from eventsourcing.infrastructure.base import RecordManagerWithTracking, TrackingKwargs from eventsourcing.infrastructure.eventsourcedrepository import EventSourcedRepository from eventsourcing.whitehead import IterableOfEvents @@ -186,6 +180,13 @@ def publish_prompt_for_events(self, _: Optional[IterableOfEvents] = None) -> Non def follow( self, upstream_application_name: str, notification_log: AbstractNotificationLog ) -> None: + """ + Sets up process application to follow the given notification log of an + upstream application. + + :param upstream_application_name: Name of the upstream application. + :param notification_log: Notification log that will be processed. + """ if ( upstream_application_name == self.name and self.apply_policy_to_generated_events @@ -205,6 +206,14 @@ def follow( def run( self, prompt: Optional[Prompt] = None, advance_by: Optional[int] = None ) -> int: + """ + Pulls event notifications from notification logs being followed by + this process application, and processes the contained domain events. + + :param prompt: Optional prompt, specifying a particular notification log. + :param advance_by: Maximum event notifications to process. + :return: Returns number of events that have been processed. + """ if prompt: assert isinstance(prompt, PromptToPull) @@ -271,6 +280,13 @@ def run( return notification_count def check_causal_dependencies(self, upstream_name, causal_dependencies_json): + """ + Checks the causal dependencies are satisfied (have already been processed). + + :param upstream_name: Name of the upstream application being processed. + :param causal_dependencies_json: Pipelines and positions in notification logs. + :raises CausalDependencyFailed: If causal dependencies are not satisfied. + """ # Decode causal dependencies of the domain event notification. if causal_dependencies_json: causal_dependencies = self.event_store.event_mapper.json_loads( @@ -313,6 +329,18 @@ def check_causal_dependencies(self, upstream_name, causal_dependencies_json): def process_upstream_event( self, domain_event: TAggregateEvent, notification_id: int, upstream_name: str ) -> Tuple[ListOfAggregateEvents, List]: + """ + Processes given domain event from an upstream notification log. + + Calls the process application policy, and then records a process event, + hence recording atomically all new domain events created by the call + to the policy along with any ORM objects that may result. + + :param domain_event: Domain event to be processed. + :param notification_id: Position in notification log. + :param upstream_name: Name of upstream application. + :return: Returns a list of new domain events. + """ cycle_started: Optional[float] = None if self.tick_interval is not None: cycle_started = time.process_time() @@ -382,6 +410,12 @@ def process_upstream_event( def get_event_from_notification( self, notification: Dict[str, Any] ) -> TAggregateEvent: + """ + Extracts the domain event of an event notification. + + :param notification: The event notification. + :return: A domain event. + """ return self.event_store.event_mapper.event_from_topic_and_state( topic=notification[self.notification_topic_key], state=notification[self.notification_state_key], @@ -431,6 +465,13 @@ def get_recorded_position(self, upstream_name): def call_policy( self, domain_event: TAggregateEvent ) -> Tuple[ListOfAggregateEvents, ListOfCausalDependencies, List[Any], List[Any]]: + """ + Calls the process application policy with the given domain event. + + :param domain_event: Domain event that will be given to the policy. + + :return: Returns a list of domain events, and a list of causal dependencies. + """ # Get the application policy. policy = self.policy_func or self.policy @@ -528,6 +569,12 @@ def policy( def collect_pending_events( self, aggregates: Sequence[TAggregate] ) -> ListOfAggregateEvents: + """ + Collects all the pending events from the given sequence of aggregates. + + :param aggregates: Sequence of aggregates. + :return: Returns a list of domain events. + """ pending_events: ListOfAggregateEvents = [] num_changed_aggregates = 0 # This doesn't necessarily obtain events in causal order... @@ -592,7 +639,15 @@ def drop_table(self) -> None: class ProcessApplicationWithSnapshotting(SnapshottingApplication, ProcessApplication): + """ + Supplements process applications that will use snapshotting. + """ def take_snapshots(self, new_events: Sequence[TAggregateEvent]) -> None: + """ + Takes snapshot of aggregates, according to the policy. + + :param new_events: Domain events used to detect if a snapshot is to be taken. + """ assert self.snapshotting_policy for event in new_events: if self.snapshotting_policy.condition([event]): diff --git a/eventsourcing/application/simple.py b/eventsourcing/application/simple.py index 211fc8ba2..b17bacd9c 100644 --- a/eventsourcing/application/simple.py +++ b/eventsourcing/application/simple.py @@ -30,10 +30,11 @@ from eventsourcing.domain.model.events import DomainEvent, publish from eventsourcing.exceptions import ProgrammingError, PromptFailed from eventsourcing.infrastructure.base import ( + DEFAULT_PIPELINE_ID, AbstractEventStore, AbstractRecordManager, BaseRecordManager, - DEFAULT_PIPELINE_ID, + RecordManagerWithNotifications, RecordManagerWithTracking, TrackingKwargs, ) @@ -253,16 +254,17 @@ def persistence_policy(self) -> PersistencePolicy: return self._persistence_policy def _raise_on_missing_infrastructure(self, what_is_missing): - msg = "Application class %s does not have a %s." % ( - type(self).__name__, - what_is_missing, - ) if not isinstance(self, ApplicationWithConcreteInfrastructure): - msg += ( - " and is not an ApplicationWithConcreteInfrastructure." + msg = ( + "Application class %s is not an subclass of %s." " Try using or inheriting from or mixin() an application" " class with concrete infrastructure such as SQLAlchemyApplication" " or DjangoApplication or AxonApplication." + ) % (type(self), ApplicationWithConcreteInfrastructure) + else: + msg = "Application class %s does not have a %s" % ( + type(self).__name__, + what_is_missing, ) raise ProgrammingError(msg) @@ -318,7 +320,7 @@ def construct_infrastructure_factory( def construct_datastore(self) -> None: """ - Constructs datastore object (which helps by creating and dropping tables). + Constructs datastore object (used to create and drop database tables). """ assert self.infrastructure_factory self._datastore = self.infrastructure_factory.construct_datastore() @@ -383,6 +385,12 @@ def change_pipeline(self, pipeline_id: int) -> None: self.event_store.record_manager.pipeline_id = pipeline_id def close(self) -> None: + """ + Closes the application for further use. + + The persistence policy is closed, and the application's + connection to the database is closed. + """ # Close the persistence policy. if self._persistence_policy is not None: self._persistence_policy.close() @@ -416,10 +424,29 @@ def mixin(cls: T, infrastructure_class: type) -> T: def save( self, aggregates=(), orm_objects_pending_save=(), orm_objects_pending_delete=(), - ): + ) -> None: + """ + Saves state of aggregates, and ORM objects. + + All of the pending events of the aggregates, along with the + ORM objects, are recorded atomically as a process event. + + Then a "prompt to pull" is published, and, if the repository cache + is in use, then puts the aggregates in the cache. + + :param aggregates: One or many aggregates. + :param orm_objects_pending_save: Sequence of ORM objects to be saved. + :param orm_objects_pending_delete: Sequance of ORM objects to be deleted. + """ + # Collect pending events from the aggregates. new_events = [] if isinstance(aggregates, BaseAggregateRoot): aggregates = [aggregates] + else: + pass + # Todo: Make sure record manager supports writing events from more than + # one aggregate atomically (e.g. Cassandra and EventStore don't). + for aggregate in aggregates: new_events += aggregate.__batch_pending_events__() process_event = ProcessEvent( @@ -428,30 +455,42 @@ def save( orm_objs_pending_delete=orm_objects_pending_delete, ) new_records = self.record_process_event(process_event) - # Find the head notification ID. - notifiable_events = [e for e in new_events if e.__notifiable__] - head_notification_id = None - if len(notifiable_events): - record_manager = self.event_store.record_manager - notification_id_name = record_manager.notification_id_name - notifications = [] - for record in new_records: - if not hasattr(record, notification_id_name): - continue - if not isinstance(getattr(record, notification_id_name), int): - continue - notifications.append( - record_manager.create_notification_from_record(record) - ) - - if len(notifications): - head_notification_id = notifications[-1]["id"] - self.publish_prompt(head_notification_id) - for aggregate in aggregates: - if self.repository.use_cache: + record_manager = self.event_store.record_manager + if isinstance(record_manager, RecordManagerWithNotifications): + # Find the head notification ID. + notifiable_events = [e for e in new_events if e.__notifiable__] + head_notification_id = None + if len(notifiable_events): + notification_id_name = record_manager.notification_id_name + notifications = [] + for record in new_records: + if not hasattr(record, notification_id_name): + continue + if not isinstance(getattr(record, notification_id_name), int): + continue + notifications.append( + record_manager.create_notification_from_record(record) + ) + + if len(notifications): + head_notification_id = notifications[-1]["id"] + self.publish_prompt(head_notification_id) + if self.repository.use_cache: + for aggregate in aggregates: self.repository.put_entity_in_cache(aggregate.id, aggregate) def record_process_event(self, process_event: ProcessEvent) -> List: + """ + Records a process event. + + Converts the domain events of the process event to event record + objects, and writes the event records and the ORM objects to + the database using the application's event store's record manager. + + :param process_event: An instance of + :class:`~eventsourcing.application.simple.ProcessEvent` + :return: A list of event records. + """ # Construct event records. event_records = self.construct_event_records( process_event.domain_events, process_event.causal_dependencies @@ -473,6 +512,13 @@ def construct_event_records( pending_events: Iterable[TAggregateEvent], causal_dependencies: Optional[ListOfCausalDependencies], ) -> List: + """ + Constructs event records from domain events. + + :param pending_events: An iterable of domain events. + :param causal_dependencies: A list of causal dependencies. + :return: A list of event records. + """ # Convert to event records. sequenced_items = self.event_store.items_from_events(pending_events) record_manager = self.event_store.record_manager @@ -495,6 +541,12 @@ def construct_event_records( setattr( event_record, notification_id_name, "event-not-notifiable" ) + else: + if any((not e.__notifiable__ for e in pending_events)): + raise Exception( + "Can't set __notifiable__=False without " + "set_notification_ids=True " + ) if self.use_causal_dependencies: assert hasattr(record_manager.record_class, "causal_dependencies") @@ -507,6 +559,13 @@ def construct_event_records( return event_records def publish_prompt(self, head_notification_id=None): + """ + Publishes a "prompt to pull" (instance of + :class:`~eventsourcing.application.simple.PromptToPull`). + + :param head_notification_id: Maximum notification ID of event records + to be pulled. + """ prompt = PromptToPull(self.name, self.pipeline_id, head_notification_id) try: publish(prompt) diff --git a/eventsourcing/application/snapshotting.py b/eventsourcing/application/snapshotting.py index faaadbf4f..d69b65e00 100644 --- a/eventsourcing/application/snapshotting.py +++ b/eventsourcing/application/snapshotting.py @@ -3,11 +3,8 @@ from eventsourcing.application.policies import SnapshottingPolicy from eventsourcing.application.simple import SimpleApplication from eventsourcing.domain.model.entity import TVersionedEntity, TVersionedEvent -from eventsourcing.infrastructure.base import ( - AbstractEventStore, - AbstractRecordManager, -) from eventsourcing.domain.model.events import AbstractSnapshot +from eventsourcing.infrastructure.base import AbstractEventStore, AbstractRecordManager from eventsourcing.infrastructure.eventstore import EventStore from eventsourcing.infrastructure.snapshotting import EventSourcedSnapshotStrategy diff --git a/eventsourcing/contrib/cargo_shipping_example.py b/eventsourcing/contrib/cargo_shipping_example.py index 9027ec83d..75e6e0b23 100644 --- a/eventsourcing/contrib/cargo_shipping_example.py +++ b/eventsourcing/contrib/cargo_shipping_example.py @@ -24,16 +24,13 @@ from eventsourcing.application.process import ProcessApplication from eventsourcing.application.sqlalchemy import SQLAlchemyApplication -from eventsourcing.system.runner import ( - InProcessRunner, - SingleThreadedRunner, -) -from eventsourcing.system.definition import System from eventsourcing.domain.model.aggregate import ( AggregateRoot, TAggregate, TAggregateEvent, ) +from eventsourcing.system.definition import System +from eventsourcing.system.runner import InProcessRunner, SingleThreadedRunner # Locations in the world. diff --git a/eventsourcing/contrib/paxos/application.py b/eventsourcing/contrib/paxos/application.py index 8ca701da3..9d39c7e2d 100644 --- a/eventsourcing/contrib/paxos/application.py +++ b/eventsourcing/contrib/paxos/application.py @@ -3,11 +3,7 @@ from typing import Any, Dict, Optional, Sequence, Set, Union from uuid import UUID -from eventsourcing.application.process import ( - ProcessApplication, - WrappedRepository, -) -from eventsourcing.system.definition import System +from eventsourcing.application.process import ProcessApplication, WrappedRepository from eventsourcing.contrib.paxos.composable import ( PaxosInstance, PaxosMessage, @@ -21,6 +17,7 @@ RecordConflictError, RepositoryKeyError, ) +from eventsourcing.system.definition import System class PaxosAggregate(BaseAggregateRoot): diff --git a/eventsourcing/contrib/paxos/composable.py b/eventsourcing/contrib/paxos/composable.py index b37d3be5e..cdbc0e9dd 100644 --- a/eventsourcing/contrib/paxos/composable.py +++ b/eventsourcing/contrib/paxos/composable.py @@ -385,8 +385,8 @@ def __init__(self, network_uid, quorum_size): self.acceptors = dict() # maps from_uid => last_accepted_proposal_id self.final_value = None self.final_acceptors = ( - None - ) # Will be a set of acceptor UIDs once the final value is chosen + None # Will be a set of acceptor UIDs once the final value is chosen + ) self.final_proposal_id = None def receive_accepted(self, msg): diff --git a/eventsourcing/contrib/suffixtrees/domain/model/generalizedsuffixtree.py b/eventsourcing/contrib/suffixtrees/domain/model/generalizedsuffixtree.py index 336c2baab..b57adce65 100644 --- a/eventsourcing/contrib/suffixtrees/domain/model/generalizedsuffixtree.py +++ b/eventsourcing/contrib/suffixtrees/domain/model/generalizedsuffixtree.py @@ -1,6 +1,3 @@ -# # coding=utf-8 -# from __future__ import unicode_literals -# # from collections import OrderedDict # from time import sleep # from uuid import uuid4 diff --git a/eventsourcing/contrib/suffixtrees/domain/model/suffixtree.py b/eventsourcing/contrib/suffixtrees/domain/model/suffixtree.py index 6edfd4379..faaee24ff 100644 --- a/eventsourcing/contrib/suffixtrees/domain/model/suffixtree.py +++ b/eventsourcing/contrib/suffixtrees/domain/model/suffixtree.py @@ -1,17 +1,12 @@ -# coding=utf-8 -from __future__ import unicode_literals - import datetime from uuid import uuid4 from eventsourcing.domain.model.decorators import attribute from eventsourcing.domain.model.entity import ( - AttributeChanged, - Created, - Discarded, TimestampedVersionedEntity, ) -from eventsourcing.domain.model.events import publish +from eventsourcing.domain.model.events import AttributeChangedEvent, CreatedEvent, \ + DiscardedEvent, publish from eventsourcing.example.application import ExampleApplication from eventsourcing.exceptions import RepositoryKeyError from eventsourcing.infrastructure.eventsourcedrepository import EventSourcedRepository @@ -22,13 +17,13 @@ class SuffixTree(TimestampedVersionedEntity): for construction. """ - class Created(Created): + class Created(CreatedEvent): pass - class AttributeChanged(AttributeChanged): + class AttributeChanged(AttributeChangedEvent): pass - class Discarded(Discarded): + class Discarded(DiscardedEvent): pass def __init__(self, root_node_id, case_insensitive=False, **kwargs): @@ -203,13 +198,13 @@ class Node(TimestampedVersionedEntity): """A node in the suffix tree. """ - class Created(Created): + class Created(CreatedEvent): pass - class AttributeChanged(AttributeChanged): + class AttributeChanged(AttributeChangedEvent): pass - class Discarded(Discarded): + class Discarded(DiscardedEvent): pass def __init__(self, suffix_node_id=None, *args, **kwargs): @@ -232,13 +227,13 @@ class Edge(TimestampedVersionedEntity): """An edge in the suffix tree. """ - class Created(Created): + class Created(CreatedEvent): pass - class AttributeChanged(AttributeChanged): + class AttributeChanged(AttributeChangedEvent): pass - class Discarded(Discarded): + class Discarded(DiscardedEvent): pass def __init__( diff --git a/eventsourcing/domain/model/__init__.py b/eventsourcing/domain/model/__init__.py index 8b1378917..e69de29bb 100644 --- a/eventsourcing/domain/model/__init__.py +++ b/eventsourcing/domain/model/__init__.py @@ -1 +0,0 @@ - diff --git a/eventsourcing/domain/model/array.py b/eventsourcing/domain/model/array.py index 74a5fd40b..86f9717a9 100644 --- a/eventsourcing/domain/model/array.py +++ b/eventsourcing/domain/model/array.py @@ -4,12 +4,10 @@ from uuid import uuid5 from eventsourcing.domain.model.decorators import retry -from eventsourcing.domain.model.entity import ( - TimestampedVersionedEntity, -) +from eventsourcing.domain.model.entity import TimestampedVersionedEntity from eventsourcing.domain.model.events import publish -from eventsourcing.exceptions import ArrayIndexError, ConcurrencyError from eventsourcing.domain.model.repository import AbstractEntityRepository +from eventsourcing.exceptions import ArrayIndexError, ConcurrencyError class ItemAssigned(TimestampedVersionedEntity.Event): diff --git a/eventsourcing/domain/model/collection.py b/eventsourcing/domain/model/collection.py index 1d25aea21..8f4521830 100644 --- a/eventsourcing/domain/model/collection.py +++ b/eventsourcing/domain/model/collection.py @@ -1,11 +1,9 @@ -from __future__ import absolute_import, division, print_function, unicode_literals - -from typing import Any, Set, Iterator, Optional +from typing import Any, Iterator, Optional, Set from uuid import UUID from eventsourcing.domain.model.entity import ( - TTimestampedVersionedEntity, TimestampedVersionedEntity, + TTimestampedVersionedEntity, ) from eventsourcing.domain.model.repository import AbstractEntityRepository diff --git a/eventsourcing/domain/model/command.py b/eventsourcing/domain/model/command.py index 0700a08b7..b31421139 100644 --- a/eventsourcing/domain/model/command.py +++ b/eventsourcing/domain/model/command.py @@ -14,7 +14,9 @@ class Event(BaseAggregateRoot.Event[TAggregate]): class Created(Event[TAggregate], BaseAggregateRoot.Created[TAggregate]): pass - class AttributeChanged(Event[TAggregate], BaseAggregateRoot.AttributeChanged[TAggregate]): + class AttributeChanged( + Event[TAggregate], BaseAggregateRoot.AttributeChanged[TAggregate] + ): pass class Discarded(Event[TAggregate], BaseAggregateRoot.Discarded[TAggregate]): diff --git a/eventsourcing/domain/model/decorators.py b/eventsourcing/domain/model/decorators.py index e89b19593..ca55a1be4 100644 --- a/eventsourcing/domain/model/decorators.py +++ b/eventsourcing/domain/model/decorators.py @@ -2,7 +2,7 @@ from functools import singledispatch, wraps from inspect import isfunction from time import sleep -from typing import Dict, Type, Callable, no_type_check, Union, Optional, Sequence +from typing import Callable, Dict, Optional, Sequence, Type, Union, no_type_check from eventsourcing.domain.model.events import DomainEvent, subscribe from eventsourcing.exceptions import ProgrammingError diff --git a/eventsourcing/domain/model/entity.py b/eventsourcing/domain/model/entity.py index e4c6db4fd..bc9f86d22 100644 --- a/eventsourcing/domain/model/entity.py +++ b/eventsourcing/domain/model/entity.py @@ -194,7 +194,9 @@ def id(self) -> UUID: """ return self._id - def __change_attribute__(self: TDomainEntity, name: str, value: Any, **kwargs) -> None: + def __change_attribute__( + self: TDomainEntity, name: str, value: Any, **kwargs + ) -> None: """ Changes named attribute with the given value, by triggering an AttributeChanged event. @@ -203,7 +205,9 @@ def __change_attribute__(self: TDomainEntity, name: str, value: Any, **kwargs) - "DomainEntity.AttributeChanged[TDomainEntity]" ] = self.AttributeChanged assert isinstance(self, DomainEntity) # For PyCharm navigation. - self.__trigger_event__(event_class=event_class, name=name, value=value, **kwargs) + self.__trigger_event__( + event_class=event_class, name=name, value=value, **kwargs + ) class AttributeChanged(Event[TDomainEntity], AttributeChangedEvent[TDomainEntity]): """ @@ -510,10 +514,11 @@ class EntityWithECC(DomainEntity): """ Entity whose events have event ID, correlation ID, and causation ID. """ + class Event(DomainEntity.Event): def __init__(self, *, processed_event=None, application_name, **kwargs): - event_id = kwargs.get('event_id') or "{}:{}:{}".format( + event_id = kwargs.get("event_id") or "{}:{}:{}".format( application_name, kwargs["originator_id"], kwargs["originator_version"] ) kwargs["event_id"] = event_id diff --git a/eventsourcing/domain/model/snapshot.py b/eventsourcing/domain/model/snapshot.py index e446b5621..edc2eb74b 100644 --- a/eventsourcing/domain/model/snapshot.py +++ b/eventsourcing/domain/model/snapshot.py @@ -1,12 +1,13 @@ -from typing import Dict, Optional, Any +from typing import Any, Dict, Optional from uuid import UUID from eventsourcing.domain.model.events import ( + AbstractSnapshot, EventWithOriginatorID, EventWithOriginatorVersion, EventWithTimestamp, - AbstractSnapshot) -from eventsourcing.utils.topic import resolve_topic, reconstruct_object +) +from eventsourcing.utils.topic import reconstruct_object, resolve_topic from eventsourcing.whitehead import TEntity diff --git a/eventsourcing/domain/model/timebucketedlog.py b/eventsourcing/domain/model/timebucketedlog.py index d4d34e2a4..32548ce91 100644 --- a/eventsourcing/domain/model/timebucketedlog.py +++ b/eventsourcing/domain/model/timebucketedlog.py @@ -1,6 +1,6 @@ import datetime from decimal import Decimal -from typing import Optional, Any, Union +from typing import Any, Optional, Union from uuid import UUID, uuid5 from dateutil.relativedelta import relativedelta @@ -12,8 +12,8 @@ LoggedEvent, publish, ) -from eventsourcing.exceptions import RepositoryKeyError from eventsourcing.domain.model.repository import AbstractEntityRepository +from eventsourcing.exceptions import RepositoryKeyError from eventsourcing.utils.times import ( datetime_from_timestamp, decimaltimestamp, diff --git a/eventsourcing/domain/model/versioning.py b/eventsourcing/domain/model/versioning.py index 949fa4a65..780b25499 100644 --- a/eventsourcing/domain/model/versioning.py +++ b/eventsourcing/domain/model/versioning.py @@ -33,7 +33,7 @@ class Upcastable(Event): def __init__(self): if type(self).__class_version__ > 0: - self.__dict__['__class_version__'] = type(self).__class_version__ + self.__dict__["__class_version__"] = type(self).__class_version__ super(Upcastable, self).__init__() @classmethod @@ -42,11 +42,11 @@ def __upcast_state__(cls, obj_state: Dict) -> Dict: Upcasts obj_state from the version of the class when the object state was recorded, to be compatible with current version of the class. """ - class_version = obj_state.get('__class_version__', 0) + class_version = obj_state.get("__class_version__", 0) while class_version < cls.__class_version__: obj_state = cls.__upcast__(obj_state, class_version) class_version += 1 - obj_state['__class_version__'] = class_version + obj_state["__class_version__"] = class_version return obj_state @classmethod diff --git a/eventsourcing/example/application.py b/eventsourcing/example/application.py index 0f99fa469..6e9763dc7 100644 --- a/eventsourcing/example/application.py +++ b/eventsourcing/example/application.py @@ -1,6 +1,5 @@ -from abc import ABC - import zlib +from abc import ABC from eventsourcing.application.policies import PersistencePolicy from eventsourcing.domain.model.entity import VersionedEntity @@ -13,7 +12,6 @@ from eventsourcing.infrastructure.snapshotting import EventSourcedSnapshotStrategy from eventsourcing.utils.transcoding import ObjectJSONDecoder, ObjectJSONEncoder - # Please note, the code is this module is basically old-fashioned, and will # be removed when the tests that depend on it are rewritten to use the new # application classes in package eventsourcing.application. These were the @@ -83,7 +81,7 @@ def construct_event_store( event_sequence_id_attr=event_sequence_id_attr, event_position_attr=event_position_attr, cipher=cipher, - compressor=zlib if cipher else None + compressor=zlib if cipher else None, ) event_store = EventStore( record_manager=record_manager, event_mapper=sequenced_item_mapper @@ -108,7 +106,7 @@ def construct_sequenced_item_mapper( json_encoder_class=json_encoder_class, json_decoder_class=json_decoder_class, cipher=cipher, - compressor=compressor + compressor=compressor, ) def close(self): diff --git a/eventsourcing/example/domainmodel.py b/eventsourcing/example/domainmodel.py index 9601edaad..0a0754768 100644 --- a/eventsourcing/example/domainmodel.py +++ b/eventsourcing/example/domainmodel.py @@ -1,8 +1,9 @@ from eventsourcing.domain.model.decorators import attribute from eventsourcing.domain.model.entity import ( EntityWithHashchain, + TDomainEntity, TimestampedVersionedEntity, - TDomainEntity) +) from eventsourcing.domain.model.repository import AbstractEntityRepository @@ -12,7 +13,8 @@ class Example(EntityWithHashchain, TimestampedVersionedEntity): """ class Event( - EntityWithHashchain.Event[TDomainEntity], TimestampedVersionedEntity.Event[TDomainEntity] + EntityWithHashchain.Event[TDomainEntity], + TimestampedVersionedEntity.Event[TDomainEntity], ): """Supertype for events of example entities.""" @@ -28,10 +30,14 @@ class AttributeChanged( ): """Published when an Example is created.""" - class Discarded(Event[TDomainEntity], TimestampedVersionedEntity.Discarded[TDomainEntity]): + class Discarded( + Event[TDomainEntity], TimestampedVersionedEntity.Discarded[TDomainEntity] + ): """Published when an Example is discarded.""" - class Heartbeat(Event[TDomainEntity], TimestampedVersionedEntity.Event[TDomainEntity]): + class Heartbeat( + Event[TDomainEntity], TimestampedVersionedEntity.Event[TDomainEntity] + ): """Published when a heartbeat in the entity occurs (see below).""" def mutate(self, obj: "Example") -> None: diff --git a/eventsourcing/example/interface/flaskapp.py b/eventsourcing/example/interface/flaskapp.py index dc1d3c517..210406b5c 100644 --- a/eventsourcing/example/interface/flaskapp.py +++ b/eventsourcing/example/interface/flaskapp.py @@ -1,4 +1,5 @@ import os + from flask import Flask from flask_sqlalchemy import SQLAlchemy from sqlalchemy_utils.types.uuid import UUIDType diff --git a/eventsourcing/example/interface/flaskwsgi.py b/eventsourcing/example/interface/flaskwsgi.py deleted file mode 100644 index 808e2a65d..000000000 --- a/eventsourcing/example/interface/flaskwsgi.py +++ /dev/null @@ -1,8 +0,0 @@ -import eventsourcing.example.interface.flaskapp - - -# Todo: Investigate why removing this file breaks python3.3. -# For some reason, this file is needed to run flaskapp.py -# with uWSGI and python3.3. -# -application = eventsourcing.example.interface.flaskapp.application diff --git a/eventsourcing/infrastructure/axonserver/datastore.py b/eventsourcing/infrastructure/axonserver/datastore.py index 1f38cc2d2..9ede8ccf5 100644 --- a/eventsourcing/infrastructure/axonserver/datastore.py +++ b/eventsourcing/infrastructure/axonserver/datastore.py @@ -1,7 +1,7 @@ import os -from typing import Optional, Any +from typing import Any, Optional -from axonclient.client import AxonClient, DEFAULT_LOCAL_AXONSERVER_URI +from axonclient.client import DEFAULT_LOCAL_AXONSERVER_URI, AxonClient from eventsourcing.infrastructure.datastore import AbstractDatastore, DatastoreSettings @@ -11,7 +11,13 @@ def __init__(self, uri: Optional[str] = None): if uri is not None: self.uri = uri else: - self.uri = os.getenv("DB_URI", DEFAULT_LOCAL_AXONSERVER_URI) + if "AXON_HOST" in os.environ: + axon_db_uri = "{}:{}".format( + os.getenv("AXON_HOST"), os.getenv("AXON_PORT", "8124") + ) + else: + axon_db_uri = DEFAULT_LOCAL_AXONSERVER_URI + self.uri = os.getenv("DB_URI", axon_db_uri) class AxonDatastore(AbstractDatastore[AxonSettings]): diff --git a/eventsourcing/infrastructure/axonserver/manager.py b/eventsourcing/infrastructure/axonserver/manager.py index 03ba70db8..78a777998 100644 --- a/eventsourcing/infrastructure/axonserver/manager.py +++ b/eventsourcing/infrastructure/axonserver/manager.py @@ -33,7 +33,7 @@ def __init__(self, axon_client: AxonClient, *args: Any, **kwargs: Any): super(AxonRecordManager, self).__init__(*args, **kwargs) self.axon_client = axon_client self.contiguous_record_ids = True - self.notification_id_name = 'id' + self.notification_id_name = "id" def all_sequence_ids(self): return [] @@ -55,8 +55,7 @@ def get_notification_records( stop = stop or INT32_MAX number_of_permits = min(stop - start, INT32_MAX) events = self.axon_client.iter_events( - tracking_token=start, - number_of_permits=number_of_permits + tracking_token=start, number_of_permits=number_of_permits ) for tracking_token, event in events: yield AxonNotification(tracking_token, axon_event=event) @@ -120,18 +119,18 @@ def to_axon_event(self, item): aggregate_sequence_number = getattr(item, self.field_names.position) payload_type = getattr(item, self.field_names.topic) payload_data = getattr(item, self.field_names.state) - is_snapshot = payload_type.endswith('#Snapshot') + is_snapshot = payload_type.endswith("#Snapshot") return AxonEvent( message_identifier=message_identifier, aggregate_identifier=sequence_id, aggregate_sequence_number=aggregate_sequence_number, - aggregate_type='AggregateRoot', + aggregate_type="AggregateRoot", timestamp=0, payload_type=payload_type, - payload_revision='1', + payload_revision="1", payload_data=payload_data, snapshot=is_snapshot, - meta_data={} + meta_data={}, ) def write_records(self, records): diff --git a/eventsourcing/infrastructure/base.py b/eventsourcing/infrastructure/base.py index b3bb2c809..753994f15 100644 --- a/eventsourcing/infrastructure/base.py +++ b/eventsourcing/infrastructure/base.py @@ -24,7 +24,6 @@ from eventsourcing.infrastructure.sequenceditemmapper import AbstractSequencedItemMapper from eventsourcing.whitehead import TEvent - DEFAULT_PIPELINE_ID = 0 TrackingKwargs = Dict[str, Union[str, int]] @@ -127,7 +126,7 @@ def __init__( record_class: type, sequenced_item_class: Type[NamedTuple] = SequencedItem, # type: ignore contiguous_record_ids: bool = False, - application_name: str = '', + application_name: str = "", pipeline_id: int = DEFAULT_PIPELINE_ID, **kwargs: Any ): @@ -294,9 +293,7 @@ def get_notifications( yield self.create_notification_from_record(record) def create_notification_from_record(self, record): - notification = { - "id": getattr(record, self.notification_id_name) - } + notification = {"id": getattr(record, self.notification_id_name)} for field_name in self.field_names: notification[field_name] = getattr(record, field_name) if hasattr(record, "causal_dependencies"): @@ -534,9 +531,7 @@ class AbstractEventStore(ABC, Generic[TEvent, TRecordManager]): """ def __init__( - self, - record_manager: TRecordManager, - event_mapper: AbstractSequencedItemMapper, + self, record_manager: TRecordManager, event_mapper: AbstractSequencedItemMapper, ): """ Initialises event store object. diff --git a/eventsourcing/infrastructure/cassandra/manager.py b/eventsourcing/infrastructure/cassandra/manager.py index d9c62c20f..5cd7790d7 100644 --- a/eventsourcing/infrastructure/cassandra/manager.py +++ b/eventsourcing/infrastructure/cassandra/manager.py @@ -1,4 +1,5 @@ import os + from cassandra import InvalidRequest from cassandra.cqlengine.functions import Token from cassandra.cqlengine.query import BatchQuery, LWTException diff --git a/eventsourcing/infrastructure/cassandra/records.py b/eventsourcing/infrastructure/cassandra/records.py index 5a16c2c1b..25cbd904c 100644 --- a/eventsourcing/infrastructure/cassandra/records.py +++ b/eventsourcing/infrastructure/cassandra/records.py @@ -1,4 +1,4 @@ -from cassandra.cqlengine.models import columns, Model +from cassandra.cqlengine.models import Model, columns class IntegerSequencedRecord(Model): diff --git a/eventsourcing/infrastructure/datastore.py b/eventsourcing/infrastructure/datastore.py index d6d7f775a..2ae573b3e 100644 --- a/eventsourcing/infrastructure/datastore.py +++ b/eventsourcing/infrastructure/datastore.py @@ -1,5 +1,5 @@ from abc import ABC, abstractmethod -from typing import Optional, Any, TypeVar, Generic +from typing import Any, Generic, Optional, TypeVar class DatastoreSettings(object): diff --git a/eventsourcing/infrastructure/django/manager.py b/eventsourcing/infrastructure/django/manager.py index 7b545355e..5b58a8304 100644 --- a/eventsourcing/infrastructure/django/manager.py +++ b/eventsourcing/infrastructure/django/manager.py @@ -1,4 +1,4 @@ -from typing import Any, Iterable, Optional, Sequence, Dict +from typing import Any, Dict, Iterable, Optional, Sequence from uuid import UUID from django.db import IntegrityError, ProgrammingError, connection, transaction diff --git a/eventsourcing/infrastructure/eventsourcedrepository.py b/eventsourcing/infrastructure/eventsourcedrepository.py index 54ce4de10..7dbd311ca 100644 --- a/eventsourcing/infrastructure/eventsourcedrepository.py +++ b/eventsourcing/infrastructure/eventsourcedrepository.py @@ -4,13 +4,10 @@ from uuid import UUID from eventsourcing.domain.model.entity import TVersionedEntity, TVersionedEvent -from eventsourcing.exceptions import RepositoryKeyError -from eventsourcing.infrastructure.base import ( - AbstractEventStore, - AbstractRecordManager, -) -from eventsourcing.domain.model.repository import AbstractEntityRepository from eventsourcing.domain.model.events import AbstractSnapshot +from eventsourcing.domain.model.repository import AbstractEntityRepository +from eventsourcing.exceptions import RepositoryKeyError +from eventsourcing.infrastructure.base import AbstractEventStore, AbstractRecordManager from eventsourcing.infrastructure.snapshotting import AbstractSnapshotStrategy from eventsourcing.whitehead import SEntity diff --git a/eventsourcing/infrastructure/eventstore.py b/eventsourcing/infrastructure/eventstore.py index 84fba8cef..0499e36db 100644 --- a/eventsourcing/infrastructure/eventstore.py +++ b/eventsourcing/infrastructure/eventstore.py @@ -6,7 +6,6 @@ from eventsourcing.infrastructure.iterators import SequencedItemIterator from eventsourcing.whitehead import TEvent - # Todo: Unify iterators in EventStore and in NotificationLog, # by pushing behaviour down to record manager? diff --git a/eventsourcing/infrastructure/factory.py b/eventsourcing/infrastructure/factory.py index 74cbbdb4f..4e221d037 100644 --- a/eventsourcing/infrastructure/factory.py +++ b/eventsourcing/infrastructure/factory.py @@ -2,9 +2,9 @@ from typing import Any, Generic, NamedTuple, Optional, Type from eventsourcing.infrastructure.base import ( + DEFAULT_PIPELINE_ID, AbstractEventStore, AbstractRecordManager, - DEFAULT_PIPELINE_ID, ) from eventsourcing.infrastructure.datastore import AbstractDatastore from eventsourcing.infrastructure.eventstore import EventStore diff --git a/eventsourcing/infrastructure/integersequencegenerators/redisincr.py b/eventsourcing/infrastructure/integersequencegenerators/redisincr.py index d5f6da71f..9e4a3896e 100644 --- a/eventsourcing/infrastructure/integersequencegenerators/redisincr.py +++ b/eventsourcing/infrastructure/integersequencegenerators/redisincr.py @@ -1,8 +1,8 @@ +import os from typing import Optional from uuid import uuid4 -import os -from redis import StrictRedis, Redis +from redis import Redis, StrictRedis from eventsourcing.infrastructure.integersequencegenerators.base import ( AbstractIntegerSequenceGenerator, diff --git a/eventsourcing/infrastructure/iterators.py b/eventsourcing/infrastructure/iterators.py index 5ffbe0819..05d40eec8 100644 --- a/eventsourcing/infrastructure/iterators.py +++ b/eventsourcing/infrastructure/iterators.py @@ -1,6 +1,6 @@ from abc import abstractmethod from threading import Thread -from typing import Iterable, Optional, NamedTuple, Iterator, Any, List +from typing import Any, Iterable, Iterator, List, NamedTuple, Optional from uuid import UUID from eventsourcing.infrastructure.base import AbstractRecordManager, BaseRecordManager @@ -34,9 +34,7 @@ def __init__( :param limit: Limit to the number of items returned. :param is_ascending: Whether or not to iterate in ascending order. """ - assert isinstance(record_manager, BaseRecordManager), type( - record_manager - ) + assert isinstance(record_manager, BaseRecordManager), type(record_manager) assert isinstance(page_size, (int, type(None))) assert isinstance(limit, (int, type(None))) self.record_manager = record_manager @@ -255,9 +253,7 @@ def __init__( **kwargs: Any ): super(GetEntityEventsThread, self).__init__(*args, **kwargs) - assert isinstance(record_manager, BaseRecordManager), type( - record_manager - ) + assert isinstance(record_manager, BaseRecordManager), type(record_manager) self.record_manager = record_manager self.stored_entity_id = sequence_id self.gt = gt diff --git a/eventsourcing/infrastructure/popo/manager.py b/eventsourcing/infrastructure/popo/manager.py index 19723fdb6..82d169453 100644 --- a/eventsourcing/infrastructure/popo/manager.py +++ b/eventsourcing/infrastructure/popo/manager.py @@ -326,6 +326,7 @@ class PopoStoredEventRecord(object): Allows other attributes to be set, such as notification ID. """ + def __init__(self, sequenced_item: NamedTuple): self.sequenced_item = sequenced_item diff --git a/eventsourcing/infrastructure/popo/mapper.py b/eventsourcing/infrastructure/popo/mapper.py index f95f9ae00..e6fe7cf2d 100644 --- a/eventsourcing/infrastructure/popo/mapper.py +++ b/eventsourcing/infrastructure/popo/mapper.py @@ -1,7 +1,7 @@ -from typing import Tuple, Type, Dict, Any +from typing import Any, Dict, Tuple, Type from eventsourcing.infrastructure.sequenceditemmapper import SequencedItemMapper -from eventsourcing.utils.topic import resolve_topic, get_topic +from eventsourcing.utils.topic import get_topic, resolve_topic from eventsourcing.whitehead import TEvent diff --git a/eventsourcing/infrastructure/sequenceditemmapper.py b/eventsourcing/infrastructure/sequenceditemmapper.py index ff884eaab..aac900db8 100644 --- a/eventsourcing/infrastructure/sequenceditemmapper.py +++ b/eventsourcing/infrastructure/sequenceditemmapper.py @@ -7,7 +7,7 @@ SequencedItemFieldNames, ) from eventsourcing.utils.cipher.aes import AESCipher -from eventsourcing.utils.topic import get_topic, resolve_topic, reconstruct_object +from eventsourcing.utils.topic import get_topic, reconstruct_object, resolve_topic from eventsourcing.utils.transcoding import ObjectJSONDecoder, ObjectJSONEncoder from eventsourcing.whitehead import TEvent diff --git a/eventsourcing/infrastructure/snapshotting.py b/eventsourcing/infrastructure/snapshotting.py index 300d9bf65..496bfda2f 100644 --- a/eventsourcing/infrastructure/snapshotting.py +++ b/eventsourcing/infrastructure/snapshotting.py @@ -3,12 +3,9 @@ from typing import Optional from uuid import UUID -from eventsourcing.domain.model.snapshot import Snapshot -from eventsourcing.infrastructure.base import ( - AbstractEventStore, - AbstractRecordManager, -) from eventsourcing.domain.model.events import AbstractSnapshot +from eventsourcing.domain.model.snapshot import Snapshot +from eventsourcing.infrastructure.base import AbstractEventStore, AbstractRecordManager from eventsourcing.utils.topic import get_topic diff --git a/eventsourcing/infrastructure/sqlalchemy/datastore.py b/eventsourcing/infrastructure/sqlalchemy/datastore.py index 41e5a1175..d2def5b3a 100644 --- a/eventsourcing/infrastructure/sqlalchemy/datastore.py +++ b/eventsourcing/infrastructure/sqlalchemy/datastore.py @@ -1,11 +1,11 @@ import os -from typing import Optional, Union, Sequence, Any, Dict +from typing import Any, Dict, Optional, Sequence, Union from sqlalchemy import create_engine from sqlalchemy.engine import Engine from sqlalchemy.exc import InternalError from sqlalchemy.ext.declarative import DeclarativeMeta -from sqlalchemy.orm import scoped_session, sessionmaker, Session +from sqlalchemy.orm import Session, scoped_session, sessionmaker from sqlalchemy.pool import StaticPool from eventsourcing.infrastructure.datastore import AbstractDatastore, DatastoreSettings diff --git a/eventsourcing/infrastructure/sqlalchemy/factory.py b/eventsourcing/infrastructure/sqlalchemy/factory.py index cae4fa3b2..74c1f7728 100644 --- a/eventsourcing/infrastructure/sqlalchemy/factory.py +++ b/eventsourcing/infrastructure/sqlalchemy/factory.py @@ -1,7 +1,7 @@ from typing import Any, NamedTuple, Optional, Type from eventsourcing.domain.model.events import DomainEvent -from eventsourcing.infrastructure.base import AbstractRecordManager, DEFAULT_PIPELINE_ID +from eventsourcing.infrastructure.base import DEFAULT_PIPELINE_ID, AbstractRecordManager from eventsourcing.infrastructure.datastore import AbstractDatastore from eventsourcing.infrastructure.eventstore import EventStore from eventsourcing.infrastructure.factory import InfrastructureFactory diff --git a/eventsourcing/infrastructure/sqlalchemy/records.py b/eventsourcing/infrastructure/sqlalchemy/records.py index e9addadd8..0aa856d48 100644 --- a/eventsourcing/infrastructure/sqlalchemy/records.py +++ b/eventsourcing/infrastructure/sqlalchemy/records.py @@ -1,4 +1,4 @@ -from sqlalchemy import DECIMAL, String, LargeBinary +from sqlalchemy import DECIMAL, LargeBinary, String from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.sql.schema import Column, Index from sqlalchemy.sql.sqltypes import BigInteger, Integer, Text diff --git a/eventsourcing/system/definition.py b/eventsourcing/system/definition.py index 67f557fc6..a68c40241 100644 --- a/eventsourcing/system/definition.py +++ b/eventsourcing/system/definition.py @@ -1,10 +1,10 @@ import weakref -from _weakref import ReferenceType from abc import ABC, abstractmethod from collections import OrderedDict from copy import deepcopy from typing import Any, Dict, List, Optional, Type, TypeVar +from _weakref import ReferenceType from eventsourcing.application.popo import PopoApplication from eventsourcing.application.process import ProcessApplication from eventsourcing.application.simple import ApplicationWithConcreteInfrastructure diff --git a/eventsourcing/system/multiprocess.py b/eventsourcing/system/multiprocess.py index 5f5402bba..b98e3f348 100644 --- a/eventsourcing/system/multiprocess.py +++ b/eventsourcing/system/multiprocess.py @@ -1,35 +1,23 @@ import multiprocessing from multiprocessing import Manager -from queue import Queue, Empty +from queue import Empty, Queue from time import sleep -from typing import ( - Sequence, - Optional, - Any, - List, - TYPE_CHECKING, - Dict, - Tuple, - Type, -) +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Tuple, Type from eventsourcing.application.notificationlog import RecordManagerNotificationLog -from eventsourcing.application.process import ( - PromptToQuit, - ProcessApplication, -) +from eventsourcing.application.process import ProcessApplication, PromptToQuit from eventsourcing.application.simple import ( ApplicationWithConcreteInfrastructure, Prompt, - is_prompt_to_pull, PromptToPull, + is_prompt_to_pull, ) from eventsourcing.domain.model.decorators import retry from eventsourcing.domain.model.events import subscribe, unsubscribe from eventsourcing.exceptions import ( - ProgrammingError, CausalDependencyFailed, OperationalError, + ProgrammingError, RecordConflictError, ) from eventsourcing.infrastructure.base import DEFAULT_PIPELINE_ID diff --git a/eventsourcing/system/ray.py b/eventsourcing/system/ray.py index d0998127a..2f9e4dc4d 100644 --- a/eventsourcing/system/ray.py +++ b/eventsourcing/system/ray.py @@ -8,11 +8,13 @@ import ray -from eventsourcing.application.process import ( - ProcessApplication, +from eventsourcing.application.process import ProcessApplication +from eventsourcing.application.simple import ( + ApplicationWithConcreteInfrastructure, + Prompt, + PromptToPull, + is_prompt_to_pull, ) -from eventsourcing.application.simple import ApplicationWithConcreteInfrastructure, \ - Prompt, is_prompt_to_pull, PromptToPull from eventsourcing.domain.model.decorators import retry from eventsourcing.domain.model.events import subscribe, unsubscribe from eventsourcing.exceptions import ( @@ -26,9 +28,8 @@ ) from eventsourcing.system.definition import AbstractSystemRunner, System from eventsourcing.system.rayhelpers import RayDbJob, RayPrompt -from eventsourcing.system.runner import DEFAULT_POLL_INTERVAL - from eventsourcing.system.raysettings import ray_init_kwargs +from eventsourcing.system.runner import DEFAULT_POLL_INTERVAL ray.init(**ray_init_kwargs) @@ -305,7 +306,7 @@ def prompt(self, prompt: RayPrompt) -> None: if PROMPT_WITH_NOTIFICATION_OBJS: for notification in prompt.notifications: self._prompted_notifications[ - (upstream_name, notification['id']) + (upstream_name, notification["id"]) ] = notification if latest_head is not None: with self.heads_lock: diff --git a/eventsourcing/system/runner.py b/eventsourcing/system/runner.py index 38a1e3a05..bd4b4014f 100644 --- a/eventsourcing/system/runner.py +++ b/eventsourcing/system/runner.py @@ -5,37 +5,37 @@ from threading import Barrier, BrokenBarrierError, Event, Lock, Thread, Timer from time import sleep from typing import ( + TYPE_CHECKING, Any, Callable, Deque, Dict, Generic, - Iterable, List, Optional, - TYPE_CHECKING, Type, - TypeVar, Union, no_type_check, ) from eventsourcing.application.notificationlog import NotificationLogReader -from eventsourcing.application.process import ( - ProcessApplication, - PromptToQuit, +from eventsourcing.application.process import ProcessApplication, PromptToQuit +from eventsourcing.application.simple import ( + ApplicationWithConcreteInfrastructure, + Prompt, + PromptToPull, + is_prompt_to_pull, ) -from eventsourcing.application.simple import ApplicationWithConcreteInfrastructure, Prompt, is_prompt_to_pull, \ - PromptToPull from eventsourcing.domain.model.decorators import retry from eventsourcing.domain.model.events import subscribe, unsubscribe from eventsourcing.exceptions import ( CausalDependencyFailed, EventSourcingError, OperationalError, + ProgrammingError, RecordConflictError, - ProgrammingError) -from eventsourcing.system.definition import System, AbstractSystemRunner +) +from eventsourcing.system.definition import AbstractSystemRunner, System from eventsourcing.whitehead import T DEFAULT_POLL_INTERVAL = 5 @@ -130,7 +130,9 @@ def run_followers(self, prompt: Prompt) -> None: break else: if isinstance(prompt, PromptToPull): - downstream_names = self.system.downstream_names[prompt.process_name] + downstream_names = self.system.downstream_names[ + prompt.process_name + ] for downstream_name in downstream_names: downstream_process = self.processes[downstream_name] downstream_process.run(prompt) @@ -798,9 +800,7 @@ def run(self) -> None: try: # Get all notifications. for upstream_name in self.app.readers: - notifications = list( - self.app.read_reader(upstream_name) - ) + notifications = list(self.app.read_reader(upstream_name)) all_notifications.append((upstream_name, notifications)) except: @@ -819,9 +819,7 @@ def run(self) -> None: # Process all notifications. for upstream_name, notifications in all_notifications: for notification in notifications: - event = self.app.get_event_from_notification( - notification - ) + event = self.app.get_event_from_notification(notification) self.app.process_upstream_event( event, notification["id"], upstream_name ) diff --git a/eventsourcing/system/thespian.py b/eventsourcing/system/thespian.py index b1936d7b0..1588e89ee 100644 --- a/eventsourcing/system/thespian.py +++ b/eventsourcing/system/thespian.py @@ -4,14 +4,12 @@ from thespian.actors import Actor, ActorExitRequest, ActorSystem from eventsourcing.application.notificationlog import RecordManagerNotificationLog -from eventsourcing.application.process import ( - ProcessApplication, -) -from eventsourcing.application.simple import is_prompt_to_pull, PromptToPull -from eventsourcing.system.definition import System, AbstractSystemRunner +from eventsourcing.application.process import ProcessApplication +from eventsourcing.application.simple import PromptToPull, is_prompt_to_pull from eventsourcing.domain.model.events import subscribe, unsubscribe from eventsourcing.exceptions import RecordConflictError from eventsourcing.infrastructure.base import DEFAULT_PIPELINE_ID +from eventsourcing.system.definition import AbstractSystemRunner, System logger = logging.getLogger() diff --git a/eventsourcing/tests/contrib_tests/paxos_tests/test_paxos_system.py b/eventsourcing/tests/contrib_tests/paxos_tests/test_paxos_system.py index 5e0e3472e..ea1205e5c 100644 --- a/eventsourcing/tests/contrib_tests/paxos_tests/test_paxos_system.py +++ b/eventsourcing/tests/contrib_tests/paxos_tests/test_paxos_system.py @@ -142,7 +142,9 @@ def test_multiprocessing(self): # Start running operating system processes. with runner: # Get local application object. - paxosapplication0 = runner.get(runner.system.process_classes["paxosapplication0"]) + paxosapplication0 = runner.get( + runner.system.process_classes["paxosapplication0"] + ) assert isinstance(paxosapplication0, PaxosApplication) # Start proposing values on the different system pipelines. @@ -159,8 +161,12 @@ def test_multiprocessing(self): paxosapplication0.propose_value(key3, value3) # Check all the process applications have expected final values. - paxosapplication1 = runner.get(runner.system.process_classes["paxosapplication1"]) - paxosapplication2 = runner.get(runner.system.process_classes["paxosapplication2"]) + paxosapplication1 = runner.get( + runner.system.process_classes["paxosapplication1"] + ) + paxosapplication2 = runner.get( + runner.system.process_classes["paxosapplication2"] + ) assert isinstance(paxosapplication1, PaxosApplication) paxosapplication0.repository.use_cache = False @@ -188,7 +194,7 @@ def test_multiprocessing(self): @notquick @skipIf( sys.version_info[:2] == (3, 6), - "RayRunner not working with Python36 (pickle issue)" + "RayRunner not working with Python36 (pickle issue)", ) def test_ray_performance(self): @@ -239,9 +245,10 @@ def assert_final_value(process_name, pipeline_id, id, expected): print( "Resolved {} paxoses with ray in {:.4f}s " "({:.1f} values/s, {:.4f}s each)".format( - num_proposals, duration, + num_proposals, + duration, num_proposals / duration, - duration / num_proposals + duration / num_proposals, ) ) @@ -268,7 +275,9 @@ def test_multiprocessing_performance(self): sleep(1) # Construct an application instance in this process. - paxosapplication0 = runner.get(runner.system.process_classes["paxosapplication0"]) + paxosapplication0 = runner.get( + runner.system.process_classes["paxosapplication0"] + ) assert isinstance(paxosapplication0, PaxosApplication) @@ -298,7 +307,7 @@ def test_multiprocessing_performance(self): "each)".format(num_proposals, duration, duration / num_proposals) ) - @retry((KeyError, AssertionError), max_attempts=100, wait=0.05, stall=0) + @retry((KeyError, AssertionError), max_attempts=100, wait=0.5, stall=0) def assert_final_value(self, process, id, value): self.assertEqual(process.repository[id].final_value, value) diff --git a/eventsourcing/tests/contrib_tests/paxos_tests/test_paxos_system_with_django.py b/eventsourcing/tests/contrib_tests/paxos_tests/test_paxos_system_with_django.py index e839494c7..38a305619 100644 --- a/eventsourcing/tests/contrib_tests/paxos_tests/test_paxos_system_with_django.py +++ b/eventsourcing/tests/contrib_tests/paxos_tests/test_paxos_system_with_django.py @@ -1,10 +1,10 @@ from unittest import skip +from eventsourcing.application.django import DjangoApplication +from eventsourcing.tests.contrib_tests.paxos_tests import test_paxos_system from eventsourcing.tests.sequenced_item_tests.test_django_record_manager import ( DjangoTestCase, ) -from eventsourcing.tests.contrib_tests.paxos_tests import test_paxos_system -from eventsourcing.application.django import DjangoApplication class TestPaxosSystemWithDjango(DjangoTestCase, test_paxos_system.TestPaxosSystem): diff --git a/eventsourcing/tests/contrib_tests/paxos_tests/test_paxos_system_with_popo.py b/eventsourcing/tests/contrib_tests/paxos_tests/test_paxos_system_with_popo.py index 61056ca3f..d15ea66cd 100644 --- a/eventsourcing/tests/contrib_tests/paxos_tests/test_paxos_system_with_popo.py +++ b/eventsourcing/tests/contrib_tests/paxos_tests/test_paxos_system_with_popo.py @@ -1,11 +1,11 @@ import sys from unittest import skip, skipIf +from eventsourcing.application.popo import PopoApplication +from eventsourcing.tests.contrib_tests.paxos_tests import test_paxos_system from eventsourcing.tests.sequenced_item_tests.test_popo_record_manager import ( PopoTestCase, ) -from eventsourcing.tests.contrib_tests.paxos_tests import test_paxos_system -from eventsourcing.application.popo import PopoApplication class TestPaxosSystemWithPopo(PopoTestCase, test_paxos_system.TestPaxosSystem): diff --git a/eventsourcing/tests/contrib_tests/suffixtrees_tests/test_suffix_tree.py b/eventsourcing/tests/contrib_tests/suffixtrees_tests/test_suffix_tree.py index 5a04ea183..1f14070c0 100644 --- a/eventsourcing/tests/contrib_tests/suffixtrees_tests/test_suffix_tree.py +++ b/eventsourcing/tests/contrib_tests/suffixtrees_tests/test_suffix_tree.py @@ -1,6 +1,3 @@ -# # coding=utf-8 -# from __future__ import unicode_literals -# # import os # # from eventsourcing.contrib.suffixtrees.domain.model.suffixtree import SuffixTree, SuffixTreeApplication, \ diff --git a/eventsourcing/tests/contrib_tests/suffixtrees_tests/test_suffix_tree_generalized.py b/eventsourcing/tests/contrib_tests/suffixtrees_tests/test_suffix_tree_generalized.py index 550e13a7c..06dd492a3 100644 --- a/eventsourcing/tests/contrib_tests/suffixtrees_tests/test_suffix_tree_generalized.py +++ b/eventsourcing/tests/contrib_tests/suffixtrees_tests/test_suffix_tree_generalized.py @@ -1,6 +1,3 @@ -# # coding=utf-8 -# from __future__ import unicode_literals -# # import datetime # import traceback # import uuid diff --git a/eventsourcing/tests/core_tests/test_aggregate_root.py b/eventsourcing/tests/core_tests/test_aggregate_root.py index 501b187e6..2bb1e7e22 100644 --- a/eventsourcing/tests/core_tests/test_aggregate_root.py +++ b/eventsourcing/tests/core_tests/test_aggregate_root.py @@ -1,5 +1,5 @@ import uuid -from typing import Dict, Optional, cast, Generic +from typing import Dict, Generic, Optional, cast from unittest.case import TestCase from uuid import UUID @@ -16,8 +16,8 @@ from eventsourcing.tests.sequenced_item_tests.test_sqlalchemy_record_manager import ( SQLAlchemyRecordManagerTestCase, ) -from eventsourcing.whitehead import TEntity from eventsourcing.utils.topic import get_topic, resolve_topic +from eventsourcing.whitehead import TEntity class TestAggregateRootEvent(TestCase): @@ -77,31 +77,47 @@ def test_example_aggregate_event_classes(self): self.assertIn("AttributeChanged", ExampleAggregateRoot.__dict__) self.assertEqual(ExampleAggregateRoot.Event.__name__, "Event") - self.assertEqual(ExampleAggregateRoot.Event.__qualname__, "ExampleAggregateRoot.Event") + self.assertEqual( + ExampleAggregateRoot.Event.__qualname__, "ExampleAggregateRoot.Event" + ) topic = "eventsourcing.tests.core_tests.test_aggregate_root#ExampleAggregateRoot.Event" self.assertEqual(get_topic(ExampleAggregateRoot.Event), topic) self.assertEqual(resolve_topic(topic), ExampleAggregateRoot.Event) self.assertEqual(ExampleAggregateRoot.Created.__name__, "Created") - self.assertEqual(ExampleAggregateRoot.Created.__qualname__, "ExampleAggregateRoot.Created") + self.assertEqual( + ExampleAggregateRoot.Created.__qualname__, "ExampleAggregateRoot.Created" + ) topic = "eventsourcing.tests.core_tests.test_aggregate_root#ExampleAggregateRoot.Created" self.assertEqual(get_topic(ExampleAggregateRoot.Created), topic) self.assertEqual(resolve_topic(topic), ExampleAggregateRoot.Created) - self.assertTrue(issubclass(ExampleAggregateRoot.Created, ExampleAggregateRoot.Event)) + self.assertTrue( + issubclass(ExampleAggregateRoot.Created, ExampleAggregateRoot.Event) + ) self.assertEqual(ExampleAggregateRoot.Discarded.__name__, "Discarded") - self.assertEqual(ExampleAggregateRoot.Discarded.__qualname__, "ExampleAggregateRoot.Discarded") + self.assertEqual( + ExampleAggregateRoot.Discarded.__qualname__, + "ExampleAggregateRoot.Discarded", + ) topic = "eventsourcing.tests.core_tests.test_aggregate_root#ExampleAggregateRoot.Discarded" self.assertEqual(get_topic(ExampleAggregateRoot.Discarded), topic) self.assertEqual(resolve_topic(topic), ExampleAggregateRoot.Discarded) - self.assertTrue(issubclass(ExampleAggregateRoot.Discarded, ExampleAggregateRoot.Event)) + self.assertTrue( + issubclass(ExampleAggregateRoot.Discarded, ExampleAggregateRoot.Event) + ) self.assertEqual(ExampleAggregateRoot.ExampleCreated.__name__, "ExampleCreated") - self.assertEqual(ExampleAggregateRoot.ExampleCreated.__qualname__, "ExampleAggregateRoot.ExampleCreated") + self.assertEqual( + ExampleAggregateRoot.ExampleCreated.__qualname__, + "ExampleAggregateRoot.ExampleCreated", + ) topic = "eventsourcing.tests.core_tests.test_aggregate_root#ExampleAggregateRoot.ExampleCreated" self.assertEqual(get_topic(ExampleAggregateRoot.ExampleCreated), topic) self.assertEqual(resolve_topic(topic), ExampleAggregateRoot.ExampleCreated) - self.assertTrue(issubclass(ExampleAggregateRoot.ExampleCreated, ExampleAggregateRoot.Event)) + self.assertTrue( + issubclass(ExampleAggregateRoot.ExampleCreated, ExampleAggregateRoot.Event) + ) def test_aggregate1_event_classes(self): self.assertIn("Event", Aggregate1.__dict__) @@ -433,14 +449,14 @@ def create_aggregate1(self) -> Aggregate1: a: Aggregate1 = Aggregate1.__create__() return a - def create_aggregate2(self)-> Aggregate2: + def create_aggregate2(self) -> Aggregate2: """ Factory method, creates and returns a new aggregate1 root entity. """ a: Aggregate2 = Aggregate2.__create__() return a - def create_aggregate3(self)-> Aggregate3: + def create_aggregate3(self) -> Aggregate3: """ Factory method, creates and returns a new aggregate1 root entity. """ diff --git a/eventsourcing/tests/core_tests/test_decorators.py b/eventsourcing/tests/core_tests/test_decorators.py index d814a57c3..d18c205ab 100644 --- a/eventsourcing/tests/core_tests/test_decorators.py +++ b/eventsourcing/tests/core_tests/test_decorators.py @@ -8,8 +8,8 @@ from eventsourcing.domain.model.events import ( EventHandlersNotEmptyError, assert_event_handlers_empty, - publish, clear_event_handlers, + publish, ) from eventsourcing.example.domainmodel import Example from eventsourcing.utils.topic import get_topic @@ -212,7 +212,6 @@ def test_applicationpolicy_decorator(self): self.seen_int = False class Application(object): - def __init__(self, test_case): self.test_case = test_case @@ -224,11 +223,10 @@ def policy(self, repository, event): def _(self, repository, event): self.test_case.seen_int = True - app = Application(self) self.assertFalse(self.seen_default) self.assertFalse(self.seen_int) - app.policy(None, '') + app.policy(None, "") self.assertTrue(self.seen_default) self.assertFalse(self.seen_int) app.policy(None, 1) @@ -252,7 +250,6 @@ class C(B): pass class Application(object): - def __init__(self, test_case): self.test_case = test_case @@ -272,11 +269,10 @@ def _(self, repository, event): def _(self, repository, event): self.test_case.seen_b = True - app = Application(self) self.assertFalse(self.seen_default) self.assertFalse(self.seen_int) - app.policy(None, '') + app.policy(None, "") self.assertTrue(self.seen_default) self.assertFalse(self.seen_int) app.policy(None, 1) @@ -284,4 +280,3 @@ def _(self, repository, event): self.assertTrue(self.seen_int) app.policy(None, C()) self.assertTrue(self.seen_b) - diff --git a/eventsourcing/tests/core_tests/test_entity.py b/eventsourcing/tests/core_tests/test_entity.py index 16821c16a..49c5866ee 100644 --- a/eventsourcing/tests/core_tests/test_entity.py +++ b/eventsourcing/tests/core_tests/test_entity.py @@ -7,11 +7,7 @@ TimestampedVersionedEntity, VersionedEntity, ) -from eventsourcing.domain.model.events import ( - publish, - subscribe, - unsubscribe, -) +from eventsourcing.domain.model.events import publish, subscribe, unsubscribe from eventsourcing.example.domainmodel import Example, create_new_example from eventsourcing.example.infrastructure import ExampleRepository from eventsourcing.exceptions import ( @@ -215,10 +211,7 @@ def a(self): def receive(x): published_events.extend(x) - subscription = ( - lambda x: True, - receive - ) + subscription = (lambda x: True, receive) subscribe(*subscription) entity_id = uuid4() try: diff --git a/eventsourcing/tests/core_tests/test_event_store.py b/eventsourcing/tests/core_tests/test_event_store.py index ab659aae0..88ad689cd 100644 --- a/eventsourcing/tests/core_tests/test_event_store.py +++ b/eventsourcing/tests/core_tests/test_event_store.py @@ -61,9 +61,7 @@ def test_list_events(self): self.assertEqual(0, len(entity_events)) # Check there are zero events in the event store, using iterator. - entity_events = event_store.list_events( - originator_id=entity_id1, page_size=1 - ) + entity_events = event_store.list_events(originator_id=entity_id1, page_size=1) self.assertEqual(0, len(entity_events)) # Store a domain event. @@ -77,9 +75,7 @@ def test_list_events(self): self.assertEqual(1, len(entity_events)) # Check there are two events in the event store, using iterator. - entity_events = event_store.list_events( - originator_id=entity_id1, page_size=1 - ) + entity_events = event_store.list_events(originator_id=entity_id1, page_size=1) self.assertEqual(1, len(entity_events)) # Store another domain event. @@ -93,9 +89,7 @@ def test_list_events(self): self.assertEqual(2, len(entity_events)) # Check there are two events in the event store, using iterator. - entity_events = event_store.list_events( - originator_id=entity_id1, page_size=1 - ) + entity_events = event_store.list_events(originator_id=entity_id1, page_size=1) self.assertEqual(2, len(entity_events)) def test_get_most_recent_event(self): diff --git a/eventsourcing/tests/core_tests/test_event_versioning.py b/eventsourcing/tests/core_tests/test_event_versioning.py index f753c126d..56d3fb3bf 100644 --- a/eventsourcing/tests/core_tests/test_event_versioning.py +++ b/eventsourcing/tests/core_tests/test_event_versioning.py @@ -114,13 +114,12 @@ def __upcast__(cls, obj_state: Dict, class_version: int): class TestUpcastingActiveWhenStoringAndRetrievingEvents(TestCase): - def tearDown(self) -> None: # Reset class versions (in case running in suite). MultiVersionAggregateFixture.Triggered.__class_version__ = 0 MultiVersionAggregateFixture.__class_version__ = 0 - del(MultiVersionAggregateFixture.Triggered.__upcast__) - del(MultiVersionAggregateFixture.Triggered.mutate) + del MultiVersionAggregateFixture.Triggered.__upcast__ + del MultiVersionAggregateFixture.Triggered.mutate def test_aggregate_state_after_versioning_events(self): """ @@ -225,7 +224,6 @@ def test_snapshot_state_after_versioning_events(self): """ app = SnapshottingApplication.mixin(SQLAlchemyApplication)( persist_event_type=BaseAggregateRoot.Event, - ) with app: my_aggregate = MultiVersionAggregateFixture.__create__() @@ -291,7 +289,7 @@ def test_snapshot_state_after_versioning_events(self): copy = app.repository[my_aggregate.id] assert isinstance(copy, MultiVersionAggregateFixture) self.assertEqual(copy.value, 10) - self.assertEqual(copy.units, '') # gets default + self.assertEqual(copy.units, "") # gets default class MultiVersionAggregateFixture(BaseAggregateRoot): @@ -301,9 +299,9 @@ class MultiVersionAggregateFixture(BaseAggregateRoot): @classmethod def __upcast__(cls, obj_state: Dict, class_version: int) -> Dict: if class_version == 0: - obj_state['value'] = cls.DEFAULT_VALUE + obj_state["value"] = cls.DEFAULT_VALUE elif class_version == 1: - obj_state['units'] = cls.DEFAULT_UNITS + obj_state["units"] = cls.DEFAULT_UNITS return obj_state def __init__(self, **kwargs): diff --git a/eventsourcing/tests/core_tests/test_persistence_policy.py b/eventsourcing/tests/core_tests/test_persistence_policy.py index 6170d192a..3e6cc6b29 100644 --- a/eventsourcing/tests/core_tests/test_persistence_policy.py +++ b/eventsourcing/tests/core_tests/test_persistence_policy.py @@ -2,13 +2,10 @@ from uuid import uuid4 from eventsourcing.application.policies import PersistencePolicy, SnapshottingPolicy -from eventsourcing.domain.model.entity import ( - VersionedEntity, - TimestampedEntity, -) +from eventsourcing.domain.model.entity import TimestampedEntity, VersionedEntity from eventsourcing.domain.model.events import publish -from eventsourcing.infrastructure.base import AbstractEventStore from eventsourcing.domain.model.repository import AbstractEntityRepository +from eventsourcing.infrastructure.base import AbstractEventStore try: from unittest import mock diff --git a/eventsourcing/tests/core_tests/test_simple_application.py b/eventsourcing/tests/core_tests/test_simple_application.py index f84d94683..865ac16cb 100644 --- a/eventsourcing/tests/core_tests/test_simple_application.py +++ b/eventsourcing/tests/core_tests/test_simple_application.py @@ -1,19 +1,18 @@ from unittest import TestCase from eventsourcing.application.axon import AxonApplication +from eventsourcing.application.django import DjangoApplication +from eventsourcing.application.notificationlog import NotificationLogReader from eventsourcing.application.popo import PopoApplication from eventsourcing.application.simple import SimpleApplication +from eventsourcing.application.snapshotting import SnapshottingApplication +from eventsourcing.application.sqlalchemy import SQLAlchemyApplication +from eventsourcing.domain.model.events import DomainEvent, assert_event_handlers_empty from eventsourcing.exceptions import ProgrammingError +from eventsourcing.tests.core_tests.test_aggregate_root import ExampleAggregateRoot from eventsourcing.tests.sequenced_item_tests.test_django_record_manager import ( DjangoTestCase, ) - -from eventsourcing.application.django import DjangoApplication -from eventsourcing.application.notificationlog import NotificationLogReader -from eventsourcing.application.snapshotting import SnapshottingApplication -from eventsourcing.application.sqlalchemy import SQLAlchemyApplication -from eventsourcing.domain.model.events import assert_event_handlers_empty, DomainEvent -from eventsourcing.tests.core_tests.test_aggregate_root import ExampleAggregateRoot from eventsourcing.utils.random import encoded_random_bytes @@ -87,7 +86,7 @@ class TestDjangoApplication(DjangoTestCase, TestSimpleApplication): infrastructure_class = DjangoApplication -class TestAxonApplication(DjangoTestCase, TestSimpleApplication): +class TestAxonApplication(TestSimpleApplication): infrastructure_class = AxonApplication diff --git a/eventsourcing/tests/core_tests/test_utils.py b/eventsourcing/tests/core_tests/test_utils.py index 592114c2d..3a1af4f44 100644 --- a/eventsourcing/tests/core_tests/test_utils.py +++ b/eventsourcing/tests/core_tests/test_utils.py @@ -1,17 +1,16 @@ import os +import sys from datetime import datetime, timedelta from decimal import Decimal from time import sleep from unittest.case import TestCase from uuid import uuid1 -import sys - -from eventsourcing.utils.random import encoded_random_bytes, decode_bytes +from eventsourcing.utils.random import decode_bytes, encoded_random_bytes from eventsourcing.utils.times import ( + decimaltimestamp, decimaltimestamp_from_uuid, utc_timezone, - decimaltimestamp, ) diff --git a/eventsourcing/tests/customization_tests/test_customise_with_alternative_cql.py b/eventsourcing/tests/customization_tests/test_customise_with_alternative_cql.py index acd12ff86..5bbb6b5dc 100644 --- a/eventsourcing/tests/customization_tests/test_customise_with_alternative_cql.py +++ b/eventsourcing/tests/customization_tests/test_customise_with_alternative_cql.py @@ -4,18 +4,17 @@ from eventsourcing.domain.model.events import DomainEvent from eventsourcing.example.domainmodel import create_new_example from eventsourcing.example.infrastructure import ExampleRepository -from eventsourcing.infrastructure.cassandra.records import StoredEventRecord -from eventsourcing.infrastructure.cassandra.manager import CassandraRecordManager from eventsourcing.infrastructure.cassandra.datastore import ( CassandraDatastore, CassandraSettings, ) +from eventsourcing.infrastructure.cassandra.manager import CassandraRecordManager +from eventsourcing.infrastructure.cassandra.records import StoredEventRecord from eventsourcing.infrastructure.eventstore import EventStore from eventsourcing.infrastructure.sequenceditem import StoredEvent from eventsourcing.infrastructure.sequenceditemmapper import SequencedItemMapper from eventsourcing.tests.datastore_tests.base import AbstractDatastoreTestCase - # In this test the default SequencedItem class is replaced with a "stored event" class. # How easy is it to customize the infrastructure to support that? We just need # to define the new sequenced item class, define a suitable record class, diff --git a/eventsourcing/tests/customization_tests/test_customise_with_alternative_domain_event_type.py b/eventsourcing/tests/customization_tests/test_customise_with_alternative_domain_event_type.py index 3423b5641..acd264b26 100644 --- a/eventsourcing/tests/customization_tests/test_customise_with_alternative_domain_event_type.py +++ b/eventsourcing/tests/customization_tests/test_customise_with_alternative_domain_event_type.py @@ -2,13 +2,13 @@ from eventsourcing.application.policies import PersistencePolicy from eventsourcing.domain.model.entity import TimeuuidedEntity -from eventsourcing.domain.model.events import EventWithTimeuuid, DomainEvent +from eventsourcing.domain.model.events import DomainEvent, EventWithTimeuuid from eventsourcing.infrastructure.cassandra.datastore import ( CassandraDatastore, CassandraSettings, ) -from eventsourcing.infrastructure.cassandra.records import TimeuuidSequencedRecord from eventsourcing.infrastructure.cassandra.manager import CassandraRecordManager +from eventsourcing.infrastructure.cassandra.records import TimeuuidSequencedRecord from eventsourcing.infrastructure.eventsourcedrepository import EventSourcedRepository from eventsourcing.infrastructure.eventstore import EventStore from eventsourcing.infrastructure.sequenceditem import SequencedItem @@ -19,7 +19,6 @@ ) from eventsourcing.utils.times import decimaltimestamp_from_uuid - # This test has events with TimeUUID value as the 'event ID'. How easy is it to customize # the infrastructure to support that? We just need to make a model that uses these events, # define a suitable database table, and configure the other components. It's easy. diff --git a/eventsourcing/tests/customization_tests/test_customise_with_alternative_sequenced_item_type.py b/eventsourcing/tests/customization_tests/test_customise_with_alternative_sequenced_item_type.py index 3e45409d4..676463af7 100644 --- a/eventsourcing/tests/customization_tests/test_customise_with_alternative_sequenced_item_type.py +++ b/eventsourcing/tests/customization_tests/test_customise_with_alternative_sequenced_item_type.py @@ -6,15 +6,14 @@ from eventsourcing.infrastructure.eventstore import EventStore from eventsourcing.infrastructure.sequenceditem import StoredEvent from eventsourcing.infrastructure.sequenceditemmapper import SequencedItemMapper -from eventsourcing.infrastructure.sqlalchemy.records import StoredEventRecord, Base -from eventsourcing.infrastructure.sqlalchemy.manager import SQLAlchemyRecordManager from eventsourcing.infrastructure.sqlalchemy.datastore import ( SQLAlchemyDatastore, SQLAlchemySettings, ) +from eventsourcing.infrastructure.sqlalchemy.manager import SQLAlchemyRecordManager +from eventsourcing.infrastructure.sqlalchemy.records import Base, StoredEventRecord from eventsourcing.tests.datastore_tests.base import AbstractDatastoreTestCase - # This test replaces the default SequencedItem class with a StoredEvent class. # How easy is it to customize the infrastructure to support that? We just need # to define the new sequenced item class, define a suitable record class, @@ -76,7 +75,9 @@ def _test(self): self.assertEqual(entity1.b, "b") # Check there is a stored event. - all_records = list(app.event_store.record_manager.get_notification_records()) + all_records = list( + app.event_store.record_manager.get_notification_records() + ) self.assertEqual(1, len(all_records)) stored_event = all_records[0] self.assertEqual(stored_event.originator_id, entity1.id) diff --git a/eventsourcing/tests/customization_tests/test_customise_with_extended_sequenced_item.py b/eventsourcing/tests/customization_tests/test_customise_with_extended_sequenced_item.py index 8ae727d59..829aa9e19 100644 --- a/eventsourcing/tests/customization_tests/test_customise_with_extended_sequenced_item.py +++ b/eventsourcing/tests/customization_tests/test_customise_with_extended_sequenced_item.py @@ -132,7 +132,9 @@ def test(self): self.assertEqual(entity1.b, "b") # Check there is a stored event. - all_records = list(app.event_store.record_manager.get_notification_records()) + all_records = list( + app.event_store.record_manager.get_notification_records() + ) self.assertEqual(len(all_records), 1) record = all_records[0] self.assertEqual(record.sequence_id, entity1.id) diff --git a/eventsourcing/tests/customization_tests/test_customized_projections_old.py b/eventsourcing/tests/customization_tests/test_customized_projections_old.py index 019edf181..d1e81e8c8 100644 --- a/eventsourcing/tests/customization_tests/test_customized_projections_old.py +++ b/eventsourcing/tests/customization_tests/test_customized_projections_old.py @@ -5,7 +5,6 @@ SQLAlchemyRecordManagerTestCase, ) - # ## Old stuff... diff --git a/eventsourcing/tests/datastore_tests/base.py b/eventsourcing/tests/datastore_tests/base.py index ba33ce677..4f0f2e2d8 100644 --- a/eventsourcing/tests/datastore_tests/base.py +++ b/eventsourcing/tests/datastore_tests/base.py @@ -1,9 +1,10 @@ from abc import abstractmethod -from typing import Type, Optional +from typing import Optional, Type from eventsourcing.infrastructure.datastore import ( + AbstractDatastore, DatastoreTableError, - AbstractDatastore) +) from eventsourcing.infrastructure.factory import InfrastructureFactory from eventsourcing.tests.base import AbstractTestCase diff --git a/eventsourcing/tests/datastore_tests/test_axonserver.py b/eventsourcing/tests/datastore_tests/test_axonserver.py index cf24445d5..0b0431d52 100644 --- a/eventsourcing/tests/datastore_tests/test_axonserver.py +++ b/eventsourcing/tests/datastore_tests/test_axonserver.py @@ -1,6 +1,6 @@ from uuid import uuid4 -from axonclient.client import AxonClient, AxonEvent, DEFAULT_LOCAL_AXONSERVER_URI +from axonclient.client import DEFAULT_LOCAL_AXONSERVER_URI, AxonClient, AxonEvent from eventsourcing.infrastructure.axonserver.datastore import ( AxonDatastore, @@ -22,7 +22,7 @@ def construct_datastore(self): def create_factory_kwargs(self): kwargs = super(AxonDatastoreTestCase, self).create_factory_kwargs() - kwargs["axon_client"] = AxonClient(DEFAULT_LOCAL_AXONSERVER_URI) + kwargs["axon_client"] = AxonClient(self.datastore.settings.uri) return kwargs diff --git a/eventsourcing/tests/datastore_tests/test_cassandra.py b/eventsourcing/tests/datastore_tests/test_cassandra.py index f0742ba10..3503f095f 100644 --- a/eventsourcing/tests/datastore_tests/test_cassandra.py +++ b/eventsourcing/tests/datastore_tests/test_cassandra.py @@ -6,6 +6,10 @@ from cassandra.cqlengine import CQLEngineException from eventsourcing.exceptions import DatasourceSettingsError +from eventsourcing.infrastructure.cassandra.datastore import ( + CassandraDatastore, + CassandraSettings, +) from eventsourcing.infrastructure.cassandra.factory import ( CassandraInfrastructureFactory, ) @@ -14,10 +18,6 @@ SnapshotRecord, TimestampSequencedRecord, ) -from eventsourcing.infrastructure.cassandra.datastore import ( - CassandraDatastore, - CassandraSettings, -) from eventsourcing.infrastructure.datastore import ( DatastoreConnectionError, DatastoreTableError, diff --git a/eventsourcing/tests/djangoproject/djangoproject/settings.py b/eventsourcing/tests/djangoproject/djangoproject/settings.py index 06b090675..05797a34c 100644 --- a/eventsourcing/tests/djangoproject/djangoproject/settings.py +++ b/eventsourcing/tests/djangoproject/djangoproject/settings.py @@ -94,7 +94,7 @@ "HOST": os.getenv("POSTGRES_HOST", "127.0.0.1"), "NAME": "eventsourcing", "USER": os.getenv("POSTGRES_USER", "eventsourcing"), - "PASSWORD": os.getenv("POSTGRES_PASSWORD", ""), + "PASSWORD": os.getenv("POSTGRES_PASSWORD", "eventsourcing"), } } @@ -102,7 +102,9 @@ # https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ - {"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"}, + { + "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator" + }, {"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"}, {"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"}, {"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"}, diff --git a/eventsourcing/tests/example_application_tests/base.py b/eventsourcing/tests/example_application_tests/base.py index 7848dd678..61dcddeb5 100644 --- a/eventsourcing/tests/example_application_tests/base.py +++ b/eventsourcing/tests/example_application_tests/base.py @@ -1,6 +1,5 @@ -from uuid import uuid4 - from time import sleep +from uuid import uuid4 from eventsourcing.application.policies import PersistencePolicy from eventsourcing.domain.model.snapshot import Snapshot diff --git a/eventsourcing/tests/example_application_tests/test_example_application_with_encryption.py b/eventsourcing/tests/example_application_tests/test_example_application_with_encryption.py index 7a654ddfe..41bff77e5 100644 --- a/eventsourcing/tests/example_application_tests/test_example_application_with_encryption.py +++ b/eventsourcing/tests/example_application_tests/test_example_application_with_encryption.py @@ -1,4 +1,3 @@ -from eventsourcing.utils.cipher.aes import AESCipher from eventsourcing.tests.example_application_tests import base from eventsourcing.tests.example_application_tests.test_example_application_with_cassandra import ( TestExampleApplicationWithCassandra, @@ -6,6 +5,7 @@ from eventsourcing.tests.example_application_tests.test_example_application_with_sqlalchemy import ( TestExampleApplicationWithSQLAlchemy, ) +from eventsourcing.utils.cipher.aes import AESCipher class WithEncryption(base.WithExampleApplication): diff --git a/eventsourcing/tests/sequenced_item_tests/base.py b/eventsourcing/tests/sequenced_item_tests/base.py index d428cc7c9..2f0b7e45c 100644 --- a/eventsourcing/tests/sequenced_item_tests/base.py +++ b/eventsourcing/tests/sequenced_item_tests/base.py @@ -205,7 +205,9 @@ def test(self): # Get items less then or equal to a position. if self.record_manager.can_lt_lte_get_records: - retrieved_items = self.record_manager.list_items(sequence_id1, lte=position2) + retrieved_items = self.record_manager.list_items( + sequence_id1, lte=position2 + ) self.assertEqual(len(retrieved_items), 2) self.assertEqual(retrieved_items[0].state, item1.state) self.assertEqual(retrieved_items[1].state, item4.state) @@ -319,8 +321,9 @@ def test(self): ) # Resume from after the first event. - retrieved_items = self.record_manager.get_notification_records(start=1 + len_old, - stop=3 + len_old) + retrieved_items = self.record_manager.get_notification_records( + start=1 + len_old, stop=3 + len_old + ) retrieved_items = list(retrieved_items) self.assertEqual(len(retrieved_items), 2) self.assertEqual(retrieved_items[0].id, 2 + len_old) diff --git a/eventsourcing/tests/system_test_fixtures.py b/eventsourcing/tests/system_test_fixtures.py index ddc7553b3..adec0c846 100644 --- a/eventsourcing/tests/system_test_fixtures.py +++ b/eventsourcing/tests/system_test_fixtures.py @@ -1,6 +1,6 @@ import logging import os -from uuid import uuid5, NAMESPACE_OID +from uuid import NAMESPACE_OID, uuid5 from eventsourcing.application.process import ProcessApplication, WrappedRepository from eventsourcing.domain.model.aggregate import BaseAggregateRoot @@ -11,8 +11,8 @@ def set_db_uri(): host = os.getenv("MYSQL_HOST", "127.0.0.1") - user = os.getenv("MYSQL_USER", "root") - password = os.getenv("MYSQL_PASSWORD", "") + user = os.getenv("MYSQL_USER", "eventsourcing") + password = os.getenv("MYSQL_PASSWORD", "eventsourcing") db_uri = ( "mysql+pymysql://{}:{}@{}/eventsourcing?charset=utf8mb4&binary_prefix=true" ).format(user, password, host) @@ -42,7 +42,7 @@ def mutate(self, order): @property def reservation_id(self): - return self.__dict__['reservation_id'] + return self.__dict__["reservation_id"] def set_is_paid(self, payment_id): self.__trigger_event__(self.Paid, payment_id=payment_id) @@ -55,7 +55,7 @@ def mutate(self, order): @property def payment_id(self): - return self.__dict__['payment_id'] + return self.__dict__["payment_id"] class Reservation(BaseAggregateRoot): @@ -139,7 +139,7 @@ def is_order_paid(self, order_id): order = self.get_order(order_id) return order is not None and order.is_paid - def get_order(self, order_id, repository: WrappedRepository=None): + def get_order(self, order_id, repository: WrappedRepository = None): try: return (repository or self.repository)[order_id] except KeyError: diff --git a/eventsourcing/tests/test_collection.py b/eventsourcing/tests/test_collection.py index b425dbc2c..01d9f4f27 100644 --- a/eventsourcing/tests/test_collection.py +++ b/eventsourcing/tests/test_collection.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, print_function, unicode_literals - from unittest.case import TestCase from uuid import uuid4 diff --git a/eventsourcing/tests/test_docs.py b/eventsourcing/tests/test_docs.py index bba0749fa..2ea379a21 100644 --- a/eventsourcing/tests/test_docs.py +++ b/eventsourcing/tests/test_docs.py @@ -32,8 +32,8 @@ def tearDown(self): os.environ[ "DB_URI" ] = "mysql+pymysql://{}:{}@{}/eventsourcing?charset=utf8mb4&binary_prefix=true".format( - os.getenv("MYSQL_USER", "root"), - os.getenv("MYSQL_PASSWORD", ""), + os.getenv("MYSQL_USER", "eventsourcing"), + os.getenv("MYSQL_PASSWORD", "eventsourcing"), os.getenv("MYSQL_HOST", "127.0.0.1"), ) database = SQLAlchemyDatastore(settings=SQLAlchemySettings()) @@ -74,18 +74,18 @@ def test_docs(self): if name in skipped: continue if name.endswith(".rst"): - # if name.endswith('domainmodel.rst'): - # if name.endswith('quick_start.rst'): - # if name.endswith('aggregates_in_ddd.rst'): - # if name.endswith('example_application.rst'): - # if name.endswith('everything.rst'): - # if name.endswith('infrastructure.rst'): - # if name.endswith('application.rst'): - # if name.endswith('snapshotting.rst'): - # if name.endswith('notifications.rst'): - # if name.endswith('projections.rst'): - # if name.endswith('deployment.rst'): - # if name.endswith('process.rst'): + # if name.endswith('domainmodel.rst'): + # if name.endswith('quick_start.rst'): + # if name.endswith('aggregates_in_ddd.rst'): + # if name.endswith('example_application.rst'): + # if name.endswith('everything.rst'): + # if name.endswith('infrastructure.rst'): + # if name.endswith('application.rst'): + # if name.endswith('snapshotting.rst'): + # if name.endswith('notifications.rst'): + # if name.endswith('projections.rst'): + # if name.endswith('deployment.rst'): + # if name.endswith('process.rst'): file_paths.append(os.path.join(docs_path, dirpath, name)) file_paths = sorted(file_paths) diff --git a/eventsourcing/tests/test_flask.py b/eventsourcing/tests/test_flask.py index f184dc54d..6622c0c2a 100644 --- a/eventsourcing/tests/test_flask.py +++ b/eventsourcing/tests/test_flask.py @@ -29,7 +29,6 @@ path_to_eventsourcing = dirname(dirname(abspath(eventsourcing.__file__))) path_to_interface_module = dirname(abspath(eventsourcing.example.interface.__file__)) path_to_flaskapp = join(path_to_interface_module, "flaskapp.py") -path_to_flaskwsgi = join(dirname(path_to_flaskapp), "flaskwsgi.py") @notquick @@ -116,7 +115,7 @@ def start_app(self): # if not os.path.exists(path_to_uwsgi): # raise AssertionError("Can't find uwsgi: %s" % path_to_uwsgi) # # Todo: Maybe use shutil.which, after dropping support for Python 2.7. - path_to_uwsgi = shutil.which('uwsgi') + path_to_uwsgi = shutil.which("uwsgi") if not os.path.exists(path_to_uwsgi): raise AssertionError("Can't find uwsgi: %s" % path_to_uwsgi) cmd = [path_to_uwsgi] @@ -125,7 +124,7 @@ def start_app(self): cmd += ["--master"] cmd += ["--processes", "4"] cmd += ["--threads", "2"] - cmd += ["--wsgi-file", path_to_flaskwsgi] + cmd += ["--wsgi-file", path_to_flaskapp] cmd += ["--http", ":{}".format(self.port)] pythonpath = ":".join( os.getenv("PYTHONPATH", "").split(":") + [path_to_eventsourcing] diff --git a/eventsourcing/tests/test_notificationlog.py b/eventsourcing/tests/test_notificationlog.py index e521c90d3..f53f29bef 100644 --- a/eventsourcing/tests/test_notificationlog.py +++ b/eventsourcing/tests/test_notificationlog.py @@ -3,9 +3,9 @@ from uuid import uuid4 from eventsourcing.application.notificationlog import ( - RecordManagerNotificationLog, BigArrayNotificationLog, NotificationLogReader, + RecordManagerNotificationLog, ) from eventsourcing.domain.model.events import DomainEvent from eventsourcing.infrastructure.repositories.array import BigArrayRepository @@ -309,7 +309,7 @@ def simple_app(environ, start_response): notification_log = RemoteNotificationLog(base_url) # Just before we start, test the deserialise_section_size exceptions. - notification_log.deserialize_section_size('1') + notification_log.deserialize_section_size("1") with self.assertRaises(ValueError): notification_log.deserialize_section_size('"1') with self.assertRaises(TypeError): diff --git a/eventsourcing/tests/test_performance.py b/eventsourcing/tests/test_performance.py index 9cd80f056..7e89c61c9 100644 --- a/eventsourcing/tests/test_performance.py +++ b/eventsourcing/tests/test_performance.py @@ -122,8 +122,7 @@ def last_n(n): assert isinstance(app.example_repository.event_store, EventStore) start_last_n = time.time() events = app.example_repository.event_store.list_events( - originator_id=example.id, - gt=num_beats - n, + originator_id=example.id, gt=num_beats - n, ) assert len(events) == n, "Asked for %s but got %s" % ( n, @@ -232,7 +231,10 @@ def test_log_performance(self): total_num_reads = 0 while True: start_read = time.time() - page_of_events, next_position = self.get_message_logged_events_and_next_position( + ( + page_of_events, + next_position, + ) = self.get_message_logged_events_and_next_position( log_reader, position, page_size ) time_to_read = time.time() - start_read @@ -254,7 +256,10 @@ def test_log_performance(self): position = None while True: start_read = time.time() - page_of_events, next_position = self.get_message_logged_events_and_next_position( + ( + page_of_events, + next_position, + ) = self.get_message_logged_events_and_next_position( log_reader, position, page_size, is_ascending=True ) time_to_read = time.time() - start_read @@ -355,7 +360,7 @@ def construct_entity_record_manager(self): def construct_log_record_manager(self): return self.factory.construct_record_manager(record_class=None) - @skip("Axon does support sequencing by timestamp, so don't test log performance") + @skip("Axon doesn't support sequencing by timestamp, so don't test log performance") def test_log_performance(self): pass diff --git a/eventsourcing/tests/test_process.py b/eventsourcing/tests/test_process.py index 329bf622e..241fc3385 100644 --- a/eventsourcing/tests/test_process.py +++ b/eventsourcing/tests/test_process.py @@ -23,12 +23,15 @@ unsubscribe, ) from eventsourcing.domain.model.snapshot import Snapshot -from eventsourcing.exceptions import CausalDependencyFailed, PromptFailed, \ - ProgrammingError +from eventsourcing.exceptions import ( + CausalDependencyFailed, + ProgrammingError, + PromptFailed, +) from eventsourcing.infrastructure.sqlalchemy.records import Base -from eventsourcing.whitehead import TEntity from eventsourcing.utils.topic import resolve_topic from eventsourcing.utils.transcoding import ObjectJSONDecoder +from eventsourcing.whitehead import TEntity class TestProcessApplication(TestCase): @@ -200,30 +203,48 @@ def test_causal_dependencies(self): downstream2.run() self.assertEqual( - 0, len(list(downstream1.event_store.record_manager.get_notification_records())) + 0, + len( + list(downstream1.event_store.record_manager.get_notification_records()) + ), ) self.assertEqual( - 0, len(list(downstream2.event_store.record_manager.get_notification_records())) + 0, + len( + list(downstream2.event_store.record_manager.get_notification_records()) + ), ) # Try to process pipeline 1, should work. downstream1.run() self.assertEqual( - 1, len(list(downstream1.event_store.record_manager.get_notification_records())) + 1, + len( + list(downstream1.event_store.record_manager.get_notification_records()) + ), ) self.assertEqual( - 0, len(list(downstream2.event_store.record_manager.get_notification_records())) + 0, + len( + list(downstream2.event_store.record_manager.get_notification_records()) + ), ) # Try again to process pipeline 2, should work this time. downstream2.run() self.assertEqual( - 1, len(list(downstream1.event_store.record_manager.get_notification_records())) + 1, + len( + list(downstream1.event_store.record_manager.get_notification_records()) + ), ) self.assertEqual( - 2, len(list(downstream2.event_store.record_manager.get_notification_records())) + 2, + len( + list(downstream2.event_store.record_manager.get_notification_records()) + ), ) core1.close() @@ -354,7 +375,6 @@ def test_clear_cache_after_exception_recording_event(self): self.assertNotIn(aggregate.id, process.repository._cache) def test_policy_returns_sequence_of_new_aggregates(self): - def policy(repository, event): first = AggregateRoot.__create__(originator_id=(uuid4())) second = AggregateRoot.__create__(originator_id=(uuid4())) @@ -386,8 +406,6 @@ def policy(repository, event): ids = process.repository.event_store.record_manager.all_sequence_ids() self.assertEqual(len(list(ids)), 3) - - def define_projection_record_class(self): class ProjectionRecord(Base): __tablename__ = "projections" diff --git a/eventsourcing/tests/test_process_with_django.py b/eventsourcing/tests/test_process_with_django.py index f374f3844..2212cd1fa 100644 --- a/eventsourcing/tests/test_process_with_django.py +++ b/eventsourcing/tests/test_process_with_django.py @@ -2,10 +2,10 @@ from django.db import models from django.db.backends.base.schema import BaseDatabaseSchemaEditor +from eventsourcing.application.django import DjangoApplication from eventsourcing.tests.sequenced_item_tests.test_django_record_manager import ( DjangoTestCase, ) -from eventsourcing.application.django import DjangoApplication from eventsourcing.tests.test_process import TestProcessApplication diff --git a/eventsourcing/tests/test_process_with_popo.py b/eventsourcing/tests/test_process_with_popo.py index 5edf1d4c0..7d880b918 100644 --- a/eventsourcing/tests/test_process_with_popo.py +++ b/eventsourcing/tests/test_process_with_popo.py @@ -1,7 +1,6 @@ from unittest import skip from eventsourcing.application.popo import PopoApplication - from eventsourcing.tests.sequenced_item_tests.test_popo_record_manager import ( PopoTestCase, ) diff --git a/eventsourcing/tests/test_ray_runner.py b/eventsourcing/tests/test_ray_runner.py index d59961077..5c6ffb920 100644 --- a/eventsourcing/tests/test_ray_runner.py +++ b/eventsourcing/tests/test_ray_runner.py @@ -16,10 +16,7 @@ clear_event_handlers, ) from eventsourcing.system.definition import System -from eventsourcing.system.ray import ( - RayProcess, - RayRunner, -) +from eventsourcing.system.ray import RayProcess, RayRunner from eventsourcing.system.rayhelpers import RayPrompt from eventsourcing.tests.system_test_fixtures import ( Orders, diff --git a/eventsourcing/tests/test_ray_runner_with_django.py b/eventsourcing/tests/test_ray_runner_with_django.py index 1ea282f5b..63776d31c 100644 --- a/eventsourcing/tests/test_ray_runner_with_django.py +++ b/eventsourcing/tests/test_ray_runner_with_django.py @@ -1,9 +1,9 @@ from unittest import skip +from eventsourcing.application.django import DjangoApplication from eventsourcing.tests.sequenced_item_tests.test_django_record_manager import ( DjangoTestCase, ) -from eventsourcing.application.django import DjangoApplication from eventsourcing.tests.test_ray_runner import TestRayRunner @@ -13,4 +13,4 @@ class DontTestRayRunnerWithDjango(DjangoTestCase, TestRayRunner): # Don't let this be found here. -del(TestRayRunner) +del TestRayRunner diff --git a/eventsourcing/tests/test_system.py b/eventsourcing/tests/test_system.py index 11de47240..a7e2101a8 100644 --- a/eventsourcing/tests/test_system.py +++ b/eventsourcing/tests/test_system.py @@ -3,16 +3,15 @@ from unittest import TestCase from uuid import uuid4 -from eventsourcing.system.multiprocess import MultiprocessRunner from eventsourcing.application.sqlalchemy import SQLAlchemyApplication -from eventsourcing.system.runner import MultiThreadedRunner -from eventsourcing.system.definition import System from eventsourcing.domain.model.events import ( assert_event_handlers_empty, clear_event_handlers, ) -from eventsourcing.exceptions import RepositoryKeyError, ProgrammingError -from eventsourcing.tests.test_process import ExampleAggregate +from eventsourcing.exceptions import ProgrammingError, RepositoryKeyError +from eventsourcing.system.definition import System +from eventsourcing.system.multiprocess import MultiprocessRunner +from eventsourcing.system.runner import MultiThreadedRunner from eventsourcing.tests.system_test_fixtures import ( Examples, Order, @@ -24,6 +23,7 @@ create_new_order, set_db_uri, ) +from eventsourcing.tests.test_process import ExampleAggregate class TestSystem(TestCase): @@ -76,9 +76,7 @@ def test_bind(self): def test_singlethreaded_runner_with_single_application_class(self): system = System( - Orders, - setup_tables=True, - infrastructure_class=self.infrastructure_class, + Orders, setup_tables=True, infrastructure_class=self.infrastructure_class, ) with system as runner: orders = runner.get(Orders) @@ -88,9 +86,7 @@ def test_singlethreaded_runner_with_single_application_class(self): def test_can_run_if_already_running(self): system = System( - Orders, - setup_tables=True, - infrastructure_class=self.infrastructure_class, + Orders, setup_tables=True, infrastructure_class=self.infrastructure_class, ) with system: with self.assertRaises(ProgrammingError): @@ -104,9 +100,7 @@ def test_can_run_if_already_running(self): def test_multithreaded_runner_with_single_application_class(self): system = System( - Orders, - setup_tables=True, - infrastructure_class=self.infrastructure_class, + Orders, setup_tables=True, infrastructure_class=self.infrastructure_class, ) with MultiThreadedRunner(system) as runner: app = runner.get(Orders) @@ -116,9 +110,7 @@ def test_multithreaded_runner_with_single_application_class(self): def test_multiprocess_runner_with_single_application_class(self): system = System( - Orders, - setup_tables=True, - infrastructure_class=self.infrastructure_class, + Orders, setup_tables=True, infrastructure_class=self.infrastructure_class, ) self.set_db_uri() @@ -164,11 +156,13 @@ def test_multithreaded_runner_with_single_pipe(self): patience = 10 while True: try: - self.assertEqual(reservations_repo[reservation_id].order_id, order_id) + self.assertEqual( + reservations_repo[reservation_id].order_id, order_id + ) except (RepositoryKeyError, AssertionError): if patience: patience -= 1 - sleep(.1) + sleep(0.1) else: raise else: @@ -282,7 +276,7 @@ def test_multithreaded_runner_with_multiapp_system(self): # assert retries, "Failed set order.is_reserved" while retries and not orders.repository[order_id].is_paid: - sleep(0.1) + sleep(0.5) retries -= 1 assert retries, "Failed set order.is_paid" diff --git a/eventsourcing/tests/test_system_with_django.py b/eventsourcing/tests/test_system_with_django.py index da5d2f664..cc1ea54f7 100644 --- a/eventsourcing/tests/test_system_with_django.py +++ b/eventsourcing/tests/test_system_with_django.py @@ -1,8 +1,7 @@ +from eventsourcing.application.django import DjangoApplication from eventsourcing.tests.sequenced_item_tests.test_django_record_manager import ( DjangoTestCase, ) - -from eventsourcing.application.django import DjangoApplication from eventsourcing.tests.test_system import TestSystem diff --git a/eventsourcing/tests/test_system_with_popo.py b/eventsourcing/tests/test_system_with_popo.py index e2af22174..876d102d2 100644 --- a/eventsourcing/tests/test_system_with_popo.py +++ b/eventsourcing/tests/test_system_with_popo.py @@ -1,23 +1,22 @@ -from time import sleep, time, process_time +from time import process_time, sleep, time from unittest import skip +from eventsourcing.application.popo import PopoApplication +from eventsourcing.system.definition import System from eventsourcing.system.runner import ( SteppingMultiThreadedRunner, SteppingSingleThreadedRunner, ) -from eventsourcing.system.definition import System from eventsourcing.tests.sequenced_item_tests.test_popo_record_manager import ( PopoTestCase, ) - -from eventsourcing.application.popo import PopoApplication -from eventsourcing.tests.test_system import TestSystem from eventsourcing.tests.system_test_fixtures import ( - create_new_order, Orders, - Reservations, Payments, + Reservations, + create_new_order, ) +from eventsourcing.tests.test_system import TestSystem class TestSystemWithPopo(PopoTestCase, TestSystem): diff --git a/eventsourcing/tests/test_thespian_runner.py b/eventsourcing/tests/test_thespian_runner.py index 9d81c6ee7..21e4655b8 100644 --- a/eventsourcing/tests/test_thespian_runner.py +++ b/eventsourcing/tests/test_thespian_runner.py @@ -4,18 +4,18 @@ import unittest from unittest import skip -from eventsourcing.system.thespian import ( - ThespianRunner, - shutdown_thespian_system, - start_thespian_system, - start_multiproc_tcp_base_system, -) from eventsourcing.application.sqlalchemy import SQLAlchemyApplication -from eventsourcing.system.definition import System from eventsourcing.domain.model.events import ( assert_event_handlers_empty, clear_event_handlers, ) +from eventsourcing.system.definition import System +from eventsourcing.system.thespian import ( + ThespianRunner, + shutdown_thespian_system, + start_multiproc_tcp_base_system, + start_thespian_system, +) from eventsourcing.tests.system_test_fixtures import ( Orders, Payments, diff --git a/eventsourcing/tests/test_thespian_runner_with_django.py b/eventsourcing/tests/test_thespian_runner_with_django.py index d58bb32f1..1df290ee8 100644 --- a/eventsourcing/tests/test_thespian_runner_with_django.py +++ b/eventsourcing/tests/test_thespian_runner_with_django.py @@ -1,7 +1,7 @@ +from eventsourcing.application.django import DjangoApplication from eventsourcing.tests.sequenced_item_tests.test_django_record_manager import ( DjangoTestCase, ) -from eventsourcing.application.django import DjangoApplication from eventsourcing.tests.test_thespian_runner import TestThespianRunner diff --git a/eventsourcing/tests/test_transcoding.py b/eventsourcing/tests/test_transcoding.py index a0b6ec317..24e84a86b 100644 --- a/eventsourcing/tests/test_transcoding.py +++ b/eventsourcing/tests/test_transcoding.py @@ -1,20 +1,19 @@ import datetime -from collections import deque, namedtuple, OrderedDict, ChainMap +from collections import ChainMap, OrderedDict, deque, namedtuple +from decimal import Decimal from enum import Enum from fractions import Fraction - -try: - from dataclasses import make_dataclass -except: - make_dataclass = None - -from decimal import Decimal from unittest import TestCase, skipIf from uuid import NAMESPACE_URL, UUID from eventsourcing.utils.times import utc_timezone from eventsourcing.utils.transcoding import ObjectJSONDecoder, ObjectJSONEncoder +try: + from dataclasses import make_dataclass +except: + make_dataclass = None + class TestTranscoding(TestCase): def test_str(self): diff --git a/eventsourcing/tests/test_transcoding_v1.py b/eventsourcing/tests/test_transcoding_v1.py index c6e899d76..45eef212b 100644 --- a/eventsourcing/tests/test_transcoding_v1.py +++ b/eventsourcing/tests/test_transcoding_v1.py @@ -1,23 +1,19 @@ import datetime from collections import deque, namedtuple +from decimal import Decimal from enum import Enum from fractions import Fraction +from unittest import TestCase, skipIf +from uuid import NAMESPACE_URL, UUID + +from eventsourcing.utils.times import utc_timezone +from eventsourcing.utils.transcoding_v1 import ObjectJSONDecoder, ObjectJSONEncoder try: from dataclasses import make_dataclass except: make_dataclass = None -from decimal import Decimal -from unittest import TestCase, skipIf -from uuid import NAMESPACE_URL, UUID - -from eventsourcing.utils.times import utc_timezone -from eventsourcing.utils.transcoding_v1 import ( - ObjectJSONDecoder, - ObjectJSONEncoder, -) - class TestTranscoding(TestCase): def test_str(self): diff --git a/eventsourcing/tests/unit_test_fixtures_suffix_tree_text.py b/eventsourcing/tests/unit_test_fixtures_suffix_tree_text.py index 1b941e5b9..1027e7bb0 100644 --- a/eventsourcing/tests/unit_test_fixtures_suffix_tree_text.py +++ b/eventsourcing/tests/unit_test_fixtures_suffix_tree_text.py @@ -1,6 +1,3 @@ -# # coding=utf-8 -# from __future__ import unicode_literals -# # LONG_TEXT_CONT = """ # This article's use of external links may not follow Wikipedia's policies or guidelines. Please improve this # article by removing excessive and inappropriate external links. (August 2010) diff --git a/eventsourcing/utils/cipher/aes.py b/eventsourcing/utils/cipher/aes.py index 2fe401b5d..0089b2bc7 100644 --- a/eventsourcing/utils/cipher/aes.py +++ b/eventsourcing/utils/cipher/aes.py @@ -1,8 +1,3 @@ -import zlib - -import binascii -from base64 import b64decode, b64encode - from Crypto.Cipher import AES from eventsourcing.exceptions import DataIntegrityError diff --git a/eventsourcing/utils/random.py b/eventsourcing/utils/random.py index d420c2ba0..6f2ffbbc5 100644 --- a/eventsourcing/utils/random.py +++ b/eventsourcing/utils/random.py @@ -1,6 +1,5 @@ -from base64 import b64encode, b64decode - import os +from base64 import b64decode, b64encode def encoded_random_bytes(num_bytes: int) -> str: diff --git a/eventsourcing/utils/transcoding.py b/eventsourcing/utils/transcoding.py index 690c531c3..ec6ef2ee3 100644 --- a/eventsourcing/utils/transcoding.py +++ b/eventsourcing/utils/transcoding.py @@ -11,16 +11,16 @@ from typing import Optional from uuid import UUID +import dateutil.parser + from eventsourcing.exceptions import EncoderTypeError +from eventsourcing.utils.topic import get_topic, resolve_topic try: import orjson except ImportError: orjson: Optional[Module] = None # type: ignore -import dateutil.parser - -from eventsourcing.utils.topic import get_topic, resolve_topic JSON_SEPARATORS = (",", ":") diff --git a/eventsourcing/utils/transcoding_v1.py b/eventsourcing/utils/transcoding_v1.py index 28ff846fb..c94aa0e64 100644 --- a/eventsourcing/utils/transcoding_v1.py +++ b/eventsourcing/utils/transcoding_v1.py @@ -89,7 +89,7 @@ def decorator(decoder_func): class ObjectJSONEncoder(JSONEncoder): def encode(self, o): - return super(ObjectJSONEncoder, self).encode(o).encode('utf8') + return super(ObjectJSONEncoder, self).encode(o).encode("utf8") def iterencode(self, o, _one_shot=False): if isinstance(o, tuple): diff --git a/setup.cfg b/setup.cfg index 1e0eac59c..56675775b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -41,3 +41,23 @@ strict_optional = True no_implicit_optional = True disallow_untyped_defs = True ;disallow_any_generics = True + +[isort] +multi_line_output = 3 +include_trailing_comma = True +force_grid_wrap = 0 +use_parentheses = True +line_length = 88 +combine_as_imports = true +default_section = LOCALFOLDER +known_first_party = eventsourcing +known_standard_library = dataclasses +known_third_party = django +not_skip = __init__.py +sections = FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER +skip = .eggs,.pip-cache,venv,.venv + +[flake8] +max-line-length = 80 +select = C,E,F,W,B,B950 +ignore = E203, E501, W503 diff --git a/setup.py b/setup.py index 7909fbdb2..e5adb0f06 100644 --- a/setup.py +++ b/setup.py @@ -24,9 +24,9 @@ ray_requires = ["ray<=0.8.99999", "psutil", "setproctitle"] -thespian_requires = ["thespian<=3.9.99999"] +thespian_requires = ["thespian<=3.10.99999"] -cassandra_requires = ["cassandra-driver<=3.22.99999"] +cassandra_requires = ["cassandra-driver<=3.23.99999"] django_requires = ["django<=3.0.99999"] @@ -42,9 +42,12 @@ "flask<=1.1.99999", "flask_sqlalchemy<=2.4.99", "uwsgi<=2.0.99999", - "redis<=3.4.99999", + # "redis<=3.5.99999", "celery<=4.4.99999", "pymysql<=0.9.99999", + "mysql-connector-python-rf<=2.2.99999", + "python-coveralls<=2.9.99999", + "coverage<5.0.0", # v5 is incompatible ATM. "cryptography", # Tests use Django with PostgreSQL. "psycopg2cffi<=2.8.99999" if is_pypy else "psycopg2-binary<=2.8.99999", @@ -60,7 +63,7 @@ "sphinx-autobuild", ] -dev_requires = docs_requires + ["black", "mypy"] +dev_requires = docs_requires + ["black", "mypy", "flake8", "flake8-bugbear", "isort"] long_description = """ A library for event sourcing in Python.