From 3f1e707042214d819c56565b7eed61a472af9702 Mon Sep 17 00:00:00 2001 From: Mohammad Alisafaee Date: Mon, 20 Mar 2023 15:45:39 +0100 Subject: [PATCH] feat(dataset): parallel data download/upload (#3358) --- .pre-commit-config.yaml | 2 +- conftest.py | 9 + docs/reference/core.rst | 14 +- poetry.lock | 318 +++++++++--------- pyproject.toml | 4 +- renku/command/dataset.py | 4 +- renku/core/__init__.py | 5 +- renku/core/config.py | 5 +- renku/core/constant.py | 5 +- renku/core/dataset/dataset.py | 125 ++++--- renku/core/dataset/dataset_add.py | 180 +++++----- renku/core/dataset/providers/api.py | 2 +- renku/core/dataset/providers/azure.py | 11 +- renku/core/dataset/providers/external.py | 11 +- renku/core/dataset/providers/s3.py | 11 +- renku/core/dataset/providers/web.py | 61 +--- renku/core/errors.py | 5 +- renku/core/gc.py | 5 +- renku/core/git.py | 5 +- renku/core/githooks.py | 5 +- renku/core/init.py | 5 +- renku/core/login.py | 5 +- renku/core/migration/m_0003__1_jsonld.py | 2 +- .../migration/m_0009__new_metadata_storage.py | 3 +- renku/core/project.py | 7 +- renku/core/storage.py | 8 +- renku/core/util/__init__.py | 5 +- renku/core/util/communication.py | 5 +- renku/core/util/contexts.py | 5 +- renku/core/util/datetime8601.py | 5 +- renku/core/util/doi.py | 5 +- renku/core/util/file_size.py | 65 ---- renku/core/util/git.py | 8 +- renku/core/util/jwt.py | 5 +- renku/core/util/os.py | 61 +++- renku/core/util/requests.py | 5 +- renku/core/util/shacl.py | 5 +- renku/core/util/ssh.py | 7 +- renku/core/util/tabulate.py | 5 +- renku/core/util/template_vars.py | 56 --- renku/core/util/urls.py | 5 +- renku/core/util/util.py | 86 ++++- renku/core/util/yaml.py | 5 +- renku/core/workflow/plan.py | 3 +- renku/core/workflow/plan_factory.py | 1 - renku/core/workflow/value_resolution.py | 42 ++- renku/data/__init__.py | 15 + renku/data/pre-commit.sh | 6 +- renku/domain_model/__init__.py | 5 +- .../util/uuid.py => domain_model/constant.py} | 24 +- renku/domain_model/dataset.py | 34 +- renku/domain_model/dataset_provider.py | 5 +- renku/domain_model/datastructures.py | 5 +- renku/domain_model/entity.py | 7 +- renku/domain_model/enums.py | 5 +- renku/domain_model/git.py | 5 +- renku/domain_model/project.py | 7 +- renku/domain_model/project_context.py | 11 +- renku/domain_model/provenance/__init__.py | 5 +- renku/domain_model/provenance/activity.py | 5 +- renku/domain_model/provenance/agent.py | 5 +- renku/domain_model/provenance/annotation.py | 5 +- renku/domain_model/provenance/parameter.py | 5 +- renku/domain_model/session.py | 5 +- renku/domain_model/sort.py | 5 +- renku/domain_model/template.py | 5 +- renku/domain_model/workflow/__init__.py | 5 +- renku/domain_model/workflow/composite_plan.py | 5 +- .../workflow/converters/__init__.py | 5 +- renku/domain_model/workflow/parameter.py | 5 +- renku/domain_model/workflow/plan.py | 5 +- renku/domain_model/workflow/provider.py | 5 +- renku/domain_model/workflow/workflow_file.py | 5 +- renku/infrastructure/git_merger.py | 1 - renku/infrastructure/repository.py | 1 - renku/infrastructure/storage/rclone.py | 2 +- renku/ui/cli/dataset.py | 2 +- renku/ui/cli/project.py | 2 +- renku/ui/cli/utils/click.py | 1 - renku/ui/cli/workflow.py | 2 +- renku/ui/service/controllers/api/mixins.py | 1 - .../service/controllers/cache_files_upload.py | 2 +- renku/ui/service/controllers/datasets_edit.py | 2 +- renku/ui/service/controllers/project_edit.py | 2 +- tests/core/commands/test_serialization.py | 2 +- tests/core/fixtures/core_workflow.py | 1 - tests/service/fixtures/service_integration.py | 1 - tests/service/views/test_dataset_views.py | 4 +- tests/service/views/test_project_views.py | 4 +- 89 files changed, 720 insertions(+), 715 deletions(-) delete mode 100644 renku/core/util/file_size.py delete mode 100644 renku/core/util/template_vars.py rename renku/{core/util/uuid.py => domain_model/constant.py} (55%) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 96f1cbb6e2..ff93c16798 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -15,7 +15,7 @@ repos: - id: mixed-line-ending - id: trailing-whitespace - repo: https://github.com/psf/black - rev: 22.10.0 + rev: 23.1.0 hooks: - id: black additional_dependencies: ["click==8.0.4"] diff --git a/conftest.py b/conftest.py index a47173a7dd..737dd58937 100644 --- a/conftest.py +++ b/conftest.py @@ -79,3 +79,12 @@ def pytest_configure(config): os.environ["RENKU_SKIP_MIN_VERSION_CHECK"] = "1" os.environ["RENKU_DISABLE_VERSION_CHECK"] = "1" + # NOTE: Set an env var during during tests to mark that Renku is running in a test session. + os.environ["RENKU_RUNNING_UNDER_TEST"] = "1" + + +def pytest_unconfigure(config): + """Hook that is called by pytest after all tests are executed.""" + os.environ.pop("RENKU_SKIP_MIN_VERSION_CHECK", None) + os.environ.pop("RENKU_DISABLE_VERSION_CHECK", None) + os.environ.pop("RENKU_RUNNING_UNDER_TEST", None) diff --git a/docs/reference/core.rst b/docs/reference/core.rst index 69c0110ce4..19aba89606 100644 --- a/docs/reference/core.rst +++ b/docs/reference/core.rst @@ -256,11 +256,11 @@ Utilities :members: :show-inheritance: -.. automodule:: renku.core.util.file_size +.. automodule:: renku.core.util.git :members: :show-inheritance: -.. automodule:: renku.core.util.git +.. automodule:: renku.core.util.jwt :members: :show-inheritance: @@ -280,15 +280,19 @@ Utilities :members: :show-inheritance: -.. automodule:: renku.core.util.urls +.. automodule:: renku.core.util.ssh :members: :show-inheritance: -.. automodule:: renku.core.util.util +.. automodule:: renku.core.util.tabulate :members: :show-inheritance: -.. automodule:: renku.core.util.uuid +.. automodule:: renku.core.util.urls + :members: + :show-inheritance: + +.. automodule:: renku.core.util.util :members: :show-inheritance: diff --git a/poetry.lock b/poetry.lock index b842c7c6e9..dc9fbc470e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -94,14 +94,14 @@ files = [ [[package]] name = "argcomplete" -version = "2.1.1" +version = "3.0.2" description = "Bash tab completion for argparse" category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "argcomplete-2.1.1-py3-none-any.whl", hash = "sha256:17041f55b8c45099428df6ce6d0d282b892471a78c71375d24f227e21c13f8c5"}, - {file = "argcomplete-2.1.1.tar.gz", hash = "sha256:72e08340852d32544459c0c19aad1b48aa2c3a96de8c6e5742456b4f538ca52f"}, + {file = "argcomplete-3.0.2-py3-none-any.whl", hash = "sha256:ba35518a5640ebbda5fb77d94e1db535233a7eac18ffb905216753749eaff8e1"}, + {file = "argcomplete-3.0.2.tar.gz", hash = "sha256:fb929c2f609f195a2b11dc914924149b45c2b55b9f48283362e18b8d35f855c1"}, ] [package.extras] @@ -167,41 +167,46 @@ files = [ [[package]] name = "black" -version = "22.10.0" +version = "23.1.0" description = "The uncompromising code formatter." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"}, - {file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"}, - {file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"}, - {file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"}, - {file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"}, - {file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"}, - {file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"}, - {file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"}, - {file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"}, - {file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"}, - {file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"}, - {file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"}, - {file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"}, - {file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"}, - {file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"}, - {file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"}, - {file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"}, - {file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"}, - {file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"}, - {file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"}, - {file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"}, + {file = "black-23.1.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:b6a92a41ee34b883b359998f0c8e6eb8e99803aa8bf3123bf2b2e6fec505a221"}, + {file = "black-23.1.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:57c18c5165c1dbe291d5306e53fb3988122890e57bd9b3dcb75f967f13411a26"}, + {file = "black-23.1.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:9880d7d419bb7e709b37e28deb5e68a49227713b623c72b2b931028ea65f619b"}, + {file = "black-23.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6663f91b6feca5d06f2ccd49a10f254f9298cc1f7f49c46e498a0771b507104"}, + {file = "black-23.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9afd3f493666a0cd8f8df9a0200c6359ac53940cbde049dcb1a7eb6ee2dd7074"}, + {file = "black-23.1.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:bfffba28dc52a58f04492181392ee380e95262af14ee01d4bc7bb1b1c6ca8d27"}, + {file = "black-23.1.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c1c476bc7b7d021321e7d93dc2cbd78ce103b84d5a4cf97ed535fbc0d6660648"}, + {file = "black-23.1.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:382998821f58e5c8238d3166c492139573325287820963d2f7de4d518bd76958"}, + {file = "black-23.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bf649fda611c8550ca9d7592b69f0637218c2369b7744694c5e4902873b2f3a"}, + {file = "black-23.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:121ca7f10b4a01fd99951234abdbd97728e1240be89fde18480ffac16503d481"}, + {file = "black-23.1.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:a8471939da5e824b891b25751955be52ee7f8a30a916d570a5ba8e0f2eb2ecad"}, + {file = "black-23.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8178318cb74f98bc571eef19068f6ab5613b3e59d4f47771582f04e175570ed8"}, + {file = "black-23.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a436e7881d33acaf2536c46a454bb964a50eff59b21b51c6ccf5a40601fbef24"}, + {file = "black-23.1.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:a59db0a2094d2259c554676403fa2fac3473ccf1354c1c63eccf7ae65aac8ab6"}, + {file = "black-23.1.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:0052dba51dec07ed029ed61b18183942043e00008ec65d5028814afaab9a22fd"}, + {file = "black-23.1.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:49f7b39e30f326a34b5c9a4213213a6b221d7ae9d58ec70df1c4a307cf2a1580"}, + {file = "black-23.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:162e37d49e93bd6eb6f1afc3e17a3d23a823042530c37c3c42eeeaf026f38468"}, + {file = "black-23.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b70eb40a78dfac24842458476135f9b99ab952dd3f2dab738c1881a9b38b753"}, + {file = "black-23.1.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:a29650759a6a0944e7cca036674655c2f0f63806ddecc45ed40b7b8aa314b651"}, + {file = "black-23.1.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:bb460c8561c8c1bec7824ecbc3ce085eb50005883a6203dcfb0122e95797ee06"}, + {file = "black-23.1.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c91dfc2c2a4e50df0026f88d2215e166616e0c80e86004d0003ece0488db2739"}, + {file = "black-23.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a951cc83ab535d248c89f300eccbd625e80ab880fbcfb5ac8afb5f01a258ac9"}, + {file = "black-23.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0680d4380db3719ebcfb2613f34e86c8e6d15ffeabcf8ec59355c5e7b85bb555"}, + {file = "black-23.1.0-py3-none-any.whl", hash = "sha256:7a0f701d314cfa0896b9001df70a530eb2472babb76086344e688829efd97d32"}, + {file = "black-23.1.0.tar.gz", hash = "sha256:b0bd97bea8903f5a2ba7219257a44e3f1f9d00073d6cc1add68f0beec69692ac"}, ] [package.dependencies] click = ">=8.0.0" mypy-extensions = ">=0.4.3" +packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} [package.extras] @@ -1197,14 +1202,14 @@ tests = ["freezegun", "pytest", "pytest-cov"] [[package]] name = "identify" -version = "2.5.20" +version = "2.5.21" description = "File identification library for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "identify-2.5.20-py2.py3-none-any.whl", hash = "sha256:5dfef8a745ca4f2c95f27e9db74cb4c8b6d9916383988e8791f3595868f78a33"}, - {file = "identify-2.5.20.tar.gz", hash = "sha256:c8b288552bc5f05a08aff09af2f58e6976bf8ac87beb38498a0e3d98ba64eb18"}, + {file = "identify-2.5.21-py2.py3-none-any.whl", hash = "sha256:69edcaffa8e91ae0f77d397af60f148b6b45a8044b2cc6d99cafa5b04793ff00"}, + {file = "identify-2.5.21.tar.gz", hash = "sha256:7671a05ef9cfaf8ff63b15d45a91a1147a03aaccb2976d4e9bd047cbbc508471"}, ] [package.extras] @@ -1236,14 +1241,14 @@ files = [ [[package]] name = "importlib-metadata" -version = "6.0.0" +version = "6.1.0" description = "Read metadata from Python packages" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "importlib_metadata-6.0.0-py3-none-any.whl", hash = "sha256:7efb448ec9a5e313a57655d35aa54cd3e01b7e1fbcf72dce1bf06119420f5bad"}, - {file = "importlib_metadata-6.0.0.tar.gz", hash = "sha256:e354bedeb60efa6affdcc8ae121b73544a7aa74156d047311948f6d711cd378d"}, + {file = "importlib_metadata-6.1.0-py3-none-any.whl", hash = "sha256:ff80f3b5394912eb1b108fcfd444dc78b7f1f3e16b16188054bd01cb9cb86f09"}, + {file = "importlib_metadata-6.1.0.tar.gz", hash = "sha256:43ce9281e097583d758c2c708c4376371261a02c34682491a8e98352365aad20"}, ] [package.dependencies] @@ -1550,7 +1555,6 @@ files = [ {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca989b91cf3a3ba28930a9fc1e9aeafc2a395448641df1f387a2d394638943b0"}, {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:822068f85e12a6e292803e112ab876bc03ed1f03dddb80154c395f891ca6b31e"}, {file = "lxml-4.9.2-cp35-cp35m-win32.whl", hash = "sha256:be7292c55101e22f2a3d4d8913944cbea71eea90792bf914add27454a13905df"}, - {file = "lxml-4.9.2-cp35-cp35m-win_amd64.whl", hash = "sha256:998c7c41910666d2976928c38ea96a70d1aa43be6fe502f21a651e17483a43c5"}, {file = "lxml-4.9.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:b26a29f0b7fc6f0897f043ca366142d2b609dc60756ee6e4e90b5f762c6adc53"}, {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:ab323679b8b3030000f2be63e22cdeea5b47ee0abd2d6a1dc0c8103ddaa56cd7"}, {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:689bb688a1db722485e4610a503e3e9210dcc20c520b45ac8f7533c837be76fe"}, @@ -1560,7 +1564,6 @@ files = [ {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:58bfa3aa19ca4c0f28c5dde0ff56c520fbac6f0daf4fac66ed4c8d2fb7f22e74"}, {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc718cd47b765e790eecb74d044cc8d37d58562f6c314ee9484df26276d36a38"}, {file = "lxml-4.9.2-cp36-cp36m-win32.whl", hash = "sha256:d5bf6545cd27aaa8a13033ce56354ed9e25ab0e4ac3b5392b763d8d04b08e0c5"}, - {file = "lxml-4.9.2-cp36-cp36m-win_amd64.whl", hash = "sha256:3ab9fa9d6dc2a7f29d7affdf3edebf6ece6fb28a6d80b14c3b2fb9d39b9322c3"}, {file = "lxml-4.9.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:05ca3f6abf5cf78fe053da9b1166e062ade3fa5d4f92b4ed688127ea7d7b1d03"}, {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:a5da296eb617d18e497bcf0a5c528f5d3b18dadb3619fbdadf4ed2356ef8d941"}, {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:04876580c050a8c5341d706dd464ff04fd597095cc8c023252566a8826505726"}, @@ -2059,19 +2062,16 @@ rdflib = ">=6.0.2" [[package]] name = "packaging" -version = "21.3" +version = "23.0" description = "Core utilities for Python packages" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, + {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"}, + {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, ] -[package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" - [[package]] name = "pathspec" version = "0.11.1" @@ -3125,89 +3125,89 @@ files = [ [[package]] name = "pyzmq" -version = "25.0.1" +version = "25.0.2" description = "Python bindings for 0MQ" category = "main" optional = true python-versions = ">=3.6" files = [ - {file = "pyzmq-25.0.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:94f65e13e6df035b0ae90d49adfe7891aa4e7bdeaa65265729fecc04ab3eb0fe"}, - {file = "pyzmq-25.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f0399450d970990705ce47ed65f5efed3e4627dfc80628c3798100e7b72e023b"}, - {file = "pyzmq-25.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f29709b0431668a967d7ff0394b00a865e7b7dde827ee0a47938b705b7c4aec3"}, - {file = "pyzmq-25.0.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4fee9420b34c0ab426f105926a701a3d73f878fe77f07a1b92e0b78d1e2c795c"}, - {file = "pyzmq-25.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57be375c6bc66b0f685cd298e5c1c3d7ee34a254145b8087aed6e25db372b0f3"}, - {file = "pyzmq-25.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a3309b2c5a5be3d48c9ade77b340361764449aa22854ac65935b1e6c0cdabe2c"}, - {file = "pyzmq-25.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7574d24579e83ee8c5d3b14769e7ba895161c43a601e911dd89d449e545e00ad"}, - {file = "pyzmq-25.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:041d617091258133e602919b28fdce4d3e2f8aedcd1e8b34c599653bc288d59e"}, - {file = "pyzmq-25.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7897ba8c3fedc6b3023bad676ceb69dbf90c077ff18ae3133ba43db47417cc72"}, - {file = "pyzmq-25.0.1-cp310-cp310-win32.whl", hash = "sha256:c462f70dadbd4649e572ca7cd1e7cf3305a8c2afc53b84214c0a7c0c3af8a657"}, - {file = "pyzmq-25.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e3a721710992cf0e213bbb7be48fb0f32202e8d01f556c196c870373bb9ad4f4"}, - {file = "pyzmq-25.0.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:b0a0fcf56279b9f3acc9b36a83feb7640c51b0db444b6870e4406d002be1d514"}, - {file = "pyzmq-25.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:95aff52fc847ea5755d2370f86e379ba2ed6eb67a0a6f90f0e8e99c553693b81"}, - {file = "pyzmq-25.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b55366e6c11e1ef7403d072b9867b62cf63eebd31dd038ef65bc8d65572854f6"}, - {file = "pyzmq-25.0.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64a2bc72bcad705ee42a8fe877478ddadb7e260e806562833d3d814125e28a44"}, - {file = "pyzmq-25.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca66aa24422d7f324acd5cb7fc7df616eb6f0205e059393fb108702e33e90c7"}, - {file = "pyzmq-25.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:58d5dfec2e2befd09b04c4683b3c984d2203cf6e054d0f9786be3826737ad612"}, - {file = "pyzmq-25.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3549292d65987e422e2c9f105b1485448381f489d8a6b6b040fc8b8f497bd578"}, - {file = "pyzmq-25.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5b1ca8b0df50d1ac88857ffe9ebd1347e0a5bb5f6e1d99940fdd7df0ffdefb49"}, - {file = "pyzmq-25.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1a107e89cdcf799060ba4fa85fd3c942e19df7b24eb2600618b2406cc73c18e"}, - {file = "pyzmq-25.0.1-cp311-cp311-win32.whl", hash = "sha256:0f22ba4e9041549a5a3f5a545169dda52fa0aa7b5ef46b336cbe6679c4c3c134"}, - {file = "pyzmq-25.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:0644c0d5c73e4bfeee8148f638ab16ad783df1c4d6c2f968552a26a43fb002a1"}, - {file = "pyzmq-25.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c5eb4b17d73b1fc208a4faa6b5918983ccc961770aa37741891f61db302dae4e"}, - {file = "pyzmq-25.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:649dd55948144a108041397f07c1299086ce1c85c2e166831db3a33dac1d0c7f"}, - {file = "pyzmq-25.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c99fd8d3efc138d6a7fb1e822133f62bb18ffec66dc6d398dcb2ac2ab8eb2cb0"}, - {file = "pyzmq-25.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d72d69d4bb37c05a446d10bc40b391cf8fb7572654fb73fa69e7d2a395197e65"}, - {file = "pyzmq-25.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:036dbf8373aed4ccf56d58c561b23601b8f33919ec1093d8c77b37ac1259702d"}, - {file = "pyzmq-25.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:861c37649c75a2ecfc2034a32b9d5ca744e1e0cddcbf65afbd8027cf7d9755be"}, - {file = "pyzmq-25.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:92f04d63aecbb71d41f7db5f988167ef429f96d8197fd46684688cdb513e8a2e"}, - {file = "pyzmq-25.0.1-cp36-cp36m-win32.whl", hash = "sha256:866a4e918f1f4b2f83e9982b817df257910e3e50e456ffa74f141a10adcd11d1"}, - {file = "pyzmq-25.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:ec29c880b82cd38a63810a93b77e13f167e05732049101947772eed9ae805097"}, - {file = "pyzmq-25.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0241a334e39aa74e4ba0ae5f9e29521f1b48b8d56bf707f25f322c04eb423e99"}, - {file = "pyzmq-25.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3b7032f55b1ed2cd8c349a89e467dca2338b7765fab82cb64c3504e49adaf51"}, - {file = "pyzmq-25.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:960f98f562ee6a50ecf283bc62479d00f5ee10e9068a21683b9e961cd87c9261"}, - {file = "pyzmq-25.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:835da498b71570d56e5526de4d5b36fa10dd9b8a82e2c405f963afeb51ff5bdc"}, - {file = "pyzmq-25.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:21de2ef6099fa8d6a3c2dc15aaca58e9f9ffdcc7b82a246590aa9564815699d9"}, - {file = "pyzmq-25.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e448a5a294958e915a7e1b664e6fbfcd3814989d381fb068673317f6f3ea3f8"}, - {file = "pyzmq-25.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:40d909bdc8a2d64ad260925154712602ee6a0425ae0b08bce78a19adfdc2f05b"}, - {file = "pyzmq-25.0.1-cp37-cp37m-win32.whl", hash = "sha256:6ff37f2b818df25c887fd40bb434569db7ff66b35f5dfff6f40cc476aee92e3f"}, - {file = "pyzmq-25.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f66ee27a0221771bbaa2cce456e8ca890569c3d18b08b955eb6420c12516537c"}, - {file = "pyzmq-25.0.1-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:1003bbae89435eadec03b4fa3bb6516dd1529fb09ae5704284f7400cc77009ba"}, - {file = "pyzmq-25.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dde7a65a8bfa88aa1721add504320f8344272542291ce4e7c77993fa32901567"}, - {file = "pyzmq-25.0.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:20b6155429d3b57e9e7bd11f1680985ef8b5b0868f1a64073fb8c01326c7c80c"}, - {file = "pyzmq-25.0.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e37a764cbf91c1ed9a02e4fede79a414284aca2a0b7d92d82a3c7b82d678ec2d"}, - {file = "pyzmq-25.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa56a362066b3a853a64d35693a08046f640961efcc0e7643768916403e72e70"}, - {file = "pyzmq-25.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c4bdf1241886d39d816535d3ef9fc325bbf02470c9fd5f2cb62706eeb834f7f2"}, - {file = "pyzmq-25.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:446acbac24427ef42bff61a807ddcad8d03df78fb976184a4d7d6f4b1e7d8a67"}, - {file = "pyzmq-25.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b39847501d229e5fab155d88a565edfb182cdd3f7046f15a7f2df9c77cdc422d"}, - {file = "pyzmq-25.0.1-cp38-cp38-win32.whl", hash = "sha256:cba6b81b653d789d76e438c2e77b49f610b23e84b3bb43b99100f08a0a5d637b"}, - {file = "pyzmq-25.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:6eca6b90c4fb290efd27582780b5eaf048887a32b2c5fcd6330819192cb07b38"}, - {file = "pyzmq-25.0.1-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:58207a6709e53b723105bac6bb3c6795ee134f7e71351f39c09d52ac235c6b0d"}, - {file = "pyzmq-25.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c62084f37682e7ee4064e8310078be4f6f7687bf528ae5761e2ba7216c5b8949"}, - {file = "pyzmq-25.0.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9c44e9f04f8ed99c6f2e9e49f29d400d7557dd9e9e3f64e1e8a595aedc4258a2"}, - {file = "pyzmq-25.0.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c635d1c40d341835066931a018e378428dfbe0347ed4bb45a6b57f7d8c34196e"}, - {file = "pyzmq-25.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eef93b5574c9ff36b4be376555efd369bd55b99bcc7be72f23bd38102dd9392b"}, - {file = "pyzmq-25.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44bc81099ab33388f6c061c1b194307d877428cb2b18282d0385584d5c73ed72"}, - {file = "pyzmq-25.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6d988844ed6caa21b0076b64671e83a136d93c57f1ae5a72b915661af55d313b"}, - {file = "pyzmq-25.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9d5eb6e88ae8a8734f239ffe1ed90559a426cf5b859b8ee66e0cd43fc5daf5c9"}, - {file = "pyzmq-25.0.1-cp39-cp39-win32.whl", hash = "sha256:f6b45db9de4c8adbf5fda58e827a32315d282cfb01e54dc74e7c7ccc0988c010"}, - {file = "pyzmq-25.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:47eeb94b78aa442568b85ad28f85bd37a9c3c34d052cbf8ebf8622c45f23a9cd"}, - {file = "pyzmq-25.0.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0ed7475f3adf0c7750d75740b3267947b501a33f4625ceae709fda2e75ec9ed7"}, - {file = "pyzmq-25.0.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6d09c22ed4d0afcc662d17c2429a03fc1fae7fe7e3bc1f413e744bccfeaabdc3"}, - {file = "pyzmq-25.0.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:703ec5f5a8369c09d8f3eb626358bdb590a2b1375bcce8b7da01b3a03f8b8668"}, - {file = "pyzmq-25.0.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20aea31cc0d1f6c3fb4685db08b4c771545cf3fed3c4b4c8942c0a4e97042ec8"}, - {file = "pyzmq-25.0.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:b1c03b942557bb366fd3dc377a15763d5d688de1328228136c75e50f968333cc"}, - {file = "pyzmq-25.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4e8a5ced9d92837f52ccdae6351c627b5012669727bc3eede2dc0f581eca1d0e"}, - {file = "pyzmq-25.0.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d78f840d88244272fb7252e47522b1179833aff7ec64583bda3d21259c9c2c20"}, - {file = "pyzmq-25.0.1-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c3f78fa80780e24d294f9421123cb3bd3b68677953c53da85273a22d1c983298"}, - {file = "pyzmq-25.0.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f6de4305e02560111a5d4555758faa85d44a5bff70cccff58dbf30c81a079f0"}, - {file = "pyzmq-25.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:34a1b1a8ce9b20e01aba71b9279d9b1d4e5980a6a4e42092180e16628a444ca1"}, - {file = "pyzmq-25.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:625759a0112af7c3fb560de5724d749729f00b901f7625d1a3f3fb38897544b1"}, - {file = "pyzmq-25.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8cff159b21438c24476a49865f3d5700c9cc5833600661bc0e672decec2ff357"}, - {file = "pyzmq-25.0.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4cc47652d990de9ef967c494c526d73920ef064fef0444355a7cebec6fc50542"}, - {file = "pyzmq-25.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44db5162a6881f7d740dec65917f38f9bfbc5ad9a10e06d7d5deebb27eb63939"}, - {file = "pyzmq-25.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f38bf2c60a3f7b87cf5177043eb7a331a4f53bc9305a2452decbd42ad0c98741"}, - {file = "pyzmq-25.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b1cf4becd15669bc62a41c1b1bb742e22ac25965134e4254cde82a4dc2554b1b"}, - {file = "pyzmq-25.0.1.tar.gz", hash = "sha256:44a24f7ce44e70d20e2a4c9ba5af70b4611df7a4b920eed2c8e0bdd5a5af225f"}, + {file = "pyzmq-25.0.2-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:ac178e666c097c8d3deb5097b58cd1316092fc43e8ef5b5fdb259b51da7e7315"}, + {file = "pyzmq-25.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:659e62e1cbb063151c52f5b01a38e1df6b54feccfa3e2509d44c35ca6d7962ee"}, + {file = "pyzmq-25.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8280ada89010735a12b968ec3ea9a468ac2e04fddcc1cede59cb7f5178783b9c"}, + {file = "pyzmq-25.0.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9b5eeb5278a8a636bb0abdd9ff5076bcbb836cd2302565df53ff1fa7d106d54"}, + {file = "pyzmq-25.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a2e5fe42dfe6b73ca120b97ac9f34bfa8414feb15e00e37415dbd51cf227ef6"}, + {file = "pyzmq-25.0.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:827bf60e749e78acb408a6c5af6688efbc9993e44ecc792b036ec2f4b4acf485"}, + {file = "pyzmq-25.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7b504ae43d37e282301da586529e2ded8b36d4ee2cd5e6db4386724ddeaa6bbc"}, + {file = "pyzmq-25.0.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb1f69a0a2a2b1aae8412979dd6293cc6bcddd4439bf07e4758d864ddb112354"}, + {file = "pyzmq-25.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2b9c9cc965cdf28381e36da525dcb89fc1571d9c54800fdcd73e3f73a2fc29bd"}, + {file = "pyzmq-25.0.2-cp310-cp310-win32.whl", hash = "sha256:24abbfdbb75ac5039205e72d6c75f10fc39d925f2df8ff21ebc74179488ebfca"}, + {file = "pyzmq-25.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6a821a506822fac55d2df2085a52530f68ab15ceed12d63539adc32bd4410f6e"}, + {file = "pyzmq-25.0.2-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:9af0bb0277e92f41af35e991c242c9c71920169d6aa53ade7e444f338f4c8128"}, + {file = "pyzmq-25.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:54a96cf77684a3a537b76acfa7237b1e79a8f8d14e7f00e0171a94b346c5293e"}, + {file = "pyzmq-25.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88649b19ede1cab03b96b66c364cbbf17c953615cdbc844f7f6e5f14c5e5261c"}, + {file = "pyzmq-25.0.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:715cff7644a80a7795953c11b067a75f16eb9fc695a5a53316891ebee7f3c9d5"}, + {file = "pyzmq-25.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:312b3f0f066b4f1d17383aae509bacf833ccaf591184a1f3c7a1661c085063ae"}, + {file = "pyzmq-25.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d488c5c8630f7e782e800869f82744c3aca4aca62c63232e5d8c490d3d66956a"}, + {file = "pyzmq-25.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:38d9f78d69bcdeec0c11e0feb3bc70f36f9b8c44fc06e5d06d91dc0a21b453c7"}, + {file = "pyzmq-25.0.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3059a6a534c910e1d5d068df42f60d434f79e6cc6285aa469b384fa921f78cf8"}, + {file = "pyzmq-25.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6526d097b75192f228c09d48420854d53dfbc7abbb41b0e26f363ccb26fbc177"}, + {file = "pyzmq-25.0.2-cp311-cp311-win32.whl", hash = "sha256:5c5fbb229e40a89a2fe73d0c1181916f31e30f253cb2d6d91bea7927c2e18413"}, + {file = "pyzmq-25.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:ed15e3a2c3c2398e6ae5ce86d6a31b452dfd6ad4cd5d312596b30929c4b6e182"}, + {file = "pyzmq-25.0.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:032f5c8483c85bf9c9ca0593a11c7c749d734ce68d435e38c3f72e759b98b3c9"}, + {file = "pyzmq-25.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:374b55516393bfd4d7a7daa6c3b36d6dd6a31ff9d2adad0838cd6a203125e714"}, + {file = "pyzmq-25.0.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:08bfcc21b5997a9be4fefa405341320d8e7f19b4d684fb9c0580255c5bd6d695"}, + {file = "pyzmq-25.0.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1a843d26a8da1b752c74bc019c7b20e6791ee813cd6877449e6a1415589d22ff"}, + {file = "pyzmq-25.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:b48616a09d7df9dbae2f45a0256eee7b794b903ddc6d8657a9948669b345f220"}, + {file = "pyzmq-25.0.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d4427b4a136e3b7f85516c76dd2e0756c22eec4026afb76ca1397152b0ca8145"}, + {file = "pyzmq-25.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:26b0358e8933990502f4513c991c9935b6c06af01787a36d133b7c39b1df37fa"}, + {file = "pyzmq-25.0.2-cp36-cp36m-win32.whl", hash = "sha256:c8fedc3ccd62c6b77dfe6f43802057a803a411ee96f14e946f4a76ec4ed0e117"}, + {file = "pyzmq-25.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:2da6813b7995b6b1d1307329c73d3e3be2fd2d78e19acfc4eff2e27262732388"}, + {file = "pyzmq-25.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a35960c8b2f63e4ef67fd6731851030df68e4b617a6715dd11b4b10312d19fef"}, + {file = "pyzmq-25.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eef2a0b880ab40aca5a878933376cb6c1ec483fba72f7f34e015c0f675c90b20"}, + {file = "pyzmq-25.0.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:85762712b74c7bd18e340c3639d1bf2f23735a998d63f46bb6584d904b5e401d"}, + {file = "pyzmq-25.0.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:64812f29d6eee565e129ca14b0c785744bfff679a4727137484101b34602d1a7"}, + {file = "pyzmq-25.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:510d8e55b3a7cd13f8d3e9121edf0a8730b87d925d25298bace29a7e7bc82810"}, + {file = "pyzmq-25.0.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b164cc3c8acb3d102e311f2eb6f3c305865ecb377e56adc015cb51f721f1dda6"}, + {file = "pyzmq-25.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:28fdb9224a258134784a9cf009b59265a9dde79582fb750d4e88a6bcbc6fa3dc"}, + {file = "pyzmq-25.0.2-cp37-cp37m-win32.whl", hash = "sha256:dd771a440effa1c36d3523bc6ba4e54ff5d2e54b4adcc1e060d8f3ca3721d228"}, + {file = "pyzmq-25.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:9bdc40efb679b9dcc39c06d25629e55581e4c4f7870a5e88db4f1c51ce25e20d"}, + {file = "pyzmq-25.0.2-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:1f82906a2d8e4ee310f30487b165e7cc8ed09c009e4502da67178b03083c4ce0"}, + {file = "pyzmq-25.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:21ec0bf4831988af43c8d66ba3ccd81af2c5e793e1bf6790eb2d50e27b3c570a"}, + {file = "pyzmq-25.0.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:abbce982a17c88d2312ec2cf7673985d444f1beaac6e8189424e0a0e0448dbb3"}, + {file = "pyzmq-25.0.2-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9e1d2f2d86fc75ed7f8845a992c5f6f1ab5db99747fb0d78b5e4046d041164d2"}, + {file = "pyzmq-25.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2e92ff20ad5d13266bc999a29ed29a3b5b101c21fdf4b2cf420c09db9fb690e"}, + {file = "pyzmq-25.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edbbf06cc2719889470a8d2bf5072bb00f423e12de0eb9ffec946c2c9748e149"}, + {file = "pyzmq-25.0.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:77942243ff4d14d90c11b2afd8ee6c039b45a0be4e53fb6fa7f5e4fd0b59da39"}, + {file = "pyzmq-25.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ab046e9cb902d1f62c9cc0eca055b1d11108bdc271caf7c2171487298f229b56"}, + {file = "pyzmq-25.0.2-cp38-cp38-win32.whl", hash = "sha256:ad761cfbe477236802a7ab2c080d268c95e784fe30cafa7e055aacd1ca877eb0"}, + {file = "pyzmq-25.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:8560756318ec7c4c49d2c341012167e704b5a46d9034905853c3d1ade4f55bee"}, + {file = "pyzmq-25.0.2-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:ab2c056ac503f25a63f6c8c6771373e2a711b98b304614151dfb552d3d6c81f6"}, + {file = "pyzmq-25.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cca8524b61c0eaaa3505382dc9b9a3bc8165f1d6c010fdd1452c224225a26689"}, + {file = "pyzmq-25.0.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cfb9f7eae02d3ac42fbedad30006b7407c984a0eb4189a1322241a20944d61e5"}, + {file = "pyzmq-25.0.2-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5eaeae038c68748082137d6896d5c4db7927e9349237ded08ee1bbd94f7361c9"}, + {file = "pyzmq-25.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a31992a8f8d51663ebf79df0df6a04ffb905063083d682d4380ab8d2c67257c"}, + {file = "pyzmq-25.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6a979e59d2184a0c8f2ede4b0810cbdd86b64d99d9cc8a023929e40dce7c86cc"}, + {file = "pyzmq-25.0.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1f124cb73f1aa6654d31b183810febc8505fd0c597afa127c4f40076be4574e0"}, + {file = "pyzmq-25.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:65c19a63b4a83ae45d62178b70223adeee5f12f3032726b897431b6553aa25af"}, + {file = "pyzmq-25.0.2-cp39-cp39-win32.whl", hash = "sha256:83d822e8687621bed87404afc1c03d83fa2ce39733d54c2fd52d8829edb8a7ff"}, + {file = "pyzmq-25.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:24683285cc6b7bf18ad37d75b9db0e0fefe58404e7001f1d82bf9e721806daa7"}, + {file = "pyzmq-25.0.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a4b4261eb8f9ed71f63b9eb0198dd7c934aa3b3972dac586d0ef502ba9ab08b"}, + {file = "pyzmq-25.0.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:62ec8d979f56c0053a92b2b6a10ff54b9ec8a4f187db2b6ec31ee3dd6d3ca6e2"}, + {file = "pyzmq-25.0.2-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:affec1470351178e892121b3414c8ef7803269f207bf9bef85f9a6dd11cde264"}, + {file = "pyzmq-25.0.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffc71111433bd6ec8607a37b9211f4ef42e3d3b271c6d76c813669834764b248"}, + {file = "pyzmq-25.0.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:6fadc60970714d86eff27821f8fb01f8328dd36bebd496b0564a500fe4a9e354"}, + {file = "pyzmq-25.0.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:269968f2a76c0513490aeb3ba0dc3c77b7c7a11daa894f9d1da88d4a0db09835"}, + {file = "pyzmq-25.0.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f7c8b8368e84381ae7c57f1f5283b029c888504aaf4949c32e6e6fb256ec9bf0"}, + {file = "pyzmq-25.0.2-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:25e6873a70ad5aa31e4a7c41e5e8c709296edef4a92313e1cd5fc87bbd1874e2"}, + {file = "pyzmq-25.0.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b733076ff46e7db5504c5e7284f04a9852c63214c74688bdb6135808531755a3"}, + {file = "pyzmq-25.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:a6f6ae12478fdc26a6d5fdb21f806b08fa5403cd02fd312e4cb5f72df078f96f"}, + {file = "pyzmq-25.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:67da1c213fbd208906ab3470cfff1ee0048838365135a9bddc7b40b11e6d6c89"}, + {file = "pyzmq-25.0.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:531e36d9fcd66f18de27434a25b51d137eb546931033f392e85674c7a7cea853"}, + {file = "pyzmq-25.0.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34a6fddd159ff38aa9497b2e342a559f142ab365576284bc8f77cb3ead1f79c5"}, + {file = "pyzmq-25.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b491998ef886662c1f3d49ea2198055a9a536ddf7430b051b21054f2a5831800"}, + {file = "pyzmq-25.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5d496815074e3e3d183fe2c7fcea2109ad67b74084c254481f87b64e04e9a471"}, + {file = "pyzmq-25.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:56a94ab1d12af982b55ca96c6853db6ac85505e820d9458ac76364c1998972f4"}, + {file = "pyzmq-25.0.2.tar.gz", hash = "sha256:6b8c1bbb70e868dc88801aa532cae6bd4e3b5233784692b786f17ad2962e5149"}, ] [package.dependencies] @@ -3963,14 +3963,14 @@ telegram = ["requests"] [[package]] name = "transaction" -version = "3.0.1" +version = "3.1.0" description = "Transaction management for Python" category = "main" optional = false -python-versions = "*" +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" files = [ - {file = "transaction-3.0.1-py2.py3-none-any.whl", hash = "sha256:2329a6e6b82d1d8d4de9267ea6ee790532c375e5911d3c7633a234e94a4a0a9e"}, - {file = "transaction-3.0.1.tar.gz", hash = "sha256:0c15ef0b7ff3518357ceea75722a30d974c3f85e11aa5cec5d5a2b6a40cfcf68"}, + {file = "transaction-3.1.0-py2.py3-none-any.whl", hash = "sha256:8376a959aa71821df1bdd7d066858a3f9f34b7f5f1c0a0e1efbd11d626895449"}, + {file = "transaction-3.1.0.tar.gz", hash = "sha256:65d0b1ea92dbe7c4e3b237fb6bd8b41dea23d7459e7bdd8c3880bffdaf912fa4"}, ] [package.dependencies] @@ -4604,48 +4604,42 @@ test = ["zope.testrunner"] [[package]] name = "zope-interface" -version = "5.5.2" +version = "6.0" description = "Interfaces for Python" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.7" files = [ - {file = "zope.interface-5.5.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:a2ad597c8c9e038a5912ac3cf166f82926feff2f6e0dabdab956768de0a258f5"}, - {file = "zope.interface-5.5.2-cp27-cp27m-win_amd64.whl", hash = "sha256:65c3c06afee96c654e590e046c4a24559e65b0a87dbff256cd4bd6f77e1a33f9"}, - {file = "zope.interface-5.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d514c269d1f9f5cd05ddfed15298d6c418129f3f064765295659798349c43e6f"}, - {file = "zope.interface-5.5.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5334e2ef60d3d9439c08baedaf8b84dc9bb9522d0dacbc10572ef5609ef8db6d"}, - {file = "zope.interface-5.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc26c8d44472e035d59d6f1177eb712888447f5799743da9c398b0339ed90b1b"}, - {file = "zope.interface-5.5.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:17ebf6e0b1d07ed009738016abf0d0a0f80388e009d0ac6e0ead26fc162b3b9c"}, - {file = "zope.interface-5.5.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f98d4bd7bbb15ca701d19b93263cc5edfd480c3475d163f137385f49e5b3a3a7"}, - {file = "zope.interface-5.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:696f3d5493eae7359887da55c2afa05acc3db5fc625c49529e84bd9992313296"}, - {file = "zope.interface-5.5.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7579960be23d1fddecb53898035a0d112ac858c3554018ce615cefc03024e46d"}, - {file = "zope.interface-5.5.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:765d703096ca47aa5d93044bf701b00bbce4d903a95b41fff7c3796e747b1f1d"}, - {file = "zope.interface-5.5.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e945de62917acbf853ab968d8916290548df18dd62c739d862f359ecd25842a6"}, - {file = "zope.interface-5.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:655796a906fa3ca67273011c9805c1e1baa047781fca80feeb710328cdbed87f"}, - {file = "zope.interface-5.5.2-cp35-cp35m-win_amd64.whl", hash = "sha256:0fb497c6b088818e3395e302e426850f8236d8d9f4ef5b2836feae812a8f699c"}, - {file = "zope.interface-5.5.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:008b0b65c05993bb08912f644d140530e775cf1c62a072bf9340c2249e613c32"}, - {file = "zope.interface-5.5.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:404d1e284eda9e233c90128697c71acffd55e183d70628aa0bbb0e7a3084ed8b"}, - {file = "zope.interface-5.5.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:3218ab1a7748327e08ef83cca63eea7cf20ea7e2ebcb2522072896e5e2fceedf"}, - {file = "zope.interface-5.5.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d169ccd0756c15bbb2f1acc012f5aab279dffc334d733ca0d9362c5beaebe88e"}, - {file = "zope.interface-5.5.2-cp36-cp36m-win_amd64.whl", hash = "sha256:e1574980b48c8c74f83578d1e77e701f8439a5d93f36a5a0af31337467c08fcf"}, - {file = "zope.interface-5.5.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:0217a9615531c83aeedb12e126611b1b1a3175013bbafe57c702ce40000eb9a0"}, - {file = "zope.interface-5.5.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:311196634bb9333aa06f00fc94f59d3a9fddd2305c2c425d86e406ddc6f2260d"}, - {file = "zope.interface-5.5.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6373d7eb813a143cb7795d3e42bd8ed857c82a90571567e681e1b3841a390d16"}, - {file = "zope.interface-5.5.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:959697ef2757406bff71467a09d940ca364e724c534efbf3786e86eee8591452"}, - {file = "zope.interface-5.5.2-cp37-cp37m-win_amd64.whl", hash = "sha256:dbaeb9cf0ea0b3bc4b36fae54a016933d64c6d52a94810a63c00f440ecb37dd7"}, - {file = "zope.interface-5.5.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:604cdba8f1983d0ab78edc29aa71c8df0ada06fb147cea436dc37093a0100a4e"}, - {file = "zope.interface-5.5.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e74a578172525c20d7223eac5f8ad187f10940dac06e40113d62f14f3adb1e8f"}, - {file = "zope.interface-5.5.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0980d44b8aded808bec5059018d64692f0127f10510eca71f2f0ace8fb11188"}, - {file = "zope.interface-5.5.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6e972493cdfe4ad0411fd9abfab7d4d800a7317a93928217f1a5de2bb0f0d87a"}, - {file = "zope.interface-5.5.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9d783213fab61832dbb10d385a319cb0e45451088abd45f95b5bb88ed0acca1a"}, - {file = "zope.interface-5.5.2-cp38-cp38-win_amd64.whl", hash = "sha256:a16025df73d24795a0bde05504911d306307c24a64187752685ff6ea23897cb0"}, - {file = "zope.interface-5.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:40f4065745e2c2fa0dff0e7ccd7c166a8ac9748974f960cd39f63d2c19f9231f"}, - {file = "zope.interface-5.5.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8a2ffadefd0e7206adc86e492ccc60395f7edb5680adedf17a7ee4205c530df4"}, - {file = "zope.interface-5.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d692374b578360d36568dd05efb8a5a67ab6d1878c29c582e37ddba80e66c396"}, - {file = "zope.interface-5.5.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4087e253bd3bbbc3e615ecd0b6dd03c4e6a1e46d152d3be6d2ad08fbad742dcc"}, - {file = "zope.interface-5.5.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fb68d212efd057596dee9e6582daded9f8ef776538afdf5feceb3059df2d2e7b"}, - {file = "zope.interface-5.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:7e66f60b0067a10dd289b29dceabd3d0e6d68be1504fc9d0bc209cf07f56d189"}, - {file = "zope.interface-5.5.2.tar.gz", hash = "sha256:bfee1f3ff62143819499e348f5b8a7f3aa0259f9aca5e0ddae7391d059dce671"}, + {file = "zope.interface-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f299c020c6679cb389814a3b81200fe55d428012c5e76da7e722491f5d205990"}, + {file = "zope.interface-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ee4b43f35f5dc15e1fec55ccb53c130adb1d11e8ad8263d68b1284b66a04190d"}, + {file = "zope.interface-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a158846d0fca0a908c1afb281ddba88744d403f2550dc34405c3691769cdd85"}, + {file = "zope.interface-6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f72f23bab1848edb7472309e9898603141644faec9fd57a823ea6b4d1c4c8995"}, + {file = "zope.interface-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48f4d38cf4b462e75fac78b6f11ad47b06b1c568eb59896db5b6ec1094eb467f"}, + {file = "zope.interface-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:87b690bbee9876163210fd3f500ee59f5803e4a6607d1b1238833b8885ebd410"}, + {file = "zope.interface-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f2363e5fd81afb650085c6686f2ee3706975c54f331b426800b53531191fdf28"}, + {file = "zope.interface-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:af169ba897692e9cd984a81cb0f02e46dacdc07d6cf9fd5c91e81f8efaf93d52"}, + {file = "zope.interface-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa90bac61c9dc3e1a563e5babb3fd2c0c1c80567e815442ddbe561eadc803b30"}, + {file = "zope.interface-6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89086c9d3490a0f265a3c4b794037a84541ff5ffa28bb9c24cc9f66566968464"}, + {file = "zope.interface-6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:809fe3bf1a91393abc7e92d607976bbb8586512913a79f2bf7d7ec15bd8ea518"}, + {file = "zope.interface-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:0ec9653825f837fbddc4e4b603d90269b501486c11800d7c761eee7ce46d1bbb"}, + {file = "zope.interface-6.0-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:790c1d9d8f9c92819c31ea660cd43c3d5451df1df61e2e814a6f99cebb292788"}, + {file = "zope.interface-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b39b8711578dcfd45fc0140993403b8a81e879ec25d53189f3faa1f006087dca"}, + {file = "zope.interface-6.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eba51599370c87088d8882ab74f637de0c4f04a6d08a312dce49368ba9ed5c2a"}, + {file = "zope.interface-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ee934f023f875ec2cfd2b05a937bd817efcc6c4c3f55c5778cbf78e58362ddc"}, + {file = "zope.interface-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:042f2381118b093714081fd82c98e3b189b68db38ee7d35b63c327c470ef8373"}, + {file = "zope.interface-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dfbbbf0809a3606046a41f8561c3eada9db811be94138f42d9135a5c47e75f6f"}, + {file = "zope.interface-6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:424d23b97fa1542d7be882eae0c0fc3d6827784105264a8169a26ce16db260d8"}, + {file = "zope.interface-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e538f2d4a6ffb6edfb303ce70ae7e88629ac6e5581870e66c306d9ad7b564a58"}, + {file = "zope.interface-6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12175ca6b4db7621aedd7c30aa7cfa0a2d65ea3a0105393e05482d7a2d367446"}, + {file = "zope.interface-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c3d7dfd897a588ec27e391edbe3dd320a03684457470415870254e714126b1f"}, + {file = "zope.interface-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:b3f543ae9d3408549a9900720f18c0194ac0fe810cecda2a584fd4dca2eb3bb8"}, + {file = "zope.interface-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d0583b75f2e70ec93f100931660328965bb9ff65ae54695fb3fa0a1255daa6f2"}, + {file = "zope.interface-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:23ac41d52fd15dd8be77e3257bc51bbb82469cf7f5e9a30b75e903e21439d16c"}, + {file = "zope.interface-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99856d6c98a326abbcc2363827e16bd6044f70f2ef42f453c0bd5440c4ce24e5"}, + {file = "zope.interface-6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1592f68ae11e557b9ff2bc96ac8fc30b187e77c45a3c9cd876e3368c53dc5ba8"}, + {file = "zope.interface-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4407b1435572e3e1610797c9203ad2753666c62883b921318c5403fb7139dec2"}, + {file = "zope.interface-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:5171eb073474a5038321409a630904fd61f12dd1856dd7e9d19cd6fe092cbbc5"}, + {file = "zope.interface-6.0.tar.gz", hash = "sha256:aab584725afd10c710b8f1e6e208dbee2d0ad009f57d674cb9d1b3964037275d"}, ] [package.dependencies] @@ -4741,4 +4735,4 @@ service = ["apispec", "apispec-webframeworks", "circus", "flask", "gunicorn", "m [metadata] lock-version = "2.0" python-versions = "^3.8.1" -content-hash = "5ff8d9a7f9bdba2ac728c60eff4a8a6abd4c76c1b7bb8d713e630a325b6879b3" +content-hash = "36cd7f93223525c4ba573a3a6e57c56380055c1ffed30aa5e0f699588360a3ab" diff --git a/pyproject.toml b/pyproject.toml index 29ec4000c6..97d4c4af29 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -74,7 +74,7 @@ inject = "<4.4.0,>=4.3.0" jinja2 = { version = ">=2.11.3,<3.1.3" } networkx = "<2.7,>=2.6.0" numpy = ">=1.20.0,<1.22.0" -packaging = "<22.0,>=21.3" +packaging = "<24.0,>=23.0" pathspec = "<1.0.0,>=0.8.0" patool = "==1.12" pluggy = "==1.0.0" @@ -121,7 +121,7 @@ sentry-sdk = { version = ">=1.5.11,<1.5.12", extras = ["flask"], optional = tru walrus = { version = ">=0.8.2,<0.10.0", optional = true } [tool.poetry.group.dev.dependencies] -black = "==22.10.0" +black = "==23.1.0" flake8 = ">=6.0.0,<7.0.0" Flake8-pyproject = "==1.2.2" isort = "<5.10.2,>=5.3.2" diff --git a/renku/command/dataset.py b/renku/command/dataset.py index 7bbea3ec5a..dc1fb86238 100644 --- a/renku/command/dataset.py +++ b/renku/command/dataset.py @@ -17,7 +17,7 @@ """Repository datasets management.""" from renku.command.command_builder.command import Command -from renku.core.constant import CONFIG_LOCAL_PATH, DATASET_METADATA_PATHS +from renku.core.constant import DATASET_METADATA_PATHS from renku.core.dataset.dataset import ( create_dataset, edit_dataset, @@ -130,7 +130,7 @@ def list_tags_command(): def pull_cloud_storage_command(): """Command for pulling/copying data from a cloud storage.""" command = Command().command(pull_cloud_storage).lock_dataset().with_database(write=True) - return command.require_migration().with_commit(commit_only=DATASET_METADATA_PATHS + [CONFIG_LOCAL_PATH]) + return command.require_migration().with_commit(commit_only=DATASET_METADATA_PATHS) def mount_cloud_storage_command(unmount: bool): diff --git a/renku/core/__init__.py b/renku/core/__init__.py index 33816dd570..c301d6efee 100644 --- a/renku/core/__init__.py +++ b/renku/core/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/config.py b/renku/core/config.py index 864c63aca7..899676113b 100644 --- a/renku/core/config.py +++ b/renku/core/config.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/constant.py b/renku/core/constant.py index a20687aff5..bde8581c07 100644 --- a/renku/core/constant.py +++ b/renku/core/constant.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/dataset/dataset.py b/renku/core/dataset/dataset.py index 4a2ab254ab..8acec11ba6 100644 --- a/renku/core/dataset/dataset.py +++ b/renku/core/dataset/dataset.py @@ -50,6 +50,7 @@ delete_dataset_file, delete_path, get_absolute_path, + get_file_size, get_files, get_safe_relative_path, hash_file, @@ -59,8 +60,10 @@ ) from renku.core.util.tabulate import tabulate from renku.core.util.urls import get_slug -from renku.core.util.util import NO_VALUE, NoValueType +from renku.core.util.util import parallel_execute +from renku.domain_model.constant import NO_VALUE, NON_EXISTING_ENTITY_CHECKSUM, NoValueType from renku.domain_model.dataset import Dataset, DatasetDetailsJson, DatasetFile, RemoteEntity, is_dataset_name_valid +from renku.domain_model.entity import Entity from renku.domain_model.enums import ConfigFilter from renku.domain_model.project_context import project_context from renku.domain_model.provenance.agent import Person @@ -68,6 +71,7 @@ from renku.infrastructure.immutable import DynamicProxy if TYPE_CHECKING: + from renku.core.interface.storage import IStorage from renku.infrastructure.repository import Repository @@ -1249,75 +1253,90 @@ def should_include(filepath: Path) -> bool: return sorted(records, key=lambda r: r.date_added) -@validate_arguments(config=dict(arbitrary_types_allowed=True)) -def pull_cloud_storage(name: str, location: Optional[Path] = None) -> None: - """Pull/copy data for a cloud storage to a dataset's data directory or a specified location. +def download_file(file: DatasetFile, storage: "IStorage") -> List[DatasetFile]: + """Download a dataset file and retrieve its missing metadata (if any). Args: - name(str): Name of the dataset - location(Optional[Path]): A directory to copy data to (Default value = None). - """ - datasets_provenance = DatasetsProvenance() + file(DatasetFile): Dataset file to download. + storage: Dataset's cloud storage (an instance of ``IStorage``). - dataset = datasets_provenance.get_by_name(name=name, strict=True) - - if not dataset.storage: - communication.warn(f"Dataset '{name}' doesn't have a storage backend") - return + Returns: + List[DatasetFile]: A list with the updated file if its metadata was missing; an empty list otherwise. - # NOTE: Try to unmount the path in case it was mounted before - unmount_path(project_context.path / dataset.get_datadir()) + """ + if not file.based_on: + raise errors.DatasetImportError(f"Dataset file doesn't have a URI: {file.entity.path}") - create_symlinks = True - destination: Union[Path, str] + path = project_context.path / file.entity.path + path.parent.mkdir(parents=True, exist_ok=True) - if location: - destination = get_absolute_path(location) - else: - stored_location = read_dataset_data_location(dataset=dataset) - if stored_location: - destination = stored_location - else: - destination = project_context.path - create_symlinks = False + # NOTE: Don't check if destination file exists. ``IStorage.copy`` won't copy a file if it exists and is not + # modified. - provider = ProviderFactory.get_pull_provider(uri=dataset.storage) - storage = provider.get_storage() + communication.start_progress(name=file.entity.path, total=1) + try: + storage.download(file.based_on.url, path) + communication.update_progress(name=file.entity.path, amount=1) + finally: + communication.finalize_progress(name=file.entity.path) - updated_files = [] + # NOTE: File has no missing information + if file.has_valid_checksum() and file.has_valid_size(): + return [] - for file in dataset.files: - path = Path(destination) / file.entity.path - path.parent.mkdir(parents=True, exist_ok=True) - # NOTE: Don't check if destination exists. ``IStorage.copy`` won't copy a file if it exists and is not modified. + if not file.has_valid_checksum(): + md5_hash = hash_file(path, hash_type="md5") or NON_EXISTING_ENTITY_CHECKSUM + entity = Entity(path=file.entity.path, checksum=md5_hash) + remote_entity = RemoteEntity(checksum=md5_hash, url=file.based_on.url, path=file.based_on.path) + else: + entity = file.entity + remote_entity = file.based_on + + size = file.size if file.has_valid_size() else get_file_size(path) + + return [ + DatasetFile( + entity=entity, + based_on=remote_entity, + size=size, + date_added=file.date_added, + date_removed=file.date_removed, + source=file.source, + ) + ] - if not file.based_on: - raise errors.DatasetImportError(f"Dataset file doesn't have a URI: {file.entity.path}") - with communication.busy(f"Copying {file.entity.path} ..."): - storage.download(file.based_on.url, path) +@validate_arguments(config=dict(arbitrary_types_allowed=True)) +def pull_cloud_storage(name: str, location: Optional[Path] = None) -> None: + """Pull/copy data for a cloud storage to a dataset's data directory or a specified location. - # NOTE: Make files read-only since we don't support pushing data to the remote storage - os.chmod(path, 0o400) + Args: + name(str): Name of the dataset + location(Optional[Path]): A directory to copy data to (Default value = None). + """ + dataset, datadir = _get_dataset_with_cloud_storage(name=name) - if not file.based_on.checksum: - md5_hash = hash_file(path, hash_type="md5") or "" - file.based_on = RemoteEntity(checksum=md5_hash, url=file.based_on.url, path=file.based_on.path) + # NOTE: Try to unmount the path in case it was mounted before + unmount_path(datadir) - new_file = DynamicProxy(file) - new_file.dataset = dataset - updated_files.append(new_file) + if location: + if not is_path_empty(datadir): + communication.confirm( + f"Dataset's data directory will be removed: {dataset.get_datadir()}. Do you want to continue?", + abort=True, + warning=True, + ) + create_symlink(target=location, symlink_path=datadir, overwrite=True) - if create_symlinks: - symlink_path = project_context.path / file.entity.path - symlink_path.parent.mkdir(parents=True, exist_ok=True) - create_symlink(path=path, symlink_path=symlink_path, overwrite=True) + provider = ProviderFactory.get_pull_provider(uri=dataset.storage) + storage = provider.get_storage() - # NOTE: Store location in metadata in case where we want to mount the external storage in the same location - store_dataset_data_location(dataset=dataset, location=location) + updated_files = parallel_execute(download_file, dataset.files, rate=5, storage=storage) if updated_files: - _update_datasets_files_metadata(updated_files=updated_files, deleted_files=[], delete=False) + dataset.add_or_update_files(updated_files) + DatasetsProvenance().add_or_update(dataset, creator=get_git_user(repository=project_context.repository)) + project_context.database.commit() def store_dataset_data_location(dataset: Dataset, location: Optional[Path]) -> None: @@ -1358,7 +1377,7 @@ def mount_cloud_storage(name: str, existing: Optional[Path], yes: bool) -> None: ) if existing: - create_symlink(path=existing, symlink_path=datadir, overwrite=True) + create_symlink(target=existing, symlink_path=datadir, overwrite=True) return delete_path(datadir) diff --git a/renku/core/dataset/dataset_add.py b/renku/core/dataset/dataset_add.py index 1829ce590c..d562bc82d8 100644 --- a/renku/core/dataset/dataset_add.py +++ b/renku/core/dataset/dataset_add.py @@ -18,8 +18,9 @@ import itertools import os.path import shutil +import tempfile from pathlib import Path -from typing import Generator, List, Optional, Tuple, Union, cast +from typing import Dict, Generator, List, Optional, Tuple, Union, cast from renku.command.command_builder.command import inject from renku.core import errors @@ -35,10 +36,11 @@ from renku.core.storage import check_external_storage, track_paths_in_storage from renku.core.util import communication, requests from renku.core.util.git import get_git_user -from renku.core.util.os import get_absolute_path, get_files, get_relative_path, get_size, hash_file, is_subpath +from renku.core.util.os import get_absolute_path, get_file_size, get_files, get_relative_path, hash_file, is_subpath from renku.core.util.urls import check_url, is_uri_subfolder, resolve_uri +from renku.core.util.util import parallel_execute +from renku.domain_model.constant import NON_EXISTING_ENTITY_CHECKSUM from renku.domain_model.dataset import Dataset, DatasetFile, RemoteEntity -from renku.domain_model.entity import NON_EXISTING_ENTITY_CHECKSUM from renku.domain_model.project_context import project_context @@ -364,94 +366,108 @@ def get_upload_uri(dataset: Dataset, entity_path: Union[Path, str]) -> str: return f"{base}/{path_within_dataset}" -def copy_files_to_dataset(dataset: Dataset, files: List[DatasetAddMetadata]): - """Copy/Move files into a dataset's directory.""" +def copy_file(file: DatasetAddMetadata, dataset: Dataset, storage: Optional[IStorage]) -> List[Optional[Path]]: + """Copy/move/link a file to dataset's data directory.""" + if not file.has_action: + return [] - def copy_file(file: DatasetAddMetadata, storage: Optional[IStorage]) -> bool: - if not file.has_action: - return False + # NOTE: If file is in a sub-directory of a dataset's remote storage URI, only update the metadata + if file.from_cloud_storage: + if dataset.storage and is_uri_subfolder(resolve_uri(dataset.storage), file.url): + file.action = DatasetAddAction.METADATA_ONLY + else: + file.action = DatasetAddAction.DOWNLOAD + + if file.action in ( + DatasetAddAction.COPY, + DatasetAddAction.MOVE, + DatasetAddAction.SYMLINK, + DatasetAddAction.DOWNLOAD, + ): + try: + file.destination.parent.mkdir(parents=True, exist_ok=True) + except OSError as e: + raise errors.InvalidFileOperation(f"Cannot create destination '{file.destination.parent}': {e}") - track_in_lfs = True + file_to_upload: Union[Path, str] = file.source.resolve() + delete_source = False + track_in_lfs = True - # NOTE: If file is in a sub-directory of a dataset's remote storage URI, only update the metadata - if file.from_cloud_storage: - if dataset.storage and is_uri_subfolder(resolve_uri(dataset.storage), file.url): - file.action = DatasetAddAction.METADATA_ONLY + try: + if file.action == DatasetAddAction.DOWNLOAD: + # NOTE: Download to a temporary location if dataset has a cloud storage because it's usually mounted as + # read-only and download would fail. It's ok not to move it to dataset's data dir since it'll be uploaded. + dst: Union[Path, str] + if storage: + fd, dst = tempfile.mkstemp() + os.close(fd) else: - file.action = DatasetAddAction.DOWNLOAD + dst = file.destination + + assert file.provider, f"Storage provider isn't set for {file} with DOWNLOAD action" + download_storage = file.provider.get_storage() + download_storage.download(file.url, dst) + file_to_upload = dst + elif file.action == DatasetAddAction.COPY: + shutil.copy(file.source, file.destination) + elif file.action == DatasetAddAction.MOVE: + # NOTE: Set ``delete_source`` in case move fails due to a dataset's read-only mounted data directory + delete_source = True + shutil.move(file.source, file.destination, copy_function=shutil.copy) # type: ignore + delete_source = False + file_to_upload = file.destination + elif file.action == DatasetAddAction.SYMLINK: + create_external_file(target=file.source, path=file.destination) + # NOTE: Don't track symlinks to external files in LFS + track_in_lfs = False + elif file.metadata_only: + # NOTE: Nothing to do when adding file to a dataset with a parent remote storage + pass + else: + raise errors.OperationError(f"Invalid action {file.action}") + except OSError as e: + # NOTE: It's ok if copying data to a read-only mounted cloud storage fails + if "Read-only file system" in str(e) and storage: + pass + else: + dst = get_relative_path(file.destination, project_context.path) or file.destination + raise errors.InvalidFileOperation(f"Cannot copy/move '{dst}': {e}") - file_to_upload = file.source.resolve() + if file.size is None: + file.size = get_file_size(file_to_upload) - if file.action in ( - DatasetAddAction.COPY, - DatasetAddAction.MOVE, - DatasetAddAction.SYMLINK, - DatasetAddAction.DOWNLOAD, - ): - try: - file.destination.parent.mkdir(parents=True, exist_ok=True) - except OSError as e: - raise errors.InvalidFileOperation(f"Cannot create destination '{file.destination.parent}': {e}") + if storage: + # NOTE: Don't track files in a dataset with cloud storage in LFS + track_in_lfs = False - try: - if file.action == DatasetAddAction.COPY: - shutil.copy(file.source, file.destination) - elif file.action == DatasetAddAction.MOVE: - shutil.move(file.source, file.destination, copy_function=shutil.copy) # type: ignore - elif file.action == DatasetAddAction.SYMLINK: - create_external_file(target=file.source, path=file.destination) - # NOTE: Don't track symlinks to external files in LFS - track_in_lfs = False - elif file.action == DatasetAddAction.DOWNLOAD: - assert file.provider, f"Storage provider isn't set for {file} with DOWNLOAD action" - download_storage = file.provider.get_storage() - download_storage.download(file.url, file.destination) - file_to_upload = file.destination - elif file.metadata_only: - # NOTE: Nothing to do when adding file to a dataset with a parent remote storage - pass - else: - raise errors.OperationError(f"Invalid action {file.action}") - except OSError as e: - # NOTE: It's ok if copying data to a read-only mounted cloud storage fails - if "Read-only file system" in str(e) and storage: - pass - else: - dst = get_relative_path(file.destination, project_context.path) or file.destination - raise errors.InvalidFileOperation(f"Cannot copy/move '{dst}': {e}") + if file.metadata_only: + assert file.based_on, f"wasBasedOn isn't set for {file} with METADATA_ONLY action" + file_uri = file.based_on.url + md5_hash: Optional[str] = file.based_on.checksum + else: + file_uri = get_upload_uri(dataset=dataset, entity_path=file.entity_path) + md5_hash = hash_file(file_to_upload, hash_type="md5") - if file.size is None: - file.size = get_size(file_to_upload) + # NOTE: If dataset has a storage backend, upload the file to the remote storage. + storage.upload(source=file_to_upload, uri=file_uri) - # NOTE: We always copy the files to the dataset's data dir. If dataset has a storage backend, we also upload the - # file to the remote storage. - if storage: - md5_hash: str = file.based_on.checksum if file.based_on else "" + file.based_on = RemoteEntity(url=file_uri, path=file.entity_path, checksum=md5_hash) - if file.metadata_only: - assert file.based_on, f"wasBasedOn isn't set for {file} with METADATA_ONLY action" - file_uri = file.based_on.url - else: - file_uri = get_upload_uri(dataset=dataset, entity_path=file.entity_path) - storage.upload(source=file_to_upload, uri=file_uri) - if not md5_hash: - md5_hash = hash_file(file_to_upload, hash_type="md5") or NON_EXISTING_ENTITY_CHECKSUM + if delete_source: + file.source.unlink(missing_ok=True) + + return [file.destination] if track_in_lfs else [] - file.based_on = RemoteEntity(url=file_uri, path=file.entity_path, checksum=md5_hash) - return track_in_lfs +def copy_files_to_dataset(dataset: Dataset, files: List[DatasetAddMetadata]): + """Copy/Move files into a dataset's directory.""" dataset_storage = None if dataset.storage: provider = ProviderFactory.get_storage_provider(uri=dataset.storage) dataset_storage = provider.get_storage() - lfs_files = [] - - for dataset_file in files: - # TODO: Parallelize copy/download/upload - if copy_file(file=dataset_file, storage=dataset_storage): - lfs_files.append(dataset_file.destination) + lfs_files = parallel_execute(copy_file, files, rate=5, dataset=dataset, storage=dataset_storage) if lfs_files and not dataset.storage: track_paths_in_storage(*lfs_files) @@ -479,12 +495,18 @@ def add_files_to_repository(dataset: Dataset, files: List[DatasetAddMetadata]): def update_dataset_metadata(dataset: Dataset, files: List[DatasetAddMetadata], clear_files_before: bool): """Add newly-added files to the dataset's metadata.""" - dataset_files = [] - repo_paths: List[Union[Path, str]] = [ - file.entity_path for file in files if (project_context.path / file.entity_path).exists() - ] + # NOTE: For datasets with cloud storage backend, we use MD5 hash as checksum instead of git hash. + if dataset.storage: + checksums: Dict[Union[Path, str], Optional[str]] = { + f.entity_path: f.based_on.checksum for f in files if f.based_on + } + else: + repo_paths: List[Union[Path, str]] = [ + file.entity_path for file in files if (project_context.path / file.entity_path).exists() + ] + checksums = project_context.repository.get_object_hashes(repo_paths) - checksums = project_context.repository.get_object_hashes(repo_paths) + dataset_files = [] for file in files: dataset_file = DatasetFile.from_path( @@ -492,7 +514,7 @@ def update_dataset_metadata(dataset: Dataset, files: List[DatasetAddMetadata], c source=file.url, based_on=file.based_on, size=file.size, - checksum=checksums.get(file.entity_path), + checksum=checksums.get(file.entity_path) or NON_EXISTING_ENTITY_CHECKSUM, ) dataset_files.append(dataset_file) diff --git a/renku/core/dataset/providers/api.py b/renku/core/dataset/providers/api.py index 45bf005b5e..c4c44f405e 100644 --- a/renku/core/dataset/providers/api.py +++ b/renku/core/dataset/providers/api.py @@ -23,7 +23,7 @@ from renku.core import errors from renku.core.constant import ProviderPriority from renku.core.plugin import hookimpl -from renku.core.util.util import NO_VALUE, NoValueType +from renku.domain_model.constant import NO_VALUE, NoValueType from renku.domain_model.dataset_provider import IDatasetProviderPlugin if TYPE_CHECKING: diff --git a/renku/core/dataset/providers/azure.py b/renku/core/dataset/providers/azure.py index 248e5011cf..985a03bae2 100644 --- a/renku/core/dataset/providers/azure.py +++ b/renku/core/dataset/providers/azure.py @@ -19,7 +19,6 @@ from pathlib import Path from typing import TYPE_CHECKING, List, Optional, Tuple, cast -from renku.command.command_builder import inject from renku.core import errors from renku.core.dataset.providers.api import ( AddProviderInterface, @@ -30,10 +29,11 @@ ) from renku.core.dataset.providers.common import get_metadata from renku.core.dataset.providers.models import DatasetAddAction -from renku.core.interface.storage import IStorage, IStorageFactory +from renku.core.interface.storage import IStorage from renku.core.util.metadata import get_canonical_key, prompt_for_credentials from renku.core.util.urls import get_scheme from renku.domain_model.project_context import project_context +from renku.infrastructure.storage.factory import StorageFactory if TYPE_CHECKING: from renku.core.dataset.providers.models import DatasetAddMetadata @@ -80,10 +80,7 @@ def get_credentials(self) -> "AzureCredentials": """Return an instance of provider's credential class.""" return AzureCredentials(provider=self) - @inject.autoparams("storage_factory") - def get_storage( - self, storage_factory: "IStorageFactory", credentials: Optional["ProviderCredentials"] = None - ) -> "IStorage": + def get_storage(self, credentials: Optional["ProviderCredentials"] = None) -> "IStorage": """Return the storage manager for the provider.""" azure_configuration = { "type": "azureblob", @@ -93,7 +90,7 @@ def get_storage( credentials = self.get_credentials() prompt_for_credentials(credentials) - return storage_factory.get_storage( + return StorageFactory.get_storage( storage_scheme="azure", provider=self, credentials=credentials, diff --git a/renku/core/dataset/providers/external.py b/renku/core/dataset/providers/external.py index b261efe26d..0767fe0213 100644 --- a/renku/core/dataset/providers/external.py +++ b/renku/core/dataset/providers/external.py @@ -21,7 +21,6 @@ from pathlib import Path from typing import TYPE_CHECKING, List, Optional, Tuple, cast -from renku.command.command_builder import inject from renku.core import errors from renku.core.dataset.providers.api import ( AddProviderInterface, @@ -32,10 +31,11 @@ ) from renku.core.dataset.providers.common import get_metadata from renku.core.dataset.providers.models import DatasetAddAction -from renku.core.interface.storage import IStorage, IStorageFactory +from renku.core.interface.storage import IStorage from renku.core.util.os import get_absolute_path from renku.core.util.urls import get_scheme from renku.domain_model.project_context import project_context +from renku.infrastructure.storage.factory import StorageFactory if TYPE_CHECKING: from renku.core.dataset.providers.models import DatasetAddMetadata @@ -86,10 +86,7 @@ def get_credentials(self) -> "ExternalCredentials": """Return an instance of provider's credential class.""" return ExternalCredentials(provider=self) - @inject.autoparams("storage_factory") - def get_storage( - self, storage_factory: "IStorageFactory", credentials: Optional["ProviderCredentials"] = None - ) -> "IStorage": + def get_storage(self, credentials: Optional["ProviderCredentials"] = None) -> "IStorage": """Return the storage manager for the provider.""" external_configuration = { "type": "local", @@ -98,7 +95,7 @@ def get_storage( if not credentials: credentials = self.get_credentials() - return storage_factory.get_storage( + return StorageFactory.get_storage( storage_scheme="file", provider=self, credentials=credentials, diff --git a/renku/core/dataset/providers/s3.py b/renku/core/dataset/providers/s3.py index 68d692161f..d3b387e591 100644 --- a/renku/core/dataset/providers/s3.py +++ b/renku/core/dataset/providers/s3.py @@ -19,7 +19,6 @@ from pathlib import Path from typing import TYPE_CHECKING, List, Optional, Tuple, cast -from renku.command.command_builder import inject from renku.core import errors from renku.core.dataset.providers.api import ( AddProviderInterface, @@ -30,10 +29,11 @@ ) from renku.core.dataset.providers.common import get_metadata from renku.core.dataset.providers.models import DatasetAddAction -from renku.core.interface.storage import IStorage, IStorageFactory +from renku.core.interface.storage import IStorage from renku.core.util.metadata import prompt_for_credentials from renku.core.util.urls import get_scheme from renku.domain_model.project_context import project_context +from renku.infrastructure.storage.factory import StorageFactory if TYPE_CHECKING: from renku.core.dataset.providers.models import DatasetAddMetadata @@ -79,10 +79,7 @@ def get_credentials(self) -> "S3Credentials": """Return an instance of provider's credential class.""" return S3Credentials(provider=self) - @inject.autoparams("storage_factory") - def get_storage( - self, storage_factory: "IStorageFactory", credentials: Optional["ProviderCredentials"] = None - ) -> "IStorage": + def get_storage(self, credentials: Optional["ProviderCredentials"] = None) -> "IStorage": """Return the storage manager for the provider.""" s3_configuration = { "type": "s3", @@ -94,7 +91,7 @@ def get_storage( credentials = self.get_credentials() prompt_for_credentials(credentials) - return storage_factory.get_storage( + return StorageFactory.get_storage( storage_scheme="s3", provider=self, credentials=credentials, diff --git a/renku/core/dataset/providers/web.py b/renku/core/dataset/providers/web.py index 723d251545..90d34cd67e 100644 --- a/renku/core/dataset/providers/web.py +++ b/renku/core/dataset/providers/web.py @@ -15,8 +15,6 @@ # limitations under the License. """Web dataset provider.""" -import concurrent.futures -import os import urllib from pathlib import Path from typing import TYPE_CHECKING, List, Optional, Tuple @@ -25,9 +23,8 @@ from renku.core import errors from renku.core.constant import CACHE from renku.core.dataset.providers.api import AddProviderInterface, ProviderApi, ProviderPriority -from renku.core.util import communication -from renku.core.util.contexts import wait_for from renku.core.util.urls import check_url, remove_credentials +from renku.core.util.util import parallel_execute from renku.domain_model.project_context import project_context if TYPE_CHECKING: @@ -102,13 +99,13 @@ def _provider_check(url): def download_file( - project_path: Path, uri: str, + filename: Optional[str] = None, + *, + project_path: Path, destination: Path, extract: bool = False, - filename: Optional[str] = None, multiple: bool = False, - delay: float = 0, ) -> List["DatasetAddMetadata"]: """Download a file from a URI and return its metadata.""" from renku.core.dataset.providers.models import DatasetAddAction, DatasetAddMetadata @@ -120,10 +117,9 @@ def download_file( with project_context.with_path(project_path): try: # NOTE: If execution time was less than the delay, block the request until delay seconds are passed - with wait_for(delay): - tmp_root, paths = requests.download_file( - base_directory=project_context.metadata_path / CACHE, url=uri, filename=filename, extract=extract - ) + tmp_root, paths = requests.download_file( + base_directory=project_context.metadata_path / CACHE, url=uri, filename=filename, extract=extract + ) except errors.RequestError as e: # pragma nocover raise errors.OperationError(f"Cannot download from {uri}") from e @@ -161,37 +157,12 @@ def download_files( destination.mkdir(parents=True, exist_ok=True) - listeners = communication.get_listeners() - - def subscribe_communication_listeners(function, **kwargs): - try: - for communicator in listeners: - communication.subscribe(communicator) - return function(**kwargs) - finally: - for communicator in listeners: - communication.unsubscribe(communicator) - - files = [] - n_cpus = os.cpu_count() or 1 - max_workers = min(n_cpus + 4, 8) - with concurrent.futures.ThreadPoolExecutor(max_workers) as executor: - futures = { - executor.submit( - subscribe_communication_listeners, - download_file, - project_path=project_context.path, - uri=url, - destination=destination, - extract=extract, - filename=name, - multiple=True, - delay=max_workers, # NOTE: Rate limit to 1 request/second - ) - for url, name in zip(urls, names) - } - - for future in concurrent.futures.as_completed(futures): - files.extend(future.result()) - - return files + return parallel_execute( + download_file, + urls, + names, + project_path=project_context.path, + destination=destination, + extract=extract, + multiple=True, + ) diff --git a/renku/core/errors.py b/renku/core/errors.py index b4178629f5..f3f94b7a75 100644 --- a/renku/core/errors.py +++ b/renku/core/errors.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/gc.py b/renku/core/gc.py index 217d491f8d..93cfc8cfd5 100644 --- a/renku/core/gc.py +++ b/renku/core/gc.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/git.py b/renku/core/git.py index 8f9b1ce7dc..1debddeb08 100644 --- a/renku/core/git.py +++ b/renku/core/git.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/githooks.py b/renku/core/githooks.py index f144178d65..347956a412 100644 --- a/renku/core/githooks.py +++ b/renku/core/githooks.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/init.py b/renku/core/init.py index 71baf95d04..8594d53504 100644 --- a/renku/core/init.py +++ b/renku/core/init.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/login.py b/renku/core/login.py index 1074e159b2..7fb9058050 100644 --- a/renku/core/login.py +++ b/renku/core/login.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/migration/m_0003__1_jsonld.py b/renku/core/migration/m_0003__1_jsonld.py index 7a0d381ca4..380425eb7b 100644 --- a/renku/core/migration/m_0003__1_jsonld.py +++ b/renku/core/migration/m_0003__1_jsonld.py @@ -199,7 +199,7 @@ def _migrate_absolute_paths(data): def _migrate_doi_identifier(data): """If the dataset _id is doi, make it a UUID.""" from renku.core.util.doi import is_doi - from renku.core.util.uuid import is_uuid + from renku.core.util.util import is_uuid _id = data.get("_id", "") identifier = data.get("identifier", "") diff --git a/renku/core/migration/m_0009__new_metadata_storage.py b/renku/core/migration/m_0009__new_metadata_storage.py index 86df2d1e77..d1cdaad811 100644 --- a/renku/core/migration/m_0009__new_metadata_storage.py +++ b/renku/core/migration/m_0009__new_metadata_storage.py @@ -48,7 +48,8 @@ from renku.core.migration.utils.conversion import convert_dataset from renku.core.util import communication from renku.core.util.yaml import load_yaml -from renku.domain_model.entity import NON_EXISTING_ENTITY_CHECKSUM, Collection, Entity +from renku.domain_model.constant import NON_EXISTING_ENTITY_CHECKSUM +from renku.domain_model.entity import Collection, Entity from renku.domain_model.project_context import has_graph_files, project_context from renku.domain_model.provenance.activity import Activity, Association, Generation, Usage from renku.domain_model.provenance.agent import Person, SoftwareAgent diff --git a/renku/core/project.py b/renku/core/project.py index 94865b91d4..c852a31ecf 100644 --- a/renku/core/project.py +++ b/renku/core/project.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -24,7 +23,7 @@ from renku.command.view_model.project import ProjectViewModel from renku.core.interface.project_gateway import IProjectGateway from renku.core.util.metadata import construct_creator -from renku.core.util.util import NO_VALUE, NoValueType +from renku.domain_model.constant import NO_VALUE, NoValueType from renku.domain_model.project_context import project_context from renku.domain_model.provenance.agent import Person diff --git a/renku/core/storage.py b/renku/core/storage.py index afd1cf0610..be02ae3c6f 100644 --- a/renku/core/storage.py +++ b/renku/core/storage.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -35,9 +34,8 @@ from renku.core.config import get_value from renku.core.constant import RENKU_LFS_IGNORE_PATH, RENKU_PROTECTED_PATHS from renku.core.util import communication -from renku.core.util.file_size import parse_file_size from renku.core.util.git import get_in_submodules, run_command -from renku.core.util.os import expand_directories +from renku.core.util.os import expand_directories, parse_file_size from renku.domain_model.project_context import project_context if TYPE_CHECKING: diff --git a/renku/core/util/__init__.py b/renku/core/util/__init__.py index 6c34437f92..425cca8b0a 100644 --- a/renku/core/util/__init__.py +++ b/renku/core/util/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/util/communication.py b/renku/core/util/communication.py index 893470ee04..0c7eb91bc4 100644 --- a/renku/core/util/communication.py +++ b/renku/core/util/communication.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/util/contexts.py b/renku/core/util/contexts.py index a6290b2899..25ac373d37 100644 --- a/renku/core/util/contexts.py +++ b/renku/core/util/contexts.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/util/datetime8601.py b/renku/core/util/datetime8601.py index b8a7729c5e..64c1665bfb 100644 --- a/renku/core/util/datetime8601.py +++ b/renku/core/util/datetime8601.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/util/doi.py b/renku/core/util/doi.py index 302b50290c..f7e944c4ac 100644 --- a/renku/core/util/doi.py +++ b/renku/core/util/doi.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/util/file_size.py b/renku/core/util/file_size.py deleted file mode 100644 index 5458cde61b..0000000000 --- a/renku/core/util/file_size.py +++ /dev/null @@ -1,65 +0,0 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and -# Eidgenössische Technische Hochschule Zürich (ETHZ). -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Helper utilities for handling file size strings.""" - -import re -from typing import Optional - -units = { - "b": 1, - "kb": 1000, - "mb": 1000**2, - "gb": 1000**3, - "tb": 1000**4, - "m": 1000**2, - "g": 1000**3, - "t": 1000**4, - "p": 1000**5, - "e": 1000**6, - "z": 1000**7, - "y": 1000**8, - "ki": 1024, - "mi": 1024**2, - "gi": 1024**3, - "ti": 1024**4, - "pi": 1024**5, - "ei": 1024**6, - "zi": 1024**7, - "yi": 1024**8, -} - - -def parse_file_size(size_str): - """Parse a human readable file size to bytes.""" - res = re.search(r"([0-9.]+)([a-zA-Z]{1,2})", size_str) - if not res or res.group(2).lower() not in units: - raise ValueError( - "Supplied file size does not contain a unit. " "Valid units are: {}".format(", ".join(units.keys())) - ) - - value = float(res.group(1)) - unit = units[res.group(2).lower()] - - return int(value * unit) - - -def bytes_to_unit(size_in_bytes, unit: str) -> Optional[float]: - """Return size in the provided unit.""" - unit = unit.lower() - if unit not in units: - raise ValueError(f"Invalid unit '{unit}'. Valid units are: [{', '.join(units)}]") - return None if size_in_bytes is None else size_in_bytes / units[unit] diff --git a/renku/core/util/git.py b/renku/core/util/git.py index 8a886ead24..ff41a9d440 100644 --- a/renku/core/util/git.py +++ b/renku/core/util/git.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -388,7 +387,8 @@ def get_entity_from_revision( Entity: The Entity for the given path and revision. """ - from renku.domain_model.entity import NON_EXISTING_ENTITY_CHECKSUM, Collection, Entity + from renku.domain_model.constant import NON_EXISTING_ENTITY_CHECKSUM + from renku.domain_model.entity import Collection, Entity def get_directory_members(absolute_path: Path) -> List[Entity]: """Return first-level files/directories in a directory.""" diff --git a/renku/core/util/jwt.py b/renku/core/util/jwt.py index cc24c7bdb6..eb1375706b 100644 --- a/renku/core/util/jwt.py +++ b/renku/core/util/jwt.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/util/os.py b/renku/core/util/os.py index 6291531f29..2472d64535 100644 --- a/renku/core/util/os.py +++ b/renku/core/util/os.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -155,11 +154,11 @@ def is_path_empty(path: Union[Path, str]) -> bool: return not any(subpaths) -def create_symlink(path: Union[Path, str], symlink_path: Union[Path, str], overwrite: bool = True) -> None: - """Create a symlink that points from symlink_path to path.""" +def create_symlink(target: Union[Path, str], symlink_path: Union[Path, str], overwrite: bool = True) -> None: + """Create a symlink that points from symlink_path to target.""" # NOTE: Don't resolve symlink path absolute_symlink_path = get_absolute_path(symlink_path) - absolute_path = get_absolute_path(path, resolve_symlinks=True) + absolute_path = get_absolute_path(target, resolve_symlinks=True) Path(absolute_symlink_path).parent.mkdir(parents=True, exist_ok=True) @@ -168,7 +167,7 @@ def create_symlink(path: Union[Path, str], symlink_path: Union[Path, str], overw delete_path(absolute_symlink_path) os.symlink(absolute_path, absolute_symlink_path) except OSError: - raise errors.InvalidFileOperation(f"Cannot create symlink from '{symlink_path}' to '{path}'") + raise errors.InvalidFileOperation(f"Cannot create symlink from '{symlink_path}' to '{target}'") def delete_path(path: Union[Path, str]) -> None: @@ -213,7 +212,7 @@ def is_ascii(data): return len(data) == len(data.encode()) -def get_size(path: Union[Path, str], follow_symlinks: bool = True) -> Optional[int]: +def get_file_size(path: Union[Path, str], follow_symlinks: bool = True) -> Optional[int]: """Return size of a file in bytes.""" path = Path(path).resolve() if follow_symlinks else Path(path) try: @@ -340,3 +339,49 @@ def expand_directories(paths): else: processed_paths.add(matched_path) yield matched_path + + +UNITS = { + "b": 1, + "kb": 1000, + "mb": 1000**2, + "gb": 1000**3, + "tb": 1000**4, + "m": 1000**2, + "g": 1000**3, + "t": 1000**4, + "p": 1000**5, + "e": 1000**6, + "z": 1000**7, + "y": 1000**8, + "ki": 1024, + "mi": 1024**2, + "gi": 1024**3, + "ti": 1024**4, + "pi": 1024**5, + "ei": 1024**6, + "zi": 1024**7, + "yi": 1024**8, +} + + +def parse_file_size(size_str): + """Parse a human readable file size to bytes.""" + res = re.search(r"([0-9.]+)([a-zA-Z]{1,2})", size_str) + if not res or res.group(2).lower() not in UNITS: + raise ValueError( + "Supplied file size does not contain a unit. " "Valid units are: {}".format(", ".join(UNITS.keys())) + ) + + value = float(res.group(1)) + unit = UNITS[res.group(2).lower()] + + return int(value * unit) + + +def bytes_to_unit(size_in_bytes, unit: str) -> Optional[float]: + """Return size in the provided unit.""" + unit = unit.lower() + if unit not in UNITS: + raise ValueError(f"Invalid unit '{unit}'. Valid units are: [{', '.join(UNITS)}]") + return None if size_in_bytes is None else size_in_bytes / UNITS[unit] diff --git a/renku/core/util/requests.py b/renku/core/util/requests.py index 8bca2b36c9..5bf802f854 100644 --- a/renku/core/util/requests.py +++ b/renku/core/util/requests.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/util/shacl.py b/renku/core/util/shacl.py index 37c5b06a38..09e8405c81 100644 --- a/renku/core/util/shacl.py +++ b/renku/core/util/shacl.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/util/ssh.py b/renku/core/util/ssh.py index eb68d46859..ec73c36a63 100644 --- a/renku/core/util/ssh.py +++ b/renku/core/util/ssh.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -38,7 +37,7 @@ class SSHKeyPair(NamedTuple): def generate_ssh_keys() -> SSHKeyPair: - """Generate an SSH keypair. + """Generate an SSH key pair. Returns: Private Public key pair. diff --git a/renku/core/util/tabulate.py b/renku/core/util/tabulate.py index f0c69f87a5..84c4894e93 100644 --- a/renku/core/util/tabulate.py +++ b/renku/core/util/tabulate.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/util/template_vars.py b/renku/core/util/template_vars.py deleted file mode 100644 index 9ea77acaf1..0000000000 --- a/renku/core/util/template_vars.py +++ /dev/null @@ -1,56 +0,0 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and -# Eidgenössische Technische Hochschule Zürich (ETHZ). -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Template variable utility methods.""" - -import datetime -from string import Formatter -from typing import Any, Iterable, Mapping, Tuple, Union - -from renku.core.errors import ParameterError -from renku.domain_model.workflow.parameter import CommandParameterBase - - -class TemplateVariableFormatter(Formatter): - """Template variable formatter for `CommandParameterBase`.""" - - RESERVED_KEYS = ["iter_index"] - - def __init__(self): - super().__init__() - - def apply(self, param: str, parameters: Mapping[str, Any] = {}) -> str: - """Renders the parameter template into its final value.""" - try: - return super().vformat(param, args=[datetime.datetime.now()], kwargs=parameters) - except KeyError as e: - raise ParameterError(f"Could not resolve the variable {str(e)}") - - def get_value(self, key, args, kwargs): - """Ignore some special keys when formatting the variable.""" - if key in self.RESERVED_KEYS: - return key - return super().get_value(key, args, kwargs) - - @staticmethod - def to_map(parameters: Iterable[Union[CommandParameterBase, Tuple[str, str]]]) -> Mapping[str, str]: - """Converts a list of `CommandParameterBase` into parameter name-value dictionary.""" - return dict( - map( - lambda x: (x.name, x.actual_value) if isinstance(x, CommandParameterBase) else (x[1], str(x[0])), - parameters, - ) - ) diff --git a/renku/core/util/urls.py b/renku/core/util/urls.py index e141fc7431..9a491d58af 100644 --- a/renku/core/util/urls.py +++ b/renku/core/util/urls.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/util/util.py b/renku/core/util/util.py index ab7e273788..7a817c06fc 100644 --- a/renku/core/util/util.py +++ b/renku/core/util/util.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -16,8 +15,13 @@ # limitations under the License. """General utility functions.""" -from typing import Any, NewType, Optional +import concurrent.futures +import os +import uuid +from pathlib import Path +from typing import Any, Callable, List, Optional, Tuple, Union +import deal from packaging.version import Version @@ -34,8 +38,74 @@ def to_semantic_version(value: str) -> Optional[Version]: return None -NoValueType = NewType("NoValueType", object) -"""Type to represent a value not being set in cases where ``None`` is a valid value.""" +def is_uuid(value): + """Check if value is UUID4. + + Copied from https://stackoverflow.com/questions/19989481/ + """ + try: + uuid_obj = uuid.UUID(value, version=4) + except ValueError: + return False + + return str(uuid_obj) == value + + +@deal.pre(lambda _: _.rate > 0, message="Rate must be positive") +def parallel_execute( + function: Callable[..., List[Any]], *data: Union[Tuple[Any, ...], List[Any]], rate: float = 1, **kwargs +) -> List[Any]: + """Execute the function using multiple threads. + + Args: + function(Callable[..., Any]): Function to parallelize. Must accept at least one parameter and returns a list. + data(Union[Tuple[Any], List[Any]]): List of data where each of its elements is passed to a function's execution. + rate(float): Number of executions per thread per second. + + Returns: + List[Any]: A list of return results of all executions. + + """ + from renku.core.util import communication + from renku.core.util.contexts import wait_for + from renku.domain_model.project_context import project_context + + listeners = communication.get_listeners() + + def subscribe_communication_listeners(delay: float, path: Path, function, *data, **kwargs): + try: + for communicator in listeners: + communication.subscribe(communicator) + if not project_context.has_context(path): + project_context.push_path(path) + with wait_for(delay): + return function(*data, **kwargs) + finally: + for communicator in listeners: + communication.unsubscribe(communicator) + + # NOTE: Disable parallelization during tests for easier debugging + if is_test_session_running(): + max_workers = 1 + delay = 0.0 + else: + n_cpus = os.cpu_count() or 1 + max_workers = min(n_cpus + 4, 8) + delay = max_workers / rate if len(data[0]) > max_workers else 0 + + files = [] + with concurrent.futures.ThreadPoolExecutor(max_workers) as executor: + futures = { + executor.submit(subscribe_communication_listeners, delay, project_context.path, function, *d, **kwargs) + for d in zip(*data) + } + + for future in concurrent.futures.as_completed(futures): + files.extend(future.result()) + + return files + -NO_VALUE = NoValueType(object()) -"""Sentinel to represent a value not being set in cases where ``None`` is a valid value.""" +def is_test_session_running() -> bool: + """Return if the code is being executed in a test and not called by user.""" + return "RENKU_RUNNING_UNDER_TEST" in os.environ diff --git a/renku/core/util/yaml.py b/renku/core/util/yaml.py index bce66e7fb1..b40e0c8ac2 100644 --- a/renku/core/util/yaml.py +++ b/renku/core/util/yaml.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/workflow/plan.py b/renku/core/workflow/plan.py index fee577bb3a..5be92ecee3 100644 --- a/renku/core/workflow/plan.py +++ b/renku/core/workflow/plan.py @@ -35,9 +35,9 @@ from renku.core.util import communication from renku.core.util.git import get_git_user from renku.core.util.os import are_paths_related, get_relative_paths -from renku.core.util.util import NO_VALUE, NoValueType from renku.core.workflow.model.concrete_execution_graph import ExecutionGraph from renku.core.workflow.value_resolution import CompositePlanValueResolver, ValueResolver +from renku.domain_model.constant import NO_VALUE, NoValueType from renku.domain_model.project_context import project_context from renku.domain_model.provenance.activity import Activity from renku.domain_model.provenance.agent import Person @@ -194,7 +194,6 @@ def show_workflow(name_or_id_or_path: str, activity_gateway: IActivityGateway, w touches_existing_files = _check_workflow_touches_existing_files(workflow, touches_files_cache, activity_map) if isinstance(workflow, Plan): - num_executions = 0 last_execution: Optional[datetime] = None diff --git a/renku/core/workflow/plan_factory.py b/renku/core/workflow/plan_factory.py index af2cabc100..2cd6e5095a 100644 --- a/renku/core/workflow/plan_factory.py +++ b/renku/core/workflow/plan_factory.py @@ -175,7 +175,6 @@ def add_inputs_and_parameters(self, arguments: List[str]): output_streams = {getattr(self, stream_name) for stream_name in ("stdout", "stderr")} for index, argument in enumerate(arguments): - if prefix: if argument.startswith("-"): position += 1 diff --git a/renku/core/workflow/value_resolution.py b/renku/core/workflow/value_resolution.py index cf3094293f..6a2025a402 100644 --- a/renku/core/workflow/value_resolution.py +++ b/renku/core/workflow/value_resolution.py @@ -16,17 +16,53 @@ # limitations under the License. """Resolution of ``Workflow`` execution values precedence.""" +import datetime from abc import ABC, abstractmethod from itertools import chain -from typing import Any, Dict, Optional, Set +from string import Formatter +from typing import Any, Dict, Iterable, Mapping, Optional, Set, Tuple, Union from renku.core import errors -from renku.core.util.template_vars import TemplateVariableFormatter from renku.domain_model.workflow.composite_plan import CompositePlan -from renku.domain_model.workflow.parameter import ParameterMapping +from renku.domain_model.workflow.parameter import CommandParameterBase, ParameterMapping from renku.domain_model.workflow.plan import AbstractPlan, Plan +class TemplateVariableFormatter(Formatter): + """Template variable formatter for `CommandParameterBase`.""" + + RESERVED_KEYS = ["iter_index"] + + def __init__(self): + super().__init__() + + def apply(self, param: str, parameters: Optional[Mapping[str, Any]] = None) -> str: + """Renders the parameter template into its final value.""" + if parameters is None: + parameters = {} + + try: + return super().vformat(param, args=[datetime.datetime.now()], kwargs=parameters) + except KeyError as e: + raise errors.ParameterError(f"Could not resolve the variable {str(e)}") + + def get_value(self, key, args, kwargs): + """Ignore some special keys when formatting the variable.""" + if key in self.RESERVED_KEYS: + return key + return super().get_value(key, args, kwargs) + + @staticmethod + def to_map(parameters: Iterable[Union[CommandParameterBase, Tuple[str, str]]]) -> Mapping[str, str]: + """Converts a list of `CommandParameterBase` into parameter name-value dictionary.""" + return dict( + map( + lambda x: (x.name, x.actual_value) if isinstance(x, CommandParameterBase) else (x[1], str(x[0])), + parameters, + ) + ) + + class ValueResolver(ABC): """Value resolution class for an ``AbstractPlan``.""" diff --git a/renku/data/__init__.py b/renku/data/__init__.py index f3182e0662..3fdd696779 100644 --- a/renku/data/__init__.py +++ b/renku/data/__init__.py @@ -1 +1,16 @@ +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. """Data files for the Renku package.""" diff --git a/renku/data/pre-commit.sh b/renku/data/pre-commit.sh index 77a568be5a..c5ffb2f772 100755 --- a/renku/data/pre-commit.sh +++ b/renku/data/pre-commit.sh @@ -1,8 +1,6 @@ #!/usr/bin/env bash -# -*- coding: utf-8 -*- -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/domain_model/__init__.py b/renku/domain_model/__init__.py index 29794e35b1..d4b5f0b122 100644 --- a/renku/domain_model/__init__.py +++ b/renku/domain_model/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/util/uuid.py b/renku/domain_model/constant.py similarity index 55% rename from renku/core/util/uuid.py rename to renku/domain_model/constant.py index 15a35448cc..f9c79bdcae 100644 --- a/renku/core/util/uuid.py +++ b/renku/domain_model/constant.py @@ -1,6 +1,5 @@ -# -# Copyright 2019-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,18 +13,13 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -"""Helper utilities for handling UUIDs.""" -import uuid - +"""Renku domain models constants.""" -def is_uuid(value): - """Check if value is UUID4. +from typing import NewType - Copied from https://stackoverflow.com/questions/19989481/ - """ - try: - uuid_obj = uuid.UUID(value, version=4) - except ValueError: - return False +NoValueType = NewType("NoValueType", object) +"""Type to represent a value not being set in cases where ``None`` is a valid value.""" - return str(uuid_obj) == value +NO_VALUE = NoValueType(object()) +"""Sentinel to represent a value not being set in cases where ``None`` is a valid value.""" +NON_EXISTING_ENTITY_CHECKSUM = "0" * 40 diff --git a/renku/domain_model/dataset.py b/renku/domain_model/dataset.py index 03885b1ed8..b1c823734f 100644 --- a/renku/domain_model/dataset.py +++ b/renku/domain_model/dataset.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -34,7 +33,7 @@ from renku.core.util.metadata import is_linked_file from renku.core.util.os import get_absolute_path from renku.core.util.urls import get_path, get_slug -from renku.core.util.util import NO_VALUE +from renku.domain_model.constant import NO_VALUE, NON_EXISTING_ENTITY_CHECKSUM from renku.domain_model.project_context import project_context from renku.infrastructure.immutable import Immutable, Slots from renku.infrastructure.persistent import Persistent @@ -203,10 +202,10 @@ class RemoteEntity(Slots): __slots__ = ("checksum", "id", "path", "url") - def __init__(self, *, checksum: str, id: Optional[str] = None, path: Union[Path, str], url: str): + def __init__(self, *, checksum: Optional[str], id: Optional[str] = None, path: Union[Path, str], url: str): super().__init__() - self.checksum: str = checksum - self.id: str = id or RemoteEntity.generate_id(checksum=checksum, path=path, url=url) + self.checksum: str = checksum or NON_EXISTING_ENTITY_CHECKSUM + self.id: str = id or RemoteEntity.generate_id(checksum=self.checksum, path=path, url=url) self.path: str = str(path) self.url: str = url @@ -274,7 +273,7 @@ def from_path( size: Optional[int] = None, ) -> "DatasetFile": """Return an instance from a path.""" - from renku.domain_model.entity import NON_EXISTING_ENTITY_CHECKSUM, Entity + from renku.domain_model.entity import Entity # NOTE: Data is added from an external storage and isn't pulled yet if based_on and not (project_context.path / path).exists(): @@ -307,6 +306,9 @@ def from_dataset_file(cls, other: "DatasetFile") -> "DatasetFile": return self + def __repr__(self) -> str: + return f"" + def correct_linked_attribute(self): """Replace ``is_external`` attribute with ``linked`` for linked dataset files.""" if self.is_external and is_linked_file(self.entity.path, project_path=project_context.path): @@ -344,6 +346,22 @@ def is_removed(self) -> bool: """Return true if dataset is removed and should not be accessed.""" return self.date_removed is not None + def has_valid_checksum(self) -> bool: + """Return if file has a valid checksum.""" + return ( + bool(self.entity.checksum) + and self.entity.checksum != NON_EXISTING_ENTITY_CHECKSUM + and ( + self.based_on is None + or self.based_on.checksum != NON_EXISTING_ENTITY_CHECKSUM + or bool(self.based_on.checksum) + ) + ) + + def has_valid_size(self) -> bool: + """Return if file has a valid size.""" + return self.size is not None + class Dataset(Persistent): """Represent a dataset.""" diff --git a/renku/domain_model/dataset_provider.py b/renku/domain_model/dataset_provider.py index 87ab8aae06..a492508f7d 100644 --- a/renku/domain_model/dataset_provider.py +++ b/renku/domain_model/dataset_provider.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/domain_model/datastructures.py b/renku/domain_model/datastructures.py index 02ac8bcb74..d3f273ae9d 100644 --- a/renku/domain_model/datastructures.py +++ b/renku/domain_model/datastructures.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/domain_model/entity.py b/renku/domain_model/entity.py index 2ccb1cf7f6..02b5365909 100644 --- a/renku/domain_model/entity.py +++ b/renku/domain_model/entity.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -23,8 +22,6 @@ from renku.infrastructure.immutable import Immutable -NON_EXISTING_ENTITY_CHECKSUM = "0" * 40 - class Entity(Immutable): """Represent a file.""" diff --git a/renku/domain_model/enums.py b/renku/domain_model/enums.py index a762296270..8c53d25506 100644 --- a/renku/domain_model/enums.py +++ b/renku/domain_model/enums.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2020- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/domain_model/git.py b/renku/domain_model/git.py index d1fe32022e..56f729f004 100644 --- a/renku/domain_model/git.py +++ b/renku/domain_model/git.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/domain_model/project.py b/renku/domain_model/project.py index 16060a1710..397fadda22 100644 --- a/renku/domain_model/project.py +++ b/renku/domain_model/project.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -27,7 +26,7 @@ from renku.core.util.datetime8601 import fix_datetime, local_now, parse_date from renku.core.util.git import get_git_user from renku.core.util.os import normalize_to_ascii -from renku.core.util.util import NO_VALUE +from renku.domain_model.constant import NO_VALUE from renku.domain_model.provenance.agent import Person from renku.domain_model.provenance.annotation import Annotation from renku.version import __minimum_project_version__ diff --git a/renku/domain_model/project_context.py b/renku/domain_model/project_context.py index 535870759b..c545b40752 100644 --- a/renku/domain_model/project_context.py +++ b/renku/domain_model/project_context.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -221,9 +220,9 @@ def _top(self) -> "ProjectProperties": raise errors.ProjectContextError("No project context was pushed") - def has_context(self) -> bool: - """Return if at least one context is pushed.""" - return bool(self._context_stack) + def has_context(self, path: Optional[Union[Path, str]] = None) -> bool: + """Return if at least one context which is equal to path (if not None) is pushed.""" + return True if self._context_stack and (path is None or self.path == Path(path).resolve()) else False def clear(self) -> None: """Remove all contexts and reset the state without committing intermediate changes. diff --git a/renku/domain_model/provenance/__init__.py b/renku/domain_model/provenance/__init__.py index 444894dec0..fd91c86ffb 100644 --- a/renku/domain_model/provenance/__init__.py +++ b/renku/domain_model/provenance/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/domain_model/provenance/activity.py b/renku/domain_model/provenance/activity.py index 8fe69ff7f2..1026fcc645 100644 --- a/renku/domain_model/provenance/activity.py +++ b/renku/domain_model/provenance/activity.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/domain_model/provenance/agent.py b/renku/domain_model/provenance/agent.py index 6d617a921e..c44c64a2b5 100644 --- a/renku/domain_model/provenance/agent.py +++ b/renku/domain_model/provenance/agent.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/domain_model/provenance/annotation.py b/renku/domain_model/provenance/annotation.py index 8a232626d1..8059bb5d27 100644 --- a/renku/domain_model/provenance/annotation.py +++ b/renku/domain_model/provenance/annotation.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/domain_model/provenance/parameter.py b/renku/domain_model/provenance/parameter.py index 8b5b3d9b1f..8265919f38 100644 --- a/renku/domain_model/provenance/parameter.py +++ b/renku/domain_model/provenance/parameter.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/domain_model/session.py b/renku/domain_model/session.py index 06c15cf2f3..c853b79277 100644 --- a/renku/domain_model/session.py +++ b/renku/domain_model/session.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/domain_model/sort.py b/renku/domain_model/sort.py index 4307eb95aa..85203083ac 100644 --- a/renku/domain_model/sort.py +++ b/renku/domain_model/sort.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/domain_model/template.py b/renku/domain_model/template.py index d6204fa5e8..b074e59073 100644 --- a/renku/domain_model/template.py +++ b/renku/domain_model/template.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/domain_model/workflow/__init__.py b/renku/domain_model/workflow/__init__.py index 967fdf0a77..ed18ed9217 100644 --- a/renku/domain_model/workflow/__init__.py +++ b/renku/domain_model/workflow/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/domain_model/workflow/composite_plan.py b/renku/domain_model/workflow/composite_plan.py index 19ea44b337..6468d61be3 100644 --- a/renku/domain_model/workflow/composite_plan.py +++ b/renku/domain_model/workflow/composite_plan.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/domain_model/workflow/converters/__init__.py b/renku/domain_model/workflow/converters/__init__.py index a6b2765016..522f83fb89 100644 --- a/renku/domain_model/workflow/converters/__init__.py +++ b/renku/domain_model/workflow/converters/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/domain_model/workflow/parameter.py b/renku/domain_model/workflow/parameter.py index 630ad507a3..dbcae441d8 100644 --- a/renku/domain_model/workflow/parameter.py +++ b/renku/domain_model/workflow/parameter.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/domain_model/workflow/plan.py b/renku/domain_model/workflow/plan.py index bb534a1329..a115b5be5f 100644 --- a/renku/domain_model/workflow/plan.py +++ b/renku/domain_model/workflow/plan.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/domain_model/workflow/provider.py b/renku/domain_model/workflow/provider.py index 8380301828..d9da43910b 100644 --- a/renku/domain_model/workflow/provider.py +++ b/renku/domain_model/workflow/provider.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/domain_model/workflow/workflow_file.py b/renku/domain_model/workflow/workflow_file.py index a0d91b90b3..2f5633ed6d 100644 --- a/renku/domain_model/workflow/workflow_file.py +++ b/renku/domain_model/workflow/workflow_file.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/infrastructure/git_merger.py b/renku/infrastructure/git_merger.py index 07b74257a1..48bfc870ae 100644 --- a/renku/infrastructure/git_merger.py +++ b/renku/infrastructure/git_merger.py @@ -301,7 +301,6 @@ def merge_projects(self, local: Project, remote: Project, base: Optional[Project # NOTE: Merge description if local.description != remote.description: - if base is None or (local.description != base.description and remote.description != base.description): local.description = communication.prompt( f"Project description was modified in local and remote branch.\n" diff --git a/renku/infrastructure/repository.py b/renku/infrastructure/repository.py index 4935a88479..32fb4def21 100644 --- a/renku/infrastructure/repository.py +++ b/renku/infrastructure/repository.py @@ -1188,7 +1188,6 @@ def __iter__(self): raise errors.ParameterError("Repository not set.") for s in self._repository.submodules: - yield self._get_submodule(s) def __len__(self) -> int: diff --git a/renku/infrastructure/storage/rclone.py b/renku/infrastructure/storage/rclone.py index b8974a9f0f..a0be2cffa4 100644 --- a/renku/infrastructure/storage/rclone.py +++ b/renku/infrastructure/storage/rclone.py @@ -25,7 +25,7 @@ from renku.core import errors from renku.core.interface.storage import FileHash, IStorage -from renku.core.util.util import NO_VALUE +from renku.domain_model.constant import NO_VALUE class RCloneStorage(IStorage): diff --git a/renku/ui/cli/dataset.py b/renku/ui/cli/dataset.py index 2adf83e1c9..7359b28ad1 100644 --- a/renku/ui/cli/dataset.py +++ b/renku/ui/cli/dataset.py @@ -518,7 +518,7 @@ from renku.command.format.dataset_files import DATASET_FILES_COLUMNS, DATASET_FILES_FORMATS from renku.command.format.dataset_tags import DATASET_TAGS_FORMATS from renku.command.format.datasets import DATASETS_COLUMNS, DATASETS_FORMATS -from renku.core.util.util import NO_VALUE, NoValueType +from renku.domain_model.constant import NO_VALUE, NoValueType def _complete_datasets(ctx, param, incomplete): diff --git a/renku/ui/cli/project.py b/renku/ui/cli/project.py index 51426ed6aa..695f9e6447 100644 --- a/renku/ui/cli/project.py +++ b/renku/ui/cli/project.py @@ -37,7 +37,7 @@ import click import renku.ui.cli.utils.color as color -from renku.core.util.util import NO_VALUE +from renku.domain_model.constant import NO_VALUE from renku.ui.cli.utils.callback import ClickCallback diff --git a/renku/ui/cli/utils/click.py b/renku/ui/cli/utils/click.py index 08a60ad0ac..716104d6cd 100644 --- a/renku/ui/cli/utils/click.py +++ b/renku/ui/cli/utils/click.py @@ -50,7 +50,6 @@ def __init__(self, *args, **kwargs): self.mutually_exclusive.add(mutex[0]) self.mutually_exclusive_names.append(mutex[1]) else: - self.mutually_exclusive.add(mutex) self.mutually_exclusive_names.append(mutex) diff --git a/renku/ui/cli/workflow.py b/renku/ui/cli/workflow.py index dcb28194c6..532bbc7fdc 100644 --- a/renku/ui/cli/workflow.py +++ b/renku/ui/cli/workflow.py @@ -730,7 +730,7 @@ from renku.command.util import ERROR from renku.command.view_model.activity_graph import ACTIVITY_GRAPH_COLUMNS from renku.core import errors -from renku.core.util.util import NO_VALUE +from renku.domain_model.constant import NO_VALUE from renku.ui.cli.utils.callback import ClickCallback from renku.ui.cli.utils.plugins import available_workflow_providers, get_supported_formats from renku.ui.cli.utils.terminal import print_workflow_file, show_text_with_pager diff --git a/renku/ui/service/controllers/api/mixins.py b/renku/ui/service/controllers/api/mixins.py index 7cd973b19b..0f72c21951 100644 --- a/renku/ui/service/controllers/api/mixins.py +++ b/renku/ui/service/controllers/api/mixins.py @@ -158,7 +158,6 @@ def execute_op(self): return self.remote() elif "git_url" in self.context and "user_id" in self.user_data: - try: project = Project.get( (Project.user_id == self.user_data["user_id"]) & (Project.git_url == self.context["git_url"]) diff --git a/renku/ui/service/controllers/cache_files_upload.py b/renku/ui/service/controllers/cache_files_upload.py index 9fb00d6781..08a2ae0947 100644 --- a/renku/ui/service/controllers/cache_files_upload.py +++ b/renku/ui/service/controllers/cache_files_upload.py @@ -23,7 +23,7 @@ from patoolib.util import PatoolError from renku.core.errors import RenkuException -from renku.core.util.file_size import bytes_to_unit +from renku.core.util.os import bytes_to_unit from renku.ui.service.config import CACHE_UPLOADS_PATH, MAX_CONTENT_LENGTH, SUPPORTED_ARCHIVES from renku.ui.service.controllers.api.abstract import ServiceCtrl from renku.ui.service.controllers.api.mixins import RenkuOperationMixin diff --git a/renku/ui/service/controllers/datasets_edit.py b/renku/ui/service/controllers/datasets_edit.py index 2d74dc3521..ed50999b7b 100644 --- a/renku/ui/service/controllers/datasets_edit.py +++ b/renku/ui/service/controllers/datasets_edit.py @@ -20,7 +20,7 @@ from renku.command.dataset import edit_dataset_command from renku.core.dataset.request_model import ImageRequestModel from renku.core.util.metadata import construct_creators -from renku.core.util.util import NO_VALUE, NoValueType +from renku.domain_model.constant import NO_VALUE, NoValueType from renku.domain_model.provenance.agent import Person from renku.ui.service.cache.models.job import Job from renku.ui.service.config import CACHE_UPLOADS_PATH, MESSAGE_PREFIX diff --git a/renku/ui/service/controllers/project_edit.py b/renku/ui/service/controllers/project_edit.py index 1512d34605..a07a14e9b7 100644 --- a/renku/ui/service/controllers/project_edit.py +++ b/renku/ui/service/controllers/project_edit.py @@ -18,7 +18,7 @@ from typing import Dict, cast from renku.command.project import edit_project_command -from renku.core.util.util import NO_VALUE +from renku.domain_model.constant import NO_VALUE from renku.ui.service.cache.models.job import Job from renku.ui.service.controllers.api.abstract import ServiceCtrl from renku.ui.service.controllers.api.mixins import RenkuOpSyncMixin diff --git a/tests/core/commands/test_serialization.py b/tests/core/commands/test_serialization.py index c93c103800..a3dc77c42a 100644 --- a/tests/core/commands/test_serialization.py +++ b/tests/core/commands/test_serialization.py @@ -22,7 +22,7 @@ import pytest from renku.core.migration.models import v9 as old_datasets -from renku.core.util.uuid import is_uuid +from renku.core.util.util import is_uuid from tests.utils import get_dataset_with_injection diff --git a/tests/core/fixtures/core_workflow.py b/tests/core/fixtures/core_workflow.py index a052c54eb4..d6025329ab 100644 --- a/tests/core/fixtures/core_workflow.py +++ b/tests/core/fixtures/core_workflow.py @@ -33,7 +33,6 @@ def composite_plan(): """Fixture for a basic CompositePlan.""" def create_run(name: str) -> Plan: - run_id = Plan.generate_id() input1 = CommandInput( id=CommandInput.generate_id(run_id, 1), diff --git a/tests/service/fixtures/service_integration.py b/tests/service/fixtures/service_integration.py index ffc2aaaa8d..5cbaf5b908 100644 --- a/tests/service/fixtures/service_integration.py +++ b/tests/service/fixtures/service_integration.py @@ -236,7 +236,6 @@ def _mock_owner(self, data, **kwargs): # NOTE: init "remote" repo runner = RenkuRunner() with chdir(remote_repo_checkout_path): - result = runner.invoke( cli, ["init", ".", "--template-id", "python-minimal", "--force"], "\n", catch_exceptions=False ) diff --git a/tests/service/views/test_dataset_views.py b/tests/service/views/test_dataset_views.py index 444b19a25c..d22f217a52 100644 --- a/tests/service/views/test_dataset_views.py +++ b/tests/service/views/test_dataset_views.py @@ -1343,9 +1343,7 @@ def test_edit_datasets_view_unset_values(svc_client_with_repo): assert_rpc_response(response) assert {"warnings", "edited", "remote_branch"} == set(response.json["result"]) - assert {"keywords": [], "custom_metadata": None, "images": [],} == response.json[ - "result" - ]["edited"] + assert {"keywords": [], "custom_metadata": None, "images": []} == response.json["result"]["edited"] params_list = { "project_id": project_id, diff --git a/tests/service/views/test_project_views.py b/tests/service/views/test_project_views.py index 1eed21f9e0..925ac92b87 100644 --- a/tests/service/views/test_project_views.py +++ b/tests/service/views/test_project_views.py @@ -143,9 +143,7 @@ def test_edit_project_view_unset(svc_client_with_repo): assert_rpc_response(response) assert {"warning", "edited", "remote_branch"} == set(response.json["result"]) - assert {"keywords": None, "custom_metadata": None,} == response.json[ - "result" - ]["edited"] + assert {"keywords": None, "custom_metadata": None} == response.json["result"]["edited"] @pytest.mark.service