diff --git a/.github/workflows/acceptance-tests.yml b/.github/workflows/acceptance-tests.yml index 12e30f17b0..e1f70034d2 100644 --- a/.github/workflows/acceptance-tests.yml +++ b/.github/workflows/acceptance-tests.yml @@ -31,7 +31,7 @@ jobs: persist: ${{ steps.deploy-comment.outputs.persist}} steps: - id: deploy-comment - uses: SwissDataScienceCenter/renku-actions/check-pr-description@v1.4.2 + uses: SwissDataScienceCenter/renku-actions/check-pr-description@v1.4.5 with: string: /deploy pr_ref: ${{ github.event.number }} @@ -43,7 +43,7 @@ jobs: name: renku-ci-rp-${{ github.event.number }} steps: - name: deploy-pr - uses: SwissDataScienceCenter/renku-actions/deploy-renku@v1.4.1 + uses: SwissDataScienceCenter/renku-actions/deploy-renku@v1.4.5 env: DOCKER_PASSWORD: ${{ secrets.RENKU_DOCKER_PASSWORD }} DOCKER_USERNAME: ${{ secrets.RENKU_DOCKER_USERNAME }} @@ -88,7 +88,7 @@ jobs: if: ${{ github.event.action != 'closed' && needs.check-deploy.outputs.pr-contains-string == 'true' && needs.check-deploy.outputs.test-enabled == 'true' }} needs: [check-deploy, deploy-pr] steps: - - uses: SwissDataScienceCenter/renku-actions/test-renku@v1.4.1 + - uses: SwissDataScienceCenter/renku-actions/test-renku@v1.4.5 with: kubeconfig: ${{ secrets.RENKUBOT_DEV_KUBECONFIG }} renku-release: renku-ci-rp-${{ github.event.number }} @@ -103,7 +103,7 @@ jobs: runs-on: ubuntu-20.04 steps: - name: renku teardown - uses: SwissDataScienceCenter/renku-actions/cleanup-renku-ci-deployments@v1.4.1 + uses: SwissDataScienceCenter/renku-actions/cleanup-renku-ci-deployments@v1.4.5 env: HELM_RELEASE_REGEX: "^renku-ci-rp-${{ github.event.number }}$" GITLAB_TOKEN: ${{ secrets.DEV_GITLAB_TOKEN }} diff --git a/.github/workflows/test_deploy.yml b/.github/workflows/test_deploy.yml index a3b7a1018b..1a975ce513 100644 --- a/.github/workflows/test_deploy.yml +++ b/.github/workflows/test_deploy.yml @@ -614,7 +614,7 @@ jobs: echo "GIT_USER=Renku Bot" >> $GITHUB_ENV echo "GIT_EMAIL=renku@datascience.ch" >> $GITHUB_ENV - name: Push chart and images - uses: SwissDataScienceCenter/renku-actions/publish-chart@v1.4.1 + uses: SwissDataScienceCenter/renku-actions/publish-chart@v1.4.5 env: CHART_NAME: renku-core GITHUB_TOKEN: ${{ secrets.RENKUBOT_GITHUB_TOKEN }} @@ -623,7 +623,7 @@ jobs: - name: Wait for chart to be available run: sleep 120 - name: Update component version - uses: SwissDataScienceCenter/renku-actions/update-component-version@v1.4.1 + uses: SwissDataScienceCenter/renku-actions/update-component-version@v1.4.5 env: CHART_NAME: renku-core GITHUB_TOKEN: ${{ secrets.RENKUBOT_GITHUB_TOKEN }} diff --git a/CHANGES.rst b/CHANGES.rst index a87df6d267..d8106745dd 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -18,6 +18,38 @@ Changes ======= +`2.5.0 `__ (2023-06-02) +------------------------------------------------------------------------------------------------------- + +Bug Fixes +~~~~~~~~~ + +- **cli:** fix dataset update with external files + (`#3379 `__) + (`e02e5bf `__) +- **cli:** fix special paths in workflow files and bump `toil` / `cwltool` + (`#3489 `__) + (`28086cf `__) +- **service:** fix working with branches + (`#3472 `__) + (`0eaf204 `__) +- **service:** return proper errors on migrations check endpoint + (`#3334 `__) + (`6237dc7 `__) + +Features +~~~~~~~~ + +- **core:** pass docker run args to session start + (`#3487 `__) + (`9cbb465 `__) +- **core:** shell completion for sessions + (`#3450 `__) + (`9fa63dd `__) +- **dataset:** export dataset keywords + (`#3454 `__) + (`feb3f14 `__) + `2.4.1 `__ (2023-05-12) ------------------------------------------------------------------------------------------------------- diff --git a/conftest.py b/conftest.py index 737dd58937..ee6ca10e91 100644 --- a/conftest.py +++ b/conftest.py @@ -1,7 +1,5 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2017-2023 Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/design/003-interactive-session/003-interactive-session.md b/design/003-interactive-session/003-interactive-session.md index dc9c99abf5..bf673bd177 100644 --- a/design/003-interactive-session/003-interactive-session.md +++ b/design/003-interactive-session/003-interactive-session.md @@ -160,10 +160,9 @@ class ISessionProvider: """ pass - def session_list(self, project_name: str, config: Optional[Dict[str, Any]]) -> List[Session]: + def session_list(self, project_name: str) -> List[Session]: """Lists all the sessions currently running by the given session provider. :param project_name: Renku project name. - :param config: Path to the session provider specific configuration YAML. :returns: a list of sessions. """ pass diff --git a/docs/how-to-guides/shell-integration.rst b/docs/how-to-guides/shell-integration.rst index 03c58547a2..c5d565b6f7 100644 --- a/docs/how-to-guides/shell-integration.rst +++ b/docs/how-to-guides/shell-integration.rst @@ -35,6 +35,7 @@ To activate tab completion for your supported shell run the following command af $ eval "$(_RENKU_COMPLETE=zsh_source renku)" +You can put the same command in your shell's startup script to enable completion by default. After this not only sub-commands of ``renku`` will be auto-completed using tab, but for example in case of ``renku workflow execute`` the available ``Plans`` are going to be listed. diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index f4dfd9a11a..eef8849ab2 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -26,6 +26,7 @@ blog BMP bugfix Calamus +cgroup chartpress Chartpress checksum @@ -51,9 +52,9 @@ CWL datadir dataset datasets -datetimes dataverse Dataverse +datetimes deployer deserialization deserialize @@ -61,6 +62,7 @@ Deserialize deserialized Deserialized deserializing +dev discoverable Dockerfile dockerfiles @@ -79,10 +81,6 @@ filesystem FilterFlights findable Fortran -GitLab -GitPython -GraphQL -graphviz gapped git-lfs gitattributes @@ -91,8 +89,12 @@ github gitignore gitignored gitkeep +GitLab gitlab gitlabClientSecret +GitPython +GraphQL +graphviz hexsha Homebrew hostname @@ -138,6 +140,7 @@ Matlab md5 mergetool metadata +metavar microservices middleware migrationscheck @@ -162,11 +165,12 @@ OpenID openssl papermill param -params parameterizable parametrization parametrize parametrized +params +PIDs pipenv PNG Postgresql @@ -193,8 +197,8 @@ refactored Renga renku Renku -renkulab renku-mls +renkulab renv repo reproducibility @@ -218,6 +222,7 @@ scala serializer sha shacl +shm Slurm Snyk SPARQL @@ -252,6 +257,7 @@ subsubcommands sudo supertype supertypes +swappiness symlink symlinks templated @@ -260,11 +266,11 @@ Tensorflow timestamp tinkerpop toolchain +TTY tutorialLink txt typesystem Ubuntu -Unmount ui Unescape unhandled @@ -274,12 +280,14 @@ Unlink unlinking unmapped unmerged +Unmount +unmount unpushed untracked untracked updatable -url uri +url urls username validator diff --git a/helm-chart/renku-core/Chart.yaml b/helm-chart/renku-core/Chart.yaml index c80c81f88b..1e48464c37 100644 --- a/helm-chart/renku-core/Chart.yaml +++ b/helm-chart/renku-core/Chart.yaml @@ -3,4 +3,4 @@ appVersion: "1.0" description: A Helm chart for Kubernetes name: renku-core icon: https://avatars0.githubusercontent.com/u/53332360?s=400&u=a4311d22842343604ef61a8c8a1e5793209a67e9&v=4 -version: 2.4.1 +version: 2.5.0 diff --git a/helm-chart/renku-core/values.yaml b/helm-chart/renku-core/values.yaml index 546f17a103..e346f00149 100644 --- a/helm-chart/renku-core/values.yaml +++ b/helm-chart/renku-core/values.yaml @@ -97,7 +97,7 @@ versions: fullnameOverride: "" image: repository: renku/renku-core - tag: "v2.4.1" + tag: "v2.5.0" pullPolicy: IfNotPresent v9: name: v9 diff --git a/poetry.lock b/poetry.lock index 74da79367a..4654027909 100644 --- a/poetry.lock +++ b/poetry.lock @@ -38,28 +38,46 @@ files = [ [[package]] name = "apispec" -version = "5.2.2" +version = "6.3.0" description = "A pluggable API specification generator. Currently supports the OpenAPI Specification (f.k.a. the Swagger specification)." category = "main" optional = true python-versions = ">=3.7" files = [ - {file = "apispec-5.2.2-py3-none-any.whl", hash = "sha256:f5f0d6b452c3e4a0e0922dce8815fac89dc4dbc758acef21fb9e01584d6602a5"}, - {file = "apispec-5.2.2.tar.gz", hash = "sha256:6ea6542e1ebffe9fd95ba01ef3f51351eac6c200a974562c7473059b9cd20aa7"}, + {file = "apispec-6.3.0-py3-none-any.whl", hash = "sha256:95a0b9355785df998bb0e9b939237a30ee4c7428fd6ef97305eae3da06b9b339"}, + {file = "apispec-6.3.0.tar.gz", hash = "sha256:6cb08d92ce73ff0b3bf46cb2ea5c00d57289b0f279fb0256a3df468182ba5344"}, ] [package.dependencies] +packaging = ">=21.3" PyYAML = {version = ">=3.10", optional = true, markers = "extra == \"yaml\""} [package.extras] -dev = ["PyYAML (>=3.10)", "flake8 (==4.0.1)", "flake8-bugbear (==22.4.25)", "marshmallow (>=3.13.0)", "mock", "mypy (==0.950)", "prance[osv] (>=0.11)", "pre-commit (>=2.4,<3.0)", "pytest", "tox", "types-PyYAML"] -docs = ["marshmallow (>=3.13.0)", "pyyaml (==6.0)", "sphinx (==4.5.0)", "sphinx-issues (==3.0.1)", "sphinx-rtd-theme (==1.0.0)"] -lint = ["flake8 (==4.0.1)", "flake8-bugbear (==22.4.25)", "mypy (==0.950)", "pre-commit (>=2.4,<3.0)", "types-PyYAML"] -marshmallow = ["marshmallow (>=3.13.0)"] -tests = ["PyYAML (>=3.10)", "marshmallow (>=3.13.0)", "mock", "prance[osv] (>=0.11)", "pytest"] -validation = ["prance[osv] (>=0.11)"] +dev = ["PyYAML (>=3.10)", "flake8 (==5.0.4)", "flake8-bugbear (==22.9.23)", "marshmallow (>=3.13.0)", "mypy (==0.982)", "openapi-spec-validator (<0.5)", "prance[osv] (>=0.11)", "pre-commit (>=2.4,<3.0)", "pytest", "tox", "types-PyYAML"] +docs = ["marshmallow (>=3.13.0)", "pyyaml (==6.0)", "sphinx (==5.2.3)", "sphinx-issues (==3.0.1)", "sphinx-rtd-theme (==1.0.0)"] +lint = ["flake8 (==5.0.4)", "flake8-bugbear (==22.9.23)", "mypy (==0.982)", "pre-commit (>=2.4,<3.0)", "types-PyYAML"] +marshmallow = ["marshmallow (>=3.18.0)"] +tests = ["PyYAML (>=3.10)", "marshmallow (>=3.13.0)", "openapi-spec-validator (<0.5)", "prance[osv] (>=0.11)", "pytest"] +validation = ["openapi-spec-validator (<0.5)", "prance[osv] (>=0.11)"] yaml = ["PyYAML (>=3.10)"] +[[package]] +name = "apispec-oneofschema" +version = "3.0.0" +description = "Plugin for apispec providing support for Marshmallow-OneOfSchema schemas" +category = "main" +optional = true +python-versions = "*" +files = [ + {file = "apispec-oneofschema-3.0.0.tar.gz", hash = "sha256:56a84492d2105340df059d477f28e67f63e38d14e40364229b68d7d512495664"}, + {file = "apispec_oneofschema-3.0.0-py2.py3-none-any.whl", hash = "sha256:8f106bcb394e764d532f1d43c658034e986aaa526a6ec1d20d1d7063aa1f0a86"}, +] + +[package.dependencies] +apispec = ">=3.0.0" +marshmallow = "<4.0.0" +marshmallow-oneofschema = "*" + [[package]] name = "apispec-webframeworks" version = "0.5.2" @@ -328,14 +346,14 @@ redis = ["redis (>=2.10.5)"] [[package]] name = "cachetools" -version = "5.3.0" +version = "5.3.1" description = "Extensible memoizing collections and decorators" category = "main" optional = false -python-versions = "~=3.7" +python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.0-py3-none-any.whl", hash = "sha256:429e1a1e845c008ea6c85aa35d4b98b65d6a9763eeef3e37e92728a12d1de9d4"}, - {file = "cachetools-5.3.0.tar.gz", hash = "sha256:13dfddc7b8df938c21a940dfa6557ce6e94a2f1cdfa58eb90c805721d58f2c14"}, + {file = "cachetools-5.3.1-py3-none-any.whl", hash = "sha256:95ef631eeaea14ba2e36f06437f36463aac3a096799e876ee55e5cdccb102590"}, + {file = "cachetools-5.3.1.tar.gz", hash = "sha256:dce83f2d9b4e1f732a8cd44af8e8fab2dbe46201467fc98b3ef8f269092bf62b"}, ] [[package]] @@ -462,19 +480,89 @@ files = [ [[package]] name = "charset-normalizer" -version = "2.1.1" +version = "3.1.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false -python-versions = ">=3.6.0" +python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, - {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, + {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, + {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, ] -[package.extras] -unicode-backport = ["unicodedata2"] - [[package]] name = "circus" version = "0.18.0" @@ -657,35 +745,31 @@ files = [ [[package]] name = "cryptography" -version = "39.0.2" +version = "40.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "cryptography-39.0.2-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:2725672bb53bb92dc7b4150d233cd4b8c59615cd8288d495eaa86db00d4e5c06"}, - {file = "cryptography-39.0.2-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:23df8ca3f24699167daf3e23e51f7ba7334d504af63a94af468f468b975b7dd7"}, - {file = "cryptography-39.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:eb40fe69cfc6f5cdab9a5ebd022131ba21453cf7b8a7fd3631f45bbf52bed612"}, - {file = "cryptography-39.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc0521cce2c1d541634b19f3ac661d7a64f9555135e9d8af3980965be717fd4a"}, - {file = "cryptography-39.0.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffd394c7896ed7821a6d13b24657c6a34b6e2650bd84ae063cf11ccffa4f1a97"}, - {file = "cryptography-39.0.2-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:e8a0772016feeb106efd28d4a328e77dc2edae84dfbac06061319fdb669ff828"}, - {file = "cryptography-39.0.2-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8f35c17bd4faed2bc7797d2a66cbb4f986242ce2e30340ab832e5d99ae60e011"}, - {file = "cryptography-39.0.2-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b49a88ff802e1993b7f749b1eeb31134f03c8d5c956e3c125c75558955cda536"}, - {file = "cryptography-39.0.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5f8c682e736513db7d04349b4f6693690170f95aac449c56f97415c6980edef5"}, - {file = "cryptography-39.0.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:d7d84a512a59f4412ca8549b01f94be4161c94efc598bf09d027d67826beddc0"}, - {file = "cryptography-39.0.2-cp36-abi3-win32.whl", hash = "sha256:c43ac224aabcbf83a947eeb8b17eaf1547bce3767ee2d70093b461f31729a480"}, - {file = "cryptography-39.0.2-cp36-abi3-win_amd64.whl", hash = "sha256:788b3921d763ee35dfdb04248d0e3de11e3ca8eb22e2e48fef880c42e1f3c8f9"}, - {file = "cryptography-39.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d15809e0dbdad486f4ad0979753518f47980020b7a34e9fc56e8be4f60702fac"}, - {file = "cryptography-39.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:50cadb9b2f961757e712a9737ef33d89b8190c3ea34d0fb6675e00edbe35d074"}, - {file = "cryptography-39.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:103e8f7155f3ce2ffa0049fe60169878d47a4364b277906386f8de21c9234aa1"}, - {file = "cryptography-39.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:6236a9610c912b129610eb1a274bdc1350b5df834d124fa84729ebeaf7da42c3"}, - {file = "cryptography-39.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e944fe07b6f229f4c1a06a7ef906a19652bdd9fd54c761b0ff87e83ae7a30354"}, - {file = "cryptography-39.0.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:35d658536b0a4117c885728d1a7032bdc9a5974722ae298d6c533755a6ee3915"}, - {file = "cryptography-39.0.2-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:30b1d1bfd00f6fc80d11300a29f1d8ab2b8d9febb6ed4a38a76880ec564fae84"}, - {file = "cryptography-39.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e029b844c21116564b8b61216befabca4b500e6816fa9f0ba49527653cae2108"}, - {file = "cryptography-39.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fa507318e427169ade4e9eccef39e9011cdc19534f55ca2f36ec3f388c1f70f3"}, - {file = "cryptography-39.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8bc0008ef798231fac03fe7d26e82d601d15bd16f3afaad1c6113771566570f3"}, - {file = "cryptography-39.0.2.tar.gz", hash = "sha256:bc5b871e977c8ee5a1bbc42fa8d19bcc08baf0c51cbf1586b0e87a2694dde42f"}, + {file = "cryptography-40.0.2-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:8f79b5ff5ad9d3218afb1e7e20ea74da5f76943ee5edb7f76e56ec5161ec782b"}, + {file = "cryptography-40.0.2-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:05dc219433b14046c476f6f09d7636b92a1c3e5808b9a6536adf4932b3b2c440"}, + {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4df2af28d7bedc84fe45bd49bc35d710aede676e2a4cb7fc6d103a2adc8afe4d"}, + {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dcca15d3a19a66e63662dc8d30f8036b07be851a8680eda92d079868f106288"}, + {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:a04386fb7bc85fab9cd51b6308633a3c271e3d0d3eae917eebab2fac6219b6d2"}, + {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:adc0d980fd2760c9e5de537c28935cc32b9353baaf28e0814df417619c6c8c3b"}, + {file = "cryptography-40.0.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d5a1bd0e9e2031465761dfa920c16b0065ad77321d8a8c1f5ee331021fda65e9"}, + {file = "cryptography-40.0.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a95f4802d49faa6a674242e25bfeea6fc2acd915b5e5e29ac90a32b1139cae1c"}, + {file = "cryptography-40.0.2-cp36-abi3-win32.whl", hash = "sha256:aecbb1592b0188e030cb01f82d12556cf72e218280f621deed7d806afd2113f9"}, + {file = "cryptography-40.0.2-cp36-abi3-win_amd64.whl", hash = "sha256:b12794f01d4cacfbd3177b9042198f3af1c856eedd0a98f10f141385c809a14b"}, + {file = "cryptography-40.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:142bae539ef28a1c76794cca7f49729e7c54423f615cfd9b0b1fa90ebe53244b"}, + {file = "cryptography-40.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:956ba8701b4ffe91ba59665ed170a2ebbdc6fc0e40de5f6059195d9f2b33ca0e"}, + {file = "cryptography-40.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4f01c9863da784558165f5d4d916093737a75203a5c5286fde60e503e4276c7a"}, + {file = "cryptography-40.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3daf9b114213f8ba460b829a02896789751626a2a4e7a43a28ee77c04b5e4958"}, + {file = "cryptography-40.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48f388d0d153350f378c7f7b41497a54ff1513c816bcbbcafe5b829e59b9ce5b"}, + {file = "cryptography-40.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c0764e72b36a3dc065c155e5b22f93df465da9c39af65516fe04ed3c68c92636"}, + {file = "cryptography-40.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:cbaba590180cba88cb99a5f76f90808a624f18b169b90a4abb40c1fd8c19420e"}, + {file = "cryptography-40.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7a38250f433cd41df7fcb763caa3ee9362777fdb4dc642b9a349721d2bf47404"}, + {file = "cryptography-40.0.2.tar.gz", hash = "sha256:c33c0d32b8594fa647d2e01dbccc303478e16fdd7cf98652d5b3ed11aa5e5c99"}, ] [package.dependencies] @@ -694,88 +778,91 @@ cffi = ">=1.12" [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] -pep8test = ["black", "check-manifest", "mypy", "ruff", "types-pytz", "types-requests"] +pep8test = ["black", "check-manifest", "mypy", "ruff"] sdist = ["setuptools-rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-shard (>=0.1.2)", "pytest-subtests", "pytest-xdist", "pytz"] +test = ["iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-shard (>=0.1.2)", "pytest-subtests", "pytest-xdist"] test-randomorder = ["pytest-randomly"] tox = ["tox"] [[package]] name = "cwl-upgrader" -version = "1.2.7" +version = "1.2.8" description = "Common Workflow Language standalone document upgrader" category = "main" optional = false python-versions = ">=3.6, <4" files = [ - {file = "cwl-upgrader-1.2.7.tar.gz", hash = "sha256:b5c63e4387aad7260ed294cf69a5f513109bcb90f712db6f3617afc0a096c8a3"}, - {file = "cwl_upgrader-1.2.7-py3-none-any.whl", hash = "sha256:a39cbc537b64b773ccb563301bd6f595d7813eba76b1f4e9d3243b09876ac841"}, + {file = "cwl-upgrader-1.2.8.tar.gz", hash = "sha256:d0ca216df461653b324aa42f5b16fb2403ec88729ad42c3ee2e3264b4811029f"}, + {file = "cwl_upgrader-1.2.8-py3-none-any.whl", hash = "sha256:b953460294677e7c18c610f0e4c07d5cd064032f5db4a09252fb2330c12a9595"}, ] [package.dependencies] "ruamel.yaml" = [ - {version = ">=0.16.0,<0.17.27", markers = "python_version >= \"3.10\""}, - {version = ">=0.15.78,<0.17.27", markers = "python_version >= \"3.8\""}, - {version = ">=0.15.98,<0.17.27", markers = "python_version >= \"3.9\""}, + {version = ">=0.16.0,<0.18", markers = "python_version >= \"3.10\""}, + {version = ">=0.15.78,<0.18", markers = "python_version >= \"3.8\""}, + {version = ">=0.15.98,<0.18", markers = "python_version >= \"3.9\""}, ] schema-salad = "*" setuptools = "*" [[package]] name = "cwl-utils" -version = "0.17" +version = "0.27" description = "" category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "cwl-utils-0.17.tar.gz", hash = "sha256:479b7d189dc16e03b607dfe17046e943fc695ebeefba51408f37ca11292fe469"}, - {file = "cwl_utils-0.17-py3-none-any.whl", hash = "sha256:0ce3fb5b91eeb89d007870ca93b717a7321f33b89195663139fb02a64f9b4072"}, + {file = "cwl-utils-0.27.tar.gz", hash = "sha256:9b7e34665c36804793cbcaeb9fe0f2d70958b60ccc4e20d0666fefe016b5ee5f"}, + {file = "cwl_utils-0.27-py3-none-any.whl", hash = "sha256:59c9d1da842257ebbeb22622e5c5e0c3f63287460dfd99a33d8478f500b46014"}, ] [package.dependencies] -CacheControl = "*" +CacheControl = "<0.13" cwl-upgrader = ">=1.2.3" packaging = "*" rdflib = "*" requests = "*" +"ruamel.yaml" = {version = ">=0.17.6,<0.17.27", markers = "python_version >= \"3.7\""} schema-salad = ">=8.3.20220825114525,<9" +urllib3 = "<2" [package.extras] pretty = ["cwlformat"] [[package]] name = "cwltool" -version = "3.1.20220628170238" +version = "3.1.20230425144158" description = "Common workflow language reference implementation" category = "main" optional = false python-versions = ">=3.6, <4" files = [ - {file = "cwltool-3.1.20220628170238-py3-none-any.whl", hash = "sha256:37b8ce96a72a40a232a04c0f49e8053703aafbe885a239eff57512c66fcfb4c3"}, - {file = "cwltool-3.1.20220628170238.tar.gz", hash = "sha256:fbac3fa443109fcbb48e7174e02963fddb16542f1b43fa54c72dd000e102296b"}, + {file = "cwltool-3.1.20230425144158-py3-none-any.whl", hash = "sha256:a4fcf698924d8108be4c0ab459923080b3ad42b27e2c9e8d77ee602b48dd8e0b"}, + {file = "cwltool-3.1.20230425144158.tar.gz", hash = "sha256:b6307c536ec94a94cb9215a26057ea792b907144ca57b015699cb180ca309086"}, ] [package.dependencies] argcomplete = "*" bagit = ">=1.6.4" coloredlogs = "*" +cwl-utils = ">=0.22" mypy-extensions = "*" prov = "1.5.1" psutil = ">=5.6.6" pydot = ">=1.4.1" pyparsing = "!=3.0.2" -rdflib = ">=4.2.2,<6.2.0" +rdflib = ">=4.2.2,<6.4.0" requests = ">=2.6.1" "ruamel.yaml" = ">=0.15,<0.17.22" -schema-salad = ">=8.2.20211104054942,<9" +schema-salad = ">=8.4,<9" setuptools = "*" shellescape = ">=3.4.1,<3.9" typing-extensions = "*" [package.extras] -deps = ["galaxy-tool-util (>=21.1.0)"] +deps = ["galaxy-tool-util (>=22.1.2,<23)"] [[package]] name = "deal" @@ -889,14 +976,14 @@ files = [ [[package]] name = "dunamai" -version = "1.16.1" +version = "1.17.0" description = "Dynamic version generation" category = "main" optional = false python-versions = ">=3.5,<4.0" files = [ - {file = "dunamai-1.16.1-py3-none-any.whl", hash = "sha256:b9f169183147f6f1d3a5b3d913ffdd67247d90948006e205cbc499fe98d45554"}, - {file = "dunamai-1.16.1.tar.gz", hash = "sha256:4f3bc2c5b0f9d83fa9c90b943100273bb087167c90a0519ac66e9e2e0d2a8210"}, + {file = "dunamai-1.17.0-py3-none-any.whl", hash = "sha256:5aa4ac1085de10691269af021b10497261a5dd644f277e2a21822212604d877b"}, + {file = "dunamai-1.17.0.tar.gz", hash = "sha256:459381b585a1e78e4070f0d38a6afb4d67de2ee95064bf6b0438ec620dde0820"}, ] [package.dependencies] @@ -1145,13 +1232,14 @@ gitdb = ">=4.0.1,<5" [[package]] name = "grandalf" -version = "0.7" +version = "0.8" description = "Graph and drawing algorithms framework" category = "main" optional = false python-versions = "*" files = [ - {file = "grandalf-0.7-py3-none-any.whl", hash = "sha256:0ba234b8962420a093af39de82e89b22e9152d54b05d2fa30953ce39fa52aea3"}, + {file = "grandalf-0.8-py3-none-any.whl", hash = "sha256:793ca254442f4a79252ea9ff1ab998e852c1e071b863593e5383afee906b4185"}, + {file = "grandalf-0.8.tar.gz", hash = "sha256:2813f7aab87f0d20f334a3162ccfbcbf085977134a17a5b516940a93a77ea974"}, ] [package.dependencies] @@ -2038,44 +2126,6 @@ files = [ [package.dependencies] setuptools = "*" -[[package]] -name = "numpy" -version = "1.24.3" -description = "Fundamental package for array computing in Python" -category = "main" -optional = false -python-versions = ">=3.8" -files = [ - {file = "numpy-1.24.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3c1104d3c036fb81ab923f507536daedc718d0ad5a8707c6061cdfd6d184e570"}, - {file = "numpy-1.24.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:202de8f38fc4a45a3eea4b63e2f376e5f2dc64ef0fa692838e31a808520efaf7"}, - {file = "numpy-1.24.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8535303847b89aa6b0f00aa1dc62867b5a32923e4d1681a35b5eef2d9591a463"}, - {file = "numpy-1.24.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d926b52ba1367f9acb76b0df6ed21f0b16a1ad87c6720a1121674e5cf63e2b6"}, - {file = "numpy-1.24.3-cp310-cp310-win32.whl", hash = "sha256:f21c442fdd2805e91799fbe044a7b999b8571bb0ab0f7850d0cb9641a687092b"}, - {file = "numpy-1.24.3-cp310-cp310-win_amd64.whl", hash = "sha256:ab5f23af8c16022663a652d3b25dcdc272ac3f83c3af4c02eb8b824e6b3ab9d7"}, - {file = "numpy-1.24.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9a7721ec204d3a237225db3e194c25268faf92e19338a35f3a224469cb6039a3"}, - {file = "numpy-1.24.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d6cc757de514c00b24ae8cf5c876af2a7c3df189028d68c0cb4eaa9cd5afc2bf"}, - {file = "numpy-1.24.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76e3f4e85fc5d4fd311f6e9b794d0c00e7002ec122be271f2019d63376f1d385"}, - {file = "numpy-1.24.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1d3c026f57ceaad42f8231305d4653d5f05dc6332a730ae5c0bea3513de0950"}, - {file = "numpy-1.24.3-cp311-cp311-win32.whl", hash = "sha256:c91c4afd8abc3908e00a44b2672718905b8611503f7ff87390cc0ac3423fb096"}, - {file = "numpy-1.24.3-cp311-cp311-win_amd64.whl", hash = "sha256:5342cf6aad47943286afa6f1609cad9b4266a05e7f2ec408e2cf7aea7ff69d80"}, - {file = "numpy-1.24.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7776ea65423ca6a15255ba1872d82d207bd1e09f6d0894ee4a64678dd2204078"}, - {file = "numpy-1.24.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ae8d0be48d1b6ed82588934aaaa179875e7dc4f3d84da18d7eae6eb3f06c242c"}, - {file = "numpy-1.24.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecde0f8adef7dfdec993fd54b0f78183051b6580f606111a6d789cd14c61ea0c"}, - {file = "numpy-1.24.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4749e053a29364d3452c034827102ee100986903263e89884922ef01a0a6fd2f"}, - {file = "numpy-1.24.3-cp38-cp38-win32.whl", hash = "sha256:d933fabd8f6a319e8530d0de4fcc2e6a61917e0b0c271fded460032db42a0fe4"}, - {file = "numpy-1.24.3-cp38-cp38-win_amd64.whl", hash = "sha256:56e48aec79ae238f6e4395886b5eaed058abb7231fb3361ddd7bfdf4eed54289"}, - {file = "numpy-1.24.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4719d5aefb5189f50887773699eaf94e7d1e02bf36c1a9d353d9f46703758ca4"}, - {file = "numpy-1.24.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ec87a7084caa559c36e0a2309e4ecb1baa03b687201d0a847c8b0ed476a7187"}, - {file = "numpy-1.24.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea8282b9bcfe2b5e7d491d0bf7f3e2da29700cec05b49e64d6246923329f2b02"}, - {file = "numpy-1.24.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210461d87fb02a84ef243cac5e814aad2b7f4be953b32cb53327bb49fd77fbb4"}, - {file = "numpy-1.24.3-cp39-cp39-win32.whl", hash = "sha256:784c6da1a07818491b0ffd63c6bbe5a33deaa0e25a20e1b3ea20cf0e43f8046c"}, - {file = "numpy-1.24.3-cp39-cp39-win_amd64.whl", hash = "sha256:d5036197ecae68d7f491fcdb4df90082b0d4960ca6599ba2659957aafced7c17"}, - {file = "numpy-1.24.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:352ee00c7f8387b44d19f4cada524586f07379c0d49270f87233983bc5087ca0"}, - {file = "numpy-1.24.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7d6acc2e7524c9955e5c903160aa4ea083736fde7e91276b0e5d98e6332812"}, - {file = "numpy-1.24.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:35400e6a8d102fd07c71ed7dcadd9eb62ee9a6e84ec159bd48c28235bbb0f8e4"}, - {file = "numpy-1.24.3.tar.gz", hash = "sha256:ab344f1bf21f140adab8e47fdbc7c35a477dc01408791f8ba00d018dd0bc5155"}, -] - [[package]] name = "ordered-set" version = "4.1.0" @@ -2219,77 +2269,82 @@ ptyprocess = ">=0.5" [[package]] name = "pillow" -version = "9.3.0" +version = "9.5.0" description = "Python Imaging Library (Fork)" category = "main" optional = true python-versions = ">=3.7" files = [ - {file = "Pillow-9.3.0-1-cp37-cp37m-win32.whl", hash = "sha256:e6ea6b856a74d560d9326c0f5895ef8050126acfdc7ca08ad703eb0081e82b74"}, - {file = "Pillow-9.3.0-1-cp37-cp37m-win_amd64.whl", hash = "sha256:32a44128c4bdca7f31de5be641187367fe2a450ad83b833ef78910397db491aa"}, - {file = "Pillow-9.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:0b7257127d646ff8676ec8a15520013a698d1fdc48bc2a79ba4e53df792526f2"}, - {file = "Pillow-9.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b90f7616ea170e92820775ed47e136208e04c967271c9ef615b6fbd08d9af0e3"}, - {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68943d632f1f9e3dce98908e873b3a090f6cba1cbb1b892a9e8d97c938871fbe"}, - {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be55f8457cd1eac957af0c3f5ece7bc3f033f89b114ef30f710882717670b2a8"}, - {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d77adcd56a42d00cc1be30843d3426aa4e660cab4a61021dc84467123f7a00c"}, - {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:829f97c8e258593b9daa80638aee3789b7df9da5cf1336035016d76f03b8860c"}, - {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:801ec82e4188e935c7f5e22e006d01611d6b41661bba9fe45b60e7ac1a8f84de"}, - {file = "Pillow-9.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:871b72c3643e516db4ecf20efe735deb27fe30ca17800e661d769faab45a18d7"}, - {file = "Pillow-9.3.0-cp310-cp310-win32.whl", hash = "sha256:655a83b0058ba47c7c52e4e2df5ecf484c1b0b0349805896dd350cbc416bdd91"}, - {file = "Pillow-9.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:9f47eabcd2ded7698106b05c2c338672d16a6f2a485e74481f524e2a23c2794b"}, - {file = "Pillow-9.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:57751894f6618fd4308ed8e0c36c333e2f5469744c34729a27532b3db106ee20"}, - {file = "Pillow-9.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7db8b751ad307d7cf238f02101e8e36a128a6cb199326e867d1398067381bff4"}, - {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3033fbe1feb1b59394615a1cafaee85e49d01b51d54de0cbf6aa8e64182518a1"}, - {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22b012ea2d065fd163ca096f4e37e47cd8b59cf4b0fd47bfca6abb93df70b34c"}, - {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a65733d103311331875c1dca05cb4606997fd33d6acfed695b1232ba1df193"}, - {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:502526a2cbfa431d9fc2a079bdd9061a2397b842bb6bc4239bb176da00993812"}, - {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:90fb88843d3902fe7c9586d439d1e8c05258f41da473952aa8b328d8b907498c"}, - {file = "Pillow-9.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:89dca0ce00a2b49024df6325925555d406b14aa3efc2f752dbb5940c52c56b11"}, - {file = "Pillow-9.3.0-cp311-cp311-win32.whl", hash = "sha256:3168434d303babf495d4ba58fc22d6604f6e2afb97adc6a423e917dab828939c"}, - {file = "Pillow-9.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:18498994b29e1cf86d505edcb7edbe814d133d2232d256db8c7a8ceb34d18cef"}, - {file = "Pillow-9.3.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:772a91fc0e03eaf922c63badeca75e91baa80fe2f5f87bdaed4280662aad25c9"}, - {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa4107d1b306cdf8953edde0534562607fe8811b6c4d9a486298ad31de733b2"}, - {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4012d06c846dc2b80651b120e2cdd787b013deb39c09f407727ba90015c684f"}, - {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77ec3e7be99629898c9a6d24a09de089fa5356ee408cdffffe62d67bb75fdd72"}, - {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:6c738585d7a9961d8c2821a1eb3dcb978d14e238be3d70f0a706f7fa9316946b"}, - {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:828989c45c245518065a110434246c44a56a8b2b2f6347d1409c787e6e4651ee"}, - {file = "Pillow-9.3.0-cp37-cp37m-win32.whl", hash = "sha256:82409ffe29d70fd733ff3c1025a602abb3e67405d41b9403b00b01debc4c9a29"}, - {file = "Pillow-9.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:41e0051336807468be450d52b8edd12ac60bebaa97fe10c8b660f116e50b30e4"}, - {file = "Pillow-9.3.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:b03ae6f1a1878233ac620c98f3459f79fd77c7e3c2b20d460284e1fb370557d4"}, - {file = "Pillow-9.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4390e9ce199fc1951fcfa65795f239a8a4944117b5935a9317fb320e7767b40f"}, - {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40e1ce476a7804b0fb74bcfa80b0a2206ea6a882938eaba917f7a0f004b42502"}, - {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0a06a052c5f37b4ed81c613a455a81f9a3a69429b4fd7bb913c3fa98abefc20"}, - {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03150abd92771742d4a8cd6f2fa6246d847dcd2e332a18d0c15cc75bf6703040"}, - {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:15c42fb9dea42465dfd902fb0ecf584b8848ceb28b41ee2b58f866411be33f07"}, - {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:51e0e543a33ed92db9f5ef69a0356e0b1a7a6b6a71b80df99f1d181ae5875636"}, - {file = "Pillow-9.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3dd6caf940756101205dffc5367babf288a30043d35f80936f9bfb37f8355b32"}, - {file = "Pillow-9.3.0-cp38-cp38-win32.whl", hash = "sha256:f1ff2ee69f10f13a9596480335f406dd1f70c3650349e2be67ca3139280cade0"}, - {file = "Pillow-9.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:276a5ca930c913f714e372b2591a22c4bd3b81a418c0f6635ba832daec1cbcfc"}, - {file = "Pillow-9.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:73bd195e43f3fadecfc50c682f5055ec32ee2c933243cafbfdec69ab1aa87cad"}, - {file = "Pillow-9.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1c7c8ae3864846fc95f4611c78129301e203aaa2af813b703c55d10cc1628535"}, - {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e0918e03aa0c72ea56edbb00d4d664294815aa11291a11504a377ea018330d3"}, - {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0915e734b33a474d76c28e07292f196cdf2a590a0d25bcc06e64e545f2d146c"}, - {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af0372acb5d3598f36ec0914deed2a63f6bcdb7b606da04dc19a88d31bf0c05b"}, - {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:ad58d27a5b0262c0c19b47d54c5802db9b34d38bbf886665b626aff83c74bacd"}, - {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:97aabc5c50312afa5e0a2b07c17d4ac5e865b250986f8afe2b02d772567a380c"}, - {file = "Pillow-9.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9aaa107275d8527e9d6e7670b64aabaaa36e5b6bd71a1015ddd21da0d4e06448"}, - {file = "Pillow-9.3.0-cp39-cp39-win32.whl", hash = "sha256:bac18ab8d2d1e6b4ce25e3424f709aceef668347db8637c2296bcf41acb7cf48"}, - {file = "Pillow-9.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:b472b5ea442148d1c3e2209f20f1e0bb0eb556538690fa70b5e1f79fa0ba8dc2"}, - {file = "Pillow-9.3.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:ab388aaa3f6ce52ac1cb8e122c4bd46657c15905904b3120a6248b5b8b0bc228"}, - {file = "Pillow-9.3.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbb8e7f2abee51cef77673be97760abff1674ed32847ce04b4af90f610144c7b"}, - {file = "Pillow-9.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca31dd6014cb8b0b2db1e46081b0ca7d936f856da3b39744aef499db5d84d02"}, - {file = "Pillow-9.3.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c7025dce65566eb6e89f56c9509d4f628fddcedb131d9465cacd3d8bac337e7e"}, - {file = "Pillow-9.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ebf2029c1f464c59b8bdbe5143c79fa2045a581ac53679733d3a91d400ff9efb"}, - {file = "Pillow-9.3.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b59430236b8e58840a0dfb4099a0e8717ffb779c952426a69ae435ca1f57210c"}, - {file = "Pillow-9.3.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12ce4932caf2ddf3e41d17fc9c02d67126935a44b86df6a206cf0d7161548627"}, - {file = "Pillow-9.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae5331c23ce118c53b172fa64a4c037eb83c9165aba3a7ba9ddd3ec9fa64a699"}, - {file = "Pillow-9.3.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:0b07fffc13f474264c336298d1b4ce01d9c5a011415b79d4ee5527bb69ae6f65"}, - {file = "Pillow-9.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:073adb2ae23431d3b9bcbcff3fe698b62ed47211d0716b067385538a1b0f28b8"}, - {file = "Pillow-9.3.0.tar.gz", hash = "sha256:c935a22a557a560108d780f9a0fc426dd7459940dc54faa49d83249c8d3e760f"}, + {file = "Pillow-9.5.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:ace6ca218308447b9077c14ea4ef381ba0b67ee78d64046b3f19cf4e1139ad16"}, + {file = "Pillow-9.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d3d403753c9d5adc04d4694d35cf0391f0f3d57c8e0030aac09d7678fa8030aa"}, + {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ba1b81ee69573fe7124881762bb4cd2e4b6ed9dd28c9c60a632902fe8db8b38"}, + {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe7e1c262d3392afcf5071df9afa574544f28eac825284596ac6db56e6d11062"}, + {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f36397bf3f7d7c6a3abdea815ecf6fd14e7fcd4418ab24bae01008d8d8ca15e"}, + {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:252a03f1bdddce077eff2354c3861bf437c892fb1832f75ce813ee94347aa9b5"}, + {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:85ec677246533e27770b0de5cf0f9d6e4ec0c212a1f89dfc941b64b21226009d"}, + {file = "Pillow-9.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b416f03d37d27290cb93597335a2f85ed446731200705b22bb927405320de903"}, + {file = "Pillow-9.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1781a624c229cb35a2ac31cc4a77e28cafc8900733a864870c49bfeedacd106a"}, + {file = "Pillow-9.5.0-cp310-cp310-win32.whl", hash = "sha256:8507eda3cd0608a1f94f58c64817e83ec12fa93a9436938b191b80d9e4c0fc44"}, + {file = "Pillow-9.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:d3c6b54e304c60c4181da1c9dadf83e4a54fd266a99c70ba646a9baa626819eb"}, + {file = "Pillow-9.5.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:7ec6f6ce99dab90b52da21cf0dc519e21095e332ff3b399a357c187b1a5eee32"}, + {file = "Pillow-9.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:560737e70cb9c6255d6dcba3de6578a9e2ec4b573659943a5e7e4af13f298f5c"}, + {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96e88745a55b88a7c64fa49bceff363a1a27d9a64e04019c2281049444a571e3"}, + {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d9c206c29b46cfd343ea7cdfe1232443072bbb270d6a46f59c259460db76779a"}, + {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cfcc2c53c06f2ccb8976fb5c71d448bdd0a07d26d8e07e321c103416444c7ad1"}, + {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:a0f9bb6c80e6efcde93ffc51256d5cfb2155ff8f78292f074f60f9e70b942d99"}, + {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:8d935f924bbab8f0a9a28404422da8af4904e36d5c33fc6f677e4c4485515625"}, + {file = "Pillow-9.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fed1e1cf6a42577953abbe8e6cf2fe2f566daebde7c34724ec8803c4c0cda579"}, + {file = "Pillow-9.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c1170d6b195555644f0616fd6ed929dfcf6333b8675fcca044ae5ab110ded296"}, + {file = "Pillow-9.5.0-cp311-cp311-win32.whl", hash = "sha256:54f7102ad31a3de5666827526e248c3530b3a33539dbda27c6843d19d72644ec"}, + {file = "Pillow-9.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:cfa4561277f677ecf651e2b22dc43e8f5368b74a25a8f7d1d4a3a243e573f2d4"}, + {file = "Pillow-9.5.0-cp311-cp311-win_arm64.whl", hash = "sha256:965e4a05ef364e7b973dd17fc765f42233415974d773e82144c9bbaaaea5d089"}, + {file = "Pillow-9.5.0-cp312-cp312-win32.whl", hash = "sha256:22baf0c3cf0c7f26e82d6e1adf118027afb325e703922c8dfc1d5d0156bb2eeb"}, + {file = "Pillow-9.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:432b975c009cf649420615388561c0ce7cc31ce9b2e374db659ee4f7d57a1f8b"}, + {file = "Pillow-9.5.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:5d4ebf8e1db4441a55c509c4baa7a0587a0210f7cd25fcfe74dbbce7a4bd1906"}, + {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:375f6e5ee9620a271acb6820b3d1e94ffa8e741c0601db4c0c4d3cb0a9c224bf"}, + {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99eb6cafb6ba90e436684e08dad8be1637efb71c4f2180ee6b8f940739406e78"}, + {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dfaaf10b6172697b9bceb9a3bd7b951819d1ca339a5ef294d1f1ac6d7f63270"}, + {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:763782b2e03e45e2c77d7779875f4432e25121ef002a41829d8868700d119392"}, + {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:35f6e77122a0c0762268216315bf239cf52b88865bba522999dc38f1c52b9b47"}, + {file = "Pillow-9.5.0-cp37-cp37m-win32.whl", hash = "sha256:aca1c196f407ec7cf04dcbb15d19a43c507a81f7ffc45b690899d6a76ac9fda7"}, + {file = "Pillow-9.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322724c0032af6692456cd6ed554bb85f8149214d97398bb80613b04e33769f6"}, + {file = "Pillow-9.5.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:a0aa9417994d91301056f3d0038af1199eb7adc86e646a36b9e050b06f526597"}, + {file = "Pillow-9.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f8286396b351785801a976b1e85ea88e937712ee2c3ac653710a4a57a8da5d9c"}, + {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c830a02caeb789633863b466b9de10c015bded434deb3ec87c768e53752ad22a"}, + {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fbd359831c1657d69bb81f0db962905ee05e5e9451913b18b831febfe0519082"}, + {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8fc330c3370a81bbf3f88557097d1ea26cd8b019d6433aa59f71195f5ddebbf"}, + {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:7002d0797a3e4193c7cdee3198d7c14f92c0836d6b4a3f3046a64bd1ce8df2bf"}, + {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:229e2c79c00e85989a34b5981a2b67aa079fd08c903f0aaead522a1d68d79e51"}, + {file = "Pillow-9.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9adf58f5d64e474bed00d69bcd86ec4bcaa4123bfa70a65ce72e424bfb88ed96"}, + {file = "Pillow-9.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:662da1f3f89a302cc22faa9f14a262c2e3951f9dbc9617609a47521c69dd9f8f"}, + {file = "Pillow-9.5.0-cp38-cp38-win32.whl", hash = "sha256:6608ff3bf781eee0cd14d0901a2b9cc3d3834516532e3bd673a0a204dc8615fc"}, + {file = "Pillow-9.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:e49eb4e95ff6fd7c0c402508894b1ef0e01b99a44320ba7d8ecbabefddcc5569"}, + {file = "Pillow-9.5.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:482877592e927fd263028c105b36272398e3e1be3269efda09f6ba21fd83ec66"}, + {file = "Pillow-9.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3ded42b9ad70e5f1754fb7c2e2d6465a9c842e41d178f262e08b8c85ed8a1d8e"}, + {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c446d2245ba29820d405315083d55299a796695d747efceb5717a8b450324115"}, + {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8aca1152d93dcc27dc55395604dcfc55bed5f25ef4c98716a928bacba90d33a3"}, + {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:608488bdcbdb4ba7837461442b90ea6f3079397ddc968c31265c1e056964f1ef"}, + {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:60037a8db8750e474af7ffc9faa9b5859e6c6d0a50e55c45576bf28be7419705"}, + {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:07999f5834bdc404c442146942a2ecadd1cb6292f5229f4ed3b31e0a108746b1"}, + {file = "Pillow-9.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a127ae76092974abfbfa38ca2d12cbeddcdeac0fb71f9627cc1135bedaf9d51a"}, + {file = "Pillow-9.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:489f8389261e5ed43ac8ff7b453162af39c3e8abd730af8363587ba64bb2e865"}, + {file = "Pillow-9.5.0-cp39-cp39-win32.whl", hash = "sha256:9b1af95c3a967bf1da94f253e56b6286b50af23392a886720f563c547e48e964"}, + {file = "Pillow-9.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:77165c4a5e7d5a284f10a6efaa39a0ae8ba839da344f20b111d62cc932fa4e5d"}, + {file = "Pillow-9.5.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:833b86a98e0ede388fa29363159c9b1a294b0905b5128baf01db683672f230f5"}, + {file = "Pillow-9.5.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aaf305d6d40bd9632198c766fb64f0c1a83ca5b667f16c1e79e1661ab5060140"}, + {file = "Pillow-9.5.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0852ddb76d85f127c135b6dd1f0bb88dbb9ee990d2cd9aa9e28526c93e794fba"}, + {file = "Pillow-9.5.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:91ec6fe47b5eb5a9968c79ad9ed78c342b1f97a091677ba0e012701add857829"}, + {file = "Pillow-9.5.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cb841572862f629b99725ebaec3287fc6d275be9b14443ea746c1dd325053cbd"}, + {file = "Pillow-9.5.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:c380b27d041209b849ed246b111b7c166ba36d7933ec6e41175fd15ab9eb1572"}, + {file = "Pillow-9.5.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c9af5a3b406a50e313467e3565fc99929717f780164fe6fbb7704edba0cebbe"}, + {file = "Pillow-9.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5671583eab84af046a397d6d0ba25343c00cd50bce03787948e0fff01d4fd9b1"}, + {file = "Pillow-9.5.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:84a6f19ce086c1bf894644b43cd129702f781ba5751ca8572f08aa40ef0ab7b7"}, + {file = "Pillow-9.5.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1e7723bd90ef94eda669a3c2c19d549874dd5badaeefabefd26053304abe5799"}, + {file = "Pillow-9.5.0.tar.gz", hash = "sha256:bf548479d336726d7a0eceb6e767e179fbde37833ae42794602631a070d630f1"}, ] [package.extras] -docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinxext-opengraph"] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] [[package]] @@ -2398,14 +2453,14 @@ tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "p [[package]] name = "pre-commit" -version = "3.3.1" +version = "3.3.2" description = "A framework for managing and maintaining multi-language pre-commit hooks." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "pre_commit-3.3.1-py2.py3-none-any.whl", hash = "sha256:218e9e3f7f7f3271ebc355a15598a4d3893ad9fc7b57fe446db75644543323b9"}, - {file = "pre_commit-3.3.1.tar.gz", hash = "sha256:733f78c9a056cdd169baa6cd4272d51ecfda95346ef8a89bf93712706021b907"}, + {file = "pre_commit-3.3.2-py2.py3-none-any.whl", hash = "sha256:8056bc52181efadf4aac792b1f4f255dfd2fb5a350ded7335d251a68561e8cb6"}, + {file = "pre_commit-3.3.2.tar.gz", hash = "sha256:66e37bec2d882de1f17f88075047ef8962581f83c234ac08da21a0c58953d1f0"}, ] [package.dependencies] @@ -2579,52 +2634,52 @@ files = [ [[package]] name = "pydantic" -version = "1.10.2" +version = "1.10.7" description = "Data validation and settings management using python type hints" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bb6ad4489af1bac6955d38ebcb95079a836af31e4c4f74aba1ca05bb9f6027bd"}, - {file = "pydantic-1.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1f5a63a6dfe19d719b1b6e6106561869d2efaca6167f84f5ab9347887d78b98"}, - {file = "pydantic-1.10.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:352aedb1d71b8b0736c6d56ad2bd34c6982720644b0624462059ab29bd6e5912"}, - {file = "pydantic-1.10.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19b3b9ccf97af2b7519c42032441a891a5e05c68368f40865a90eb88833c2559"}, - {file = "pydantic-1.10.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e9069e1b01525a96e6ff49e25876d90d5a563bc31c658289a8772ae186552236"}, - {file = "pydantic-1.10.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:355639d9afc76bcb9b0c3000ddcd08472ae75318a6eb67a15866b87e2efa168c"}, - {file = "pydantic-1.10.2-cp310-cp310-win_amd64.whl", hash = "sha256:ae544c47bec47a86bc7d350f965d8b15540e27e5aa4f55170ac6a75e5f73b644"}, - {file = "pydantic-1.10.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a4c805731c33a8db4b6ace45ce440c4ef5336e712508b4d9e1aafa617dc9907f"}, - {file = "pydantic-1.10.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d49f3db871575e0426b12e2f32fdb25e579dea16486a26e5a0474af87cb1ab0a"}, - {file = "pydantic-1.10.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37c90345ec7dd2f1bcef82ce49b6235b40f282b94d3eec47e801baf864d15525"}, - {file = "pydantic-1.10.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b5ba54d026c2bd2cb769d3468885f23f43710f651688e91f5fb1edcf0ee9283"}, - {file = "pydantic-1.10.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:05e00dbebbe810b33c7a7362f231893183bcc4251f3f2ff991c31d5c08240c42"}, - {file = "pydantic-1.10.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2d0567e60eb01bccda3a4df01df677adf6b437958d35c12a3ac3e0f078b0ee52"}, - {file = "pydantic-1.10.2-cp311-cp311-win_amd64.whl", hash = "sha256:c6f981882aea41e021f72779ce2a4e87267458cc4d39ea990729e21ef18f0f8c"}, - {file = "pydantic-1.10.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4aac8e7103bf598373208f6299fa9a5cfd1fc571f2d40bf1dd1955a63d6eeb5"}, - {file = "pydantic-1.10.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a7b66c3f499108b448f3f004801fcd7d7165fb4200acb03f1c2402da73ce4c"}, - {file = "pydantic-1.10.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bedf309630209e78582ffacda64a21f96f3ed2e51fbf3962d4d488e503420254"}, - {file = "pydantic-1.10.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9300fcbebf85f6339a02c6994b2eb3ff1b9c8c14f502058b5bf349d42447dcf5"}, - {file = "pydantic-1.10.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:216f3bcbf19c726b1cc22b099dd409aa371f55c08800bcea4c44c8f74b73478d"}, - {file = "pydantic-1.10.2-cp37-cp37m-win_amd64.whl", hash = "sha256:dd3f9a40c16daf323cf913593083698caee97df2804aa36c4b3175d5ac1b92a2"}, - {file = "pydantic-1.10.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b97890e56a694486f772d36efd2ba31612739bc6f3caeee50e9e7e3ebd2fdd13"}, - {file = "pydantic-1.10.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9cabf4a7f05a776e7793e72793cd92cc865ea0e83a819f9ae4ecccb1b8aa6116"}, - {file = "pydantic-1.10.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06094d18dd5e6f2bbf93efa54991c3240964bb663b87729ac340eb5014310624"}, - {file = "pydantic-1.10.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc78cc83110d2f275ec1970e7a831f4e371ee92405332ebfe9860a715f8336e1"}, - {file = "pydantic-1.10.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ee433e274268a4b0c8fde7ad9d58ecba12b069a033ecc4645bb6303c062d2e9"}, - {file = "pydantic-1.10.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7c2abc4393dea97a4ccbb4ec7d8658d4e22c4765b7b9b9445588f16c71ad9965"}, - {file = "pydantic-1.10.2-cp38-cp38-win_amd64.whl", hash = "sha256:0b959f4d8211fc964772b595ebb25f7652da3f22322c007b6fed26846a40685e"}, - {file = "pydantic-1.10.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c33602f93bfb67779f9c507e4d69451664524389546bacfe1bee13cae6dc7488"}, - {file = "pydantic-1.10.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5760e164b807a48a8f25f8aa1a6d857e6ce62e7ec83ea5d5c5a802eac81bad41"}, - {file = "pydantic-1.10.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6eb843dcc411b6a2237a694f5e1d649fc66c6064d02b204a7e9d194dff81eb4b"}, - {file = "pydantic-1.10.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b8795290deaae348c4eba0cebb196e1c6b98bdbe7f50b2d0d9a4a99716342fe"}, - {file = "pydantic-1.10.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e0bedafe4bc165ad0a56ac0bd7695df25c50f76961da29c050712596cf092d6d"}, - {file = "pydantic-1.10.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e05aed07fa02231dbf03d0adb1be1d79cabb09025dd45aa094aa8b4e7b9dcda"}, - {file = "pydantic-1.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:c1ba1afb396148bbc70e9eaa8c06c1716fdddabaf86e7027c5988bae2a829ab6"}, - {file = "pydantic-1.10.2-py3-none-any.whl", hash = "sha256:1b6ee725bd6e83ec78b1aa32c5b1fa67a3a65badddde3976bca5fe4568f27709"}, - {file = "pydantic-1.10.2.tar.gz", hash = "sha256:91b8e218852ef6007c2b98cd861601c6a09f1aa32bbbb74fab5b1c33d4a1e410"}, + {file = "pydantic-1.10.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e79e999e539872e903767c417c897e729e015872040e56b96e67968c3b918b2d"}, + {file = "pydantic-1.10.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:01aea3a42c13f2602b7ecbbea484a98169fb568ebd9e247593ea05f01b884b2e"}, + {file = "pydantic-1.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:516f1ed9bc2406a0467dd777afc636c7091d71f214d5e413d64fef45174cfc7a"}, + {file = "pydantic-1.10.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae150a63564929c675d7f2303008d88426a0add46efd76c3fc797cd71cb1b46f"}, + {file = "pydantic-1.10.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ecbbc51391248116c0a055899e6c3e7ffbb11fb5e2a4cd6f2d0b93272118a209"}, + {file = "pydantic-1.10.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f4a2b50e2b03d5776e7f21af73e2070e1b5c0d0df255a827e7c632962f8315af"}, + {file = "pydantic-1.10.7-cp310-cp310-win_amd64.whl", hash = "sha256:a7cd2251439988b413cb0a985c4ed82b6c6aac382dbaff53ae03c4b23a70e80a"}, + {file = "pydantic-1.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:68792151e174a4aa9e9fc1b4e653e65a354a2fa0fed169f7b3d09902ad2cb6f1"}, + {file = "pydantic-1.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe2507b8ef209da71b6fb5f4e597b50c5a34b78d7e857c4f8f3115effaef5fe"}, + {file = "pydantic-1.10.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10a86d8c8db68086f1e30a530f7d5f83eb0685e632e411dbbcf2d5c0150e8dcd"}, + {file = "pydantic-1.10.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75ae19d2a3dbb146b6f324031c24f8a3f52ff5d6a9f22f0683694b3afcb16fb"}, + {file = "pydantic-1.10.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:464855a7ff7f2cc2cf537ecc421291b9132aa9c79aef44e917ad711b4a93163b"}, + {file = "pydantic-1.10.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:193924c563fae6ddcb71d3f06fa153866423ac1b793a47936656e806b64e24ca"}, + {file = "pydantic-1.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:b4a849d10f211389502059c33332e91327bc154acc1845f375a99eca3afa802d"}, + {file = "pydantic-1.10.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cc1dde4e50a5fc1336ee0581c1612215bc64ed6d28d2c7c6f25d2fe3e7c3e918"}, + {file = "pydantic-1.10.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0cfe895a504c060e5d36b287ee696e2fdad02d89e0d895f83037245218a87fe"}, + {file = "pydantic-1.10.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:670bb4683ad1e48b0ecb06f0cfe2178dcf74ff27921cdf1606e527d2617a81ee"}, + {file = "pydantic-1.10.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:950ce33857841f9a337ce07ddf46bc84e1c4946d2a3bba18f8280297157a3fd1"}, + {file = "pydantic-1.10.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c15582f9055fbc1bfe50266a19771bbbef33dd28c45e78afbe1996fd70966c2a"}, + {file = "pydantic-1.10.7-cp37-cp37m-win_amd64.whl", hash = "sha256:82dffb306dd20bd5268fd6379bc4bfe75242a9c2b79fec58e1041fbbdb1f7914"}, + {file = "pydantic-1.10.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c7f51861d73e8b9ddcb9916ae7ac39fb52761d9ea0df41128e81e2ba42886cd"}, + {file = "pydantic-1.10.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6434b49c0b03a51021ade5c4daa7d70c98f7a79e95b551201fff682fc1661245"}, + {file = "pydantic-1.10.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64d34ab766fa056df49013bb6e79921a0265204c071984e75a09cbceacbbdd5d"}, + {file = "pydantic-1.10.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:701daea9ffe9d26f97b52f1d157e0d4121644f0fcf80b443248434958fd03dc3"}, + {file = "pydantic-1.10.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf135c46099ff3f919d2150a948ce94b9ce545598ef2c6c7bf55dca98a304b52"}, + {file = "pydantic-1.10.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0f85904f73161817b80781cc150f8b906d521fa11e3cdabae19a581c3606209"}, + {file = "pydantic-1.10.7-cp38-cp38-win_amd64.whl", hash = "sha256:9f6f0fd68d73257ad6685419478c5aece46432f4bdd8d32c7345f1986496171e"}, + {file = "pydantic-1.10.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c230c0d8a322276d6e7b88c3f7ce885f9ed16e0910354510e0bae84d54991143"}, + {file = "pydantic-1.10.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:976cae77ba6a49d80f461fd8bba183ff7ba79f44aa5cfa82f1346b5626542f8e"}, + {file = "pydantic-1.10.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d45fc99d64af9aaf7e308054a0067fdcd87ffe974f2442312372dfa66e1001d"}, + {file = "pydantic-1.10.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d2a5ebb48958754d386195fe9e9c5106f11275867051bf017a8059410e9abf1f"}, + {file = "pydantic-1.10.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:abfb7d4a7cd5cc4e1d1887c43503a7c5dd608eadf8bc615413fc498d3e4645cd"}, + {file = "pydantic-1.10.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:80b1fab4deb08a8292d15e43a6edccdffa5377a36a4597bb545b93e79c5ff0a5"}, + {file = "pydantic-1.10.7-cp39-cp39-win_amd64.whl", hash = "sha256:d71e69699498b020ea198468e2480a2f1e7433e32a3a99760058c6520e2bea7e"}, + {file = "pydantic-1.10.7-py3-none-any.whl", hash = "sha256:0cd181f1d0b1d00e2b705f1bf1ac7799a2d938cce3376b8007df62b29be3c2c6"}, + {file = "pydantic-1.10.7.tar.gz", hash = "sha256:cfc83c0678b6ba51b0532bea66860617c4cd4251ecf76e9846fa5a9f3454e97e"}, ] [package.dependencies] -typing-extensions = ">=4.1.0" +typing-extensions = ">=4.2.0" [package.extras] dotenv = ["python-dotenv (>=0.10.4)"] @@ -2778,6 +2833,17 @@ files = [ [package.extras] diagrams = ["jinja2", "railroad-diagrams"] +[[package]] +name = "pypubsub" +version = "4.0.3" +description = "Python Publish-Subscribe Package" +category = "main" +optional = false +python-versions = ">=3.3, <4" +files = [ + {file = "Pypubsub-4.0.3-py3-none-any.whl", hash = "sha256:7f716bae9388afe01ff82b264ba8a96a8ae78b42bb1f114f2716ca8f9e404e2a"}, +] + [[package]] name = "pyreadline3" version = "3.4.1" @@ -2792,23 +2858,24 @@ files = [ [[package]] name = "pyshacl" -version = "0.18.1" +version = "0.19.1" description = "Python SHACL Validator" category = "main" optional = false python-versions = ">=3.7.0,<4.0.0" files = [ - {file = "pyshacl-0.18.1-py3-none-any.whl", hash = "sha256:a8aabc9261a07c825ae38fb34276e0e70fa599790bc717d672c676275fc2c2fb"}, - {file = "pyshacl-0.18.1.tar.gz", hash = "sha256:cb76d782163e9b1b3c5f048882f7675e5711c86691438e7f58cc4c3a49f07f95"}, + {file = "pyshacl-0.19.1-py3-none-any.whl", hash = "sha256:262a4528f26ece139537816effda24505561e6aa962097749602913d4f072fdd"}, + {file = "pyshacl-0.19.1.tar.gz", hash = "sha256:eba29e38f6a08033ca579502140a688c0e36795b2b89f169da977197e942e369"}, ] [package.dependencies] owlrl = ">=6.0.2,<7" +packaging = ">=21.3" prettytable = ">=2.2.1,<3.0.0" rdflib = ">=6.1.1,<8" [package.extras] -dev-lint = ["black (==21.9b0)", "flake8 (>=3.8.0,<4.0.0)", "isort (>=5.7.0,<6.0.0)"] +dev-lint = ["black (==22.6.0)", "flake8 (>=3.8.0,<4.0.0)", "isort (>=5.7.0,<6.0.0)"] dev-type-checking = ["mypy (>=0.800,<0.801)", "types-setuptools"] js = ["pyduktape2 (>=0.4.1,<0.5.0)"] jsonld = ["rdflib-jsonld (>=0.4.0,<0.6)"] @@ -2918,22 +2985,6 @@ files = [ flake8 = ">=4.0" pytest = ">=7.0" -[[package]] -name = "pytest-forked" -version = "1.6.0" -description = "run tests in isolated forked subprocesses" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-forked-1.6.0.tar.gz", hash = "sha256:4dafd46a9a600f65d822b8f605133ecf5b3e1941ebb3588e943b4e3eb71a5a3f"}, - {file = "pytest_forked-1.6.0-py3-none-any.whl", hash = "sha256:810958f66a91afb1a1e2ae83089d8dc1cd2437ac96b12963042fbb9fb4d16af0"}, -] - -[package.dependencies] -py = "*" -pytest = ">=3.10" - [[package]] name = "pytest-lazy-fixture" version = "0.6.3" @@ -2985,14 +3036,14 @@ pytest-cache = "*" [[package]] name = "pytest-recording" -version = "0.12.1" +version = "0.12.2" description = "A pytest plugin that allows you recording of network interactions via VCR.py" category = "dev" optional = false python-versions = ">=3.5" files = [ - {file = "pytest-recording-0.12.1.tar.gz", hash = "sha256:0d1f36d10dea5090cab8ecd230e5dc937c97b9fed193874b330d2926ddea028f"}, - {file = "pytest_recording-0.12.1-py3-none-any.whl", hash = "sha256:6b5546b822b270b8d7338f70950453be45e4aa5bfd884d97583dfa47288380f9"}, + {file = "pytest-recording-0.12.2.tar.gz", hash = "sha256:7c8949c24e5546a699f8fbbff0c5d6896cd09463378ac3d3f1ebb110d2186847"}, + {file = "pytest_recording-0.12.2-py3-none-any.whl", hash = "sha256:f055f97eb98bbefd0453a7796aa3a6833502c173421928b9d878cf1420b36406"}, ] [package.dependencies] @@ -3017,20 +3068,19 @@ pytest = ">=5.0.0" [[package]] name = "pytest-xdist" -version = "2.5.0" -description = "pytest xdist plugin for distributed testing and loop-on-failing modes" +version = "3.3.1" +description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "pytest-xdist-2.5.0.tar.gz", hash = "sha256:4580deca3ff04ddb2ac53eba39d76cb5dd5edeac050cb6fbc768b0dd712b4edf"}, - {file = "pytest_xdist-2.5.0-py3-none-any.whl", hash = "sha256:6fe5c74fec98906deb8f2d2b616b5c782022744978e7bd4695d39c8f42d0ce65"}, + {file = "pytest-xdist-3.3.1.tar.gz", hash = "sha256:d5ee0520eb1b7bcca50a60a518ab7a7707992812c578198f8b44fdfac78e8c93"}, + {file = "pytest_xdist-3.3.1-py3-none-any.whl", hash = "sha256:ff9daa7793569e6a68544850fd3927cd257cc03a7ef76c95e86915355e82b5f2"}, ] [package.dependencies] execnet = ">=1.1" pytest = ">=6.2.0" -pytest-forked = "*" [package.extras] psutil = ["psutil (>=3.0)"] @@ -3186,89 +3236,89 @@ files = [ [[package]] name = "pyzmq" -version = "25.0.2" +version = "25.1.0" description = "Python bindings for 0MQ" category = "main" optional = true python-versions = ">=3.6" files = [ - {file = "pyzmq-25.0.2-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:ac178e666c097c8d3deb5097b58cd1316092fc43e8ef5b5fdb259b51da7e7315"}, - {file = "pyzmq-25.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:659e62e1cbb063151c52f5b01a38e1df6b54feccfa3e2509d44c35ca6d7962ee"}, - {file = "pyzmq-25.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8280ada89010735a12b968ec3ea9a468ac2e04fddcc1cede59cb7f5178783b9c"}, - {file = "pyzmq-25.0.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9b5eeb5278a8a636bb0abdd9ff5076bcbb836cd2302565df53ff1fa7d106d54"}, - {file = "pyzmq-25.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a2e5fe42dfe6b73ca120b97ac9f34bfa8414feb15e00e37415dbd51cf227ef6"}, - {file = "pyzmq-25.0.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:827bf60e749e78acb408a6c5af6688efbc9993e44ecc792b036ec2f4b4acf485"}, - {file = "pyzmq-25.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7b504ae43d37e282301da586529e2ded8b36d4ee2cd5e6db4386724ddeaa6bbc"}, - {file = "pyzmq-25.0.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb1f69a0a2a2b1aae8412979dd6293cc6bcddd4439bf07e4758d864ddb112354"}, - {file = "pyzmq-25.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2b9c9cc965cdf28381e36da525dcb89fc1571d9c54800fdcd73e3f73a2fc29bd"}, - {file = "pyzmq-25.0.2-cp310-cp310-win32.whl", hash = "sha256:24abbfdbb75ac5039205e72d6c75f10fc39d925f2df8ff21ebc74179488ebfca"}, - {file = "pyzmq-25.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6a821a506822fac55d2df2085a52530f68ab15ceed12d63539adc32bd4410f6e"}, - {file = "pyzmq-25.0.2-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:9af0bb0277e92f41af35e991c242c9c71920169d6aa53ade7e444f338f4c8128"}, - {file = "pyzmq-25.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:54a96cf77684a3a537b76acfa7237b1e79a8f8d14e7f00e0171a94b346c5293e"}, - {file = "pyzmq-25.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88649b19ede1cab03b96b66c364cbbf17c953615cdbc844f7f6e5f14c5e5261c"}, - {file = "pyzmq-25.0.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:715cff7644a80a7795953c11b067a75f16eb9fc695a5a53316891ebee7f3c9d5"}, - {file = "pyzmq-25.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:312b3f0f066b4f1d17383aae509bacf833ccaf591184a1f3c7a1661c085063ae"}, - {file = "pyzmq-25.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d488c5c8630f7e782e800869f82744c3aca4aca62c63232e5d8c490d3d66956a"}, - {file = "pyzmq-25.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:38d9f78d69bcdeec0c11e0feb3bc70f36f9b8c44fc06e5d06d91dc0a21b453c7"}, - {file = "pyzmq-25.0.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3059a6a534c910e1d5d068df42f60d434f79e6cc6285aa469b384fa921f78cf8"}, - {file = "pyzmq-25.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6526d097b75192f228c09d48420854d53dfbc7abbb41b0e26f363ccb26fbc177"}, - {file = "pyzmq-25.0.2-cp311-cp311-win32.whl", hash = "sha256:5c5fbb229e40a89a2fe73d0c1181916f31e30f253cb2d6d91bea7927c2e18413"}, - {file = "pyzmq-25.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:ed15e3a2c3c2398e6ae5ce86d6a31b452dfd6ad4cd5d312596b30929c4b6e182"}, - {file = "pyzmq-25.0.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:032f5c8483c85bf9c9ca0593a11c7c749d734ce68d435e38c3f72e759b98b3c9"}, - {file = "pyzmq-25.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:374b55516393bfd4d7a7daa6c3b36d6dd6a31ff9d2adad0838cd6a203125e714"}, - {file = "pyzmq-25.0.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:08bfcc21b5997a9be4fefa405341320d8e7f19b4d684fb9c0580255c5bd6d695"}, - {file = "pyzmq-25.0.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1a843d26a8da1b752c74bc019c7b20e6791ee813cd6877449e6a1415589d22ff"}, - {file = "pyzmq-25.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:b48616a09d7df9dbae2f45a0256eee7b794b903ddc6d8657a9948669b345f220"}, - {file = "pyzmq-25.0.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d4427b4a136e3b7f85516c76dd2e0756c22eec4026afb76ca1397152b0ca8145"}, - {file = "pyzmq-25.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:26b0358e8933990502f4513c991c9935b6c06af01787a36d133b7c39b1df37fa"}, - {file = "pyzmq-25.0.2-cp36-cp36m-win32.whl", hash = "sha256:c8fedc3ccd62c6b77dfe6f43802057a803a411ee96f14e946f4a76ec4ed0e117"}, - {file = "pyzmq-25.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:2da6813b7995b6b1d1307329c73d3e3be2fd2d78e19acfc4eff2e27262732388"}, - {file = "pyzmq-25.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a35960c8b2f63e4ef67fd6731851030df68e4b617a6715dd11b4b10312d19fef"}, - {file = "pyzmq-25.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eef2a0b880ab40aca5a878933376cb6c1ec483fba72f7f34e015c0f675c90b20"}, - {file = "pyzmq-25.0.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:85762712b74c7bd18e340c3639d1bf2f23735a998d63f46bb6584d904b5e401d"}, - {file = "pyzmq-25.0.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:64812f29d6eee565e129ca14b0c785744bfff679a4727137484101b34602d1a7"}, - {file = "pyzmq-25.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:510d8e55b3a7cd13f8d3e9121edf0a8730b87d925d25298bace29a7e7bc82810"}, - {file = "pyzmq-25.0.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b164cc3c8acb3d102e311f2eb6f3c305865ecb377e56adc015cb51f721f1dda6"}, - {file = "pyzmq-25.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:28fdb9224a258134784a9cf009b59265a9dde79582fb750d4e88a6bcbc6fa3dc"}, - {file = "pyzmq-25.0.2-cp37-cp37m-win32.whl", hash = "sha256:dd771a440effa1c36d3523bc6ba4e54ff5d2e54b4adcc1e060d8f3ca3721d228"}, - {file = "pyzmq-25.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:9bdc40efb679b9dcc39c06d25629e55581e4c4f7870a5e88db4f1c51ce25e20d"}, - {file = "pyzmq-25.0.2-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:1f82906a2d8e4ee310f30487b165e7cc8ed09c009e4502da67178b03083c4ce0"}, - {file = "pyzmq-25.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:21ec0bf4831988af43c8d66ba3ccd81af2c5e793e1bf6790eb2d50e27b3c570a"}, - {file = "pyzmq-25.0.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:abbce982a17c88d2312ec2cf7673985d444f1beaac6e8189424e0a0e0448dbb3"}, - {file = "pyzmq-25.0.2-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9e1d2f2d86fc75ed7f8845a992c5f6f1ab5db99747fb0d78b5e4046d041164d2"}, - {file = "pyzmq-25.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2e92ff20ad5d13266bc999a29ed29a3b5b101c21fdf4b2cf420c09db9fb690e"}, - {file = "pyzmq-25.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edbbf06cc2719889470a8d2bf5072bb00f423e12de0eb9ffec946c2c9748e149"}, - {file = "pyzmq-25.0.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:77942243ff4d14d90c11b2afd8ee6c039b45a0be4e53fb6fa7f5e4fd0b59da39"}, - {file = "pyzmq-25.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ab046e9cb902d1f62c9cc0eca055b1d11108bdc271caf7c2171487298f229b56"}, - {file = "pyzmq-25.0.2-cp38-cp38-win32.whl", hash = "sha256:ad761cfbe477236802a7ab2c080d268c95e784fe30cafa7e055aacd1ca877eb0"}, - {file = "pyzmq-25.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:8560756318ec7c4c49d2c341012167e704b5a46d9034905853c3d1ade4f55bee"}, - {file = "pyzmq-25.0.2-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:ab2c056ac503f25a63f6c8c6771373e2a711b98b304614151dfb552d3d6c81f6"}, - {file = "pyzmq-25.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cca8524b61c0eaaa3505382dc9b9a3bc8165f1d6c010fdd1452c224225a26689"}, - {file = "pyzmq-25.0.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cfb9f7eae02d3ac42fbedad30006b7407c984a0eb4189a1322241a20944d61e5"}, - {file = "pyzmq-25.0.2-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5eaeae038c68748082137d6896d5c4db7927e9349237ded08ee1bbd94f7361c9"}, - {file = "pyzmq-25.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a31992a8f8d51663ebf79df0df6a04ffb905063083d682d4380ab8d2c67257c"}, - {file = "pyzmq-25.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6a979e59d2184a0c8f2ede4b0810cbdd86b64d99d9cc8a023929e40dce7c86cc"}, - {file = "pyzmq-25.0.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1f124cb73f1aa6654d31b183810febc8505fd0c597afa127c4f40076be4574e0"}, - {file = "pyzmq-25.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:65c19a63b4a83ae45d62178b70223adeee5f12f3032726b897431b6553aa25af"}, - {file = "pyzmq-25.0.2-cp39-cp39-win32.whl", hash = "sha256:83d822e8687621bed87404afc1c03d83fa2ce39733d54c2fd52d8829edb8a7ff"}, - {file = "pyzmq-25.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:24683285cc6b7bf18ad37d75b9db0e0fefe58404e7001f1d82bf9e721806daa7"}, - {file = "pyzmq-25.0.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a4b4261eb8f9ed71f63b9eb0198dd7c934aa3b3972dac586d0ef502ba9ab08b"}, - {file = "pyzmq-25.0.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:62ec8d979f56c0053a92b2b6a10ff54b9ec8a4f187db2b6ec31ee3dd6d3ca6e2"}, - {file = "pyzmq-25.0.2-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:affec1470351178e892121b3414c8ef7803269f207bf9bef85f9a6dd11cde264"}, - {file = "pyzmq-25.0.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffc71111433bd6ec8607a37b9211f4ef42e3d3b271c6d76c813669834764b248"}, - {file = "pyzmq-25.0.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:6fadc60970714d86eff27821f8fb01f8328dd36bebd496b0564a500fe4a9e354"}, - {file = "pyzmq-25.0.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:269968f2a76c0513490aeb3ba0dc3c77b7c7a11daa894f9d1da88d4a0db09835"}, - {file = "pyzmq-25.0.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f7c8b8368e84381ae7c57f1f5283b029c888504aaf4949c32e6e6fb256ec9bf0"}, - {file = "pyzmq-25.0.2-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:25e6873a70ad5aa31e4a7c41e5e8c709296edef4a92313e1cd5fc87bbd1874e2"}, - {file = "pyzmq-25.0.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b733076ff46e7db5504c5e7284f04a9852c63214c74688bdb6135808531755a3"}, - {file = "pyzmq-25.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:a6f6ae12478fdc26a6d5fdb21f806b08fa5403cd02fd312e4cb5f72df078f96f"}, - {file = "pyzmq-25.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:67da1c213fbd208906ab3470cfff1ee0048838365135a9bddc7b40b11e6d6c89"}, - {file = "pyzmq-25.0.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:531e36d9fcd66f18de27434a25b51d137eb546931033f392e85674c7a7cea853"}, - {file = "pyzmq-25.0.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34a6fddd159ff38aa9497b2e342a559f142ab365576284bc8f77cb3ead1f79c5"}, - {file = "pyzmq-25.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b491998ef886662c1f3d49ea2198055a9a536ddf7430b051b21054f2a5831800"}, - {file = "pyzmq-25.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5d496815074e3e3d183fe2c7fcea2109ad67b74084c254481f87b64e04e9a471"}, - {file = "pyzmq-25.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:56a94ab1d12af982b55ca96c6853db6ac85505e820d9458ac76364c1998972f4"}, - {file = "pyzmq-25.0.2.tar.gz", hash = "sha256:6b8c1bbb70e868dc88801aa532cae6bd4e3b5233784692b786f17ad2962e5149"}, + {file = "pyzmq-25.1.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:1a6169e69034eaa06823da6a93a7739ff38716142b3596c180363dee729d713d"}, + {file = "pyzmq-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:19d0383b1f18411d137d891cab567de9afa609b214de68b86e20173dc624c101"}, + {file = "pyzmq-25.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1e931d9a92f628858a50f5bdffdfcf839aebe388b82f9d2ccd5d22a38a789dc"}, + {file = "pyzmq-25.1.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:97d984b1b2f574bc1bb58296d3c0b64b10e95e7026f8716ed6c0b86d4679843f"}, + {file = "pyzmq-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:154bddda2a351161474b36dba03bf1463377ec226a13458725183e508840df89"}, + {file = "pyzmq-25.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cb6d161ae94fb35bb518b74bb06b7293299c15ba3bc099dccd6a5b7ae589aee3"}, + {file = "pyzmq-25.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:90146ab578931e0e2826ee39d0c948d0ea72734378f1898939d18bc9c823fcf9"}, + {file = "pyzmq-25.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:831ba20b660b39e39e5ac8603e8193f8fce1ee03a42c84ade89c36a251449d80"}, + {file = "pyzmq-25.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3a522510e3434e12aff80187144c6df556bb06fe6b9d01b2ecfbd2b5bfa5c60c"}, + {file = "pyzmq-25.1.0-cp310-cp310-win32.whl", hash = "sha256:be24a5867b8e3b9dd5c241de359a9a5217698ff616ac2daa47713ba2ebe30ad1"}, + {file = "pyzmq-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:5693dcc4f163481cf79e98cf2d7995c60e43809e325b77a7748d8024b1b7bcba"}, + {file = "pyzmq-25.1.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:13bbe36da3f8aaf2b7ec12696253c0bf6ffe05f4507985a8844a1081db6ec22d"}, + {file = "pyzmq-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:69511d604368f3dc58d4be1b0bad99b61ee92b44afe1cd9b7bd8c5e34ea8248a"}, + {file = "pyzmq-25.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a983c8694667fd76d793ada77fd36c8317e76aa66eec75be2653cef2ea72883"}, + {file = "pyzmq-25.1.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:332616f95eb400492103ab9d542b69d5f0ff628b23129a4bc0a2fd48da6e4e0b"}, + {file = "pyzmq-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58416db767787aedbfd57116714aad6c9ce57215ffa1c3758a52403f7c68cff5"}, + {file = "pyzmq-25.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cad9545f5801a125f162d09ec9b724b7ad9b6440151b89645241d0120e119dcc"}, + {file = "pyzmq-25.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d6128d431b8dfa888bf51c22a04d48bcb3d64431caf02b3cb943269f17fd2994"}, + {file = "pyzmq-25.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2b15247c49d8cbea695b321ae5478d47cffd496a2ec5ef47131a9e79ddd7e46c"}, + {file = "pyzmq-25.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:442d3efc77ca4d35bee3547a8e08e8d4bb88dadb54a8377014938ba98d2e074a"}, + {file = "pyzmq-25.1.0-cp311-cp311-win32.whl", hash = "sha256:65346f507a815a731092421d0d7d60ed551a80d9b75e8b684307d435a5597425"}, + {file = "pyzmq-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:8b45d722046fea5a5694cba5d86f21f78f0052b40a4bbbbf60128ac55bfcc7b6"}, + {file = "pyzmq-25.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f45808eda8b1d71308c5416ef3abe958f033fdbb356984fabbfc7887bed76b3f"}, + {file = "pyzmq-25.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b697774ea8273e3c0460cf0bba16cd85ca6c46dfe8b303211816d68c492e132"}, + {file = "pyzmq-25.1.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b324fa769577fc2c8f5efcd429cef5acbc17d63fe15ed16d6dcbac2c5eb00849"}, + {file = "pyzmq-25.1.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:5873d6a60b778848ce23b6c0ac26c39e48969823882f607516b91fb323ce80e5"}, + {file = "pyzmq-25.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:f0d9e7ba6a815a12c8575ba7887da4b72483e4cfc57179af10c9b937f3f9308f"}, + {file = "pyzmq-25.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:414b8beec76521358b49170db7b9967d6974bdfc3297f47f7d23edec37329b00"}, + {file = "pyzmq-25.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:01f06f33e12497dca86353c354461f75275a5ad9eaea181ac0dc1662da8074fa"}, + {file = "pyzmq-25.1.0-cp36-cp36m-win32.whl", hash = "sha256:b5a07c4f29bf7cb0164664ef87e4aa25435dcc1f818d29842118b0ac1eb8e2b5"}, + {file = "pyzmq-25.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:968b0c737797c1809ec602e082cb63e9824ff2329275336bb88bd71591e94a90"}, + {file = "pyzmq-25.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:47b915ba666c51391836d7ed9a745926b22c434efa76c119f77bcffa64d2c50c"}, + {file = "pyzmq-25.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5af31493663cf76dd36b00dafbc839e83bbca8a0662931e11816d75f36155897"}, + {file = "pyzmq-25.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5489738a692bc7ee9a0a7765979c8a572520d616d12d949eaffc6e061b82b4d1"}, + {file = "pyzmq-25.1.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1fc56a0221bdf67cfa94ef2d6ce5513a3d209c3dfd21fed4d4e87eca1822e3a3"}, + {file = "pyzmq-25.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:75217e83faea9edbc29516fc90c817bc40c6b21a5771ecb53e868e45594826b0"}, + {file = "pyzmq-25.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3830be8826639d801de9053cf86350ed6742c4321ba4236e4b5568528d7bfed7"}, + {file = "pyzmq-25.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3575699d7fd7c9b2108bc1c6128641a9a825a58577775ada26c02eb29e09c517"}, + {file = "pyzmq-25.1.0-cp37-cp37m-win32.whl", hash = "sha256:95bd3a998d8c68b76679f6b18f520904af5204f089beebb7b0301d97704634dd"}, + {file = "pyzmq-25.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:dbc466744a2db4b7ca05589f21ae1a35066afada2f803f92369f5877c100ef62"}, + {file = "pyzmq-25.1.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:3bed53f7218490c68f0e82a29c92335daa9606216e51c64f37b48eb78f1281f4"}, + {file = "pyzmq-25.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eb52e826d16c09ef87132c6e360e1879c984f19a4f62d8a935345deac43f3c12"}, + {file = "pyzmq-25.1.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ddbef8b53cd16467fdbfa92a712eae46dd066aa19780681a2ce266e88fbc7165"}, + {file = "pyzmq-25.1.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9301cf1d7fc1ddf668d0abbe3e227fc9ab15bc036a31c247276012abb921b5ff"}, + {file = "pyzmq-25.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e23a8c3b6c06de40bdb9e06288180d630b562db8ac199e8cc535af81f90e64b"}, + {file = "pyzmq-25.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4a82faae00d1eed4809c2f18b37f15ce39a10a1c58fe48b60ad02875d6e13d80"}, + {file = "pyzmq-25.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c8398a1b1951aaa330269c35335ae69744be166e67e0ebd9869bdc09426f3871"}, + {file = "pyzmq-25.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d40682ac60b2a613d36d8d3a0cd14fbdf8e7e0618fbb40aa9fa7b796c9081584"}, + {file = "pyzmq-25.1.0-cp38-cp38-win32.whl", hash = "sha256:33d5c8391a34d56224bccf74f458d82fc6e24b3213fc68165c98b708c7a69325"}, + {file = "pyzmq-25.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:c66b7ff2527e18554030319b1376d81560ca0742c6e0b17ff1ee96624a5f1afd"}, + {file = "pyzmq-25.1.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:af56229ea6527a849ac9fb154a059d7e32e77a8cba27e3e62a1e38d8808cb1a5"}, + {file = "pyzmq-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bdca18b94c404af6ae5533cd1bc310c4931f7ac97c148bbfd2cd4bdd62b96253"}, + {file = "pyzmq-25.1.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0b6b42f7055bbc562f63f3df3b63e3dd1ebe9727ff0f124c3aa7bcea7b3a00f9"}, + {file = "pyzmq-25.1.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4c2fc7aad520a97d64ffc98190fce6b64152bde57a10c704b337082679e74f67"}, + {file = "pyzmq-25.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be86a26415a8b6af02cd8d782e3a9ae3872140a057f1cadf0133de685185c02b"}, + {file = "pyzmq-25.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:851fb2fe14036cfc1960d806628b80276af5424db09fe5c91c726890c8e6d943"}, + {file = "pyzmq-25.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2a21fec5c3cea45421a19ccbe6250c82f97af4175bc09de4d6dd78fb0cb4c200"}, + {file = "pyzmq-25.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bad172aba822444b32eae54c2d5ab18cd7dee9814fd5c7ed026603b8cae2d05f"}, + {file = "pyzmq-25.1.0-cp39-cp39-win32.whl", hash = "sha256:4d67609b37204acad3d566bb7391e0ecc25ef8bae22ff72ebe2ad7ffb7847158"}, + {file = "pyzmq-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:71c7b5896e40720d30cd77a81e62b433b981005bbff0cb2f739e0f8d059b5d99"}, + {file = "pyzmq-25.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4cb27ef9d3bdc0c195b2dc54fcb8720e18b741624686a81942e14c8b67cc61a6"}, + {file = "pyzmq-25.1.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0c4fc2741e0513b5d5a12fe200d6785bbcc621f6f2278893a9ca7bed7f2efb7d"}, + {file = "pyzmq-25.1.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fc34fdd458ff77a2a00e3c86f899911f6f269d393ca5675842a6e92eea565bae"}, + {file = "pyzmq-25.1.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8751f9c1442624da391bbd92bd4b072def6d7702a9390e4479f45c182392ff78"}, + {file = "pyzmq-25.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:6581e886aec3135964a302a0f5eb68f964869b9efd1dbafdebceaaf2934f8a68"}, + {file = "pyzmq-25.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5482f08d2c3c42b920e8771ae8932fbaa0a67dff925fc476996ddd8155a170f3"}, + {file = "pyzmq-25.1.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5e7fbcafa3ea16d1de1f213c226005fea21ee16ed56134b75b2dede5a2129e62"}, + {file = "pyzmq-25.1.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:adecf6d02b1beab8d7c04bc36f22bb0e4c65a35eb0b4750b91693631d4081c70"}, + {file = "pyzmq-25.1.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6d39e42a0aa888122d1beb8ec0d4ddfb6c6b45aecb5ba4013c27e2f28657765"}, + {file = "pyzmq-25.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7018289b402ebf2b2c06992813523de61d4ce17bd514c4339d8f27a6f6809492"}, + {file = "pyzmq-25.1.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9e68ae9864d260b18f311b68d29134d8776d82e7f5d75ce898b40a88df9db30f"}, + {file = "pyzmq-25.1.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e21cc00e4debe8f54c3ed7b9fcca540f46eee12762a9fa56feb8512fd9057161"}, + {file = "pyzmq-25.1.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f666ae327a6899ff560d741681fdcdf4506f990595201ed39b44278c471ad98"}, + {file = "pyzmq-25.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f5efcc29056dfe95e9c9db0dfbb12b62db9c4ad302f812931b6d21dd04a9119"}, + {file = "pyzmq-25.1.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:48e5e59e77c1a83162ab3c163fc01cd2eebc5b34560341a67421b09be0891287"}, + {file = "pyzmq-25.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:108c96ebbd573d929740d66e4c3d1bdf31d5cde003b8dc7811a3c8c5b0fc173b"}, + {file = "pyzmq-25.1.0.tar.gz", hash = "sha256:80c41023465d36280e801564a69cbfce8ae85ff79b080e1913f6e90481fb8957"}, ] [package.dependencies] @@ -3276,25 +3326,25 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} [[package]] name = "rdflib" -version = "6.1.1" +version = "6.3.2" description = "RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information." category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.7,<4.0" files = [ - {file = "rdflib-6.1.1-py3-none-any.whl", hash = "sha256:fc81cef513cd552d471f2926141396b633207109d0154c8e77926222c70367fe"}, - {file = "rdflib-6.1.1.tar.gz", hash = "sha256:8dbfa0af2990b98471dacbc936d6494c997ede92fd8ed693fb84ee700ef6f754"}, + {file = "rdflib-6.3.2-py3-none-any.whl", hash = "sha256:36b4e74a32aa1e4fa7b8719876fb192f19ecd45ff932ea5ebbd2e417a0247e63"}, + {file = "rdflib-6.3.2.tar.gz", hash = "sha256:72af591ff704f4caacea7ecc0c5a9056b8553e0489dd4f35a9bc52dbd41522e0"}, ] [package.dependencies] -isodate = "*" -pyparsing = "*" -setuptools = "*" +isodate = ">=0.6.0,<0.7.0" +pyparsing = ">=2.1.0,<4" [package.extras] -docs = ["sphinx (<5)", "sphinxcontrib-apidoc"] -html = ["html5lib"] -tests = ["berkeleydb", "html5lib", "networkx", "pytest", "pytest-cov", "pytest-subtests"] +berkeleydb = ["berkeleydb (>=18.1.0,<19.0.0)"] +html = ["html5lib (>=1.0,<2.0)"] +lxml = ["lxml (>=4.3.0,<5.0.0)"] +networkx = ["networkx (>=2.0.0,<3.0.0)"] [[package]] name = "redis" @@ -3338,21 +3388,21 @@ tests = ["check-manifest (>=0.25)", "isort (>=4.2.2)", "pkginfo (<1.9)", "pydocs [[package]] name = "requests" -version = "2.28.1" +version = "2.31.0" description = "Python HTTP for Humans." category = "main" optional = false -python-versions = ">=3.7, <4" +python-versions = ">=3.7" files = [ - {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, - {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, ] [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = ">=2,<3" +charset-normalizer = ">=2,<4" idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<1.27" +urllib3 = ">=1.21.1,<3" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] @@ -3416,14 +3466,14 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "rq" -version = "1.13.0" +version = "1.14.1" description = "RQ is a simple, lightweight, library for creating background jobs, and processing them." category = "main" optional = true python-versions = ">=3.6" files = [ - {file = "rq-1.13.0-py2.py3-none-any.whl", hash = "sha256:621966d7cbf96d5609557a4bd3fd77f749d6d10997d2e353a3e89a14e08eea16"}, - {file = "rq-1.13.0.tar.gz", hash = "sha256:5bb0380a17597200520731686766bb72faf16ebffb602663560d91ea2c9e7103"}, + {file = "rq-1.14.1-py2.py3-none-any.whl", hash = "sha256:37e003db1da205e08db6cc4653b7c6ccfd9292000954240308abfce2ebde43ba"}, + {file = "rq-1.14.1.tar.gz", hash = "sha256:5fb86038922ddd76eb2d9aa0adeec6dcf64f159dbbe730b26358b1417120dd44"}, ] [package.dependencies] @@ -3515,64 +3565,63 @@ files = [ [[package]] name = "schema-salad" -version = "8.4.20230511084951" +version = "8.4.20230601112322" description = "Schema Annotations for Linked Avro Data (SALAD)" category = "main" optional = false python-versions = ">=3.6,<3.12" files = [ - {file = "schema-salad-8.4.20230511084951.tar.gz", hash = "sha256:c96b6a81a40f4ee94b7c49416bdb41808428af368494053a33c168e33fbe484d"}, - {file = "schema_salad-8.4.20230511084951-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:6cad9815fe212aafcceaa31953689695e6a6d8000785d0a6c974af8ed4f8b25d"}, - {file = "schema_salad-8.4.20230511084951-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:db5e4cee821d0189ca4187be064b812db5ed2ee7b3ab544fa7ee8d34257e3f1e"}, - {file = "schema_salad-8.4.20230511084951-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00c07378da76faa2478605d46999560781e1afd83b80366554143d9b77f66b53"}, - {file = "schema_salad-8.4.20230511084951-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fbdd23bb33b3a9cc2d2ecf2a3674641a462f47f9a8bc0ed2c6f6f2b8e72c0583"}, - {file = "schema_salad-8.4.20230511084951-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:58971d5e51be3e46a3326955bcb08ced57666e500a395f2ce3895ff15d0b1ab7"}, - {file = "schema_salad-8.4.20230511084951-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c5af4a029a58b267b866716a602af9e45c09d0f228ddfcca4c4f1b1273ec835"}, - {file = "schema_salad-8.4.20230511084951-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:19165473e512b7e6b1cd55389232233c0d6fdc08dea00d578a843f36d15bb59e"}, - {file = "schema_salad-8.4.20230511084951-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:339b5ff7801ff33883b44fb206c2eaafb3a4f077abd8e48ee3e36ef0c8d063be"}, - {file = "schema_salad-8.4.20230511084951-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:5bcef21a270cf82be4b222e7385d665c8d24f4ab2b44160cf3350b06419cc102"}, - {file = "schema_salad-8.4.20230511084951-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:38087671b85baf288d9014bc31cc97093faec387009c5984a8cf1509ff57c408"}, - {file = "schema_salad-8.4.20230511084951-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86733e2207f601f255ff04084bbab717a469f32a437f6c18ffd5867bb4e5cb72"}, - {file = "schema_salad-8.4.20230511084951-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6d6cdcf5b09626a3b2e5d90abcd7d21a52889c3c6bfb87a335e501ca193dbdf7"}, - {file = "schema_salad-8.4.20230511084951-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:90975cc53b99d8df98c31107db6b618c33aead191d603adde78333f2418684cb"}, - {file = "schema_salad-8.4.20230511084951-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:069cdfec88f5b57664673a2fb75078f8e1417f66e73a54cc4da5e7d10cc6446d"}, - {file = "schema_salad-8.4.20230511084951-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b398f9c5d94209862b51af3f27f45b8a96d76533b814068e225dfb2a27e96837"}, - {file = "schema_salad-8.4.20230511084951-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e2fb2c6d38ad417a67eed3e80b4336201a87977e335f4447d3bebef4d5778d87"}, - {file = "schema_salad-8.4.20230511084951-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:e9b3e1962dec2a4b143aa71417d2af471c85ff79736ce2e8dedeede111717fae"}, - {file = "schema_salad-8.4.20230511084951-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95e1de8de192b33736efa5c6652ff4987b053f4435cbb3fc0a4f715fa09303e"}, - {file = "schema_salad-8.4.20230511084951-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3707bd9b6cbc38ce92deb6ca5ff18b1d13f6126ccf50263d837fe2b542c8665"}, - {file = "schema_salad-8.4.20230511084951-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:30a04d2152203a4487bb9d16ce3bca07a02c735ccc711c03f11af3bd31324bc6"}, - {file = "schema_salad-8.4.20230511084951-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2dfd27015fe87536c1979b1a24171a812d2ee5f7ef220f852200f6f226e831c9"}, - {file = "schema_salad-8.4.20230511084951-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:752504b4ee374c587eedbd7e2770a4ad59b05e0e282a3873ceea0ff76302810e"}, - {file = "schema_salad-8.4.20230511084951-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:befdbd3a80c4be84fda21df215f1c1b5cd3dc044b79bd978669386fa2b295d32"}, - {file = "schema_salad-8.4.20230511084951-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4d4aa7a8b92c249862fe60c89113f7e6677dfcd3c994c80c49c2a4abe3ca8e8f"}, - {file = "schema_salad-8.4.20230511084951-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:c0d69e9e9ca4a8e6f816d7169b1a9dc5fb6d5d3b91cd36f9ebe03df6bec87e7b"}, - {file = "schema_salad-8.4.20230511084951-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d318e6b97ef6f312f90991ed21365c4e95af62da6f7b8fce5bff1ebca6e9c75"}, - {file = "schema_salad-8.4.20230511084951-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58a9277e311026893e1d180dc22f5d7e6085953ed53d11d89930c62633026d67"}, - {file = "schema_salad-8.4.20230511084951-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:f5bba0b4a220185cbb96c04478131f13d9d7100c099bbfd5e8eec298ed5b02df"}, - {file = "schema_salad-8.4.20230511084951-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:765aa183d4dd0989bd485f5bd2700363522255aab4ccca6a4bcb1eb84a30e395"}, - {file = "schema_salad-8.4.20230511084951-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b55acd992b07a639944b5d86b1d48b2ff57749aa351d1cbba1c1377a9842ec46"}, - {file = "schema_salad-8.4.20230511084951-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bbb776cf57a54ce900f354e2730f8e55f674a4e78fdb7773fd981c2f7df2cf9b"}, - {file = "schema_salad-8.4.20230511084951-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:16bc7c19cea3c69280473d971e680f16a03b4fa593e61588eb3bd149637f1ae2"}, - {file = "schema_salad-8.4.20230511084951-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:dcf82a3393c92f48d8c41f05e5423a0d745490d6a3c460bca84aa44df9d58af7"}, - {file = "schema_salad-8.4.20230511084951-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8aa816447c3c0c87e10dcd214967d706f0d3fee7bb0e646c89d414a0f85c801b"}, - {file = "schema_salad-8.4.20230511084951-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e92417f6ef542a4b9bd5f73f866563d6629e8a4a61f76ff490001914fd0d079a"}, - {file = "schema_salad-8.4.20230511084951-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c01d81d5c4af780d24032f9554b5de8fd57a86342d49a59d78e86158a11c6650"}, - {file = "schema_salad-8.4.20230511084951-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2cf9684ae63f3c7cbebea07d6e1b945f8e439a5930bc6eaa3618cf5734e2af6e"}, - {file = "schema_salad-8.4.20230511084951-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6deaa7798c7ebaea355dfbb2dda835fbc8bb0ae3bfa88ca2ef4505793d0f7f0"}, - {file = "schema_salad-8.4.20230511084951-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c64f461c934abeb46a0803c101677d7a163555c499df9ad4b77303978879d21a"}, - {file = "schema_salad-8.4.20230511084951-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cf966b084eb45330471f24ce090438988368b0440084fc40f740967c2f402137"}, - {file = "schema_salad-8.4.20230511084951-py3-none-any.whl", hash = "sha256:82d45f86ebdd9e0d9b34a98fdb819691e68914ebd823d40994dc85308ac9ff98"}, + {file = "schema-salad-8.4.20230601112322.tar.gz", hash = "sha256:8d2c8ac3caf2eb404bdd94a4c2a0e31345c5cc0884801d1c5dc5ca86d18040b4"}, + {file = "schema_salad-8.4.20230601112322-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:5b52e0199c7e02835e808dae114a9aaad603f42962efb9850fe9693c980a11ce"}, + {file = "schema_salad-8.4.20230601112322-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4f9edafac3c2b27584a24ab3be98e09cdda38448b10755b87c20f3ce518c97fd"}, + {file = "schema_salad-8.4.20230601112322-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:301a5686ec6142dfc36f51530f297764a422e12c7a99b981c6d92552852cbd39"}, + {file = "schema_salad-8.4.20230601112322-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:178db41bbc46d68594399b7435969f1ebaba64d96fa9efb08400b16861c08c72"}, + {file = "schema_salad-8.4.20230601112322-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4cadab0f20817a1a80ad89b98099657339e553c41ea07f7ac102603e8f73d648"}, + {file = "schema_salad-8.4.20230601112322-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08da37193385254bca7fdb4748ef6c08cb283dd669f0a56a05a265688463856f"}, + {file = "schema_salad-8.4.20230601112322-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:59d9373f7576e011fb885e4c452a3c1402cb3fa529488198a20951f611ca2d25"}, + {file = "schema_salad-8.4.20230601112322-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e30644406bb7446531e4cd52f3c6bb60086ccaf6beb091be1660f39468b0fb18"}, + {file = "schema_salad-8.4.20230601112322-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:b24fd886b15634ea2819fd89b47972867b48beb33307d919e0860f9d3fdb37fe"}, + {file = "schema_salad-8.4.20230601112322-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d4d53cfbc3d0ba983f2c977e0e1e99e6207453ccfcf4ade393a29afdce32a88e"}, + {file = "schema_salad-8.4.20230601112322-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2d35b578a882364596af0dc0a46aa4b77af913f992bd56da1efb591b0e6fc"}, + {file = "schema_salad-8.4.20230601112322-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:b9787319549edb4d36e44504f36f4a025fbae7cbf1eba2ebe1a647bfde0d7991"}, + {file = "schema_salad-8.4.20230601112322-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9090f654b1ce0fb98be419340d488fb539fe98bb3ac4a23fefd7dc71f173bf90"}, + {file = "schema_salad-8.4.20230601112322-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6c10de96857d24efc7b755667ba16f219e042ddb123ba6f4a8c4b429a14d9c8"}, + {file = "schema_salad-8.4.20230601112322-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ef8a227d974e87bcdb4ec98c32a9354881586a0520985e3fa9fa509123615c2a"}, + {file = "schema_salad-8.4.20230601112322-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54ee3b017c42c3f442d39e16979d9f18b30e02db7817ecb73682fe75ea0810b6"}, + {file = "schema_salad-8.4.20230601112322-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:6ada405a5cbbecd43c73bbc067abb080e29c93eea8ba0a3f30efdb420f52006a"}, + {file = "schema_salad-8.4.20230601112322-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fa2fa08fcded0b0bedc46f4d3582ab9366eaedadc48417e3f67fd1836f300aa7"}, + {file = "schema_salad-8.4.20230601112322-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:249e249f07f14f47e59f1b47fd35de661089896e2055754ee9d5dbec71ab6413"}, + {file = "schema_salad-8.4.20230601112322-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:5d979dea308cc90d6a1cd974f0a4f94cd30c75edaced6b520c507047891c68ae"}, + {file = "schema_salad-8.4.20230601112322-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:368e419e85ab85661680d40b3b9ab1efcdfb43ad12a44f797ac68418053c5baf"}, + {file = "schema_salad-8.4.20230601112322-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b107e6ef58698e7953b4eb2ed0fa1da25ba07f470f209a2aaa6512f86745c8c7"}, + {file = "schema_salad-8.4.20230601112322-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:033f6c5dca6603d7ba12a09081cb7cd7ece8ebf0caa6ba3cf3d1af8b075ac321"}, + {file = "schema_salad-8.4.20230601112322-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ee55dd6d8a0fc08881c1c312510dc9afbf5ddf4c0271958f1b29345512fbb183"}, + {file = "schema_salad-8.4.20230601112322-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:5aaf0d240d93b5bcc99817168fe523a73bb0e9fc0daf90703656209bfbfa3cf3"}, + {file = "schema_salad-8.4.20230601112322-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:07880fbda95c07128e7058da605766fb79d75e61aef3ef0c022316a302f1c625"}, + {file = "schema_salad-8.4.20230601112322-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ec4fb9c8c307202a4c394557ccf131e00f57d9c50bc64957046d302d6ca432b"}, + {file = "schema_salad-8.4.20230601112322-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:af210dbd0fdf68596007026ed2cabd33b54227e49b19549e1fee7963a8381390"}, + {file = "schema_salad-8.4.20230601112322-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2381319d3908b21afff3b162a8399d61daa28aabe50b1c6ca7e9ed1ddef9e884"}, + {file = "schema_salad-8.4.20230601112322-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a230d1a1c344712f212c74d046da78c630fd32a422caa5d1f588acff43ec1fc"}, + {file = "schema_salad-8.4.20230601112322-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:330e04111a1d24b4ac4283e50309d35716e65682a7d6917cee259c5ddcd9271c"}, + {file = "schema_salad-8.4.20230601112322-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:85e069e711364fd65883b7571ce7e9c007e455063ba5fa60e47f0e16d7b5d9f6"}, + {file = "schema_salad-8.4.20230601112322-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:be42d6ae55c0fd95e15d7fb60bb2caa13b2461eb29a7531ed36c3ba086a6fcf5"}, + {file = "schema_salad-8.4.20230601112322-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:91eb43c02f2f3af248b35bbe04963e9437fc5f1c8b4cf7b94021ea2dc2428fda"}, + {file = "schema_salad-8.4.20230601112322-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a282b4603d293529692c67f3d1e12c9299e97ff9f76ce58ee5462f18e8f463df"}, + {file = "schema_salad-8.4.20230601112322-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a26c4d9afd044054f6a4deef9236b278c103bcb85313d6da38b149b93d59e902"}, + {file = "schema_salad-8.4.20230601112322-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1177cc97bdd4131b389b9104c3d87470b9a0a3ed9bead3d4877c0650b5c870c6"}, + {file = "schema_salad-8.4.20230601112322-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6bd461b0053307278bc3a30c6c6277e4cfdad63ba865c6cf6a3d97e43ba296b"}, + {file = "schema_salad-8.4.20230601112322-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:edf61fbbfc1358699a986df7f7632fb25f1892b0a0e1fb805fdd163e78a037ed"}, + {file = "schema_salad-8.4.20230601112322-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f3e154304f054169d88872e749334b83476e3dc7a686d4599486b430e96775b2"}, + {file = "schema_salad-8.4.20230601112322-py3-none-any.whl", hash = "sha256:0e531245757e4ff5fbda6a0fe4749f95f2ed3818870cd2e09417f9bee93cf730"}, ] [package.dependencies] -CacheControl = {version = ">=0.11.7,<0.13", extras = ["filecache"]} +CacheControl = {version = ">=0.11.7,<0.14", extras = ["filecache"]} mistune = ">=2.0.3,<2.1" mypy-extensions = "*" rdflib = ">=4.2.2,<7.0.0" requests = ">=1.0" -"ruamel.yaml" = {version = ">=0.17.6,<0.17.27", markers = "python_version >= \"3.7\""} -urllib3 = "<2" +"ruamel.yaml" = {version = ">=0.17.6,<0.18", markers = "python_version >= \"3.7\""} [package.extras] docs = ["pytest (<8)", "sphinx (>=2.2)", "sphinx-autoapi", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "sphinxcontrib-autoprogram", "typed-ast"] @@ -3580,21 +3629,22 @@ pycodegen = ["black"] [[package]] name = "sentry-sdk" -version = "1.20.0" +version = "1.23.1" description = "Python client for Sentry (https://sentry.io)" category = "main" optional = true python-versions = "*" files = [ - {file = "sentry-sdk-1.20.0.tar.gz", hash = "sha256:a3410381ae769a436c0852cce140a5e5e49f566a07fb7c2ab445af1302f6ad89"}, - {file = "sentry_sdk-1.20.0-py2.py3-none-any.whl", hash = "sha256:0ad6bbbe78057b8031a07de7aca6d2a83234e51adc4d436eaf8d8c697184db71"}, + {file = "sentry-sdk-1.23.1.tar.gz", hash = "sha256:0300fbe7a07b3865b3885929fb863a68ff01f59e3bcfb4e7953d0bf7fd19c67f"}, + {file = "sentry_sdk-1.23.1-py2.py3-none-any.whl", hash = "sha256:a884e2478e0b055776ea2b9234d5de9339b4bae0b3a5e74ae43d131db8ded27e"}, ] [package.dependencies] blinker = {version = ">=1.1", optional = true, markers = "extra == \"flask\""} certifi = "*" flask = {version = ">=0.11", optional = true, markers = "extra == \"flask\""} -urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""} +markupsafe = {version = "*", optional = true, markers = "extra == \"flask\""} +urllib3 = {version = ">=1.26.11,<2.0.0", markers = "python_version >= \"3.6\""} [package.extras] aiohttp = ["aiohttp (>=3.5)"] @@ -3606,10 +3656,11 @@ chalice = ["chalice (>=1.16.0)"] django = ["django (>=1.8)"] falcon = ["falcon (>=1.4)"] fastapi = ["fastapi (>=0.79.0)"] -flask = ["blinker (>=1.1)", "flask (>=0.11)"] +flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"] grpcio = ["grpcio (>=1.21.1)"] httpx = ["httpx (>=0.16.0)"] huey = ["huey (>=2)"] +loguru = ["loguru (>=0.5)"] opentelemetry = ["opentelemetry-distro (>=0.35b0)"] pure-eval = ["asttokens", "executing", "pure-eval"] pymongo = ["pymongo (>=3.1)"] @@ -3624,19 +3675,19 @@ tornado = ["tornado (>=5)"] [[package]] name = "setuptools" -version = "67.7.2" +version = "67.8.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "setuptools-67.7.2-py3-none-any.whl", hash = "sha256:23aaf86b85ca52ceb801d32703f12d77517b2556af839621c641fca11287952b"}, - {file = "setuptools-67.7.2.tar.gz", hash = "sha256:f104fa03692a2602fa0fec6c6a9e63b6c8a968de13e17c026957dd1f53d80990"}, + {file = "setuptools-67.8.0-py3-none-any.whl", hash = "sha256:5df61bf30bb10c6f756eb19e7c9f3b473051f48db77fddbe06ff2ca307df9a6f"}, + {file = "setuptools-67.8.0.tar.gz", hash = "sha256:62642358adc77ffa87233bc4d2354c4b2682d214048f500964dbe760ccedf102"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -3946,14 +3997,13 @@ files = [ [[package]] name = "toil" -version = "5.7.1" +version = "5.10.0" description = "Pipeline management software for clusters." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "toil-5.7.1-py3-none-any.whl", hash = "sha256:81041ba89867a3a37e265a5e073c60a6dfe9ed03342fb3a53bb48f7c04ef1b4b"}, - {file = "toil-5.7.1.tar.gz", hash = "sha256:99a11bb1d138068861013f78f72489ff6ed9af02ceed8a551879a73738c9ebe7"}, + {file = "toil-5.10.0-py3-none-any.whl", hash = "sha256:3a56046c456e6c9c7814c2ae677d9b47987a2ce041f09395fb8f00fa29a1cd1a"}, ] [package.dependencies] @@ -3963,21 +4013,24 @@ docker = ">=3.7.2,<6" enlighten = ">=1.5.2,<2" psutil = ">=3.0.1,<6" py-tes = ">=0.4.2,<1" +PyPubSub = ">=4.0.3,<5" python-dateutil = "*" pytz = ">=2012" requests = ">=2,<3" +typing-extensions = "*" +urllib3 = ">=1.26.0,<2.0.0" [package.extras] -all = ["apache-libcloud (>=2.2.1,<3)", "boto (>=2.48.0,<3)", "boto3 (>=1.20.46,<2)", "boto3-stubs[iam,s3,sdb] (==1.24.0)", "celery (>=5.1.0,<6)", "connexion[swagger-ui] (>=2.10.0,<3)", "cwltool (==3.1.20220628170238)", "flask (>=2.0,<3)", "flask-cors (==3.0.10)", "galaxy-tool-util", "google-cloud-storage (>=1.6.0,<2)", "gunicorn (==20.1.0)", "idna (>=2)", "kubernetes (>=12.0.1,<22)", "mypy-boto3-iam (==1.24.0)", "mypy-boto3-s3 (==1.24.0)", "mypy-boto3-sdb (==1.24.0)", "networkx (>=2,<2.8.5)", "pymesos (>=0.3.15,<0.4)", "pynacl (>=1.4.0,<2)", "ruamel.yaml (>=0.15,<0.17.22)", "ruamel.yaml (>=0.15,<=0.17.21)", "ruamel.yaml.clib (>=0.2.6)", "wdlparse (==0.1.0)", "werkzeug (>=2.0,<3)", "wes-service (>=4.0.0,<5)"] -aws = ["boto (>=2.48.0,<3)", "boto3 (>=1.20.46,<2)", "boto3-stubs[iam,s3,sdb] (==1.24.0)", "mypy-boto3-iam (==1.24.0)", "mypy-boto3-s3 (==1.24.0)", "mypy-boto3-sdb (==1.24.0)"] -cwl = ["cwltool (==3.1.20220628170238)", "galaxy-tool-util", "networkx (>=2,<2.8.5)", "ruamel.yaml (>=0.15,<=0.17.21)", "ruamel.yaml.clib (>=0.2.6)"] +all = ["CacheControl[filecache]", "apache-libcloud (>=2.2.1,<3)", "boto (>=2.48.0,<3)", "boto3-stubs[boto3,iam,s3,sdb,sts] (>=1.20.46,<2)", "celery (>=5.1.0,<6)", "connexion[swagger-ui] (>=2.10.0,<3)", "cwltool (==3.1.20230425144158)", "flask (>=2.0,<3)", "flask-cors (==3.0.10)", "galaxy-tool-util", "google-cloud-storage (>=2,<=2.8.0)", "gunicorn (==20.1.0)", "idna (>=2)", "kubernetes (>=12.0.1,<22)", "kubernetes-stubs (==v22.6.0post1)", "miniwdl (==1.9.1)", "networkx (>=2,<2.8.9)", "pymesos (>=0.3.15,<0.4)", "pynacl (>=1.4.0,<2)", "ruamel.yaml (>=0.15,<0.17.22)", "ruamel.yaml (>=0.15,<=0.17.21)", "ruamel.yaml.clib (>=0.2.6)", "schema-salad (>=8.4.20230128170514,<9)", "types-PyYAML", "types-urllib3", "wdlparse (==0.1.0)", "werkzeug (>=2.0,<3)", "wes-service (>=4.0.0,<5)"] +aws = ["boto (>=2.48.0,<3)", "boto3-stubs[boto3,iam,s3,sdb,sts] (>=1.20.46,<2)"] +cwl = ["CacheControl[filecache]", "cwltool (==3.1.20230425144158)", "galaxy-tool-util", "networkx (>=2,<2.8.9)", "ruamel.yaml (>=0.15,<=0.17.21)", "ruamel.yaml.clib (>=0.2.6)", "schema-salad (>=8.4.20230128170514,<9)"] encryption = ["pynacl (>=1.4.0,<2)"] -google = ["apache-libcloud (>=2.2.1,<3)", "google-cloud-storage (>=1.6.0,<2)"] -htcondor = ["htcondor (>=8.6.0,<9)"] -kubernetes = ["idna (>=2)", "kubernetes (>=12.0.1,<22)"] +google = ["apache-libcloud (>=2.2.1,<3)", "google-cloud-storage (>=2,<=2.8.0)"] +htcondor = ["htcondor (>=10.2.0.post1,<11)"] +kubernetes = ["idna (>=2)", "kubernetes (>=12.0.1,<22)", "kubernetes-stubs (==v22.6.0post1)", "types-PyYAML", "types-urllib3"] mesos = ["pymesos (>=0.3.15,<0.4)"] server = ["celery (>=5.1.0,<6)", "connexion[swagger-ui] (>=2.10.0,<3)", "flask (>=2.0,<3)", "flask-cors (==3.0.10)", "gunicorn (==20.1.0)", "ruamel.yaml (>=0.15,<0.17.22)", "werkzeug (>=2.0,<3)", "wes-service (>=4.0.0,<5)"] -wdl = ["wdlparse (==0.1.0)"] +wdl = ["miniwdl (==1.9.1)", "wdlparse (==0.1.0)"] [[package]] name = "toml" @@ -4090,14 +4143,14 @@ files = [ [[package]] name = "types-pyyaml" -version = "6.0.12.9" +version = "6.0.12.10" description = "Typing stubs for PyYAML" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-PyYAML-6.0.12.9.tar.gz", hash = "sha256:c51b1bd6d99ddf0aa2884a7a328810ebf70a4262c292195d3f4f9a0005f9eeb6"}, - {file = "types_PyYAML-6.0.12.9-py3-none-any.whl", hash = "sha256:5aed5aa66bd2d2e158f75dda22b059570ede988559f030cf294871d3b647e3e8"}, + {file = "types-PyYAML-6.0.12.10.tar.gz", hash = "sha256:ebab3d0700b946553724ae6ca636ea932c1b0868701d4af121630e78d695fc97"}, + {file = "types_PyYAML-6.0.12.10-py3-none-any.whl", hash = "sha256:662fa444963eff9b68120d70cda1af5a5f2aa57900003c2006d7626450eaae5f"}, ] [[package]] @@ -4129,14 +4182,14 @@ types-urllib3 = "<1.27" [[package]] name = "types-tabulate" -version = "0.8.9" +version = "0.9.0.2" description = "Typing stubs for tabulate" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-tabulate-0.8.9.tar.gz", hash = "sha256:2fc3fa4fe1853ac987cf50e8d4599e3fe446dd53064fe86a46a407a98e9fc04f"}, - {file = "types_tabulate-0.8.9-py3-none-any.whl", hash = "sha256:7971ed0cd40454eb18d82c01e2f18bcd09ca23cc9eb901c62d2b04e5d1f57f84"}, + {file = "types-tabulate-0.9.0.2.tar.gz", hash = "sha256:1dd4322a3a146e9073169c74278b8f14a58eb9905ca9db0d2588df408f27cac9"}, + {file = "types_tabulate-0.9.0.2-py3-none-any.whl", hash = "sha256:a2e41cc41b6b46bfaec78f8fd8e03058fda7a31af6f203a4b235f5482f571f6f"}, ] [[package]] @@ -4153,26 +4206,26 @@ files = [ [[package]] name = "typing-extensions" -version = "4.5.0" +version = "4.6.2" description = "Backported and Experimental Type Hints for Python 3.7+" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"}, - {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, + {file = "typing_extensions-4.6.2-py3-none-any.whl", hash = "sha256:3a8b36f13dd5fdc5d1b16fe317f5668545de77fa0b8e02006381fd49d731ab98"}, + {file = "typing_extensions-4.6.2.tar.gz", hash = "sha256:06006244c70ac8ee83fa8282cb188f697b8db25bc8b4df07be1873c43897060c"}, ] [[package]] name = "urllib3" -version = "1.26.15" +version = "1.26.16" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"}, - {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"}, + {file = "urllib3-1.26.16-py2.py3-none-any.whl", hash = "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f"}, + {file = "urllib3-1.26.16.tar.gz", hash = "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14"}, ] [package.extras] @@ -4182,19 +4235,20 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "vcrpy" -version = "4.2.1" +version = "4.3.1" description = "Automatically mock your HTTP interactions to simplify and speed up testing" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "vcrpy-4.2.1-py2.py3-none-any.whl", hash = "sha256:efac3e2e0b2af7686f83a266518180af7a048619b2f696e7bad9520f5e2eac09"}, - {file = "vcrpy-4.2.1.tar.gz", hash = "sha256:7cd3e81a2c492e01c281f180bcc2a86b520b173d2b656cb5d89d99475423e013"}, + {file = "vcrpy-4.3.1-py2.py3-none-any.whl", hash = "sha256:35398f1b373f32340f39d735ea45f40d679ace316f3dddf8cbcbc2f120e6d1d0"}, + {file = "vcrpy-4.3.1.tar.gz", hash = "sha256:24e2d450bf1c2f9f9b4246ee91beb7d58f862a9f2f030514b14783b83c5146ec"}, ] [package.dependencies] PyYAML = "*" six = ">=1.5" +urllib3 = {version = "<2", markers = "python_version < \"3.10\""} wrapt = "*" yarl = "*" @@ -4247,14 +4301,14 @@ files = [ [[package]] name = "websocket-client" -version = "1.5.1" +version = "1.5.2" description = "WebSocket client for Python with low level API options" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "websocket-client-1.5.1.tar.gz", hash = "sha256:3f09e6d8230892547132177f575a4e3e73cfdf06526e20cc02aa1c3b47184d40"}, - {file = "websocket_client-1.5.1-py3-none-any.whl", hash = "sha256:cdf5877568b7e83aa7cf2244ab56a3213de587bbe0ce9d8b9600fc77b455d89e"}, + {file = "websocket-client-1.5.2.tar.gz", hash = "sha256:c7d67c13b928645f259d9b847ab5b57fd2d127213ca41ebd880de1f553b7c23b"}, + {file = "websocket_client-1.5.2-py3-none-any.whl", hash = "sha256:f8c64e28cd700e7ba1f04350d66422b6833b82a796b525a51e740b8cc8dab4b1"}, ] [package.extras] @@ -4753,9 +4807,9 @@ cffi = {version = ">=1.11", markers = "platform_python_implementation == \"PyPy\ cffi = ["cffi (>=1.11)"] [extras] -service = ["apispec", "apispec-webframeworks", "circus", "flask", "gunicorn", "marshmallow", "marshmallow-oneofschema", "pillow", "python-dotenv", "redis", "rq", "rq-scheduler", "sentry-sdk", "walrus"] +service = ["apispec", "apispec-oneofschema", "apispec-webframeworks", "circus", "flask", "gunicorn", "marshmallow", "marshmallow-oneofschema", "pillow", "python-dotenv", "redis", "rq", "rq-scheduler", "sentry-sdk", "walrus"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<3.12" -content-hash = "5fd9703ff48c7fdc1a6eab028b41494001406638de820830a4617341cc26968a" +content-hash = "ea8f64af7de18978e6821f6db008c030830efa8574e83b5e5dbc0452fe6b61a4" diff --git a/pyproject.toml b/pyproject.toml index 964b169ece..56669e28ee 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,22 +60,21 @@ click = ">=8.0,<8.1.4" click-option-group = "<0.6.0,>=0.5.2" click-plugins = "==1.1.1" coverage = { version = "<6.5,>=4.5.3", extras=["toml"], optional = true } -cryptography = ">=38.0.0,<40.0.0" -cwl-utils = ">=0.12,<0.18" -cwltool = "==3.1.20220628170238" +cryptography = ">=38.0.0,<41.0.0" +cwl-utils = ">=0.27,<0.28" +cwltool = "==3.1.20230425144158" deal = ">=4.24.0,<5.0.0" deepdiff = ">=5.8,<7.0" deepmerge = "==1.0.1" docker = "<6,>=3.7.2" filelock = ">=3.3.0,<3.12.1" gitpython = "==3.1.27" -grandalf = "==0.7" +grandalf = "==0.8" humanize = ">=3.0.0,<4.1.0" importlib-resources = ">=5.12.0,<5.13.0" inject = "<4.4.0,>=4.3.0" jinja2 = { version = ">=2.11.3,<3.1.3" } networkx = ">=2.6.0,<3.2" -numpy = ">=1.24.0,<1.25.0" packaging = "<24.0,>=23.0" pathspec = "<1.0.0,>=0.8.0" patool = "==1.12" @@ -83,21 +82,21 @@ pluggy = "==1.0.0" portalocker = ">=2.2.1,<2.8" poetry-dynamic-versioning = "0.21.5" psutil = ">=5.4.7,<5.9.2" -pydantic = "==1.10.2" +pydantic = "==1.10.7" pyjwt = ">=2.1.0,<2.5.0" pyld = "==2.0.3" pyopenssl = ">=19.0.0,<22.1.0" -pyshacl = ">=0.17.2,<0.18.2" +pyshacl = ">=0.17.2,<0.19.2" python-dateutil = "<2.8.3,>=2.6.1" python-editor = "==1.0.4" python-gitlab = ">=2.10.1,<3.8.2" pyyaml = "<6.1.0,>=5.4" rdflib = "<7.0,>=6.0.0" -requests = ">=2.23.0,<2.28.2" +requests = ">=2.23.0,<2.31.1" rich = ">=9.3.0,<13.4.0" shellingham = "1.5.0.post1" tabulate = ">=0.7.7,<0.9.1" -toil = "==5.7.1" +toil = "==5.10.0" tqdm = "<4.62.4,>=4.48.1" werkzeug = ">=1.0.0,<2.2.4" yagup = ">=0.1.1" @@ -107,19 +106,20 @@ zodb = "==5.8.0" zstandard = ">=0.16.0,<0.22.0" # service dependencies: -apispec = { version = ">=4.0.0,<5.3.0", optional = true } +apispec = { version = ">=6.3.0,<6.4.0", optional = true } +apispec-oneofschema = { version = ">=3.0.0,<4.0.0", optional = true} apispec-webframeworks = { version = "<0.6,>=0.5.2", optional = true } circus = { version = "==0.18.0", optional = true } flask = { version = "==2.2.5", optional = true } gunicorn = { version = "*", optional = true } marshmallow = { version = ">=3.18.0,<3.20.0", optional = true } marshmallow-oneofschema = { version = ">=3.0.1,<4.0.0", optional = true } -pillow = { version = ">=9.0.0,<9.4", optional = true } +pillow = { version = ">=9.0.0,<9.6", optional = true } python-dotenv = { version = ">=0.19.0,<0.21.0", optional = true } redis = { version = ">=3.5.3,<4.6.0", optional = true } -rq = { version = "==1.13.0", optional = true } +rq = { version = "==1.14.1", optional = true } rq-scheduler = { version = "==0.13.1", optional = true } -sentry-sdk = { version = ">=1.5.11,<1.20.1", extras = ["flask"], optional = true } +sentry-sdk = { version = ">=1.5.11,<1.24.0", extras = ["flask"], optional = true } walrus = { version = ">=0.8.2,<0.10.0", optional = true } [tool.poetry.group.dev.dependencies] @@ -134,7 +134,7 @@ types-PyYAML = "<6.1.0,>=5.4" types-python-dateutil = "^2.8.10" types-redis = ">=3.5.3,<4.1.0" types-requests = ">=2.23.0,<2.28.12" -types-tabulate = "<0.8.10,>=0.7.7" +types-tabulate = ">=0.7.7,<0.9.1" [tool.poetry.group.tests] optional = true @@ -154,9 +154,9 @@ pytest-flake8 = ">=1.0.6,<1.1.2" pytest-lazy-fixture = ">=0.6.3,<0.7.0" pytest-mock = ">=3.2.0,<3.11.0" pytest-pep8 = "==1.0.6" -pytest-recording = "==0.12.1" +pytest-recording = "==0.12.2" pytest-timeout = "==2.1.0" -pytest-xdist = ">=1.34.0,<2.6.0" +pytest-xdist = ">=1.34.0,<3.4.0" responses = ">=0.22.0,<0.24.0" [tool.poetry.group.docs] @@ -173,6 +173,7 @@ sphinxcontrib-spelling = ">=7,<9" [tool.poetry.extras] service = [ "apispec", + "apispec-oneofschema", "apispec-webframeworks", "circus", "flask", @@ -244,11 +245,11 @@ pattern = """(?x) (?# ignore whitespace """ [tool.pytest.ini_options] -addopts = "--flake8 --black --doctest-glob=\"*.rst\" --doctest-modules --cov --cov-report=term-missing --ignore=docs/cheatsheet/" +addopts = "--doctest-glob=\"*.rst\" --doctest-modules --cov --cov-report=term-missing --ignore=docs/cheatsheet/" doctest_optionflags = "ALLOW_UNICODE" flake8-ignore = ["*.py", "E121", "E126", "E203", "E226", "E231", "W503", "W504", "docs/conf.py", "docs/cheatsheet/conf.py", "ALL"] flake8-max-line-length = 120 -testpaths = ["docs", "tests", "renku", "conftest.py"] +testpaths = ["docs", "tests", "conftest.py"] markers = [ "integration: mark a test as a integration.", "jobs: mark a test as a job test.", @@ -282,6 +283,7 @@ check_untyped_defs = true [[tool.mypy.overrides]] module = [ "apispec.*", + "apispec_oneofschema.*", "apispec_webframeworks.*", "appdirs", "BTrees.*", diff --git a/renku/command/checks/activities.py b/renku/command/checks/activities.py index 4cadd48c70..b220ececf7 100644 --- a/renku/command/checks/activities.py +++ b/renku/command/checks/activities.py @@ -58,7 +58,7 @@ def check_migrated_activity_ids(fix, activity_gateway: IActivityGateway, **_): wrong_activities = [] if not wrong_activities: - return True, None + return True, False, None problems = ( WARNING @@ -68,7 +68,7 @@ def check_migrated_activity_ids(fix, activity_gateway: IActivityGateway, **_): + "\n" ) - return False, problems + return False, True, problems @inject.autoparams("activity_gateway") @@ -81,7 +81,8 @@ def check_activity_dates(fix, activity_gateway: IActivityGateway, **_): _: keyword arguments. Returns: - Tuple[bool, Optional[str]]: Tuple of whether there are activities with invalid dates a string of the problem. + Tuple[bool, Optional[str]]: Tuple of whether there are activities with invalid dates, if they can be + automatically fixed and a string of the problem. """ invalid_activities = [] @@ -95,7 +96,7 @@ def check_activity_dates(fix, activity_gateway: IActivityGateway, **_): invalid_activities.append(activity) if not invalid_activities: - return True, None + return True, False, None if not fix: ids = [a.id for a in invalid_activities] message = ( @@ -104,13 +105,13 @@ def check_activity_dates(fix, activity_gateway: IActivityGateway, **_): + "\n\t" + "\n\t".join(ids) ) - return False, message + return False, True, message fix_activity_dates(activities=invalid_activities) project_context.database.commit() communication.info("Activity dates were fixed") - return True, None + return True, False, None def fix_activity_dates(activities): diff --git a/renku/command/checks/datasets.py b/renku/command/checks/datasets.py index e0fc35bc3a..511bf9405a 100644 --- a/renku/command/checks/datasets.py +++ b/renku/command/checks/datasets.py @@ -38,12 +38,13 @@ def check_dataset_old_metadata_location(**_): _: keyword arguments. Returns: - Tuple of whether dataset metadata location is valid and string of found problems. + Tuple of whether dataset metadata location is valid, if an automated fix is available and string of + found problems. """ old_metadata = get_pre_0_3_4_datasets_metadata() if not old_metadata: - return True, None + return True, False, None problems = ( WARNING + "There are metadata files in the old location." @@ -52,7 +53,7 @@ def check_dataset_old_metadata_location(**_): + "\n" ) - return False, problems + return False, False, problems @inject.autoparams("dataset_gateway") @@ -64,7 +65,7 @@ def check_missing_files(dataset_gateway: IDatasetGateway, **_): _: keyword arguments. Returns: - Tuple of whether all dataset files are there and string of found problems. + Tuple of whether all dataset files are there, if an automated fix is available and string of found problems. """ missing = defaultdict(list) @@ -79,7 +80,7 @@ def check_missing_files(dataset_gateway: IDatasetGateway, **_): missing[dataset.name].append(file_.entity.path) if not missing: - return True, None + return True, False, None problems = WARNING + "There are missing files in datasets." @@ -91,7 +92,7 @@ def check_missing_files(dataset_gateway: IDatasetGateway, **_): + "\n\t ".join(click.style(path, fg="red") for path in files) ) - return False, problems + return False, False, problems @inject.autoparams("dataset_gateway") @@ -104,7 +105,7 @@ def check_invalid_datasets_derivation(fix, dataset_gateway: IDatasetGateway, **_ _: keyword arguments. Returns: - Tuple of whether dataset derivations are valid and string of found problems. + Tuple of whether dataset derivations are valid, if an automated fix is available and string of found problems. """ invalid_datasets = [] @@ -130,7 +131,7 @@ def fix_or_report(dataset): break if not invalid_datasets: - return True, None + return True, False, None problems = ( WARNING @@ -140,7 +141,7 @@ def fix_or_report(dataset): + "\n" ) - return False, problems + return False, True, problems @inject.autoparams("dataset_gateway") @@ -193,9 +194,9 @@ def check_dataset_files_outside_datadir(fix, dataset_gateway: IDatasetGateway, * + "\n\t".join(click.style(file.entity.path, fg="yellow") for file in invalid_files) + "\n" ) - return False, problems + return False, True, problems - return True, None + return True, False, None @inject.autoparams("dataset_gateway") @@ -208,7 +209,7 @@ def check_external_files(fix, dataset_gateway: IDatasetGateway, **_): _: keyword arguments. Returns: - Tuple of whether no external files are found and string of found problems. + Tuple of whether no external files are found, if an automated fix is available and string of found problems. """ from renku.core.dataset.dataset import file_unlink @@ -222,7 +223,7 @@ def check_external_files(fix, dataset_gateway: IDatasetGateway, **_): datasets[dataset.name].append(file) if not external_files: - return True, None + return True, False, None external_files_str = "\n\t".join(sorted(external_files)) @@ -232,7 +233,7 @@ def check_external_files(fix, dataset_gateway: IDatasetGateway, **_): "Use 'renku dataset rm' or rerun 'renku doctor' with '--fix' flag to remove them:\n\t" f"{external_files_str}\n" ) - return False, problems + return False, True, problems communication.info( "The following external files were deleted from the project. You need to add them later manually using a " @@ -242,4 +243,4 @@ def check_external_files(fix, dataset_gateway: IDatasetGateway, **_): for name, files in datasets.items(): file_unlink(name=name, yes=True, dataset_files=files) - return True, None + return True, False, None diff --git a/renku/command/checks/githooks.py b/renku/command/checks/githooks.py index 524ba59ce7..c630261eef 100644 --- a/renku/command/checks/githooks.py +++ b/renku/command/checks/githooks.py @@ -41,7 +41,7 @@ def check_git_hooks_installed(**_): hook_path = get_hook_path(name=hook, path=project_context.path) if not hook_path.exists(): message = WARNING + "Git hooks are not installed. " 'Use "renku githooks install" to install them. \n' - return False, message + return False, False, message with hook_path.open() as file_: actual_hook = _extract_renku_hook(file_) @@ -50,16 +50,16 @@ def check_git_hooks_installed(**_): if not expected_hook: message = WARNING + "Cannot check for existence of Git hooks.\n" - return False, message + return False, False, message if actual_hook != expected_hook: message = ( WARNING + "Git hooks are outdated or not installed.\n" ' (use "renku githooks install --force" to update them) \n' ) - return False, message + return False, False, message - return True, None + return True, False, None def _extract_renku_hook(file): diff --git a/renku/command/checks/migration.py b/renku/command/checks/migration.py index 9e81af9e31..5ac8d692e3 100644 --- a/renku/command/checks/migration.py +++ b/renku/command/checks/migration.py @@ -26,7 +26,7 @@ def check_migration(**_): _: keyword arguments. Returns: - Tuple of whether project metadata is up to date and string of found problems. + Tuple of whether project metadata is up to date, if an automated fix is available and string of found problems. """ if is_migration_required(): problems = WARNING + "Project requires migration.\n" + ' (use "renku migrate" to fix this issue)\n' @@ -35,6 +35,6 @@ def check_migration(**_): ERROR + "Project version is not supported by your version of Renku.\n" + " (upgrade your Renku version)\n" ) else: - return True, None + return True, False, None - return False, problems + return False, False, problems diff --git a/renku/command/checks/project.py b/renku/command/checks/project.py index eb79d5f0bd..85485c01ee 100644 --- a/renku/command/checks/project.py +++ b/renku/command/checks/project.py @@ -33,7 +33,7 @@ def check_project_id_group(fix, project_gateway: IProjectGateway, **_): _: keyword arguments. Returns: - Tuple of whether project id is valid. + Tuple of whether project id is valid, if an automated fix is available and string of found problems. """ current_project = project_gateway.get_project() @@ -42,21 +42,25 @@ def check_project_id_group(fix, project_gateway: IProjectGateway, **_): ) if namespace is None or name is None: - return True, None + return True, False, None generated_id = Project.generate_id(namespace=namespace, name=name) if generated_id == current_project.id: - return True, None + return True, False, None if fix: communication.info(f"Fixing project id '{current_project.id}' -> '{generated_id}'") current_project.id = generated_id project_gateway.update_project(current_project) - return True, None + return True, False, None - return True, ( - WARNING - + "Project id doesn't match id created based on the current Git remote (use 'renku doctor --fix' to fix it):" - f"\n\t'{current_project.id}' -> '{generated_id}'" + return ( + False, + True, + ( + WARNING + + "Project id doesn't match id based on the current Git remote (use 'renku doctor --fix' to fix it):" + f"\n\t'{current_project.id}' -> '{generated_id}'" + ), ) diff --git a/renku/command/checks/storage.py b/renku/command/checks/storage.py index 709507db2d..7deb79d548 100644 --- a/renku/command/checks/storage.py +++ b/renku/command/checks/storage.py @@ -26,22 +26,22 @@ def check_lfs_info(**_): _: keyword arguments. Returns: - Tuple of whether project structure is valid and string of found problems. + Tuple of whether project structure is valid, if an automated fix is available and string of found problems. """ if not check_external_storage(): - return True, None + return True, False, None files = check_lfs_migrate_info() if not files: - return True, None + return True, False, None message = ( WARNING + "Git history contains large files - consider moving them " - + "to external storage like git LFS\n\t" + + "to external storage like git LFS using 'renku storage migrate'\n\t" + "\n\t".join(files) + "\n" ) - return False, message + return False, False, message diff --git a/renku/command/checks/validate_shacl.py b/renku/command/checks/validate_shacl.py index e08c1900da..69c031a024 100644 --- a/renku/command/checks/validate_shacl.py +++ b/renku/command/checks/validate_shacl.py @@ -16,7 +16,6 @@ """Check KG structure using SHACL.""" import pyld -import yaml from renku.command.command_builder import inject from renku.command.schema.dataset import dump_dataset_as_jsonld @@ -24,7 +23,6 @@ from renku.command.util import WARNING from renku.core.interface.dataset_gateway import IDatasetGateway from renku.core.util.shacl import validate_graph -from renku.core.util.yaml import NoDatesSafeLoader from renku.domain_model.project_context import project_context @@ -71,18 +69,18 @@ def check_project_structure(**_): _: keyword arguments. Returns: - Tuple of whether project structure is valid and string of found problems. + Tuple of whether project structure is valid, if an automated fix is available and string of found problems. """ data = ProjectSchema().dump(project_context.project) conform, graph, t = _check_shacl_structure(data) if conform: - return True, None + return True, False, None problems = f"{WARNING}Invalid structure of project metadata\n\t{_shacl_graph_to_string(graph)}" - return False, problems + return False, False, problems @inject.autoparams("dataset_gateway") @@ -94,7 +92,8 @@ def check_datasets_structure(dataset_gateway: IDatasetGateway, **_): _: keyword arguments. Returns: - Tuple[bool, str]: Tuple of whether structure is valid and of problems that might have been found. + Tuple[bool, str]: Tuple of whether structure is valid, if an automated fix is available and of problems + that might have been found. """ ok = True @@ -116,16 +115,9 @@ def check_datasets_structure(dataset_gateway: IDatasetGateway, **_): problems.append(f"{dataset.name}\n\t{_shacl_graph_to_string(graph)}\n") if ok: - return True, None + return True, False, None - return False, "\n".join(problems) - - -def _check_shacl_structure_for_path(path): - with path.open(mode="r") as fp: - data = yaml.load(fp, Loader=NoDatesSafeLoader) or {} - - return _check_shacl_structure(data) + return False, False, "\n".join(problems) def _check_shacl_structure(data): diff --git a/renku/command/checks/workflow.py b/renku/command/checks/workflow.py index b4cf5407c0..8a8b4b968f 100644 --- a/renku/command/checks/workflow.py +++ b/renku/command/checks/workflow.py @@ -26,7 +26,7 @@ from renku.infrastructure.gateway.activity_gateway import reindex_catalog -def check_activity_catalog(fix, force, **_) -> Tuple[bool, Optional[str]]: +def check_activity_catalog(fix, force, **_) -> Tuple[bool, bool, Optional[str]]: """Check if the activity-catalog needs to be rebuilt. Args: @@ -35,7 +35,8 @@ def check_activity_catalog(fix, force, **_) -> Tuple[bool, Optional[str]]: _: keyword arguments. Returns: - Tuple of whether the activity-catalog needs to be rebuilt and a string of found problems. + Tuple of whether the activity-catalog needs to be rebuilt, if an automated fix is available and a string of + found problems. """ database = project_context.database activity_catalog = database["activity-catalog"] @@ -44,25 +45,25 @@ def check_activity_catalog(fix, force, **_) -> Tuple[bool, Optional[str]]: # NOTE: If len(activity_catalog) > 0 then either the project is fixed or it used a fixed Renku version but still has # broken metadata. ``force`` allows to rebuild the metadata in the latter case. if (len(relations) == 0 or len(activity_catalog) > 0) and not (force and fix): - return True, None + return True, False, None if not fix: problems = ( WARNING + "The project's workflow metadata needs to be rebuilt (use 'renku doctor --fix' to rebuild it).\n" ) - return False, problems + return False, True, problems with communication.busy("Rebuilding workflow metadata ..."): reindex_catalog(database=database) communication.info("Workflow metadata was rebuilt") - return True, None + return True, False, None @inject.autoparams("plan_gateway") -def check_plan_modification_date(fix, plan_gateway: IPlanGateway, **_) -> Tuple[bool, Optional[str]]: +def check_plan_modification_date(fix, plan_gateway: IPlanGateway, **_) -> Tuple[bool, bool, Optional[str]]: """Check if all plans have modification date set for them. Args: @@ -71,7 +72,8 @@ def check_plan_modification_date(fix, plan_gateway: IPlanGateway, **_) -> Tuple[ _: keyword arguments. Returns: - Tuple[bool, Optional[str]]: Tuple of whether there are plans without modification date and a string of their IDs + Tuple[bool, Optional[str]]: Tuple of whether there are plans without modification date, if an automated fix is + available and a string of their IDs """ plans: List[AbstractPlan] = plan_gateway.get_all_plans() @@ -81,7 +83,7 @@ def check_plan_modification_date(fix, plan_gateway: IPlanGateway, **_) -> Tuple[ to_be_processed.append(plan) if not to_be_processed: - return True, None + return True, False, None if not fix: ids = [plan.id for plan in to_be_processed] message = ( @@ -89,13 +91,13 @@ def check_plan_modification_date(fix, plan_gateway: IPlanGateway, **_) -> Tuple[ + "The following workflows have incorrect modification date (use 'renku doctor --fix' to fix them):\n\t" + "\n\t".join(ids) ) - return False, message + return False, True, message fix_plan_dates(plans=to_be_processed, plan_gateway=plan_gateway) project_context.database.commit() communication.info("Workflow modification dates were fixed") - return True, None + return True, False, None def fix_plan_dates(plans: List[AbstractPlan], plan_gateway): diff --git a/renku/command/doctor.py b/renku/command/doctor.py index cb211edf48..9150352934 100644 --- a/renku/command/doctor.py +++ b/renku/command/doctor.py @@ -44,22 +44,25 @@ def _doctor_check(fix: bool, force: bool): from renku.command import checks is_ok = True + fixes_available = False problems = [] for check in checks.__all__: try: - ok, problems_ = getattr(checks, check)(fix=fix, force=force) + ok, has_fix, problems_ = getattr(checks, check)(fix=fix, force=force) except Exception: ok = False + has_fix = False tb = "\n\t".join(traceback.format_exc().split("\n")) problems_ = f"{ERROR}Exception raised when running {check}\n\t{tb}" is_ok &= ok + fixes_available |= has_fix if problems_: problems.append(problems_) - return is_ok, "\n".join(problems) + return is_ok, fixes_available, "\n".join(problems) def doctor_check_command(with_fix): diff --git a/renku/command/migrate.py b/renku/command/migrate.py index c04ce1e9e8..df57d07dcc 100644 --- a/renku/command/migrate.py +++ b/renku/command/migrate.py @@ -16,11 +16,14 @@ # limitations under the License. """Migrate project to the latest Renku version.""" -from typing import List +from dataclasses import dataclass +from typing import List, Optional, Tuple, Union from pydantic import validate_arguments from renku.command.command_builder.command import Command +from renku.core.errors import MinimumVersionError +from renku.core.migration.migrate import SUPPORTED_PROJECT_VERSION from renku.domain_model.project_context import project_context SUPPORTED_RENKU_PROJECT = 1 @@ -32,12 +35,89 @@ DOCKERFILE_UPDATE_POSSIBLE = 64 +@dataclass +class CoreStatusResult: + """Core migration status.""" + + migration_required: bool + project_metadata_version: Optional[int] + current_metadata_version: int + + +@dataclass +class DockerfileStatusResult: + """Docker migration status.""" + + automated_dockerfile_update: bool + newer_renku_available: Optional[bool] + dockerfile_renku_version: Optional[str] + latest_renku_version: str + + +@dataclass +class TemplateStatusResult: + """Template migration status.""" + + automated_template_update: bool + newer_template_available: bool + project_template_version: Optional[str] + latest_template_version: Optional[str] + template_source: Optional[str] + template_ref: Optional[str] + template_id: Optional[str] + ssh_supported: bool + + +@dataclass +class MigrationCheckResult: + """Migration check output.""" + + project_supported: bool + core_renku_version: str + project_renku_version: Optional[str] + core_compatibility_status: Union[CoreStatusResult, Exception] + dockerfile_renku_status: Union[DockerfileStatusResult, Exception] + template_status: Union[TemplateStatusResult, Exception] + + @staticmethod + def from_minimum_version_error(minimum_version_error: MinimumVersionError) -> "MigrationCheckResult": + """Create a migration check when the project isn't supported yet.""" + from renku import __version__ + + return MigrationCheckResult( + project_supported=False, + core_renku_version=str(minimum_version_error.current_version), + project_renku_version=f">={minimum_version_error.minimum_version}", + core_compatibility_status=CoreStatusResult( + migration_required=False, + project_metadata_version=None, + current_metadata_version=SUPPORTED_PROJECT_VERSION, + ), + dockerfile_renku_status=DockerfileStatusResult( + dockerfile_renku_version="unknown", + latest_renku_version=__version__, + newer_renku_available=False, + automated_dockerfile_update=False, + ), + template_status=TemplateStatusResult( + automated_template_update=False, + newer_template_available=False, + template_source="unknown", + template_ref="unknown", + template_id="unknown", + project_template_version="unknown", + latest_template_version="unknown", + ssh_supported=False, + ), + ) + + def migrations_check(): """Return a command for a migrations check.""" return Command().command(_migrations_check).with_database(write=False) -def _migrations_check(): +def _migrations_check() -> MigrationCheckResult: """Check migration status of project. Returns: @@ -47,14 +127,29 @@ def _migrations_check(): core_version, latest_version = _migrations_versions() - return { - "project_supported": not is_project_unsupported(), - "core_renku_version": core_version, - "project_renku_version": latest_version, - "core_compatibility_status": _metadata_migration_check(), - "dockerfile_renku_status": _dockerfile_migration_check(), - "template_status": _template_migration_check(), - } + try: + core_compatibility_status: Union[CoreStatusResult, Exception] = _metadata_migration_check() + except Exception as e: + core_compatibility_status = e + + try: + docker_status: Union[DockerfileStatusResult, Exception] = _dockerfile_migration_check() + except Exception as e: + docker_status = e + + try: + template_status: Union[TemplateStatusResult, Exception] = _template_migration_check() + except Exception as e: + template_status = e + + return MigrationCheckResult( + project_supported=not is_project_unsupported(), + core_renku_version=core_version, + project_renku_version=latest_version, + core_compatibility_status=core_compatibility_status, + dockerfile_renku_status=docker_status, + template_status=template_status, + ) def migrations_versions(): @@ -62,7 +157,7 @@ def migrations_versions(): return Command().command(_migrations_versions).lock_project().with_database() -def _migrations_versions(): +def _migrations_versions() -> Tuple[str, Optional[str]]: """Return source and destination migration versions. Returns: @@ -81,7 +176,7 @@ def _migrations_versions(): return __version__, latest_agent -def _template_migration_check(): +def _template_migration_check() -> TemplateStatusResult: """Return template migration status. Returns: @@ -90,33 +185,26 @@ def _template_migration_check(): from renku.core.config import get_value from renku.core.template.usecase import check_for_template_update - try: - project = project_context.project - template_source = project.template_metadata.template_source - template_ref = project.template_metadata.template_ref - template_id = project.template_metadata.template_id - ssh_supported = project.template_metadata.ssh_supported - except (ValueError, AttributeError): - project = None - template_source = None - template_ref = None - template_id = None - ssh_supported = False + project = project_context.project + template_source = project.template_metadata.template_source + template_ref = project.template_metadata.template_ref + template_id = project.template_metadata.template_id + ssh_supported = project.template_metadata.ssh_supported ssh_supported = get_value("renku", "ssh_supported") == "true" or ssh_supported update_available, update_allowed, current_version, new_version = check_for_template_update(project) - return { - "automated_template_update": update_allowed, - "newer_template_available": update_available, - "project_template_version": current_version, - "latest_template_version": new_version, - "template_source": template_source, - "template_ref": template_ref, - "template_id": template_id, - "ssh_supported": ssh_supported, - } + return TemplateStatusResult( + automated_template_update=update_allowed, + newer_template_available=update_available, + project_template_version=current_version, + latest_template_version=new_version, + template_source=template_source, + template_ref=template_ref, + template_id=template_id, + ssh_supported=ssh_supported, + ) def dockerfile_migration_check(): @@ -124,7 +212,7 @@ def dockerfile_migration_check(): return Command().command(_dockerfile_migration_check) -def _dockerfile_migration_check(): +def _dockerfile_migration_check() -> DockerfileStatusResult: """Return Dockerfile migration status. Returns: @@ -135,12 +223,12 @@ def _dockerfile_migration_check(): automated_dockerfile_update, newer_renku_available, dockerfile_renku_version = update_dockerfile(check_only=True) - return { - "automated_dockerfile_update": automated_dockerfile_update, - "newer_renku_available": newer_renku_available, - "dockerfile_renku_version": dockerfile_renku_version, - "latest_renku_version": __version__, - } + return DockerfileStatusResult( + automated_dockerfile_update=automated_dockerfile_update, + newer_renku_available=newer_renku_available, + dockerfile_renku_version=dockerfile_renku_version, + latest_renku_version=__version__, + ) def metadata_migration_check(): @@ -148,7 +236,7 @@ def metadata_migration_check(): return Command().command(_metadata_migration_check) -def _metadata_migration_check(): +def _metadata_migration_check() -> CoreStatusResult: """Return metadata migration status. Returns: @@ -156,11 +244,11 @@ def _metadata_migration_check(): """ from renku.core.migration.migrate import SUPPORTED_PROJECT_VERSION, get_project_version, is_migration_required - return { - "migration_required": is_migration_required(), - "project_metadata_version": get_project_version(), - "current_metadata_version": SUPPORTED_PROJECT_VERSION, - } + return CoreStatusResult( + migration_required=is_migration_required(), + project_metadata_version=get_project_version(), + current_metadata_version=SUPPORTED_PROJECT_VERSION, + ) def migrate_project_command(): diff --git a/renku/command/schema/agent.py b/renku/command/schema/agent.py index 4f5ca2ceb0..2f394e1f5c 100644 --- a/renku/command/schema/agent.py +++ b/renku/command/schema/agent.py @@ -16,7 +16,7 @@ """Agents JSON-LD schemes.""" from calamus.schema import JsonLDSchema -from marshmallow import EXCLUDE +from marshmallow import EXCLUDE, pre_load from renku.command.schema.calamus import StringList, fields, prov, schema from renku.domain_model.provenance.agent import Person, SoftwareAgent @@ -32,6 +32,27 @@ class Meta: model = Person unknown = EXCLUDE + @pre_load + def fix_affiliation(self, data, **kwargs): + """Fix affiliation to be a string.""" + affiliations = [] + affiliation = data.get("http://schema.org/affiliation") + if affiliation: + if not isinstance(affiliation, list): + affiliation = [affiliation] + for a in affiliation: + if isinstance(a, dict): + name = a.get("http://schema.org/name", "") + if isinstance(name, list): + name = name[0] + else: + name = str(a) + affiliations.append(name) + + data["http://schema.org/affiliation"] = affiliations + + return data + affiliation = StringList(schema.affiliation, load_default=None) alternate_name = StringList(schema.alternateName, load_default=None) email = fields.String(schema.email, load_default=None) diff --git a/renku/command/session.py b/renku/command/session.py index 802624ea16..824ad9be58 100644 --- a/renku/command/session.py +++ b/renku/command/session.py @@ -1,7 +1,6 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and -# Eidgenössische Technische Hochschule Zürich (ETHZ). +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,9 +15,26 @@ # limitations under the License. """Renku session commands.""" - from renku.command.command_builder.command import Command -from renku.core.session.session import session_list, session_open, session_start, session_stop, ssh_setup +from renku.core.session.session import ( + search_session_providers, + search_sessions, + session_list, + session_open, + session_start, + session_stop, + ssh_setup, +) + + +def search_sessions_command(): + """Get all the session names that match a pattern.""" + return Command().command(search_sessions).require_migration().with_database(write=False) + + +def search_session_providers_command(): + """Get all the session provider names that match a pattern.""" + return Command().command(search_session_providers).require_migration().with_database(write=False) def session_list_command(): diff --git a/renku/command/view_model/activity_graph.py b/renku/command/view_model/activity_graph.py index 6aaf924818..d8e3c18739 100644 --- a/renku/command/view_model/activity_graph.py +++ b/renku/command/view_model/activity_graph.py @@ -339,8 +339,14 @@ def text_representation( max_y = max(max_y, local_max_y) xy = node.view.xy + split = node.data[0].splitlines() + max_height = len(split) + max_width = max(len(line) for line in split) node_shape = NodeShape( - node.data[0], Point(xy[0], xy[1] + min_y), double_border=node.data[1], color=node_color + node.data[0], + Point(round(xy[0] - max_width / 2), round(xy[1] + min_y - max_height)), + double_border=node.data[1], + color=node_color, ) canvas.add_shape(node_shape, layer=1) max_y = max(max_y, node_shape.extent[0][1]) diff --git a/renku/command/view_model/graph.py b/renku/command/view_model/graph.py index 0dd4e5a7a2..6c2ae108a7 100644 --- a/renku/command/view_model/graph.py +++ b/renku/command/view_model/graph.py @@ -20,11 +20,12 @@ import io import json from enum import Enum -from typing import Dict, List, Optional +from typing import Dict, Iterator, List, Optional, cast import pyld import rdflib from rdflib import ConjunctiveGraph, Graph +from rdflib.query import ResultRow from rdflib.tools.rdf2dot import LABEL_PROPERTIES, NODECOLOR, rdf2dot @@ -189,7 +190,7 @@ def color(p): } """ - for s, p, o in graph.query(sparql): + for s, p, o in cast(Iterator[ResultRow], graph.query(sparql)): sn = node(s) if p == rdflib.RDFS.label: continue diff --git a/renku/command/view_model/text_canvas.py b/renku/command/view_model/text_canvas.py index 9bb7feeaae..984ccf01c5 100644 --- a/renku/command/view_model/text_canvas.py +++ b/renku/command/view_model/text_canvas.py @@ -15,17 +15,16 @@ # limitations under the License. """Activity graph view model.""" +import abc +import math from collections import defaultdict from copy import deepcopy -from io import StringIO -from typing import TYPE_CHECKING, Any, Dict, List, NamedTuple, Optional, Tuple +from dataclasses import dataclass +from functools import partial +from typing import Any, Callable, Dict, Generator, NamedTuple, Optional, Tuple -import numpy as np from click import style -if TYPE_CHECKING: - import numpy.typing as npt - class Point(NamedTuple): """A point with coordinates for rendering.""" @@ -34,23 +33,162 @@ class Point(NamedTuple): y: int +class Size(NamedTuple): + """A size for a two dimensional matrix.""" + + width: int + height: int + + +class EdgeSet(NamedTuple): + """A collection of edge characters for rendering boxes.""" + + top_left: str + bottom_left: str + top_right: str + bottom_right: str + + +@dataclass +class CharacterSet: + """A rendering character set for rendering Ascii content.""" + + edge: EdgeSet + horizontal: str + vertical: str + + def style(self, transform: Callable[[str], str]) -> None: + """Apply a style to this character set (like bold). + + Args: + transform(Callable[[str], str]): Function that applies a style to a string. + """ + self.horizontal = transform(self.horizontal) + self.vertical = transform(self.vertical) + + self.edge = EdgeSet( + transform(self.edge.top_left), + transform(self.edge.bottom_left), + transform(self.edge.top_right), + transform(self.edge.bottom_right), + ) + + MAX_NODE_LENGTH = 40 -class Shape: - """basic shape class that all shapes inherit from.""" +def line_points(start: Point, end: Point) -> Generator[Point, None, None]: + """Return all (discrete) points of a line from start to end. + + Args: + start(Point): Starting point of the line. + end(Point): End point of the line. + Returns: + Generator[Point, None, None]: All discrete points part of the line. + """ + x_extent = end.x - start.x + y_extent = end.y - start.y + + length = max(abs(x_extent), abs(y_extent)) + 1 + assert length > 0 + for i in range(length): + position = i / length + yield Point(math.ceil(start.x + position * x_extent), math.ceil(start.y + position * y_extent)) + + +class TextMatrix: + """A two dimensional matrix of strings. - pass + Indices need to be positive. To support handling negative indices, you can set x/y-offset to the lowest + negative index you want to pass and indices will be shifted by this amount internally. + + Args: + size(Tuple[int,int]): The width and height of the matrix. + x_offset(int): Offset to apply to x indices. + y_offset(int): Offset to apply to y indices. + """ + + def __init__(self, size: Size, x_offset: int = 0, y_offset: int = 0) -> None: + assert size.width > 0 + assert size.height > 0 + self._size = size + self.x_offset = x_offset + self.y_offset = y_offset + self._content = [] + + for _ in range(self.size.height): + row = [] + for _ in range(self.size.width): + row.append(" ") + self._content.append(row) + + @property + def size(self) -> Size: + """The size of the matrix.""" + return self._size + + def __getitem__(self, index: Tuple[int, int]) -> str: + """Get element at width,height position.""" + pos = (index[0] + self.x_offset, index[1] + self.y_offset) + assert 0 <= pos[0] < self.size.width + assert 0 <= pos[1] < self.size.height + return self._content[pos[1]][pos[0]] + + def __setitem__(self, index: Tuple[int, int], value: str) -> None: + """Set element at width,height position.""" + pos = (index[0] + self.x_offset, index[1] + self.y_offset) + assert 0 <= pos[0] < self.size.width + assert 0 <= pos[1] < self.size.height + self._content[pos[1]][pos[0]] = value + + def __str__(self) -> str: + return "\n".join(["".join(row) for row in self._content]) + + def draw_line(self, start: Point, end: Point, value: str) -> None: + """Fill a line from start to end with value. + + Args: + start(Point): The starting point. + end(Point): The end point. + value(str): The value (character) to use for representing the line. + """ + for point in line_points(start, end): + self[point.x, point.y] = value + + def draw(self, start: Point, value: "TextMatrix") -> None: + """Write content from value with start being upper-left corner. + + Args: + start(Point): The upper-left corner for where to place the content. + value(TextMatrix): The content text matrix to place into this one. + """ + for y in range(start.y, start.y + value.size.height): + for x in range(start.x, start.x + value.size.width): + self[x, y] = value[x - start.x, y - start.y] + + +class Shape(abc.ABC): + """Basic shape class that all shapes inherit from.""" + + def draw(self, matrix: TextMatrix, color: bool = True, ascii=False) -> None: + """Draw self onto a text matrix.. + + Args: + matrix(TextMatrix): The text matrix to draw on. + color(bool, optional): Whether or not to render in color (Default value = True). + ascii: Whether to use ascii characters only or with UTF8 (Default value = False). + """ + raise NotImplementedError() class RectangleShape(Shape): """A rectangle shape.""" - ASCII_CHARACTERS = {"edge": ["+"] * 4, "horizontal": "-", "vertical": "|"} + ASCII_CHARACTERS = CharacterSet(edge=EdgeSet("+", "+", "+", "+"), horizontal="-", vertical="|") - UNICODE_CHARACTERS = {"edge": ["┌", "└", "┐", "┘"], "horizontal": "─", "vertical": "│"} + UNICODE_CHARACTERS = CharacterSet(edge=EdgeSet("┌", "└", "┐", "┘"), horizontal="─", vertical="│") - UNICODE_CHARACTERS_DOUBLE = {"edge": ["╔", "╚", "╗", "╝"], "horizontal": "═", "vertical": "║"} + UNICODE_CHARACTERS_DOUBLE = CharacterSet(edge=EdgeSet("╔", "╚", "╗", "╝"), horizontal="═", vertical="║") def __init__(self, start: Point, end: Point, double_border=False, color: Optional[str] = None): self.start = start @@ -58,19 +196,13 @@ def __init__(self, start: Point, end: Point, double_border=False, color: Optiona self.double_border = double_border self.color = color - def draw( - self, color: bool = True, ascii=False - ) -> Tuple["npt.NDArray[np.int32]", "npt.NDArray[np.int32]", "npt.NDArray[np.str_]"]: - """Return the indices and values to draw this shape onto the canvas. + def draw(self, matrix: TextMatrix, color: bool = True, ascii=False) -> None: + """Draw self onto a text matrix.. Args: + matrix(TextMatrix): The text matrix to draw on. color(bool, optional): Whether or not to render in color (Default value = True). ascii: Whether to use ascii characters only or with UTF8 (Default value = False). - - Returns: - Tuple[List[Tuple[int]],List[str]]: Tuple of list of coordinates and list if characters - at those coordinates. - """ if not ascii and self.double_border: characters = deepcopy(self.UNICODE_CHARACTERS_DOUBLE) @@ -79,44 +211,26 @@ def draw( if color and self.color: # NOTE: Add color to border characters - for key, value in list(characters.items()): - if isinstance(value, list): - characters[key] = [style(v, fg=self.color) for v in value] - else: - characters[key] = style(value, fg=self.color) - - # first set corners - xs = np.array([self.start.x, self.start.x, self.end.x - 1, self.end.x - 1]) - ys = np.array([self.start.y, self.end.y, self.start.y, self.end.y]) - vals = np.array(characters["edge"]) - + characters.style(partial(style, fg=self.color)) # horizontal lines - line_xs = np.arange(self.start.x + 1, self.end.x - 1) - xs = np.append(xs, line_xs) - ys = np.append(ys, np.array([self.start.y] * line_xs.size)) - vals = np.append(vals, np.array([characters["horizontal"]] * line_xs.size)) - xs = np.append(xs, line_xs) - ys = np.append(ys, np.array([self.end.y] * line_xs.size)) - vals = np.append(vals, np.array([characters["horizontal"]] * line_xs.size)) + matrix.draw_line( + Point(self.start.x + 1, self.start.y), Point(self.end.x - 1, self.start.y), characters.horizontal + ) + matrix.draw_line(Point(self.start.x + 1, self.end.y), Point(self.end.x - 1, self.end.y), characters.horizontal) # vertical lines - line_ys = np.arange(self.start.y + 1, self.end.y) - xs = np.append(xs, np.array([self.start.x] * line_ys.size)) - ys = np.append(ys, line_ys) - vals = np.append(vals, np.array([characters["vertical"]] * line_ys.size)) - xs = np.append(xs, np.array([self.end.x - 1] * line_ys.size)) - ys = np.append(ys, line_ys) - vals = np.append(vals, np.array([characters["vertical"]] * line_ys.size)) + matrix.draw_line(Point(self.start.x, self.start.y + 1), Point(self.start.x, self.end.y), characters.vertical) + matrix.draw_line(Point(self.end.x, self.start.y + 1), Point(self.end.x, self.end.y), characters.vertical) - # fill with whitespace to force overwriting underlying text - fill_xs, fill_ys = np.meshgrid( - np.arange(self.start.x + 1, self.end.x - 1), np.arange(self.start.y + 1, self.end.y - 1) - ) - xs = np.append(xs, fill_xs) - ys = np.append(ys, fill_ys) - vals = np.append(vals, np.array([" "] * fill_xs.size)) + # set corners + matrix[self.start.x, self.start.y] = characters.edge.top_left + matrix[self.start.x, self.end.y] = characters.edge.bottom_left + matrix[self.end.x, self.start.y] = characters.edge.top_right + matrix[self.end.x, self.end.y] = characters.edge.bottom_right - return xs.astype(int), ys.astype(int), vals + # fill with whitespace to force overwriting underlying text + content = TextMatrix(Size(self.end.x - self.start.x - 1, self.end.y - self.start.y - 1)) + matrix.draw(Point(self.start.x + 1, self.start.y + 1), content) @property def extent(self) -> Tuple[Point, Point]: @@ -137,30 +251,17 @@ def __init__(self, text: str, point: Point, bold: bool = False, color: Optional[ self.bold = bold self.color = color - def draw( - self, color: bool = True, ascii=False - ) -> Tuple["npt.NDArray[np.int32]", "npt.NDArray[np.int32]", "npt.NDArray[np.str_]"]: - """Return the indices and values to draw this shape onto the canvas. + def draw(self, matrix: TextMatrix, color: bool = True, ascii=False) -> None: + """Draw self onto a text matrix.. Args: + matrix(TextMatrix): The text matrix to draw on. color(bool, optional): Whether or not to render in color (Default value = True). ascii: Whether to use ascii characters only or with UTF8 (Default value = False). - - Returns: - Tuple[List[Tuple[int]],List[str]]: Tuple of list of coordinates and list if characters - at those coordinates. """ - xs: List[int] = [] - ys: List[int] = [] - vals = [] - - current_x = self.point.x - current_y = self.point.y - for line in self.text: - for char in line: - xs.append(current_x) - ys.append(current_y) + for y, line in enumerate(self.text): + for x, char in enumerate(line): kwargs: Dict[str, Any] = dict() if self.bold: kwargs["bold"] = True @@ -168,15 +269,9 @@ def draw( kwargs["fg"] = self.color if kwargs: - vals.append(style(char, **kwargs)) - else: - vals.append(char) + char = style(char, **kwargs) - current_x += 1 - current_x = self.point.x - current_y += 1 - - return np.array(xs), np.array(ys), np.array(vals) + matrix[self.point.x + x, self.point.y + y] = char @property def extent(self) -> Tuple[Point, Point]: @@ -187,47 +282,34 @@ def extent(self) -> Tuple[Point, Point]: """ max_line_len = max(len(line) for line in self.text) num_lines = len(self.text) - return (self.point, Point(self.point.x + max_line_len, self.point.y + num_lines - 1)) + return (self.point, Point(self.point.x + max_line_len - 1, self.point.y + num_lines - 1)) class NodeShape(Shape): """An activity node shape.""" def __init__(self, text: str, point: Point, double_border=False, color: Optional[str] = None): - self.point = Point(round(point.x), round(point.y - len(text.splitlines()))) - self.text_shape = TextShape(text, self.point, bold=double_border, color=color) - + self.point = point + self.text_shape = TextShape(text, Point(self.point.x + 1, self.point.y + 1), bold=double_border, color=color) text_extent = self.text_shape.extent self.box_shape = RectangleShape( - Point(text_extent[0].x - 1, text_extent[0].y - 1), + self.point, Point(text_extent[1].x + 1, text_extent[1].y + 1), double_border=double_border, color=color, ) - # move width/2 to the left to center on coordinate - self.x_offset = round((text_extent[1].x - text_extent[0].x) / 2) - - def draw( - self, color: bool = True, ascii=False - ) -> Tuple["npt.NDArray[np.int32]", "npt.NDArray[np.int32]", "npt.NDArray[np.str_]"]: - """Return the indices and values to draw this shape onto the canvas. + def draw(self, matrix: TextMatrix, color: bool = True, ascii=False) -> None: + """Draw self onto a text matrix.. Args: + matrix(TextMatrix): The text matrix to draw on. color(bool, optional): Whether or not to render in color (Default value = True). ascii: Whether to use ascii characters only or with UTF8 (Default value = False). - - Returns: - Tuple[List[Tuple[int]],List[str]]: Tuple of list of coordinates and list if characters - at those coordinates. """ - xs, ys, vals = self.box_shape.draw(color, ascii) - - text_xs, text_ys, text_vals = self.text_shape.draw(color, ascii) - - self.actual_extent = (Point(xs.min() - self.x_offset, ys.min()), Point(xs.max() - self.x_offset, ys.max())) - - return np.append(xs, text_xs) - self.x_offset, np.append(ys, text_ys), np.append(vals, text_vals) + self.actual_extent = self.extent + self.box_shape.draw(matrix, color=color, ascii=ascii) + self.text_shape.draw(matrix, color=color, ascii=ascii) @property def extent(self) -> Tuple[Point, Point]: @@ -237,8 +319,8 @@ def extent(self) -> Tuple[Point, Point]: Bounds of this shape. """ box_extent = self.box_shape.extent - return Point(box_extent[0].x - self.x_offset, box_extent[0].y), Point( - box_extent[1].x - self.x_offset, + return Point(box_extent[0].x, box_extent[0].y), Point( + box_extent[1].x, box_extent[1].y, ) @@ -253,7 +335,6 @@ def __init__(self, start: Point, end: Point, color: str): self.start = Point(round(start.x), round(start.y)) self.end = Point(round(end.x), round(end.y)) self.color = color - self.line_indices = self._line_indices(start, end) @staticmethod def next_color() -> str: @@ -265,30 +346,6 @@ def next_color() -> str: EdgeShape.CURRENT_COLOR = (EdgeShape.CURRENT_COLOR + 1) % len(EdgeShape.COLORS) return EdgeShape.COLORS[EdgeShape.CURRENT_COLOR] - def _line_indices(self, start: Point, end: Point): - """Interpolate a line. - - Args: - start(Point): Starting point of line. - end(Point): Ending point of line. - - Returns: - Tuple of all x,y coordinates of points in this line. - """ - if abs(end.y - start.y) < abs(end.x - start.x): - # swap x and y, then swap back - xs, ys = self._line_indices(Point(start.y, start.x), Point(end.y, end.x)) - return (ys, xs) - - if start.y > end.y: - # swap start and end - return self._line_indices(end, start) - - x = np.arange(start.y, end.y + 1, dtype=float) - y = x * (end.x - start.x) / (end.y - start.y) + (end.y * start.x - start.y * end.x) / (end.y - start.y) - - return (np.floor(y).astype(int), x.astype(int)) - def intersects_with(self, other_edge: "EdgeShape") -> bool: """Checks whether this edge intersects with other edges. @@ -298,32 +355,22 @@ def intersects_with(self, other_edge: "EdgeShape") -> bool: Returns: bool: True if this edge intersects ``other_edge``, False otherwise. """ - coordinates = set(map(tuple, np.column_stack(self.line_indices))) - other_coordinates = set(map(tuple, np.column_stack(other_edge.line_indices))) + return not set(line_points(self.start, self.end)).intersection(set(line_points(self.start, self.end))) - return coordinates.intersection(other_coordinates) is not None - - def draw( - self, color: bool = True, ascii=False - ) -> Tuple["npt.NDArray[np.int32]", "npt.NDArray[np.int32]", "npt.NDArray[np.str_]"]: - """Return the indices and values to draw this shape onto the canvas. + def draw(self, matrix: TextMatrix, color: bool = True, ascii=False) -> None: + """Draw self onto a text matrix.. Args: + matrix(TextMatrix): The text matrix to draw on. color(bool, optional): Whether or not to render in color (Default value = True). ascii: Whether to use ascii characters only or with UTF8 (Default value = False). - - Returns: - Tuple[List[Tuple[int]],List[str]]: Tuple of list of coordinates and list if characters - at those coordinates. - """ - xs, ys = self.line_indices char = "*" if color: char = style(char, fg=self.color) - return xs, ys, np.array(len(xs) * [char]) + matrix.draw_line(self.start, self.end, char) @property def extent(self) -> Tuple[Point, Point]: @@ -362,7 +409,7 @@ def get_coordinates(self, point: Point) -> Point: Returns: Point: Coordinates in parent coordinate system. """ - return Point(point.x - self.offset[1], point.y - self.offset[0]) + return Point(point.x + self.x_offset, point.y) def render(self, color: bool = True, ascii=False): """Render contained shapes onto canvas. @@ -379,19 +426,20 @@ def render(self, color: bool = True, ascii=False): for shape in self.shapes[layer]: shape_extent = shape.extent extent = ( - Point(min(extent[0].x, shape_extent[0].x), min(extent[0].y, shape_extent[0].y)), - Point(max(extent[1].x, shape_extent[1].x), max(extent[1].y, shape_extent[1].y)), + Point( + math.floor(min(extent[0].x, shape_extent[0].x)), math.floor(min(extent[0].y, shape_extent[0].y)) + ), + Point( + math.ceil(max(extent[1].x, shape_extent[1].x)), math.ceil(max(extent[1].y, shape_extent[1].y)) + ), ) - - self.offset = (extent[0].y, extent[0].x) - size = (extent[1].y - extent[0].y + 2, extent[1].x - extent[0].x + 2) - self._canvas = np.chararray(size, unicode=True, itemsize=10) - self._canvas[:] = " " + self.x_offset = -extent[0].x + size = Size(extent[1].x - extent[0].x + 30, extent[1].y - extent[0].y + 2) + self._canvas = TextMatrix(size, x_offset=self.x_offset, y_offset=-extent[0].y) for layer in layers: for shape in self.shapes[layer]: - xs, ys, vals = shape.draw(color=color, ascii=ascii) - self._canvas[ys - self.offset[0], xs - self.offset[1]] = vals + shape.draw(self._canvas, color=color, ascii=ascii) @property def text(self) -> str: @@ -402,6 +450,5 @@ def text(self) -> str: """ if self._canvas is None: raise ValueError("Call render() before getting text.") - string_buffer = StringIO() - np.savetxt(string_buffer, self._canvas, fmt="%1s", delimiter="") - return string_buffer.getvalue() + + return str(self._canvas) diff --git a/renku/core/dataset/dataset.py b/renku/core/dataset/dataset.py index e6f2ec0af4..c8456ee8e8 100644 --- a/renku/core/dataset/dataset.py +++ b/renku/core/dataset/dataset.py @@ -18,6 +18,7 @@ import os import shutil import urllib +from collections import defaultdict from pathlib import Path from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union, cast @@ -29,22 +30,19 @@ from renku.core import errors from renku.core.config import get_value, remove_value, set_value from renku.core.dataset.datasets_provenance import DatasetsProvenance -from renku.core.dataset.pointer_file import ( - create_external_file, - delete_external_file, - is_linked_file_updated, - update_linked_file, -) +from renku.core.dataset.pointer_file import delete_external_file, is_linked_file_updated, update_linked_file +from renku.core.dataset.providers.api import AddProviderInterface, ProviderApi from renku.core.dataset.providers.factory import ProviderFactory -from renku.core.dataset.providers.models import ProviderDataset +from renku.core.dataset.providers.git import GitProvider +from renku.core.dataset.providers.models import DatasetUpdateAction, ProviderDataset from renku.core.dataset.request_model import ImageRequestModel from renku.core.dataset.tag import get_dataset_by_tag, prompt_access_token, prompt_tag_selection from renku.core.interface.dataset_gateway import IDatasetGateway -from renku.core.storage import check_external_storage, pull_paths_from_storage, track_paths_in_storage +from renku.core.storage import check_external_storage, track_paths_in_storage from renku.core.util import communication from renku.core.util.datetime8601 import local_now -from renku.core.util.git import clone_repository, get_cache_directory_for_repository, get_git_user -from renku.core.util.metadata import is_linked_file, prompt_for_credentials, read_credentials, store_credentials +from renku.core.util.git import get_git_user +from renku.core.util.metadata import prompt_for_credentials, read_credentials, store_credentials from renku.core.util.os import ( create_symlink, delete_dataset_file, @@ -72,7 +70,6 @@ if TYPE_CHECKING: from renku.core.interface.storage import IStorage - from renku.infrastructure.repository import Repository @validate_arguments(config=dict(arbitrary_types_allowed=True)) @@ -571,6 +568,7 @@ def remove_files(dataset): total_size=calculate_total_size(importer.provider_dataset_files), clear_files_before=True, datadir=datadir, + storage=provider_dataset.storage, ) new_dataset.update_metadata_from(provider_dataset) @@ -714,19 +712,32 @@ def update_datasets( raise errors.ParameterError("No files matched the criteria.") return imported_dataset_updates_view_models, [] - git_files = [] + provider_files: Dict[AddProviderInterface, List[DynamicProxy]] = defaultdict(list) unique_remotes = set() linked_files = [] - local_files = [] for file in records: - if file.based_on: - git_files.append(file) - unique_remotes.add(file.based_on.url) - elif file.linked: + if file.linked: linked_files.append(file) else: - local_files.append(file) + if not getattr(file, "provider", None): + if file.based_on: + uri = file.dataset.same_as.value if file.dataset.same_as else file.based_on.url + else: + uri = file.source + try: + file.provider = cast( + AddProviderInterface, + ProviderFactory.get_add_provider(uri), + ) + except errors.DatasetProviderNotFound: + communication.warn(f"Couldn't find provider for file {file.path} in dataset {file.dataset.name}") + continue + + provider_files[file.provider].append(file) + + if isinstance(file.provider, GitProvider): + unique_remotes.add(file.based_on.url) if ref and len(unique_remotes) > 1: raise errors.ParameterError( @@ -741,18 +752,24 @@ def update_datasets( updated = update_linked_files(linked_files, dry_run=dry_run) updated_files.extend(updated) - if git_files and not no_remote: - updated, deleted = update_dataset_git_files(files=git_files, ref=ref, delete=delete, dry_run=dry_run) - updated_files.extend(updated) - deleted_files.extend(deleted) + provider_context: Dict[str, Any] = {} + + for provider, files in provider_files.items(): + if (no_remote and cast(ProviderApi, provider).is_remote) or ( + no_local and not cast(ProviderApi, provider).is_remote + ): + continue - if local_files and not no_local: - updated, deleted, new = update_dataset_local_files( - records=local_files, check_data_directory=check_data_directory + results = provider.update_files( + files=files, + dry_run=dry_run, + delete=delete, + context=provider_context, + ref=ref, + check_data_directory=check_data_directory, ) - updated_files.extend(updated) - deleted_files.extend(deleted) - updated_files.extend(new) + updated_files.extend(r.entity for r in results if r.action == DatasetUpdateAction.UPDATE) + deleted_files.extend(r.entity for r in results if r.action == DatasetUpdateAction.DELETE) if not dry_run: if deleted_files and not delete: @@ -974,62 +991,6 @@ def move_files(dataset_gateway: IDatasetGateway, files: Dict[Path, Path], to_dat datasets_provenance.add_or_update(to_dataset, creator=creator) -def update_dataset_local_files( - records: List[DynamicProxy], check_data_directory: bool -) -> Tuple[List[DynamicProxy], List[DynamicProxy], List[DynamicProxy]]: - """Update files metadata from the git history. - - Args: - records(List[DynamicProxy]): File records to update. - check_data_directory(bool): Whether to check the dataset's data directory for new files. - Returns: - Tuple[List[DynamicProxy], List[DynamicProxy]]: Tuple of updated and deleted file records. - """ - updated_files: List[DynamicProxy] = [] - deleted_files: List[DynamicProxy] = [] - new_files: List[DynamicProxy] = [] - progress_text = "Checking for local updates" - - try: - communication.start_progress(progress_text, len(records)) - check_paths = [] - records_to_check = [] - - for file in records: - communication.update_progress(progress_text, 1) - - if file.based_on or file.linked: - continue - - if not (project_context.path / file.entity.path).exists(): - deleted_files.append(file) - continue - - check_paths.append(file.entity.path) - records_to_check.append(file) - - checksums = project_context.repository.get_object_hashes(check_paths) - - for file in records_to_check: - current_checksum = checksums.get(file.entity.path) - if not current_checksum: - deleted_files.append(file) - elif current_checksum != file.entity.checksum: - updated_files.append(file) - elif check_data_directory and not any(file.entity.path == f.entity.path for f in file.dataset.files): - datadir = file.dataset.get_datadir() - try: - get_safe_relative_path(file.entity.path, datadir) - except ValueError: - continue - - new_files.append(file) - finally: - communication.finalize_progress(progress_text) - - return updated_files, deleted_files, new_files - - def _update_datasets_files_metadata(updated_files: List[DynamicProxy], deleted_files: List[DynamicProxy], delete: bool): modified_datasets = {} checksums = project_context.repository.get_object_hashes([file.entity.path for file in updated_files]) @@ -1037,12 +998,16 @@ def _update_datasets_files_metadata(updated_files: List[DynamicProxy], deleted_f new_file = DatasetFile.from_path( path=file.entity.path, based_on=file.based_on, source=file.source, checksum=checksums.get(file.entity.path) ) - modified_datasets[file.dataset.name] = file.dataset + modified_datasets[file.dataset.name] = ( + file.dataset._subject if isinstance(file.dataset, DynamicProxy) else file.dataset + ) file.dataset.add_or_update_files(new_file) if delete: for file in deleted_files: - modified_datasets[file.dataset.name] = file.dataset + modified_datasets[file.dataset.name] = ( + file.dataset._subject if isinstance(file.dataset, DynamicProxy) else file.dataset + ) file.dataset.unlink_file(file.entity.path) datasets_provenance = DatasetsProvenance() @@ -1050,78 +1015,6 @@ def _update_datasets_files_metadata(updated_files: List[DynamicProxy], deleted_f datasets_provenance.add_or_update(dataset, creator=get_git_user(repository=project_context.repository)) -def update_dataset_git_files( - files: List[DynamicProxy], ref: Optional[str], delete: bool, dry_run: bool -) -> Tuple[List[DynamicProxy], List[DynamicProxy]]: - """Update files and dataset metadata according to their remotes. - - Args: - files(List[DynamicProxy]): List of files to be updated. - ref(Optional[str]): Reference to use for update. - delete(bool, optional): Indicates whether to delete files or not (Default value = False). - dry_run(bool): Whether to perform update or only print changes. - - Returns: - Tuple[List[DynamicProxy], List[DynamicProxy]]: Tuple of updated and deleted file records. - """ - visited_repos: Dict[str, "Repository"] = {} - updated_files: List[DynamicProxy] = [] - deleted_files: List[DynamicProxy] = [] - - progress_text = "Checking files for updates" - - try: - communication.start_progress(progress_text, len(files)) - for file in files: - communication.update_progress(progress_text, 1) - if not file.based_on: - continue - - based_on = file.based_on - url = based_on.url - if url in visited_repos: - remote_repository = visited_repos[url] - else: - communication.echo(msg="Cloning remote repository...") - path = get_cache_directory_for_repository(url=url) - remote_repository = clone_repository(url=url, path=path, checkout_revision=ref) - visited_repos[url] = remote_repository - - checksum = remote_repository.get_object_hash(path=based_on.path, revision="HEAD") - found = checksum is not None - changed = found and based_on.checksum != checksum - - src = remote_repository.path / based_on.path - dst = project_context.metadata_path.parent / file.entity.path - - if not found: - if not dry_run and delete: - delete_dataset_file(dst, follow_symlinks=True) - project_context.repository.add(dst, force=True) - deleted_files.append(file) - elif changed: - if not dry_run: - # Fetch file if it is tracked by Git LFS - pull_paths_from_storage(remote_repository, remote_repository.path / based_on.path) - if is_linked_file(path=src, project_path=remote_repository.path): - delete_dataset_file(dst, follow_symlinks=True) - create_external_file(target=src.resolve(), path=dst) - else: - shutil.copy(src, dst) - file.based_on = RemoteEntity( - checksum=checksum, path=based_on.path, url=based_on.url # type: ignore - ) - updated_files.append(file) - finally: - communication.finalize_progress(progress_text) - - if not updated_files and (not delete or not deleted_files): - # Nothing to commit or update - return [], deleted_files - - return updated_files, deleted_files - - def update_linked_files(records: List[DynamicProxy], dry_run: bool) -> List[DynamicProxy]: """Update files linked to other files in the project. @@ -1230,7 +1123,7 @@ def should_include(filepath: Path) -> bool: continue record = DynamicProxy(file) - record.dataset = dataset + record.dataset = DynamicProxy(dataset) records.append(record) if not check_data_directory: diff --git a/renku/core/dataset/providers/api.py b/renku/core/dataset/providers/api.py index c4c44f405e..02ab716315 100644 --- a/renku/core/dataset/providers/api.py +++ b/renku/core/dataset/providers/api.py @@ -16,25 +16,32 @@ """API for providers.""" import abc -from collections import UserDict +from collections import UserDict, defaultdict from pathlib import Path from typing import TYPE_CHECKING, Any, Dict, List, Optional, Protocol, Tuple, Type, Union from renku.core import errors from renku.core.constant import ProviderPriority from renku.core.plugin import hookimpl +from renku.core.util import communication +from renku.core.util.os import delete_dataset_file +from renku.core.util.urls import is_uri_subfolder, resolve_uri from renku.domain_model.constant import NO_VALUE, NoValueType +from renku.domain_model.dataset import RemoteEntity from renku.domain_model.dataset_provider import IDatasetProviderPlugin +from renku.domain_model.project_context import project_context if TYPE_CHECKING: from renku.core.dataset.providers.models import ( DatasetAddMetadata, + DatasetUpdateMetadata, ProviderDataset, ProviderDatasetFile, ProviderParameter, ) from renku.core.interface.storage import IStorage from renku.domain_model.dataset import Dataset, DatasetTag + from renku.infrastructure.immutable import DynamicProxy class ProviderApi(IDatasetProviderPlugin): @@ -42,12 +49,13 @@ class ProviderApi(IDatasetProviderPlugin): priority: Optional[ProviderPriority] = None name: Optional[str] = None + is_remote: Optional[bool] = None def __init__(self, uri: str, **kwargs): self._uri: str = uri or "" def __init_subclass__(cls, **kwargs): - for required_property in ("priority", "name"): + for required_property in ("priority", "name", "is_remote"): if getattr(cls, required_property, None) is None: raise NotImplementedError(f"{required_property} must be set for {cls}") @@ -85,6 +93,13 @@ def get_metadata(self, uri: str, destination: Path, **kwargs) -> List["DatasetAd """Get metadata of files that will be added to a dataset.""" raise NotImplementedError + @abc.abstractmethod + def update_files( + self, files: List["DynamicProxy"], dry_run: bool, delete: bool, context: Dict[str, Any], **kwargs + ) -> List["DatasetUpdateMetadata"]: + """Update dataset files from the remote provider.""" + raise NotImplementedError + class ExportProviderInterface(abc.ABC): """Interface defining export providers.""" @@ -143,6 +158,82 @@ def supports_storage(uri: str) -> bool: """Whether or not this provider supports a given URI storage.""" raise NotImplementedError + def update_files( + self, + files: List["DynamicProxy"], + dry_run: bool, + delete: bool, + context: Dict[str, Any], + **kwargs, + ) -> List["DatasetUpdateMetadata"]: + """Update dataset files from the remote provider.""" + from renku.core.dataset.providers.models import DatasetUpdateAction, DatasetUpdateMetadata + + progress_text = f"Checking remote files for updates in dataset {files[0].dataset.name}" + + results: List[DatasetUpdateMetadata] = [] + + try: + communication.start_progress(progress_text, len(files)) + + storage = self.get_storage() + + # group files by storage to efficiently compute hashes + storage_files_dict: Dict[str, List["DynamicProxy"]] = defaultdict(list) + + for file in files: + if file.dataset.storage: + storage_files_dict[file.dataset.storage].append(file) + elif file.based_on: + if not self.supports_storage(file.based_on.url): + raise ValueError( + f"Called {getattr(self, 'name', 'Storage')} provider with file {file.entity.path} " + "which is not supported by this provider" + ) + storage_files_dict[file.based_on.url].append(file) + + for file_storage, files in storage_files_dict.items(): + hashes = storage.get_hashes(uri=file_storage) + for file in files: + communication.update_progress(progress_text, 1) + if not file.based_on: + continue + + dst = project_context.metadata_path.parent / file.entity.path + + hash = next((h for h in hashes if h.uri == file.based_on.url), None) + + if hash: + if not dry_run and ( + not file.dataset.storage + or not is_uri_subfolder(resolve_uri(file.dataset.storage), file.based_on.url) + ): + # Redownload downloaded (not mounted) file + download_storage = self.get_storage() + download_storage.download(file.based_on.url, dst) + file.based_on = RemoteEntity( + checksum=hash.hash if hash.hash else "", url=hash.uri, path=hash.path + ) + results.append(DatasetUpdateMetadata(entity=file, action=DatasetUpdateAction.UPDATE)) + else: + if ( + not dry_run + and delete + and ( + not file.dataset.storage + or not is_uri_subfolder(resolve_uri(file.dataset.storage), file.based_on.url) + ) + ): + # Delete downloaded (not mounted) file + delete_dataset_file(dst, follow_symlinks=True) + project_context.repository.add(dst, force=True) + results.append(DatasetUpdateMetadata(entity=file, action=DatasetUpdateAction.DELETE)) + + finally: + communication.finalize_progress(progress_text) + + return results + class CloudStorageProviderType(Protocol): """Intersection type for ``mypy`` hinting in storage classes.""" diff --git a/renku/core/dataset/providers/azure.py b/renku/core/dataset/providers/azure.py index 985a03bae2..e7bc4fcbc5 100644 --- a/renku/core/dataset/providers/azure.py +++ b/renku/core/dataset/providers/azure.py @@ -45,6 +45,7 @@ class AzureProvider(ProviderApi, StorageProviderInterface, AddProviderInterface) priority = ProviderPriority.HIGHEST name = "Azure" + is_remote = True def __init__(self, uri: str): super().__init__(uri=uri) diff --git a/renku/core/dataset/providers/dataverse.py b/renku/core/dataset/providers/dataverse.py index 193da80232..22b938d43e 100644 --- a/renku/core/dataset/providers/dataverse.py +++ b/renku/core/dataset/providers/dataverse.py @@ -37,6 +37,7 @@ AUTHOR_METADATA_TEMPLATE, CONTACT_METADATA_TEMPLATE, DATASET_METADATA_TEMPLATE, + KEYWORDS_METADATA_TEMPLATE, ) from renku.core.dataset.providers.doi import DOIProvider from renku.core.dataset.providers.repository import RepositoryImporter, make_request @@ -81,6 +82,7 @@ class DataverseProvider(ProviderApi, ExportProviderInterface, ImportProviderInte priority = ProviderPriority.HIGH name = "Dataverse" + is_remote = True def __init__(self, uri: str, is_doi: bool = False): super().__init__(uri=uri) @@ -311,6 +313,7 @@ def __init__( name=None, parent_url=None, type=None, + encoding_format=None, ): self.content_size = content_size self.content_url = content_url @@ -321,6 +324,7 @@ def __init__( self.name = name self.parent_url = parent_url self.type = type + self.encoding_format = encoding_format @property def remote_url(self): @@ -384,6 +388,7 @@ def export(self, **kwargs): def _get_dataset_metadata(self): authors, contacts = self._get_creators() subject = self._get_subject() + keywords = self._get_keywords() metadata_template = Template(DATASET_METADATA_TEMPLATE) metadata = metadata_template.substitute( name=_escape_json_string(self.dataset.title), @@ -391,6 +396,7 @@ def _get_dataset_metadata(self): contacts=json.dumps(contacts), description=_escape_json_string(self.dataset.description), subject=subject, + keywords=json.dumps(keywords), ) return json.loads(metadata) @@ -425,6 +431,16 @@ def _get_creators(self): return authors, contacts + def _get_keywords(self): + keywords = [] + + for keyword in self.dataset.keywords: + keyword_template = Template(KEYWORDS_METADATA_TEMPLATE) + keyword_str = keyword_template.substitute(keyword=_escape_json_string(keyword)) + keywords.append(json.loads(keyword_str)) + + return keywords + class _DataverseDeposition: """Dataverse record for deposit.""" diff --git a/renku/core/dataset/providers/dataverse_metadata_templates.py b/renku/core/dataset/providers/dataverse_metadata_templates.py index 8338eb515e..06f7a3f667 100644 --- a/renku/core/dataset/providers/dataverse_metadata_templates.py +++ b/renku/core/dataset/providers/dataverse_metadata_templates.py @@ -38,6 +38,12 @@ "multiple": true, "typeName": "datasetContact" }, + { + "value": ${keywords}, + "typeClass": "compound", + "multiple": true, + "typeName": "keyword" + }, { "value": [ { @@ -99,3 +105,14 @@ } } """ + +KEYWORDS_METADATA_TEMPLATE = """ +{ + "keywordValue": { + "typeName": "keywordValue", + "multiple": false, + "typeClass": "primitive", + "value": "${keyword}" + } +} +""" diff --git a/renku/core/dataset/providers/doi.py b/renku/core/dataset/providers/doi.py index 4fd2b74cda..eb3ee19f96 100644 --- a/renku/core/dataset/providers/doi.py +++ b/renku/core/dataset/providers/doi.py @@ -30,6 +30,7 @@ class DOIProvider(ProviderApi, ImportProviderInterface): priority = ProviderPriority.HIGHER name = "DOI" + is_remote = True def __init__(self, uri: str, headers=None, timeout=3): super().__init__(uri=uri) diff --git a/renku/core/dataset/providers/external.py b/renku/core/dataset/providers/external.py index 0767fe0213..6fb881d796 100644 --- a/renku/core/dataset/providers/external.py +++ b/renku/core/dataset/providers/external.py @@ -47,6 +47,7 @@ class ExternalProvider(ProviderApi, StorageProviderInterface, AddProviderInterfa priority = ProviderPriority.HIGHEST name = "External" + is_remote = True def __init__(self, uri: str): super().__init__(uri=get_uri_absolute_path(uri).rstrip("/")) diff --git a/renku/core/dataset/providers/git.py b/renku/core/dataset/providers/git.py index 43a88a758e..1ed53eecd8 100644 --- a/renku/core/dataset/providers/git.py +++ b/renku/core/dataset/providers/git.py @@ -17,22 +17,26 @@ import glob import os +import shutil from collections import defaultdict from pathlib import Path -from typing import TYPE_CHECKING, Dict, List, Optional, Set, Union +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Union from renku.core import errors +from renku.core.dataset.pointer_file import create_external_file from renku.core.dataset.providers.api import AddProviderInterface, ProviderApi, ProviderPriority from renku.core.storage import pull_paths_from_storage from renku.core.util import communication from renku.core.util.git import clone_repository, get_cache_directory_for_repository -from renku.core.util.os import get_files, is_subpath +from renku.core.util.metadata import is_linked_file +from renku.core.util.os import delete_dataset_file, get_files, is_subpath from renku.core.util.urls import check_url, remove_credentials from renku.domain_model.dataset import RemoteEntity from renku.domain_model.project_context import project_context +from renku.infrastructure.immutable import DynamicProxy if TYPE_CHECKING: - from renku.core.dataset.providers.models import DatasetAddMetadata, ProviderParameter + from renku.core.dataset.providers.models import DatasetAddMetadata, DatasetUpdateMetadata, ProviderParameter class GitProvider(ProviderApi, AddProviderInterface): @@ -40,6 +44,7 @@ class GitProvider(ProviderApi, AddProviderInterface): priority = ProviderPriority.NORMAL name = "Git" + is_remote = True @staticmethod def supports(uri: str) -> bool: @@ -178,3 +183,69 @@ def get_file_metadata(src: Path, dst: Path) -> Optional["DatasetAddMetadata"]: communication.warn(f"The following files overwrite each other in the destination project:/n/t{files_str}") return results + + def update_files( + self, + files: List[DynamicProxy], + dry_run: bool, + delete: bool, + context: Dict[str, Any], + ref: Optional[str] = None, + **kwargs, + ) -> List["DatasetUpdateMetadata"]: + """Update dataset files from the remote provider.""" + from renku.core.dataset.providers.models import DatasetUpdateAction, DatasetUpdateMetadata + + if "visited_repos" not in context: + context["visited_repos"] = {} + + progress_text = "Checking git files for updates" + + results: List[DatasetUpdateMetadata] = [] + + try: + communication.start_progress(progress_text, len(files)) + for file in files: + communication.update_progress(progress_text, 1) + if not file.based_on: + continue + + based_on = file.based_on + url = based_on.url + if url in context["visited_repos"]: + remote_repository = context["visited_repos"][url] + else: + communication.echo(msg="Cloning remote repository...") + path = get_cache_directory_for_repository(url=url) + remote_repository = clone_repository(url=url, path=path, checkout_revision=ref) + context["visited_repos"][url] = remote_repository + + checksum = remote_repository.get_object_hash(path=based_on.path, revision="HEAD") + found = checksum is not None + changed = found and based_on.checksum != checksum + + src = remote_repository.path / based_on.path + dst = project_context.metadata_path.parent / file.entity.path + + if not found: + if not dry_run and delete: + delete_dataset_file(dst, follow_symlinks=True) + project_context.repository.add(dst, force=True) + results.append(DatasetUpdateMetadata(entity=file, action=DatasetUpdateAction.DELETE)) + elif changed: + if not dry_run: + # Fetch file if it is tracked by Git LFS + pull_paths_from_storage(remote_repository, remote_repository.path / based_on.path) + if is_linked_file(path=src, project_path=remote_repository.path): + delete_dataset_file(dst, follow_symlinks=True) + create_external_file(target=src.resolve(), path=dst) + else: + shutil.copy(src, dst) + file.based_on = RemoteEntity( + checksum=checksum, path=based_on.path, url=based_on.url # type: ignore + ) + results.append(DatasetUpdateMetadata(entity=file, action=DatasetUpdateAction.UPDATE)) + finally: + communication.finalize_progress(progress_text) + + return results diff --git a/renku/core/dataset/providers/local.py b/renku/core/dataset/providers/local.py index 807dc658b5..e537e77958 100644 --- a/renku/core/dataset/providers/local.py +++ b/renku/core/dataset/providers/local.py @@ -19,7 +19,7 @@ import urllib import uuid from pathlib import Path -from typing import TYPE_CHECKING, List, Optional +from typing import TYPE_CHECKING, Any, Dict, List, Optional from renku.core import errors from renku.core.config import get_value @@ -33,12 +33,13 @@ from renku.core.storage import check_external_storage, track_paths_in_storage from renku.core.util import communication from renku.core.util.metadata import is_protected_path -from renku.core.util.os import get_absolute_path, is_path_empty, is_subpath +from renku.core.util.os import get_absolute_path, get_safe_relative_path, is_path_empty, is_subpath from renku.core.util.urls import check_url from renku.domain_model.project_context import project_context +from renku.infrastructure.immutable import DynamicProxy if TYPE_CHECKING: - from renku.core.dataset.providers.models import DatasetAddMetadata, ProviderParameter + from renku.core.dataset.providers.models import DatasetAddMetadata, DatasetUpdateMetadata, ProviderParameter from renku.domain_model.dataset import Dataset, DatasetTag @@ -47,6 +48,7 @@ class LocalProvider(ProviderApi, AddProviderInterface, ExportProviderInterface): priority = ProviderPriority.LOW name = "Local" + is_remote = False def __init__(self, uri: str): super().__init__(uri=uri) @@ -233,6 +235,58 @@ def get_file_metadata(src: Path) -> DatasetAddMetadata: return results + def update_files( + self, + files: List[DynamicProxy], + dry_run: bool, + delete: bool, + context: Dict[str, Any], + check_data_directory: bool = False, + **kwargs, + ) -> List["DatasetUpdateMetadata"]: + """Update dataset files from the remote provider.""" + from renku.core.dataset.providers.models import DatasetUpdateAction, DatasetUpdateMetadata + + progress_text = "Checking for local updates" + results: List[DatasetUpdateMetadata] = [] + + try: + communication.start_progress(progress_text, len(files)) + check_paths = [] + records_to_check = [] + for file in files: + communication.update_progress(progress_text, 1) + + if file.based_on or file.linked: + continue + + if not (project_context.path / file.entity.path).exists(): + results.append(DatasetUpdateMetadata(entity=file, action=DatasetUpdateAction.DELETE)) + continue + + check_paths.append(file.entity.path) + records_to_check.append(file) + + checksums = project_context.repository.get_object_hashes(check_paths) + + for file in records_to_check: + current_checksum = checksums.get(file.entity.path) + if not current_checksum: + results.append(DatasetUpdateMetadata(entity=file, action=DatasetUpdateAction.DELETE)) + elif current_checksum != file.entity.checksum: + results.append(DatasetUpdateMetadata(entity=file, action=DatasetUpdateAction.UPDATE)) + elif check_data_directory and not any(file.entity.path == f.entity.path for f in file.dataset.files): + datadir = file.dataset.get_datadir() + try: + get_safe_relative_path(file.entity.path, datadir) + except ValueError: + continue + + results.append(DatasetUpdateMetadata(entity=file, action=DatasetUpdateAction.UPDATE)) + finally: + communication.finalize_progress(progress_text) + return results + def get_exporter( self, dataset: "Dataset", *, tag: Optional["DatasetTag"], path: Optional[str] = None, **kwargs ) -> "LocalExporter": diff --git a/renku/core/dataset/providers/models.py b/renku/core/dataset/providers/models.py index eda44fdaa6..4a9f57bf44 100644 --- a/renku/core/dataset/providers/models.py +++ b/renku/core/dataset/providers/models.py @@ -26,6 +26,7 @@ from renku.command.schema.dataset import DatasetSchema from renku.domain_model.dataset import Dataset +from renku.infrastructure.immutable import DynamicProxy if TYPE_CHECKING: from renku.core.dataset.providers.api import StorageProviderInterface @@ -44,6 +45,13 @@ class DatasetAddAction(Enum): REMOTE_STORAGE = auto() # For URIs that are from a remote storage provider +class DatasetUpdateAction(Enum): + """Types of action when updating a file in a dataset.""" + + UPDATE = auto() + DELETE = auto() + + @dataclasses.dataclass class DatasetAddMetadata: """Metadata for a new file that will be added to a dataset.""" @@ -77,6 +85,14 @@ def get_absolute_commit_path(self, project_path: Path) -> str: return os.path.join(project_path, self.entity_path) +@dataclasses.dataclass +class DatasetUpdateMetadata: + """Metadata for updating dataset files.""" + + entity: DynamicProxy + action: DatasetUpdateAction + + class ProviderParameter(NamedTuple): """Provider-specific parameters.""" @@ -87,6 +103,7 @@ class ProviderParameter(NamedTuple): is_flag: bool = False multiple: bool = False type: Optional[Type] = None + metavar: Optional[str] = None class ProviderDataset(Dataset): @@ -131,6 +148,7 @@ def from_dataset(cls, dataset: "Dataset") -> "ProviderDataset": same_as=dataset.same_as, title=dataset.title, version=dataset.version, + storage=dataset.storage, ) @property diff --git a/renku/core/dataset/providers/olos.py b/renku/core/dataset/providers/olos.py index 5faa5b1188..72bd742db0 100644 --- a/renku/core/dataset/providers/olos.py +++ b/renku/core/dataset/providers/olos.py @@ -38,6 +38,7 @@ class OLOSProvider(ProviderApi, ExportProviderInterface): priority = ProviderPriority.HIGH name = "OLOS" + is_remote = True def __init__(self, uri: str, is_doi: bool = False): super().__init__(uri=uri) diff --git a/renku/core/dataset/providers/renku.py b/renku/core/dataset/providers/renku.py index a4f7e5b881..d6015ebf20 100644 --- a/renku/core/dataset/providers/renku.py +++ b/renku/core/dataset/providers/renku.py @@ -43,6 +43,7 @@ class RenkuProvider(ProviderApi, ImportProviderInterface): priority = ProviderPriority.HIGH name = "Renku" + is_remote = True def __init__(self, uri: str, **_): super().__init__(uri=uri) diff --git a/renku/core/dataset/providers/s3.py b/renku/core/dataset/providers/s3.py index d3b387e591..ad7adc31c0 100644 --- a/renku/core/dataset/providers/s3.py +++ b/renku/core/dataset/providers/s3.py @@ -45,6 +45,7 @@ class S3Provider(ProviderApi, StorageProviderInterface, AddProviderInterface): priority = ProviderPriority.HIGHEST name = "S3" + is_remote = True def __init__(self, uri: str): super().__init__(uri=uri) diff --git a/renku/core/dataset/providers/web.py b/renku/core/dataset/providers/web.py index 90d34cd67e..68e5b67c44 100644 --- a/renku/core/dataset/providers/web.py +++ b/renku/core/dataset/providers/web.py @@ -17,18 +17,22 @@ import urllib from pathlib import Path -from typing import TYPE_CHECKING, List, Optional, Tuple +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union from urllib.parse import urlparse from renku.core import errors from renku.core.constant import CACHE +from renku.core.dataset.dataset_add import copy_file from renku.core.dataset.providers.api import AddProviderInterface, ProviderApi, ProviderPriority +from renku.core.util import communication +from renku.core.util.os import delete_dataset_file from renku.core.util.urls import check_url, remove_credentials from renku.core.util.util import parallel_execute from renku.domain_model.project_context import project_context +from renku.infrastructure.immutable import DynamicProxy if TYPE_CHECKING: - from renku.core.dataset.providers.models import DatasetAddMetadata + from renku.core.dataset.providers.models import DatasetAddMetadata, DatasetUpdateMetadata class WebProvider(ProviderApi, AddProviderInterface): @@ -36,6 +40,7 @@ class WebProvider(ProviderApi, AddProviderInterface): priority = ProviderPriority.LOWEST name = "Web" + is_remote = True @staticmethod def supports(uri: str) -> bool: @@ -69,6 +74,80 @@ def get_metadata( multiple=multiple, ) + def update_files( + self, + files: List[DynamicProxy], + dry_run: bool, + delete: bool, + context: Dict[str, Any], + **kwargs, + ) -> List["DatasetUpdateMetadata"]: + """Update dataset files from the remote provider.""" + from renku.core.dataset.providers.models import DatasetAddMetadata, DatasetUpdateAction, DatasetUpdateMetadata + + progress_text = "Checking for local updates" + results: List[DatasetUpdateMetadata] = [] + + download_cache: Dict[str, DatasetAddMetadata] = {} + potential_updates: List[Tuple[DatasetAddMetadata, DynamicProxy]] = [] + + try: + communication.start_progress(progress_text, len(files)) + for file in files: + if not file.source: + continue + destination = project_context.path / file.dataset.get_datadir() + try: + if file.entity.path not in download_cache: + downloaded_files = download_file( + project_path=project_context.path, uri=file.source, destination=destination + ) + + if not any(f.entity_path == file.entity.path for f in downloaded_files): + # File probably comes from an extracted download + downloaded_files = download_file( + project_path=project_context.path, + uri=file.source, + destination=destination, + extract=True, + ) + + download_cache.update({str(f.entity_path): f for f in downloaded_files}) + except errors.OperationError: + results.append(DatasetUpdateMetadata(entity=file, action=DatasetUpdateAction.DELETE)) + else: + metadata = download_cache.get(file.entity.path) + + if not metadata: + results.append(DatasetUpdateMetadata(entity=file, action=DatasetUpdateAction.DELETE)) + + if not dry_run and delete: + delete_dataset_file(file.entity.path, follow_symlinks=True) + project_context.repository.add(file.entity.path, force=True) + else: + potential_updates.append((metadata, file)) + + finally: + communication.finalize_progress(progress_text) + + if not potential_updates: + return results + + check_paths: List[Union[Path, str]] = [ + str(u[0].source.relative_to(project_context.path)) for u in potential_updates + ] + # Stage files temporarily so we can get hashes + project_context.repository.add(*check_paths, force=True) + hashes = project_context.repository.get_object_hashes(check_paths) + project_context.repository.remove(*check_paths, index=True) + + for metadata, file in potential_updates: + if file.entity.checksum != hashes.get(metadata.source): + results.append(DatasetUpdateMetadata(entity=file, action=DatasetUpdateAction.UPDATE)) + if not dry_run: + copy_file(metadata, file.dataset, storage=None) + return results + def _ensure_dropbox(url): """Ensure dropbox url is set for file download.""" diff --git a/renku/core/dataset/providers/zenodo.py b/renku/core/dataset/providers/zenodo.py index 2dc31b3a48..26daf7452d 100644 --- a/renku/core/dataset/providers/zenodo.py +++ b/renku/core/dataset/providers/zenodo.py @@ -61,6 +61,7 @@ class ZenodoProvider(ProviderApi, ExportProviderInterface, ImportProviderInterfa priority = ProviderPriority.HIGH name = "Zenodo" + is_remote = True def __init__(self, uri: str, is_doi: bool = False): super().__init__(uri=uri) @@ -70,7 +71,7 @@ def __init__(self, uri: str, is_doi: bool = False): @staticmethod def supports(uri): - """Whether or not this provider supports a given URI.""" + """Whether this provider supports a given URI.""" if "zenodo" in uri.lower(): return True @@ -335,10 +336,7 @@ def __init__(self, dataset, publish, tag): @property def zenodo_url(self): """Returns correct Zenodo URL based on environment.""" - if "ZENODO_USE_SANDBOX" in os.environ: - return ZENODO_SANDBOX_URL - - return ZENODO_BASE_URL + return ZENODO_SANDBOX_URL if "ZENODO_USE_SANDBOX" in os.environ else ZENODO_BASE_URL def set_access_token(self, access_token): """Set access token.""" @@ -482,6 +480,7 @@ def attach_metadata(self, dataset, tag): {"name": creator.name, "affiliation": creator.affiliation if creator.affiliation else None} for creator in dataset.creators ], + "keywords": dataset.keywords, } } @@ -532,12 +531,12 @@ def _check_response(response): def _make_request(uri, accept: str = "application/json"): """Execute network request.""" record_id = ZenodoProvider.get_record_id(uri) - url = make_records_url(record_id) + url = make_records_url(record_id, uri=uri) return make_request(url=url, accept=accept) -def make_records_url(record_id): +def make_records_url(record_id, uri: str): """Create URL to access record by ID. Args: @@ -546,4 +545,6 @@ def make_records_url(record_id): Returns: str: Full URL for the record. """ - return urllib.parse.urljoin(ZENODO_BASE_URL, posixpath.join(ZENODO_API_PATH, "records", record_id)) + url = ZENODO_SANDBOX_URL if "sandbox.zenodo.org" in uri.lower() else ZENODO_BASE_URL + + return urllib.parse.urljoin(url, posixpath.join(ZENODO_API_PATH, "records", record_id)) diff --git a/renku/core/migration/m_0005__2_cwl.py b/renku/core/migration/m_0005__2_cwl.py index 1a74bfa830..77fba95f57 100644 --- a/renku/core/migration/m_0005__2_cwl.py +++ b/renku/core/migration/m_0005__2_cwl.py @@ -26,7 +26,7 @@ from urllib.parse import urlparse from cwl_utils.parser import load_document_by_uri -from cwl_utils.parser.cwl_v1_0 import CommandLineTool, InitialWorkDirRequirement +from cwl_utils.parser.cwl_v1_0 import CommandLineTool, Directory, File, InitialWorkDirRequirement from werkzeug.utils import secure_filename from renku.core.constant import RENKU_HOME @@ -167,7 +167,7 @@ def _migrate_single_step(migration_context, cmd_line_tool, path, commit=None, pa matched_input = next(i for i in inputs if i.id.endswith(name)) inputs.remove(matched_input) - path = project_context.metadata_path / OLD_WORKFLOW_PATH / Path(matched_input.default["path"]) + path = project_context.metadata_path / OLD_WORKFLOW_PATH / Path(matched_input.default.path) stdin = path.resolve().relative_to(project_context.path) id_ = CommandInput.generate_id(base_id, "stdin") @@ -237,7 +237,7 @@ def _migrate_single_step(migration_context, cmd_line_tool, path, commit=None, pa pass if isinstance(matched_input.default, dict): - path = project_context.metadata_path / OLD_WORKFLOW_PATH / Path(matched_input.default["path"]) + path = project_context.metadata_path / OLD_WORKFLOW_PATH / Path(matched_input.default.path) else: path = Path(matched_input.default) @@ -282,8 +282,8 @@ def _migrate_single_step(migration_context, cmd_line_tool, path, commit=None, pa if prefix and i.inputBinding.separate: prefix += " " - if isinstance(i.default, dict) and "class" in i.default and i.default["class"] in ["File", "Directory"]: - path = project_context.metadata_path / OLD_WORKFLOW_PATH / Path(i.default["path"]) + if isinstance(i.default, (File, Directory)): + path = project_context.metadata_path / OLD_WORKFLOW_PATH / Path(str(i.default.path)) path = Path(os.path.realpath(path)).relative_to(project_context.path) run.inputs.append( diff --git a/renku/core/migration/models/v9.py b/renku/core/migration/models/v9.py index a53af67b61..69d67e5a75 100644 --- a/renku/core/migration/models/v9.py +++ b/renku/core/migration/models/v9.py @@ -1544,7 +1544,7 @@ def creators_csv(self): @property def keywords_csv(self): """Comma-separated list of keywords associated with dataset.""" - return ", ".join(self.keywords) + return ", ".join(self.keywords or []) @property def tags_csv(self): diff --git a/renku/core/migration/utils/__init__.py b/renku/core/migration/utils/__init__.py index 7ee44c6302..e81ef7fa90 100644 --- a/renku/core/migration/utils/__init__.py +++ b/renku/core/migration/utils/__init__.py @@ -20,6 +20,7 @@ import posixpath import threading import uuid +from typing import Optional, cast from urllib.parse import ParseResult, quote, urljoin, urlparse from renku.core.util.yaml import read_yaml @@ -163,7 +164,7 @@ def read_project_version() -> str: return read_project_version_from_yaml(yaml_data) -def read_latest_agent(): +def read_latest_agent() -> Optional[str]: """Read project version from metadata file.""" import pyld @@ -177,16 +178,16 @@ def read_latest_agent(): yaml_data = read_yaml(metadata_path) jsonld = pyld.jsonld.expand(yaml_data)[0] jsonld = normalize(jsonld) - return _get_jsonld_property(jsonld, "http://schema.org/agent", "pre-0.11.0") + return cast(str, _get_jsonld_property(jsonld, "http://schema.org/agent", "pre-0.11.0")) -def read_project_version_from_yaml(yaml_data): +def read_project_version_from_yaml(yaml_data) -> str: """Read project version from YAML data.""" import pyld jsonld = pyld.jsonld.expand(yaml_data)[0] jsonld = normalize(jsonld) - return _get_jsonld_property(jsonld, "http://schema.org/schemaVersion", "1") + return cast(str, _get_jsonld_property(jsonld, "http://schema.org/schemaVersion", "1")) def _get_jsonld_property(jsonld, property_name, default=None): diff --git a/renku/core/plugin/session.py b/renku/core/plugin/session.py index cb341da83d..00feb558b7 100644 --- a/renku/core/plugin/session.py +++ b/renku/core/plugin/session.py @@ -39,4 +39,6 @@ def get_supported_session_providers() -> List[ISessionProvider]: from renku.core.plugin.pluginmanager import get_plugin_manager pm = get_plugin_manager() - return pm.hook.session_provider() + providers = pm.hook.session_provider() + + return sorted(providers, key=lambda p: p.priority) diff --git a/renku/core/session/__init__.py b/renku/core/session/__init__.py index cdf58ca669..8a6b673e57 100644 --- a/renku/core/session/__init__.py +++ b/renku/core/session/__init__.py @@ -1,7 +1,6 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and -# Eidgenössische Technische Hochschule Zürich (ETHZ). +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/renku/core/session/docker.py b/renku/core/session/docker.py index 404dbd8621..15d009dc32 100644 --- a/renku/core/session/docker.py +++ b/renku/core/session/docker.py @@ -1,7 +1,6 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and -# Eidgenössische Technische Hochschule Zürich (ETHZ). +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -21,7 +20,7 @@ import webbrowser from datetime import datetime from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Tuple, cast +from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Tuple, Union, cast from uuid import uuid4 import docker @@ -33,7 +32,7 @@ from renku.core.plugin import hookimpl from renku.core.util import communication from renku.domain_model.project_context import project_context -from renku.domain_model.session import ISessionProvider, Session +from renku.domain_model.session import ISessionProvider, Session, SessionStopStatus if TYPE_CHECKING: from renku.core.dataset.providers.models import ProviderParameter @@ -43,7 +42,7 @@ class DockerSessionProvider(ISessionProvider): """A docker based interactive session provider.""" JUPYTER_PORT = 8888 - # NOTE: Give the docker provider a higher priority so that it's checked first + # NOTE: Give the docker provider the highest priority so that it's checked first priority: ProviderPriority = ProviderPriority.HIGHEST def __init__(self): @@ -54,7 +53,7 @@ def docker_client(self) -> docker.client.DockerClient: Note: This is not a @property, even though it should be, because ``pluggy`` - will call it in that case in unrelated parts of the code that will + will call it in that case in unrelated parts of the code. Raises: errors.DockerError: Exception when docker is not available. Returns: @@ -127,13 +126,104 @@ def get_start_parameters(self) -> List["ProviderParameter"]: return [ ProviderParameter("port", help="Local port to use (random if not specified).", type=int), ProviderParameter("force-build", help="Always build image and don't check if it exists.", is_flag=True), + ProviderParameter( + "blkio-weight", help="Block IO (relative weight), between 10 and 1000, or 0 to disable.", type=int + ), + ProviderParameter("cap-add", help="Add Linux capabilities.", multiple=True), + ProviderParameter("cap-drop", help="Drop Linux capabilities.", multiple=True), + ProviderParameter("cgroup-parent", help="Override the default parent cgroup.", type=str), + ProviderParameter("cpu-count", help="Number of usable CPUs.", type=int), + ProviderParameter("cpu-percent", help="Usable percentage of the available CPUs.", type=int), + ProviderParameter("cpu-period", help="The length of a CPU period in microseconds.", type=int), + ProviderParameter( + "cpu-quota", help="Microseconds of CPU time that the container can get in a CPU period.", type=int + ), + ProviderParameter("cpu-rt-period", help="Limit CPU real-time period in microseconds.", type=int), + ProviderParameter("cpu-rt-runtime", help="Limit CPU real-time runtime in microseconds.", type=int), + ProviderParameter("cpu-shares", help="CPU shares (relative weight).", type=int), + ProviderParameter("cpuset-cpus", help="CPUs in which to allow execution ('0-3', '0,1').", type=str), + ProviderParameter( + "cpuset-mems", help="Memory nodes (MEMs) in which to allow execution ('0-3', '0,1').", type=str + ), + ProviderParameter( + "device-cgroup-rules", + help="A list of cgroup rules to apply to the container.", + multiple=True, + flags=["device-cgroup-rule"], + ), + ProviderParameter("devices", help="Expose host devices to the container.", multiple=True, flags=["device"]), + ProviderParameter("dns", help="Set custom DNS servers.", multiple=True), + ProviderParameter( + "dns-opt", + help="Additional options to be added to the container's ``resolv.conf`` file.", + type=str, + flags=["dns-opt", "dns-option"], + ), + ProviderParameter("dns-search", help="DNS search domains.", multiple=True), + ProviderParameter("domainname", help="Container NIS domain name.", type=str), + ProviderParameter("entrypoint", help="The entrypoint for the container.", type=str), + ProviderParameter( + "environment", + help="Environment variables to set inside the container, in the format 'VAR=VAL'", + multiple=True, + flags=["env"], + ), + ProviderParameter( + "group-add", + help="List of additional group names and/or IDs that the container process will run as.", + multiple=True, + ), + ProviderParameter("hostname", help="Optional hostname for the container.", type=str), + ProviderParameter( + "init", help="Run an init inside the container that forwards signals and reaps processes", is_flag=True + ), + ProviderParameter("isolation", help="Isolation technology to use.", type=str), + ProviderParameter("kernel-memory", help="Kernel memory limit (bytes).", type=int, metavar=""), + ProviderParameter("mac-address", help="MAC address to assign to the container.", type=str), + ProviderParameter("mem-reservation", help="Memory soft limit.", type=int, flags=["memory-reservation"]), + ProviderParameter( + "mem-swappiness", + help="Tune container memory swappiness (0 to 100).", + type=int, + flags=["memory-swappiness"], + ), + ProviderParameter("memswap-limit", help="Swap limit equal to memory plus swap.", flags=["memory-swap"]), + ProviderParameter("name", help="The name for this container.", type=str), + ProviderParameter("network", help="Connect a container to a network.", type=str), + ProviderParameter("oom-kill-disable", help="Disable OOM Killer.", is_flag=True), + ProviderParameter("oom-score-adj", help="Tune host's OOM preferences (-1000 to 1000).", type=int), + ProviderParameter("pids-limit", help="Tune a container's PIDs limit.", type=int), + ProviderParameter("platform", help="Set platform if server is multi-platform capable.", type=str), + ProviderParameter("privileged", help="Give extended privileges to this container.", is_flag=True), + ProviderParameter( + "publish-all-ports", help="Publish all ports to the host.", is_flag=True, flags=["publish-all"] + ), + ProviderParameter("read-only", help="Mount the container's root filesystem as read-only", is_flag=True), + ProviderParameter("remove", help="Automatically remove the container when it exits.", flags=["rm"]), + ProviderParameter("runtime", help="Runtime to use with this container.", type=str), + ProviderParameter("security-opt", help="Security Options.", multiple=True), + ProviderParameter("shm-size", help="Size of /dev/shm (bytes).", type=int, metavar=""), + ProviderParameter( + "stdin-open", help="Keep STDIN open even if not attached.", is_flag=True, flags=["interactive"] + ), + ProviderParameter("stop-signal", help="Signal to stop the container.", type=str), + ProviderParameter("tty", help="Allocate a pseudo-TTY.", is_flag=True), + ProviderParameter("user", help="Username or UID", type=str), + ProviderParameter("volume-driver", help="The name of a volume driver/plugin.", type=str), + ProviderParameter( + "volumes", + help="A list of volume mounts (e.g. '/host/path/:/mount/path/in/container')", + multiple=True, + flags=["volume"], + ), + ProviderParameter("volumes-from", help="Mount volumes from the specified container(s)", multiple=True), ] def get_open_parameters(self) -> List["ProviderParameter"]: """Returns parameters that can be set for session open.""" return [] - def session_list(self, project_name: str, config: Optional[Dict[str, Any]]) -> List[Session]: + def session_list(self, project_name: str) -> List[Session]: """Lists all the sessions currently running by the given session provider. Returns: @@ -163,7 +253,7 @@ def session_start( cpu_request: Optional[float] = None, mem_request: Optional[str] = None, disk_request: Optional[str] = None, - gpu_request: Optional[str] = None, + gpu_request: Optional[Union[str, int]] = None, **kwargs, ) -> Tuple[str, str]: """Creates an interactive session. @@ -174,6 +264,8 @@ def session_start( show_non_standard_user_warning = True def session_start_helper(consider_disk_request: bool): + nonlocal gpu_request + try: docker_is_running = self.docker_client().ping() if not docker_is_running: @@ -202,8 +294,15 @@ def session_start_helper(consider_disk_request: bool): docker.types.DeviceRequest(count=-1, capabilities=[["compute", "utility"]]) ] else: + if not isinstance(gpu_request, int): + try: + gpu_request = int(gpu_request) + except ValueError: + raise errors.ParameterError( + f"Invalid value for 'gpu': '{gpu_request}'. Valid values are integers or 'all'" + ) resource_requests["device_requests"] = [ - docker.types.DeviceRequest(count=[gpu_request], capabilities=[["compute", "utility"]]) + docker.types.DeviceRequest(count=gpu_request, capabilities=[["compute", "utility"]]) ] # NOTE: set git user @@ -215,15 +314,27 @@ def session_start_helper(consider_disk_request: bool): work_dir = Path(working_dir) / "work" / project_name.split("/")[-1] - volumes = [f"{str(project_context.path.resolve())}:{work_dir}"] + volumes = kwargs.pop("volumes", []) + volumes = list(volumes) + volumes.append(f"{str(project_context.path.resolve())}:{work_dir}") + + environment = {} + passed_env_vars = kwargs.pop("environment", []) + for env_var in passed_env_vars: + var, _, value = env_var.partition("=") + if not var: + raise errors.ParameterError(f"Invalid environment variable: '{env_var}'") + environment[var] = value user = project_context.repository.get_user() - environment = { - "GIT_AUTHOR_NAME": user.name, - "GIT_AUTHOR_EMAIL": user.email, - "GIT_COMMITTER_EMAIL": user.email, - "EMAIL": user.email, - } + environment.update( + { + "GIT_AUTHOR_NAME": user.name, + "GIT_AUTHOR_EMAIL": user.email, + "GIT_COMMITTER_EMAIL": user.email, + "EMAIL": user.email, + } + ) additional_options: Dict[str, Any] = {} @@ -240,7 +351,7 @@ def session_start_helper(consider_disk_request: bool): ) show_non_standard_user_warning = False - additional_options["user"] = "root" + additional_options["user"] = kwargs.pop("user", "root") environment["NB_UID"] = str(os.getuid()) environment["CHOWN_HOME"] = "yes" environment["CHOWN_HOME_OPTS"] = "-R" @@ -268,6 +379,7 @@ def session_start_helper(consider_disk_request: bool): working_dir=str(work_dir), **resource_requests, **additional_options, + **kwargs, ) if not container.ports: @@ -297,29 +409,36 @@ def session_start_helper(consider_disk_request: bool): else: return result, "" - def session_stop(self, project_name: str, session_name: Optional[str], stop_all: bool) -> bool: + def session_stop(self, project_name: str, session_name: Optional[str], stop_all: bool) -> SessionStopStatus: """Stops all or a given interactive session.""" try: docker_containers = ( self._get_docker_containers(project_name) if stop_all else self.docker_client().containers.list(filters={"id": session_name}) + if session_name + else self.docker_client().containers.list() ) - if len(docker_containers) == 0: - return False + n_docker_containers = len(docker_containers) + + if n_docker_containers == 0: + return SessionStopStatus.FAILED if session_name else SessionStopStatus.NO_ACTIVE_SESSION + elif not session_name and len(docker_containers) > 1: + return SessionStopStatus.NAME_NEEDED [c.stop() for c in docker_containers] - return True except docker.errors.APIError as error: raise errors.DockerError(error.msg) + else: + return SessionStopStatus.SUCCESSFUL - def session_open(self, project_name: str, session_name: str, **kwargs) -> bool: + def session_open(self, project_name: str, session_name: Optional[str], **kwargs) -> bool: """Open a given interactive session. Args: project_name(str): Renku project name. - session_name(str): The unique id of the interactive session. + session_name(Optional[str]): The unique id of the interactive session. """ url = self.session_url(session_name) @@ -329,10 +448,14 @@ def session_open(self, project_name: str, session_name: str, **kwargs) -> bool: webbrowser.open(url) return True - def session_url(self, session_name: str) -> Optional[str]: + def session_url(self, session_name: Optional[str]) -> Optional[str]: """Get the URL of the interactive session.""" - for c in self.docker_client().containers.list(): - if c.short_id == session_name and f"{DockerSessionProvider.JUPYTER_PORT}/tcp" in c.ports: + sessions = self.docker_client().containers.list() + + for c in sessions: + if ( + c.short_id == session_name or (not session_name and len(sessions) == 1) + ) and f"{DockerSessionProvider.JUPYTER_PORT}/tcp" in c.ports: host = c.ports[f"{DockerSessionProvider.JUPYTER_PORT}/tcp"][0] return f'http://{host["HostIp"]}:{host["HostPort"]}/?token={c.labels["jupyter_token"]}' return None diff --git a/renku/core/session/renkulab.py b/renku/core/session/renkulab.py index 2ee9cc5462..4693c0d9ac 100644 --- a/renku/core/session/renkulab.py +++ b/renku/core/session/renkulab.py @@ -1,7 +1,6 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and -# Eidgenössische Technische Hochschule Zürich (ETHZ). +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -26,6 +25,7 @@ from renku.core import errors from renku.core.config import get_value +from renku.core.constant import ProviderPriority from renku.core.login import read_renku_token from renku.core.plugin import hookimpl from renku.core.session.utils import get_renku_project_name, get_renku_url @@ -34,7 +34,7 @@ from renku.core.util.jwt import is_token_expired from renku.core.util.ssh import SystemSSHConfig from renku.domain_model.project_context import project_context -from renku.domain_model.session import ISessionProvider, Session +from renku.domain_model.session import ISessionProvider, Session, SessionStopStatus if TYPE_CHECKING: from renku.core.dataset.providers.models import ProviderParameter @@ -44,6 +44,8 @@ class RenkulabSessionProvider(ISessionProvider): """A session provider that uses the notebook service API to launch sessions.""" DEFAULT_TIMEOUT_SECONDS = 300 + # NOTE: Give the renkulab provider the lowest priority so that it's checked last + priority: ProviderPriority = ProviderPriority.LOWEST def __init__(self): self.__renku_url: Optional[str] = None @@ -187,7 +189,7 @@ def _cleanup_ssh_connection_configs( gotten from the server. """ if not running_sessions: - running_sessions = self.session_list("", None, ssh_garbage_collection=False) + running_sessions = self.session_list(project_name="", ssh_garbage_collection=False) system_config = SystemSSHConfig() @@ -199,7 +201,8 @@ def _cleanup_ssh_connection_configs( if path not in session_config_paths: path.unlink() - def _remote_head_hexsha(self): + @staticmethod + def _remote_head_hexsha(): remote = get_remote(repository=project_context.repository) if remote is None: @@ -221,7 +224,8 @@ def _send_renku_request(self, req_type: str, *args, **kwargs): ) return res - def _project_name_from_full_project_name(self, project_name: str) -> str: + @staticmethod + def _project_name_from_full_project_name(project_name: str) -> str: """Get just project name of project name if in owner/name form.""" if "/" not in project_name: return project_name @@ -282,9 +286,7 @@ def get_open_parameters(self) -> List["ProviderParameter"]: ProviderParameter("ssh", help="Open a remote terminal through SSH.", is_flag=True), ] - def session_list( - self, project_name: str, config: Optional[Dict[str, Any]], ssh_garbage_collection: bool = True - ) -> List[Session]: + def session_list(self, project_name: str, ssh_garbage_collection: bool = True) -> List[Session]: """Lists all the sessions currently running by the given session provider. Returns: @@ -398,11 +400,16 @@ def session_start( ) raise errors.RenkulabSessionError("Cannot start session via the notebook service because " + res.text) - def session_stop(self, project_name: str, session_name: Optional[str], stop_all: bool) -> bool: + def session_stop(self, project_name: str, session_name: Optional[str], stop_all: bool) -> SessionStopStatus: """Stops all sessions (for the given project) or a specific interactive session.""" responses = [] + sessions = self.session_list(project_name=project_name) + n_sessions = len(sessions) + + if n_sessions == 0: + return SessionStopStatus.NO_ACTIVE_SESSION + if stop_all: - sessions = self.session_list(project_name=project_name, config=None) for session in sessions: responses.append( self._send_renku_request( @@ -410,33 +417,50 @@ def session_stop(self, project_name: str, session_name: Optional[str], stop_all: ) ) self._wait_for_session_status(session.id, "stopping") - else: + elif session_name: responses.append( self._send_renku_request( "delete", f"{self._notebooks_url()}/servers/{session_name}", headers=self._auth_header() ) ) self._wait_for_session_status(session_name, "stopping") + elif n_sessions == 1: + responses.append( + self._send_renku_request( + "delete", f"{self._notebooks_url()}/servers/{sessions[0].id}", headers=self._auth_header() + ) + ) + self._wait_for_session_status(sessions[0].id, "stopping") + else: + return SessionStopStatus.NAME_NEEDED self._cleanup_ssh_connection_configs(project_name) - return all([response.status_code == 204 for response in responses]) if responses else False + n_successfully_stopped = len([r for r in responses if r.status_code == 204]) - def session_open(self, project_name: str, session_name: str, ssh: bool = False, **kwargs) -> bool: + return SessionStopStatus.SUCCESSFUL if n_successfully_stopped == n_sessions else SessionStopStatus.FAILED + + def session_open(self, project_name: str, session_name: Optional[str], ssh: bool = False, **kwargs) -> bool: """Open a given interactive session. Args: project_name(str): Renku project name. - session_name(str): The unique id of the interactive session. + session_name(Optional[str]): The unique id of the interactive session. ssh(bool): Whether to open an SSH connection or a normal browser interface. """ - sessions = self.session_list("", None) + sessions = self.session_list(project_name="") system_config = SystemSSHConfig() name = self._project_name_from_full_project_name(project_name) ssh_prefix = f"{system_config.renku_host}-{name}-" + if not session_name: + if len(sessions) == 1: + session_name = sessions[0].id + else: + return False + if session_name.startswith(ssh_prefix): - # NOTE: use passed in ssh connection name instead of session id by accident + # NOTE: User passed in ssh connection name instead of session id by accident session_name = session_name.replace(ssh_prefix, "", 1) if not any(s.id == session_name for s in sessions): diff --git a/renku/core/session/session.py b/renku/core/session/session.py index 6ba04307c9..d6d9e8f608 100644 --- a/renku/core/session/session.py +++ b/renku/core/session/session.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -31,7 +30,7 @@ from renku.core.util import communication from renku.core.util.os import safe_read_yaml from renku.core.util.ssh import SystemSSHConfig, generate_ssh_keys -from renku.domain_model.session import ISessionProvider, Session +from renku.domain_model.session import ISessionProvider, Session, SessionStopStatus def _safe_get_provider(provider: str) -> ISessionProvider: @@ -50,27 +49,57 @@ class SessionList(NamedTuple): @validate_arguments(config=dict(arbitrary_types_allowed=True)) -def session_list(config_path: Optional[str], provider: Optional[str] = None) -> SessionList: +def search_sessions(name: str, provider: Optional[str] = None) -> List[str]: + """Get all sessions that their name starts with the given name. + + Args: + name(str): The name to search for. + provider(Optional[str]): Name of the session provider to use (Default value = None). + + Returns: + All sessions whose name starts with ``name``. + """ + sessions = session_list(provider=provider).sessions + name = name.lower() + return [s.id for s in sessions if s.id.lower().startswith(name)] + + +@validate_arguments(config=dict(arbitrary_types_allowed=True)) +def search_session_providers(name: str) -> List[str]: + """Get all session providers that their name starts with the given name. + + Args: + name(str): The name to search for. + + Returns: + All session providers whose name starts with ``name``. + """ + from renku.core.plugin.session import get_supported_session_providers + + name = name.lower() + return [p.name for p in get_supported_session_providers() if p.name.lower().startswith(name)] + + +@validate_arguments(config=dict(arbitrary_types_allowed=True)) +def session_list(*, provider: Optional[str] = None) -> SessionList: """List interactive sessions. Args: - config_path(str, optional): Path to config YAML. - provider(str, optional): Name of the session provider to use. + provider(Optional[str]): Name of the session provider to use (Default value = None). + Returns: The list of sessions, whether they're all local sessions and potential warnings raised. """ def list_sessions(session_provider: ISessionProvider) -> List[Session]: try: - return session_provider.session_list(config=config, project_name=project_name) + return session_provider.session_list(project_name=project_name) except errors.RenkulabSessionGetUrlError: if provider: raise return [] project_name = get_renku_project_name() - config = safe_read_yaml(config_path) if config_path else dict() - providers = [_safe_get_provider(provider)] if provider else get_supported_session_providers() all_sessions = [] @@ -179,12 +208,12 @@ def session_stop(session_name: Optional[str], stop_all: bool = False, provider: """Stop interactive session. Args: - session_name(str): Name of the session to open. + session_name(Optional[str]): Name of the session to open. stop_all(bool): Whether to stop all sessions or just the specified one. - provider(str, optional): Name of the session provider to use. + provider(Optional[str]): Name of the session provider to use. """ - def stop_sessions(session_provider: ISessionProvider) -> bool: + def stop_sessions(session_provider: ISessionProvider) -> SessionStopStatus: try: return session_provider.session_stop( project_name=project_name, session_name=session_name, stop_all=stop_all @@ -192,55 +221,63 @@ def stop_sessions(session_provider: ISessionProvider) -> bool: except errors.RenkulabSessionGetUrlError: if provider: raise - return False + return SessionStopStatus.FAILED - session_detail = "all sessions" if stop_all else f"session {session_name}" + session_detail = "all sessions" if stop_all else f"session {session_name}" if session_name else "session" project_name = get_renku_project_name() providers = [_safe_get_provider(provider)] if provider else get_supported_session_providers() - is_stopped = False + statues = [] warning_messages = [] with communication.busy(msg=f"Waiting for {session_detail} to stop..."): for session_provider in sorted(providers, key=lambda p: p.priority): try: - is_stopped = stop_sessions(session_provider) + status = stop_sessions(session_provider) except errors.RenkuException as e: warning_messages.append(f"Cannot stop sessions in provider '{session_provider.name}': {e}") + else: + statues.append(status) - if is_stopped and session_name: + # NOTE: The given session name was stopped; don't continue + if session_name and not stop_all and status == SessionStopStatus.SUCCESSFUL: break if warning_messages: for message in warning_messages: communication.warn(message) - if not is_stopped: - if not session_name: - raise errors.ParameterError("There are no running sessions.") + if not statues: + return + elif all(s == SessionStopStatus.NO_ACTIVE_SESSION for s in statues): + raise errors.ParameterError("There are no running sessions.") + elif session_name and not any(s == SessionStopStatus.SUCCESSFUL for s in statues): raise errors.ParameterError(f"Could not find '{session_name}' among the running sessions.") + elif any(s == SessionStopStatus.FAILED for s in statues): + raise errors.ParameterError("Cannot stop some sessions") + elif not session_name and not any(s == SessionStopStatus.SUCCESSFUL for s in statues): + raise errors.ParameterError("Session name is missing") @validate_arguments(config=dict(arbitrary_types_allowed=True)) -def session_open(session_name: str, provider: Optional[str] = None, **kwargs): +def session_open(session_name: Optional[str], provider: Optional[str] = None, **kwargs): """Open interactive session in the browser. Args: - session_name(str): Name of the session to open. - provider(str, optional): Name of the session provider to use. + session_name(Optional[str]): Name of the session to open. + provider(Optional[str]): Name of the session provider to use. """ - providers = [_safe_get_provider(provider)] if provider else get_supported_session_providers() project_name = get_renku_project_name() - found = False for session_provider in providers: if session_provider.session_open(project_name, session_name, **kwargs): - found = True - break + return - if not found: + if session_name: raise errors.ParameterError(f"Could not find '{session_name}' among the running sessions.") + else: + raise errors.ParameterError("Session name is missing") @validate_arguments(config=dict(arbitrary_types_allowed=True)) diff --git a/renku/core/session/utils.py b/renku/core/session/utils.py index 9604e55894..c6f16de97c 100644 --- a/renku/core/session/utils.py +++ b/renku/core/session/utils.py @@ -1,7 +1,6 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and -# Eidgenössische Technische Hochschule Zürich (ETHZ). +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/renku/core/storage.py b/renku/core/storage.py index d2429c589d..c42df05e2f 100644 --- a/renku/core/storage.py +++ b/renku/core/storage.py @@ -15,7 +15,6 @@ # limitations under the License. """Logic for handling a data storage.""" -import csv import functools import itertools import os @@ -26,7 +25,7 @@ from pathlib import Path from shutil import move, which from subprocess import PIPE, STDOUT, check_output, run -from typing import TYPE_CHECKING, List, Optional, Tuple +from typing import TYPE_CHECKING, Dict, List, Optional, Tuple, Union import pathspec @@ -39,7 +38,6 @@ from renku.domain_model.project_context import project_context if TYPE_CHECKING: - from renku.domain_model.entity import Entity # type: ignore from renku.infrastructure.repository import Repository @@ -99,12 +97,12 @@ def wrapper(*args, **kwargs): @functools.lru_cache -def storage_installed(): +def storage_installed() -> bool: """Verify that git-lfs is installed and on system PATH.""" return bool(which("git-lfs")) -def storage_installed_locally(): +def storage_installed_locally() -> bool: """Verify that git-lfs is installed for the project.""" repo_config = project_context.repository.get_configuration(scope="local") return repo_config.has_section('filter "lfs"') @@ -129,7 +127,7 @@ def check_external_storage(): return is_storage_installed -def renku_lfs_ignore(): +def renku_lfs_ignore() -> pathspec.PathSpec: """Gets pathspec for files to not add to LFS.""" ignore_path = project_context.path / RENKU_LFS_IGNORE_PATH @@ -141,14 +139,14 @@ def renku_lfs_ignore(): return pathspec.PathSpec.from_lines("renku_gitwildmatch", lines) -def get_minimum_lfs_file_size(): +def get_minimum_lfs_file_size() -> int: """The minimum size of a file in bytes to be added to lfs.""" size = get_value("renku", "lfs_threshold") return parse_file_size(size) -def init_external_storage(force=False): +def init_external_storage(force: bool = False) -> None: """Initialize the external storage for data.""" try: result = run( @@ -166,13 +164,13 @@ def init_external_storage(force=False): @check_external_storage_wrapper -def track_paths_in_storage(*paths): +def track_paths_in_storage(*paths: Union[Path, str]) -> Optional[List[str]]: """Track paths in the external storage.""" if not project_context.external_storage_requested or not check_external_storage(): - return + return None # Calculate which paths can be tracked in lfs - track_paths = [] + track_paths: List[str] = [] attrs = project_context.repository.get_attributes(*paths) for path in paths: @@ -210,7 +208,7 @@ def track_paths_in_storage(*paths): universal_newlines=True, ) - if result.returncode != 0: + if result and result.returncode != 0: raise errors.GitLFSError(f"Error executing 'git lfs track: \n {result.stdout}") except (KeyboardInterrupt, OSError) as e: raise errors.ParameterError(f"Couldn't run 'git lfs':\n{e}") @@ -227,7 +225,7 @@ def track_paths_in_storage(*paths): @check_external_storage_wrapper -def untrack_paths_from_storage(*paths): +def untrack_paths_from_storage(*paths: Union[Path, str]) -> None: """Untrack paths from the external storage.""" try: result = run_command( @@ -239,25 +237,25 @@ def untrack_paths_from_storage(*paths): universal_newlines=True, ) - if result.returncode != 0: + if result and result.returncode != 0: raise errors.GitLFSError(f"Error executing 'git lfs untrack: \n {result.stdout}") except (KeyboardInterrupt, OSError) as e: raise errors.ParameterError(f"Couldn't run 'git lfs':\n{e}") @check_external_storage_wrapper -def list_tracked_paths(): +def list_tracked_paths() -> List[Path]: """List paths tracked in lfs.""" try: files = check_output(_CMD_STORAGE_LIST, cwd=project_context.path, encoding="UTF-8") except (KeyboardInterrupt, OSError) as e: raise errors.ParameterError(f"Couldn't run 'git lfs ls-files':\n{e}") - files_split = [project_context.path / f for f in files.splitlines()] + files_split: List[Path] = [project_context.path / f for f in files.splitlines()] return files_split @check_external_storage_wrapper -def list_unpushed_lfs_paths(repository: "Repository"): +def list_unpushed_lfs_paths(repository: "Repository") -> List[Path]: """List paths tracked in lfs for a repository.""" if len(repository.remotes) < 1 or (repository.active_branch and not repository.active_branch.remote_branch): @@ -279,7 +277,7 @@ def list_unpushed_lfs_paths(repository: "Repository"): @check_external_storage_wrapper -def pull_paths_from_storage(repository: "Repository", *paths): +def pull_paths_from_storage(repository: "Repository", *paths: Union[Path, str]): """Pull paths from LFS.""" project_dict = defaultdict(list) @@ -304,19 +302,19 @@ def pull_paths_from_storage(repository: "Repository", *paths): universal_newlines=True, ) - if result.returncode != 0: + if result and result.returncode != 0: raise errors.GitLFSError(f"Cannot pull LFS objects from server:\n {result.stdout}") @check_external_storage_wrapper -def clean_storage_cache(*check_paths): +def clean_storage_cache(*check_paths: Union[Path, str]) -> Tuple[List[str], List[str]]: """Remove paths from lfs cache.""" project_dict = defaultdict(list) - repositories = {} - tracked_paths = {} - unpushed_paths = {} - untracked_paths = [] - local_only_paths = [] + repositories: Dict[Path, "Repository"] = {} + tracked_paths: Dict[Path, List[Path]] = {} + unpushed_paths: Dict[Path, List[Path]] = {} + untracked_paths: List[str] = [] + local_only_paths: List[str] = [] repository = project_context.repository @@ -386,7 +384,7 @@ def clean_storage_cache(*check_paths): @check_external_storage_wrapper -def checkout_paths_from_storage(*paths): +def checkout_paths_from_storage(*paths: Union[Path, str]): """Checkout a paths from LFS.""" result = run_command( _CMD_STORAGE_CHECKOUT, @@ -397,18 +395,18 @@ def checkout_paths_from_storage(*paths): universal_newlines=True, ) - if result.returncode != 0: + if result and result.returncode != 0: raise errors.GitLFSError(f"Error executing 'git lfs checkout: \n {result.stdout}") -def check_requires_tracking(*paths): +def check_requires_tracking(*paths: Union[Path, str]) -> Optional[List[str]]: """Check paths and return a list of those that must be tracked.""" if not project_context.external_storage_requested: - return + return None attrs = project_context.repository.get_attributes(*paths) - track_paths = [] + track_paths: List[str] = [] for path in paths: absolute_path = Path(os.path.abspath(project_context.path / path)) @@ -470,7 +468,7 @@ def add_migrate_pattern(pattern, collection): return includes, excludes -def check_lfs_migrate_info(everything=False, use_size_filter=True): +def check_lfs_migrate_info(everything: bool = False, use_size_filter: bool = True) -> List[str]: """Return list of file groups in history should be in LFS.""" ref = ( ["--everything"] @@ -510,7 +508,7 @@ def check_lfs_migrate_info(everything=False, use_size_filter=True): if lfs_output.returncode != 0: raise errors.GitLFSError(f"Error executing 'git lfs migrate info: \n {lfs_output.stdout}") - groups = [] + groups: List[str] = [] files_re = re.compile(r"(.*\s+[\d.]+\s+\S+).*") for line in lfs_output.stdout.split("\n"): @@ -526,7 +524,7 @@ def check_lfs_migrate_info(everything=False, use_size_filter=True): return groups -def migrate_files_to_lfs(paths): +def migrate_files_to_lfs(paths: List[str]): """Migrate files to Git LFS.""" if paths: includes: List[str] = ["--include", ",".join(paths)] @@ -534,11 +532,7 @@ def migrate_files_to_lfs(paths): else: includes, excludes = get_lfs_migrate_filters() - tempdir = Path(tempfile.mkdtemp()) - map_path = tempdir / "objectmap.csv" - object_map = [f"--object-map={map_path}"] - - command = _CMD_STORAGE_MIGRATE_IMPORT + includes + excludes + object_map + command = _CMD_STORAGE_MIGRATE_IMPORT + includes + excludes try: lfs_output = run(command, stdout=PIPE, stderr=STDOUT, cwd=project_context.path, text=True) @@ -547,78 +541,3 @@ def migrate_files_to_lfs(paths): if lfs_output.returncode != 0: raise errors.GitLFSError(f"Error executing 'git lfs migrate import: \n {lfs_output.stdout}") - - with open(map_path, newline="") as csvfile: - reader = csv.reader(csvfile, delimiter=",") - - commit_sha_mapping = [(r[0], r[1]) for r in reader] - - os.remove(map_path) - - sha_mapping = dict() - - repo_root = Path(".") - repository = project_context.repository - - for old_commit_sha, new_commit_sha in commit_sha_mapping: - old_commit = repository.get_commit(old_commit_sha) - new_commit = repository.get_commit(new_commit_sha) - processed = set() - - for diff in old_commit.get_changes(): - path_obj = Path(diff.b_path) - - # NOTE: Get git object hash mapping for files and parent folders - while path_obj != repo_root: - if path_obj in processed: - break - - path_str = str(path_obj) - old_sha = old_commit.tree[path_str].hexsha - new_sha = new_commit.tree[path_str].hexsha - - sha_mapping[old_sha] = new_sha - - processed.add(path_obj) - path_obj = path_obj.parent - - def _map_checksum(entity, checksum_mapping) -> Optional["Entity"]: - """Update the checksum and id of an entity based on a mapping.""" - from renku.domain_model.entity import Entity - from renku.domain_model.provenance.activity import Collection - - if entity.checksum not in checksum_mapping: - return None - - new_checksum = checksum_mapping[entity.checksum] - - if isinstance(entity, Collection) and entity.members: - members = [] - for member in entity.members: - new_member = _map_checksum(member, checksum_mapping) - if new_member: - members.append(new_member) - else: - members.append(member) - new_entity: Entity = Collection(checksum=new_checksum, path=entity.path, members=members) - else: - new_entity = Entity(checksum=new_checksum, path=entity.path) - - return new_entity - - def _map_checksum_old(entity, checksum_mapping): - """Update the checksum and id of an entity based on a mapping.""" - # TODO: Remove this method once moved to Entity with 'id' field - from renku.domain_model.provenance.activity import Collection - - if entity.checksum not in checksum_mapping: - return - - new_checksum = checksum_mapping[entity.checksum] - - entity._id = entity._id.replace(entity.checksum, new_checksum) - entity.checksum = new_checksum - - if isinstance(entity, Collection) and entity.members: - for member in entity.members: - _map_checksum_old(member, checksum_mapping) diff --git a/renku/core/workflow/converters/cwl.py b/renku/core/workflow/converters/cwl.py index 9fe71a514d..04da4b036a 100644 --- a/renku/core/workflow/converters/cwl.py +++ b/renku/core/workflow/converters/cwl.py @@ -148,7 +148,7 @@ def workflow_convert( step_filename = Path(f"{uuid4()}.cwl") step_path = (tmpdir / step_filename).resolve() write_yaml(step_path, step.run.save()) - step.run = str(step_path) + step.run = f"file://{step_path}" if filename is None: filename = Path(f"parent_{uuid4()}.cwl") else: diff --git a/renku/data/pre-commit.sh b/renku/data/pre-commit.sh index c5ffb2f772..e413376cda 100755 --- a/renku/data/pre-commit.sh +++ b/renku/data/pre-commit.sh @@ -21,10 +21,17 @@ # Find all modified or added files, and do nothing if there aren't any. export RENKU_DISABLE_VERSION_CHECK=true -IFS=$'\n' read -r -d '' -a MODIFIED_FILES \ - <<< "$(git diff --name-only --cached --diff-filter=M)" -IFS=$'\n' read -r -d '' -a ADDED_FILES \ - <<< "$(git diff --name-only --cached --diff-filter=A)" + +declare -a MODIFIED_FILES=() +while IFS= read -r -d '' file; do + MODIFIED_FILES+=( "$file" ) +done < <(git diff -z --name-only --cached --diff-filter=M) + +declare -a ADDED_FILES=() +while IFS= read -r -d '' file; do + ADDED_FILES+=( "$file" ) +done < <(git diff -z --name-only --cached --diff-filter=A) + if [ ${#MODIFIED_FILES[@]} -ne 0 ] || [ ${#ADDED_FILES[@]} -ne 0 ]; then # Verify that renku is installed; if not, warn and exit. diff --git a/renku/domain_model/dataset.py b/renku/domain_model/dataset.py index 3f5738d8a7..d6bfcf1ae6 100644 --- a/renku/domain_model/dataset.py +++ b/renku/domain_model/dataset.py @@ -500,7 +500,7 @@ def creators_full_csv(self): @property def keywords_csv(self): """Comma-separated list of keywords associated with dataset.""" - return ", ".join(self.keywords) + return ", ".join(self.keywords or []) def get_datadir(self) -> Path: """Return dataset's data directory relative to project's root.""" diff --git a/renku/domain_model/session.py b/renku/domain_model/session.py index e441b55bab..1c2cf5d899 100644 --- a/renku/domain_model/session.py +++ b/renku/domain_model/session.py @@ -19,6 +19,7 @@ from abc import ABCMeta, abstractmethod from datetime import datetime +from enum import Enum, auto from pathlib import Path from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple @@ -28,6 +29,15 @@ from renku.core.dataset.providers.models import ProviderParameter +class SessionStopStatus(Enum): + """Status code returned when stopping sessions.""" + + NO_ACTIVE_SESSION = auto() + SUCCESSFUL = auto() + FAILED = auto() # When all or some of (requested) sessions can't be stopped + NAME_NEEDED = auto() + + class Session: """Interactive session.""" @@ -53,7 +63,7 @@ def __init__( class ISessionProvider(metaclass=ABCMeta): - """Abstract class for a interactive session provider.""" + """Abstract class for an interactive session provider.""" priority: ProviderPriority = ProviderPriority.NORMAL @@ -112,12 +122,11 @@ def get_open_parameters(self) -> List[ProviderParameter]: pass @abstractmethod - def session_list(self, project_name: str, config: Optional[Dict[str, Any]]) -> List[Session]: + def session_list(self, project_name: str) -> List[Session]: """Lists all the sessions currently running by the given session provider. Args: project_name(str): Renku project name. - config(Dict[str, Any], optional): Path to the session provider specific configuration YAML. Returns: a list of sessions. @@ -153,27 +162,27 @@ def session_start( pass @abstractmethod - def session_stop(self, project_name: str, session_name: Optional[str], stop_all: bool) -> bool: + def session_stop(self, project_name: str, session_name: Optional[str], stop_all: bool) -> SessionStopStatus: """Stops all or a given interactive session. Args: project_name(str): Project's name. session_name(str, optional): The unique id of the interactive session. - stop_all(bool): Specifies whether or not to stop all the running interactive sessions. + stop_all(bool): Specifies whether to stop all the running interactive sessions. Returns: - bool: True in case session(s) has been successfully stopped + SessionStopStatus: The status of running and stopped sessions """ pass @abstractmethod - def session_open(self, project_name: str, session_name: str, **kwargs) -> bool: + def session_open(self, project_name: str, session_name: Optional[str], **kwargs) -> bool: """Open a given interactive session. Args: project_name(str): Renku project name. - session_name(str): The unique id of the interactive session. + session_name(Optional[str]): The unique id of the interactive session. """ pass @@ -194,7 +203,7 @@ def pre_start_checks(self, **kwargs): The expectation is that this method will abort the session start if the checks are not successful or will take corrective actions to - make sure that the session launches successfully. By default this method does not do any checks. + make sure that the session launches successfully. By default, this method does not do any checks. """ return None diff --git a/renku/infrastructure/repository.py b/renku/infrastructure/repository.py index 22578d6ae5..3d0dbd5870 100644 --- a/renku/infrastructure/repository.py +++ b/renku/infrastructure/repository.py @@ -917,8 +917,10 @@ def get_existing_paths_in_revision( if files: # NOTE: check existing files for batch in split_paths(*files): - existing_paths = git.Git(working_dir=self.path).ls_tree(*batch, r=revision, name_only=True) - result.extend(existing_paths.splitlines()) + existing_paths = git.Git(working_dir=self.path).ls_tree( + *batch, r=revision, name_only=True, z=True + ) + result.extend(existing_paths.strip("\x00").split("\x00")) if dirs: # NOTE: check existing dirs @@ -930,7 +932,7 @@ def get_existing_paths_in_revision( return result else: - existing_files = git.Git().ls_tree(r=revision, name_only=True).splitlines() + existing_files = git.Git().ls_tree(r=revision, name_only=True, z=True).strip("\x00").split("\x00") existing_dirs = git.Git().ls_tree(r=revision, name_only=True, d=True).splitlines() return existing_dirs + existing_files except git.GitCommandError as e: diff --git a/renku/ui/cli/dataset.py b/renku/ui/cli/dataset.py index 7359b28ad1..956592eb1b 100644 --- a/renku/ui/cli/dataset.py +++ b/renku/ui/cli/dataset.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -186,7 +185,7 @@ You can use ``--destination`` or ``-d`` flag to set the location where the new data is copied to. This location be will under the dataset's data directory and -will be created if does not exists. +will be created if it does not exists. .. code-block:: console @@ -374,7 +373,7 @@ This will export the dataset ``my-dataset`` to ``zenodo.org`` as a draft, allowing for publication later on. If the dataset has any tags set, you -can chose if the repository `HEAD` version or one of the tags should be +can choose if the repository `HEAD` version or one of the tags should be exported. The remote version will be set to the local tag that is being exported. @@ -519,16 +518,7 @@ from renku.command.format.dataset_tags import DATASET_TAGS_FORMATS from renku.command.format.datasets import DATASETS_COLUMNS, DATASETS_FORMATS from renku.domain_model.constant import NO_VALUE, NoValueType - - -def _complete_datasets(ctx, param, incomplete): - from renku.command.dataset import search_datasets_command - - try: - result = search_datasets_command().build().execute(name=incomplete) - return result.output - except Exception: - return [] +from renku.ui.cli.utils.click import shell_complete_datasets @click.group() @@ -628,7 +618,7 @@ def create(name, title, description, creators, metadata, keyword, storage, datad @dataset.command() -@click.argument("name", shell_complete=_complete_datasets) +@click.argument("name", shell_complete=shell_complete_datasets) @click.option("-t", "--title", default=NO_VALUE, type=click.UNPROCESSED, help="Title of the dataset.") @click.option("-d", "--description", default=NO_VALUE, type=click.UNPROCESSED, help="Dataset's description.") @click.option( @@ -747,7 +737,7 @@ def edit(name, title, description, creators, metadata, metadata_source, keywords @dataset.command("show") @click.option("-t", "--tag", default=None, type=click.STRING, help="Tag for which to show dataset metadata.") -@click.argument("name", shell_complete=_complete_datasets) +@click.argument("name", shell_complete=shell_complete_datasets) def show(tag, name): """Show metadata of a dataset.""" from renku.command.dataset import show_dataset_command @@ -794,7 +784,7 @@ def add_provider_options(*param_decls, **attrs): @dataset.command() -@click.argument("name", shell_complete=_complete_datasets) +@click.argument("name", shell_complete=shell_complete_datasets) @click.argument("urls", type=click.Path(), nargs=-1) @click.option("-f", "--force", is_flag=True, help="Allow adding otherwise ignored files.") @click.option("-o", "--overwrite", is_flag=True, help="Overwrite existing files.") @@ -838,7 +828,7 @@ def add(name, urls, force, overwrite, create, destination, datadir, **kwargs): @dataset.command("ls-files") -@click.argument("names", nargs=-1, shell_complete=_complete_datasets) +@click.argument("names", nargs=-1, shell_complete=shell_complete_datasets) @click.option("-t", "--tag", default=None, type=click.STRING, help="Tag for which to show dataset files.") @click.option( "--creators", @@ -879,7 +869,7 @@ def ls_files(names, tag, creators, include, exclude, format, columns): @dataset.command() -@click.argument("name", shell_complete=_complete_datasets) +@click.argument("name", shell_complete=shell_complete_datasets) @click.option("-I", "--include", multiple=True, help="Include files matching given pattern.") @click.option("-X", "--exclude", multiple=True, help="Exclude files matching given pattern.") @click.option("-y", "--yes", is_flag=True, help="Confirm unlinking of all files.") @@ -914,7 +904,7 @@ def remove(name): @dataset.command("tag") -@click.argument("name", shell_complete=_complete_datasets) +@click.argument("name", shell_complete=shell_complete_datasets) @click.argument("tag") @click.option("-d", "--description", default="", help="A description for this tag") @click.option("-f", "--force", is_flag=True, help="Allow overwriting existing tags.") @@ -927,7 +917,7 @@ def tag(name, tag, description, force): @dataset.command("rm-tags") -@click.argument("name", shell_complete=_complete_datasets) +@click.argument("name", shell_complete=shell_complete_datasets) @click.argument("tags", nargs=-1) def remove_tags(name, tags): """Remove tags from a dataset.""" @@ -938,7 +928,7 @@ def remove_tags(name, tags): @dataset.command("ls-tags") -@click.argument("name", shell_complete=_complete_datasets) +@click.argument("name", shell_complete=shell_complete_datasets) @click.option( "--format", type=click.Choice(list(DATASET_TAGS_FORMATS.keys())), default="tabular", help="Choose an output format." ) @@ -976,7 +966,7 @@ def export_provider_options(*param_decls, **attrs): @dataset.command() -@click.argument("name", shell_complete=_complete_datasets) +@click.argument("name", shell_complete=shell_complete_datasets) @export_provider_argument() @click.option("-t", "--tag", help="Dataset tag to export") @export_provider_options() @@ -1037,7 +1027,7 @@ def import_(uri, name, extract, yes, datadir, **kwargs): @dataset.command() @click.pass_context -@click.argument("names", nargs=-1, shell_complete=_complete_datasets) +@click.argument("names", nargs=-1, shell_complete=shell_complete_datasets) @click.option( "--creators", help="Filter files which where authored by specific creators. Multiple creators are specified by comma.", @@ -1165,8 +1155,8 @@ def get_dataset_files(records): ctx.exit(1) -@dataset.command(hidden=True) -@click.argument("name", shell_complete=_complete_datasets) +@dataset.command() +@click.argument("name", shell_complete=shell_complete_datasets) @click.option( "-l", "--location", @@ -1183,8 +1173,8 @@ def pull(name, location): pull_cloud_storage_command().with_communicator(communicator).build().execute(name=name, location=location) -@dataset.command(hidden=True) -@click.argument("name", shell_complete=_complete_datasets) +@dataset.command() +@click.argument("name", shell_complete=shell_complete_datasets) @click.option( "-e", "--existing", @@ -1207,8 +1197,8 @@ def mount(name, existing, unmount, yes): command.execute(name=name, existing=existing, yes=yes) -@dataset.command(hidden=True) -@click.argument("name", shell_complete=_complete_datasets) +@dataset.command() +@click.argument("name", shell_complete=shell_complete_datasets) def unmount(name): """Unmount a backend storage in the dataset's data directory.""" from renku.command.dataset import unmount_cloud_storage_command diff --git a/renku/ui/cli/doctor.py b/renku/ui/cli/doctor.py index d5f4b90a90..e522ddbb56 100644 --- a/renku/ui/cli/doctor.py +++ b/renku/ui/cli/doctor.py @@ -58,11 +58,15 @@ def doctor(ctx, fix, force): command = doctor_check_command(with_fix=fix) if fix: command = command.with_communicator(communicator) - is_ok, problems = command.build().execute(fix=fix, force=force).output + is_ok, fixes_available, problems = command.build().execute(fix=fix, force=force).output if is_ok: click.secho("Everything seems to be ok.", fg=color.GREEN) ctx.exit(0) click.echo(problems) + + if fixes_available: + click.echo("Run with '--fix' flag to try and fix these issues.") + ctx.exit(1) diff --git a/renku/ui/cli/env.py b/renku/ui/cli/env.py index 09f60262bb..3dafd9a711 100644 --- a/renku/ui/cli/env.py +++ b/renku/ui/cli/env.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/cli/migrate.py b/renku/ui/cli/migrate.py index b135eadbbc..a24bc1f9d4 100644 --- a/renku/ui/cli/migrate.py +++ b/renku/ui/cli/migrate.py @@ -59,6 +59,7 @@ """ import json import os +from dataclasses import asdict import click @@ -152,7 +153,12 @@ def migrationscheck(): from renku.command.migrate import migrations_check result = migrations_check().lock_project().build().execute().output - click.echo(json.dumps(result)) + result_dict = asdict(result) + + if result_dict.get("errors"): + for key, value in result_dict["errors"]: + result_dict["errors"][key] = str(value) + click.echo(json.dumps(result_dict)) @click.command(hidden=True) diff --git a/renku/ui/cli/service.py b/renku/ui/cli/service.py index b2d08d2340..0c19c9973c 100644 --- a/renku/ui/cli/service.py +++ b/renku/ui/cli/service.py @@ -179,6 +179,9 @@ def service(ctx, env): import rq # noqa: F401 from dotenv import load_dotenv + if ctx.invoked_subcommand in ["apispec", "logs", "api"]: + return # Redis not needed + try: from renku.ui.service.cache.base import BaseCache diff --git a/renku/ui/cli/session.py b/renku/ui/cli/session.py index 010c0705e4..3db7642479 100644 --- a/renku/ui/cli/session.py +++ b/renku/ui/cli/session.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -59,6 +58,11 @@ Finally, it prompts the user to build the image locally if no image is found. You can force the image to always be built by using the ``--force-build`` flag. +This command accepts a subset of arguments of the ``docker run`` command. See +its help for the list of supported arguments: ``renku session start --help``. +Accepted values are the same as the ``docker run`` command unless stated +otherwise. + Renkulab provider ~~~~~~~~~~~~~~~~~ @@ -109,7 +113,8 @@ $ renku session start -p renkulab --ssh Your system is not set up for SSH connections to Renkulab. Would you like to set it up? [y/N]: y [...] - Session sessionid successfully started, use 'renku session open --ssh sessionid' or 'ssh sessionid' to connect to it + Session successfully started, use 'renku session open --ssh ' or 'ssh ' to + connect to it This will create SSH keys for you and setup SSH configuration for connecting to the renku deployment. You can then use the SSH connection name (``ssh renkulab.io-myproject-sessionid`` in the example) @@ -190,6 +195,7 @@ from renku.command.util import WARNING from renku.core import errors from renku.ui.cli.utils.callback import ClickCallback +from renku.ui.cli.utils.click import shell_complete_session_providers, shell_complete_sessions from renku.ui.cli.utils.plugins import get_supported_session_providers_names @@ -205,6 +211,7 @@ def session(): "-p", "--provider", type=click.Choice(Proxy(get_supported_session_providers_names)), + shell_complete=shell_complete_session_providers, default=None, help="Backend to use for listing interactive sessions.", ) @@ -212,6 +219,7 @@ def session(): "config", "-c", "--config", + hidden=True, type=click.Path(exists=True, dir_okay=False), metavar="", help="YAML file containing configuration for the provider.", @@ -231,7 +239,7 @@ def list_sessions(provider, config, columns, format): """List interactive sessions.""" from renku.command.session import session_list_command - result = session_list_command().build().execute(provider=provider, config_path=config) + result = session_list_command().build().execute(provider=provider) click.echo(SESSION_FORMATS[format](result.output.sessions, columns=columns)) @@ -258,6 +266,7 @@ def session_start_provider_options(*param_decls, **attrs): "-p", "--provider", type=click.Choice(Proxy(get_supported_session_providers_names)), + shell_complete=shell_complete_session_providers, default="docker", show_default=True, help="Backend to use for creating an interactive session.", @@ -273,13 +282,20 @@ def session_start_provider_options(*param_decls, **attrs): @click.option("--image", type=click.STRING, metavar="", help="Docker image to use for the session.") @click.option("--cpu", type=click.FLOAT, metavar="", help="CPUs quota for the session.") @click.option("--disk", type=click.STRING, metavar="", help="Amount of disk space required for the session.") -@click.option("--gpu", type=click.STRING, metavar="", help="GPU quota for the session.") +@click.option( + "--gpu", + type=click.STRING, + metavar="", + help="Number of GPU devices to add to the container ('all' to pass all GPUs).", +) @click.option("--memory", type=click.STRING, metavar="", help="Amount of memory required for the session.") @session_start_provider_options() def start(provider, config, image, cpu, disk, gpu, memory, **kwargs): """Start an interactive session.""" from renku.command.session import session_start_command + kwargs = {k: v for k, v in kwargs.items() if v is not None} + communicator = ClickCallback() session_start_command().with_communicator(communicator).build().execute( provider=provider, @@ -294,13 +310,14 @@ def start(provider, config, image, cpu, disk, gpu, memory, **kwargs): @session.command("stop") -@click.argument("session_name", metavar="", required=False) +@click.argument("session_name", metavar="", required=False, default=None, shell_complete=shell_complete_sessions) @click.option( "provider", "-p", "--provider", type=click.Choice(Proxy(get_supported_session_providers_names)), default=None, + shell_complete=shell_complete_session_providers, help="Session provider to use.", ) @click.option("stop_all", "-a", "--all", is_flag=True, help="Stops all the running containers.") @@ -308,9 +325,7 @@ def stop(session_name, stop_all, provider): """Stop an interactive session.""" from renku.command.session import session_stop_command - if not stop_all and session_name is None: - raise errors.ParameterError("Please specify either a session ID or the '-a/--all' flag.") - elif stop_all and session_name: + if stop_all and session_name: raise errors.ParameterError("Cannot specify a session ID with the '-a/--all' flag.") communicator = ClickCallback() @@ -319,8 +334,10 @@ def stop(session_name, stop_all, provider): ) if stop_all: click.echo("All running interactive sessions for this project have been stopped.") - else: + elif session_name: click.echo(f"Interactive session '{session_name}' has been successfully stopped.") + else: + click.echo("Interactive session has been successfully stopped.") def session_open_provider_options(*param_decls, **attrs): @@ -333,12 +350,13 @@ def session_open_provider_options(*param_decls, **attrs): @session.command("open") -@click.argument("session_name", metavar="", required=True) +@click.argument("session_name", metavar="", required=False, default=None, shell_complete=shell_complete_sessions) @click.option( "provider", "-p", "--provider", type=click.Choice(Proxy(get_supported_session_providers_names)), + shell_complete=shell_complete_session_providers, default=None, help="Session provider to use.", ) diff --git a/renku/ui/cli/utils/__init__.py b/renku/ui/cli/utils/__init__.py index 27c62f07ad..77cea52e35 100644 --- a/renku/ui/cli/utils/__init__.py +++ b/renku/ui/cli/utils/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/cli/utils/callback.py b/renku/ui/cli/utils/callback.py index 6e1e77d938..489524fe7d 100644 --- a/renku/ui/cli/utils/callback.py +++ b/renku/ui/cli/utils/callback.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/cli/utils/click.py b/renku/ui/cli/utils/click.py index 716104d6cd..39d2516dd8 100644 --- a/renku/ui/cli/utils/click.py +++ b/renku/ui/cli/utils/click.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -24,6 +23,54 @@ from renku.core.dataset.providers.models import ProviderParameter +def shell_complete_datasets(ctx, param, incomplete) -> List[str]: + """Shell completion for dataset names.""" + from renku.command.dataset import search_datasets_command + + try: + result = search_datasets_command().build().execute(name=incomplete) + except Exception: + return [] + else: + return result.output + + +def shell_complete_workflows(ctx, param, incomplete) -> List[str]: + """Shell completion for plan names.""" + from renku.command.workflow import search_workflows_command + + try: + result = search_workflows_command().build().execute(name=incomplete) + except Exception: + return [] + else: + return [n for n in result.output if n.startswith(incomplete)] + + +def shell_complete_sessions(ctx, param, incomplete) -> List[str]: + """Shell completion for session names.""" + from renku.command.session import search_sessions_command + + try: + result = search_sessions_command().build().execute(name=incomplete) + except Exception: + return [] + else: + return result.output + + +def shell_complete_session_providers(ctx, param, incomplete) -> List[str]: + """Shell completion for session providers names.""" + from renku.command.session import search_session_providers_command + + try: + result = search_session_providers_command().build().execute(name=incomplete) + except Exception: + return [] + else: + return result.output + + class CaseInsensitiveChoice(click.Choice): """Case-insensitive click choice. @@ -82,7 +129,7 @@ def wrapper(f): param_help = f"\b\n{param.help}\n " if j == 0 else param.help # NOTE: add newline after a group args = ( - [f"-{a}" if len(a) == 1 else f"--{a}" for a in param.flags if a] + [param.name] + [f"-{a}" if len(a) == 1 else f"--{a}" for a in param.flags if a] + [param.name.replace("-", "_")] if param.flags else [f"--{param.name}"] ) @@ -94,6 +141,7 @@ def wrapper(f): is_flag=param.is_flag, default=param.default, multiple=param.multiple, + metavar=param.metavar, )(f) name = f"{provider.name} configuration" diff --git a/renku/ui/cli/utils/color.py b/renku/ui/cli/utils/color.py index b6b35aa828..e111a0c83f 100644 --- a/renku/ui/cli/utils/color.py +++ b/renku/ui/cli/utils/color.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/cli/utils/curses.py b/renku/ui/cli/utils/curses.py index 0784d5cb8b..cbba1c4d72 100644 --- a/renku/ui/cli/utils/curses.py +++ b/renku/ui/cli/utils/curses.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -40,7 +39,7 @@ class CursesActivityGraphViewer: } ACTIVITY_OVERLAY_WIDTH = 60 - ACTIVITY_OVERLAY_HEIGHT = 40 + ACTIVITY_OVERLAY_HEIGHT = 35 HELP_OVERLAY_WIDTH = 60 HELP_OVERLAY_HEIGHT = 6 DATE_FORMAT = "%Y-%m-%d %H:%M:S" diff --git a/renku/ui/cli/utils/plugins.py b/renku/ui/cli/utils/plugins.py index 8e2610a771..1997d66075 100644 --- a/renku/ui/cli/utils/plugins.py +++ b/renku/ui/cli/utils/plugins.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/cli/utils/terminal.py b/renku/ui/cli/utils/terminal.py index b70af87993..61d1958c4d 100644 --- a/renku/ui/cli/utils/terminal.py +++ b/renku/ui/cli/utils/terminal.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/cli/workflow.py b/renku/ui/cli/workflow.py index f47d3358d9..cefac3c70d 100644 --- a/renku/ui/cli/workflow.py +++ b/renku/ui/cli/workflow.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -758,20 +757,11 @@ from renku.core import errors from renku.domain_model.constant import NO_VALUE from renku.ui.cli.utils.callback import ClickCallback +from renku.ui.cli.utils.click import shell_complete_workflows from renku.ui.cli.utils.plugins import available_workflow_providers, get_supported_formats from renku.ui.cli.utils.terminal import print_workflow_file, show_text_with_pager -def _complete_workflows(ctx, param, incomplete): - from renku.command.workflow import search_workflows_command - - try: - result = search_workflows_command().build().execute(name=incomplete) - return list(filter(lambda x: x.startswith(incomplete), result.output)) - except Exception: - return [] - - @click.group() def workflow(): """Workflow commands.""" @@ -800,7 +790,7 @@ def list_workflows(format, columns): @workflow.command() -@click.argument("name_or_id_or_path", metavar="", shell_complete=_complete_workflows) +@click.argument("name_or_id_or_path", metavar="", shell_complete=shell_complete_workflows) def show(name_or_id_or_path): """Show details for workflow .""" from renku.command.view_model.plan import PlanViewModel @@ -830,7 +820,7 @@ def show(name_or_id_or_path): @workflow.command() -@click.argument("name", metavar="", shell_complete=_complete_workflows) +@click.argument("name", metavar="", shell_complete=shell_complete_workflows) @click.option("--force", is_flag=True, help="Override the existence check.") def remove(name, force): """Remove a workflow named .""" @@ -876,7 +866,7 @@ def remove(name, force): help="Creator's name, email, and affiliation. Accepted format is 'Forename Surname [affiliation]'.", ) @click.argument("name", required=True) -@click.argument("steps", nargs=-1, type=click.UNPROCESSED, shell_complete=_complete_workflows) +@click.argument("steps", nargs=-1, type=click.UNPROCESSED, shell_complete=shell_complete_workflows) def compose( description, mappings, @@ -940,7 +930,7 @@ def compose( @workflow.command() -@click.argument("workflow_name", metavar="", shell_complete=_complete_workflows) +@click.argument("workflow_name", metavar="", shell_complete=shell_complete_workflows) @click.option("-n", "--name", metavar="", help="New name of the workflow") @click.option("-d", "--description", metavar="", help="New description of the workflow") @click.option( @@ -1055,7 +1045,7 @@ def edit( @workflow.command() -@click.argument("workflow_name", metavar="", shell_complete=_complete_workflows) +@click.argument("workflow_name", metavar="", shell_complete=shell_complete_workflows) @click.option( "-f", "--format", @@ -1183,7 +1173,7 @@ def outputs(ctx, paths): help="YAML file containing parameter mappings to be used.", ) @click.option("--skip-metadata-update", is_flag=True, help="Do not update the metadata store for the execution.") -@click.argument("name_or_id", required=True, shell_complete=_complete_workflows) +@click.argument("name_or_id", required=True, shell_complete=shell_complete_workflows) def execute( provider, config, @@ -1341,7 +1331,7 @@ def visualize(sources, columns, exclude_files, ascii, revision, format, interact ) @click.option("mappings", "-m", "--map", multiple=True, help="Mapping for a workflow parameter.") @click.option("config", "-c", "--config", metavar="", help="YAML file containing config for the provider.") -@click.argument("name_or_id", required=True, shell_complete=_complete_workflows) +@click.argument("name_or_id", required=True, shell_complete=shell_complete_workflows) def iterate(name_or_id, mappings, mapping_path, dry_run, provider, config, skip_metadata_update): """Execute a workflow by iterating through a range of provided parameters.""" from renku.command.view_model.plan import PlanViewModel diff --git a/renku/ui/service/cache/models/job.py b/renku/ui/service/cache/models/job.py index fc41e639cd..38135980a4 100644 --- a/renku/ui/service/cache/models/job.py +++ b/renku/ui/service/cache/models/job.py @@ -43,6 +43,7 @@ class Job(Model): state = TextField() extras = JSONField() client_extras = TextField() + branch = TextField() ctrl_context = JSONField() ctrl_result = JSONField() diff --git a/renku/ui/service/controllers/api/mixins.py b/renku/ui/service/controllers/api/mixins.py index 0f72c21951..934f98d9ba 100644 --- a/renku/ui/service/controllers/api/mixins.py +++ b/renku/ui/service/controllers/api/mixins.py @@ -169,8 +169,8 @@ def execute_op(self): "git_url": self.request_data["git_url"], } - if "ref" in self.request_data: - clone_context["ref"] = self.request_data["ref"] + if "branch" in self.request_data: + clone_context["branch"] = self.request_data["branch"] # NOTE: If we want to migrate project, then we need to do full clone. # This operation can take very long time, and as such is expected @@ -185,27 +185,27 @@ def execute_op(self): if not project.initialized: raise UninitializedProject(project.abs_path) else: - ref = self.request_data.get("ref", None) + branch = self.request_data.get("branch", None) - if ref: + if branch: with Repository(project.abs_path) as repository: - if ref != repository.active_branch.name: + if branch != repository.active_branch.name: # NOTE: Command called for different branch than the one used in cache, change branch if len(repository.remotes) != 1: raise RenkuException("Couldn't find remote for project in cache.") origin = repository.remotes[0] - remote_branch = f"{origin}/{ref}" + remote_branch = f"{origin}/{branch}" with project.write_lock(): - # NOTE: Add new ref to remote branches - repository.run_git_command("remote", "set-branches", "--add", origin, ref) + # NOTE: Add new branch to remote branches + repository.run_git_command("remote", "set-branches", "--add", origin, branch) if self.migrate_project or self.clone_depth == PROJECT_CLONE_NO_DEPTH: - repository.fetch(origin, ref) + repository.fetch(origin, branch) else: - repository.fetch(origin, ref, depth=self.clone_depth) + repository.fetch(origin, branch, depth=self.clone_depth) # NOTE: Switch to new ref - repository.run_git_command("checkout", "--track", "-f", "-b", ref, remote_branch) + repository.run_git_command("checkout", "--track", "-f", "-b", branch, remote_branch) # NOTE: cleanup remote branches in case a remote was deleted (fetch fails otherwise) repository.run_git_command("remote", "prune", origin) diff --git a/renku/ui/service/controllers/cache_migrations_check.py b/renku/ui/service/controllers/cache_migrations_check.py index 24a5b61c4e..c5584ebc70 100644 --- a/renku/ui/service/controllers/cache_migrations_check.py +++ b/renku/ui/service/controllers/cache_migrations_check.py @@ -17,18 +17,18 @@ """Renku service migrations check controller.""" import tempfile +from dataclasses import asdict from pathlib import Path -from renku.command.migrate import migrations_check +from renku.command.doctor import doctor_check_command +from renku.command.migrate import MigrationCheckResult, migrations_check from renku.core.errors import AuthenticationError, MinimumVersionError, ProjectNotFound, RenkuException -from renku.core.migration.migrate import SUPPORTED_PROJECT_VERSION from renku.core.util.contexts import renku_project_context from renku.ui.service.controllers.api.abstract import ServiceCtrl from renku.ui.service.controllers.api.mixins import RenkuOperationMixin from renku.ui.service.interfaces.git_api_provider import IGitAPIProvider from renku.ui.service.serializers.cache import ProjectMigrationCheckRequest, ProjectMigrationCheckResponseRPC from renku.ui.service.views import result_response -from renku.version import __version__ class MigrationsCheckCtrl(ServiceCtrl, RenkuOperationMixin): @@ -55,18 +55,14 @@ def _fast_op_without_cache(self): with tempfile.TemporaryDirectory() as tempdir: tempdir_path = Path(tempdir) - self.git_api_provider.download_files_from_api( - [ - ".renku/metadata/root", - ".renku/metadata/project", - ".renku/metadata.yml", - ".renku/renku.ini", + files=[ "Dockerfile", ], - tempdir_path, + folders=[".renku"], + target_folder=tempdir_path, remote=self.ctx["git_url"], - ref=self.request_data.get("ref", None), + branch=self.request_data.get("branch", None), token=self.user_data.get("token", None), ) with renku_project_context(tempdir_path): @@ -75,36 +71,18 @@ def _fast_op_without_cache(self): def renku_op(self): """Renku operation for the controller.""" try: - return migrations_check().build().execute().output + migrations_check_result = migrations_check().build().execute().output + doctor_result = doctor_check_command(with_fix=False).build().execute(fix=False, force=False).output + migrations_check_result.core_compatibility_status.fixes_available = doctor_result[1] + migrations_check_result.core_compatibility_status.issues_found = doctor_result[2] + return migrations_check_result except MinimumVersionError as e: - return { - "project_supported": False, - "core_renku_version": e.current_version, - "project_renku_version": f">={e.minimum_version}", - "core_compatibility_status": { - "migration_required": False, - "project_metadata_version": f">={SUPPORTED_PROJECT_VERSION}", - "current_metadata_version": SUPPORTED_PROJECT_VERSION, - }, - "dockerfile_renku_status": { - "dockerfile_renku_version": "unknown", - "latest_renku_version": __version__, - "newer_renku_available": False, - "automated_dockerfile_update": False, - }, - "template_status": { - "automated_template_update": False, - "newer_template_available": False, - "template_source": "unknown", - "template_ref": "unknown", - "template_id": "unknown", - "project_template_version": "unknown", - "latest_template_version": "unknown", - }, - } + return MigrationCheckResult.from_minimum_version_error(e) def to_response(self): """Execute controller flow and serialize to service response.""" + from renku.ui.service.views.error_handlers import pretty_print_error + if "project_id" in self.context: result = self.execute_op() else: @@ -116,4 +94,16 @@ def to_response(self): except BaseException: result = self.execute_op() - return result_response(self.RESPONSE_SERIALIZER, result) + result_dict = asdict(result) + + # NOTE: Pretty-print errors for the UI + if isinstance(result.template_status, Exception): + result_dict["template_status"] = pretty_print_error(result.template_status) + + if isinstance(result.dockerfile_renku_status, Exception): + result_dict["dockerfile_renku_status"] = pretty_print_error(result.dockerfile_renku_status) + + if isinstance(result.core_compatibility_status, Exception): + result_dict["core_compatibility_status"] = pretty_print_error(result.core_compatibility_status) + + return result_response(self.RESPONSE_SERIALIZER, result_dict) diff --git a/renku/ui/service/controllers/graph_export.py b/renku/ui/service/controllers/graph_export.py index b51df14d0f..da1b3297a9 100644 --- a/renku/ui/service/controllers/graph_export.py +++ b/renku/ui/service/controllers/graph_export.py @@ -55,7 +55,7 @@ def renku_op(self): """Renku operation for the controller.""" result = migrations_check().build().execute().output - if not result["project_supported"]: + if not result.project_supported: raise RenkuException("project not supported") callback_payload = { diff --git a/renku/ui/service/controllers/templates_create_project.py b/renku/ui/service/controllers/templates_create_project.py index 051f6e460d..2d6842a188 100644 --- a/renku/ui/service/controllers/templates_create_project.py +++ b/renku/ui/service/controllers/templates_create_project.py @@ -65,7 +65,7 @@ def default_metadata(self): metadata = { "__template_source__": self.ctx["git_url"], - "__template_ref__": self.ctx["ref"], + "__template_ref__": self.ctx["branch"], "__template_id__": self.ctx["identifier"], "__namespace__": self.ctx["project_namespace"], "__repository__": self.ctx["project_repository"], @@ -115,7 +115,7 @@ def setup_new_project(self): def setup_template(self): """Reads template manifest.""" - templates_source = fetch_templates_source(source=self.ctx["git_url"], reference=self.ctx["ref"]) + templates_source = fetch_templates_source(source=self.ctx["git_url"], reference=self.ctx["branch"]) identifier = self.ctx["identifier"] try: self.template = templates_source.get_template(id=identifier, reference=None) diff --git a/renku/ui/service/controllers/templates_read_manifest.py b/renku/ui/service/controllers/templates_read_manifest.py index 463b110977..d46d4e2e32 100644 --- a/renku/ui/service/controllers/templates_read_manifest.py +++ b/renku/ui/service/controllers/templates_read_manifest.py @@ -50,7 +50,7 @@ def template_manifest(self): """Reads template manifest.""" from PIL import Image - templates_source = fetch_templates_source(source=self.ctx["git_url"], reference=self.ctx["ref"]) + templates_source = fetch_templates_source(source=self.ctx["git_url"], reference=self.ctx["branch"]) manifest = templates_source.manifest.get_raw_content() # NOTE: convert icons to base64 diff --git a/renku/ui/service/controllers/utils/project_clone.py b/renku/ui/service/controllers/utils/project_clone.py index 3ab5652d88..cf7b320198 100644 --- a/renku/ui/service/controllers/utils/project_clone.py +++ b/renku/ui/service/controllers/utils/project_clone.py @@ -73,7 +73,7 @@ def user_project_clone(cache, user_data, project_data): "user.email": project_data["email"], "pull.rebase": False, }, - checkout_revision=project_data["ref"], + checkout_revision=project_data["branch"], ) ).output project.save() diff --git a/renku/ui/service/controllers/utils/remote_project.py b/renku/ui/service/controllers/utils/remote_project.py index f5775f14a5..5154b672c4 100644 --- a/renku/ui/service/controllers/utils/remote_project.py +++ b/renku/ui/service/controllers/utils/remote_project.py @@ -45,7 +45,7 @@ def __init__(self, user_data, request_data): self.ctx = ProjectCloneContext().load({**user_data, **request_data}, unknown=EXCLUDE) self.git_url = self.ctx["url_with_auth"] - self.branch = self.ctx["ref"] + self.branch = self.ctx["branch"] @property def remote_url(self): diff --git a/renku/ui/service/errors.py b/renku/ui/service/errors.py index a8f3e58e20..434a0f9b7a 100644 --- a/renku/ui/service/errors.py +++ b/renku/ui/service/errors.py @@ -527,6 +527,7 @@ class ProgramUpdateProjectError(ServiceError): code = SVC_ERROR_USER + 140 userMessage = "Our servers could not update the project succesfully. You could try doing it manually in a session." devMessage = "Updating the target project failed. Check the Sentry exception for further details." + userReference = "https://renku.readthedocs.io/en/stable/how-to-guides/general/upgrading-renku.html" def __init__(self, exception=None): super().__init__(exception=exception) diff --git a/renku/ui/service/gateways/gitlab_api_provider.py b/renku/ui/service/gateways/gitlab_api_provider.py index 8d14f1b00c..5cbb7f311d 100644 --- a/renku/ui/service/gateways/gitlab_api_provider.py +++ b/renku/ui/service/gateways/gitlab_api_provider.py @@ -16,6 +16,8 @@ # limitations under the License. """Git APi provider interface.""" +import tarfile +import tempfile from pathlib import Path from typing import List, Optional, Union @@ -43,15 +45,25 @@ class GitlabAPIProvider(IGitAPIProvider): def download_files_from_api( self, - paths: List[Union[Path, str]], + files: List[Union[Path, str]], + folders: List[Union[Path, str]], target_folder: Union[Path, str], remote: str, token: str, - ref: Optional[str] = None, + branch: Optional[str] = None, ): - """Download files through a remote Git API.""" - if not ref: - ref = "HEAD" + """Download files through a remote Git API. + + Args: + files(List[Union[Path, str]]): Files to download. + folders(List[Union[Path, str]]): Folders to download. + target_folder(Union[Path, str]): Destination to save downloads to. + remote(str): Git remote URL. + token(str): Gitlab API token. + branch(Optional[str]): Git reference (Default value = None). + """ + if not branch: + branch = "HEAD" target_folder = Path(target_folder) @@ -73,18 +85,21 @@ def download_files_from_api( else: raise - result_paths = [] - - for path in paths: - full_path = target_folder / path + for file in files: + full_path = target_folder / file full_path.parent.mkdir(parents=True, exist_ok=True) try: with open(full_path, "wb") as f: - project.files.raw(file_path=str(path), ref=ref, streamed=True, action=f.write) - - result_paths.append(full_path) + project.files.raw(file_path=str(file), ref=branch, streamed=True, action=f.write) except gitlab.GitlabGetError: delete_dataset_file(full_path) continue + + for folder in folders: + with tempfile.NamedTemporaryFile() as f: + project.repository_archive(path=str(folder), sha=branch, streamed=True, action=f.write, format="tar.gz") + f.seek(0) + with tarfile.open(fileobj=f) as archive: + archive.extractall(path=target_folder) diff --git a/renku/ui/service/interfaces/git_api_provider.py b/renku/ui/service/interfaces/git_api_provider.py index e0e4ce9d46..dfe7d022ba 100644 --- a/renku/ui/service/interfaces/git_api_provider.py +++ b/renku/ui/service/interfaces/git_api_provider.py @@ -26,11 +26,12 @@ class IGitAPIProvider(ABC): def download_files_from_api( self, - paths: List[Union[Path, str]], + files: List[Union[Path, str]], + folders: List[Union[Path, str]], target_folder: Union[Path, str], remote: str, token: str, - ref: Optional[str] = None, + branch: Optional[str] = None, ): """Download files through a remote Git API.""" raise NotImplementedError() diff --git a/renku/ui/service/serializers/cache.py b/renku/ui/service/serializers/cache.py index 619a8ef658..f4b93c516f 100644 --- a/renku/ui/service/serializers/cache.py +++ b/renku/ui/service/serializers/cache.py @@ -20,6 +20,7 @@ from urllib.parse import urlparse from marshmallow import Schema, ValidationError, fields, post_load, pre_load, validates_schema +from marshmallow_oneofschema import OneOfSchema from werkzeug.utils import secure_filename from renku.core import errors @@ -29,6 +30,7 @@ from renku.ui.service.serializers.common import ( ArchiveSchema, AsyncSchema, + ErrorResponse, FileDetailsSchema, LocalRepositorySchema, RemoteRepositorySchema, @@ -131,7 +133,6 @@ class RepositoryCloneRequest(RemoteRepositorySchema): """Request schema for repository clone.""" depth = fields.Integer(metadata={"description": "Git fetch depth"}, load_default=PROJECT_CLONE_DEPTH_DEFAULT) - ref = fields.String(metadata={"description": "Repository reference (branch, commit or tag)"}, load_default=None) class ProjectCloneContext(RepositoryCloneRequest): @@ -240,18 +241,6 @@ class ProjectMigrateRequest(AsyncSchema, LocalRepositorySchema, RemoteRepository skip_docker_update = fields.Boolean(dump_default=False) skip_migrations = fields.Boolean(dump_default=False) - @pre_load() - def handle_ref(self, data, **kwargs): - """Handle ref and branch.""" - - # Backward compatibility: branch and ref were both used. Let's keep branch as the exposed field - # even if interally it gets converted to "ref" later. - if data.get("ref"): - data["branch"] = data["ref"] - del data["ref"] - - return data - class ProjectMigrateResponse(RenkuSyncSchema): """Response schema for project migrate.""" @@ -274,7 +263,7 @@ class ProjectMigrationCheckRequest(LocalRepositorySchema, RemoteRepositorySchema """Request schema for project migration check.""" -class ProjectCompatibilityResponse(Schema): +class ProjectCompatibilityResponseDetail(Schema): """Response schema outlining service compatibility for migrations check.""" project_metadata_version = fields.String( @@ -286,9 +275,30 @@ class ProjectCompatibilityResponse(Schema): migration_required = fields.Boolean( metadata={"description": "Whether or not a metadata migration is required to be compatible with this service."} ) + fixes_available = fields.Boolean( + metadata={ + "description": "Whether automated fixes of metadata (beyond those done during migration) are available." + } + ) + issues_found = fields.List(fields.Str, metadata={"description": "Metadata issues found on project."}) + + +class ProjectCompatibilityResponse(OneOfSchema): + """Combined schema of DockerfileStatusResponseDetail or Exception.""" + + type_schemas = {"detail": ProjectCompatibilityResponseDetail, "error": ErrorResponse} + + def get_obj_type(self, obj): + """Get type from object.""" + from renku.command.migrate import CoreStatusResult + + if isinstance(obj, CoreStatusResult) or (isinstance(obj, dict) and "userMessage" not in obj): + return "detail" + return "error" -class DockerfileStatusResponse(Schema): + +class DockerfileStatusResponseDetail(Schema): """Response schema outlining dockerfile status for migrations check.""" newer_renku_available = fields.Boolean( @@ -305,7 +315,22 @@ class DockerfileStatusResponse(Schema): dockerfile_renku_version = fields.String(metadata={"description": "Version of Renku specified in the Dockerfile."}) -class TemplateStatusResponse(Schema): +class DockerfileStatusResponse(OneOfSchema): + """Combined schema of DockerfileStatusResponseDetail or Exception.""" + + type_schemas = {"detail": DockerfileStatusResponseDetail, "error": ErrorResponse} + + def get_obj_type(self, obj): + """Get type from object.""" + from renku.command.migrate import DockerfileStatusResult + + if isinstance(obj, DockerfileStatusResult) or (isinstance(obj, dict) and "userMessage" not in obj): + return "detail" + + return "error" + + +class TemplateStatusResponseDetail(Schema): """Response schema outlining template status for migrations check.""" automated_template_update = fields.Boolean( @@ -343,6 +368,21 @@ class TemplateStatusResponse(Schema): ) +class TemplateStatusResponse(OneOfSchema): + """Combined schema of TemplateStatusResponseDetail or Exception.""" + + type_schemas = {"detail": TemplateStatusResponseDetail, "error": ErrorResponse} + + def get_obj_type(self, obj): + """Get type from object.""" + from renku.command.migrate import TemplateStatusResult + + if isinstance(obj, TemplateStatusResult) or (isinstance(obj, dict) and "userMessage" not in obj): + return "detail" + + return "error" + + class ProjectMigrationCheckResponse(Schema): """Response schema for project migration check.""" diff --git a/renku/ui/service/serializers/common.py b/renku/ui/service/serializers/common.py index c246b9dad1..bc2a666434 100644 --- a/renku/ui/service/serializers/common.py +++ b/renku/ui/service/serializers/common.py @@ -19,7 +19,7 @@ from datetime import datetime import yagup -from marshmallow import Schema, fields, validates +from marshmallow import Schema, fields, pre_load, validates from renku.ui.service.errors import UserRepoUrlInvalidError from renku.ui.service.serializers.rpc import JsonRPCResponse @@ -52,7 +52,18 @@ def validate_git_url(self, value): class RemoteRepositorySchema(RemoteRepositoryBaseSchema): """Schema for tracking a remote repository and branch.""" - branch = fields.String(metadata={"description": "Remote git branch."}) + branch = fields.String(load_default=None, metadata={"description": "Remote git branch (or tag or commit SHA)."}) + + @pre_load + def set_branch_from_ref(self, data, **kwargs): + """Set `branch` field from `ref` if present.""" + if "ref" in data and not data.get("branch"): + # Backward compatibility: branch and ref were both used. Let's keep branch as the exposed field + # even if internally it gets converted to "ref" later. + data["branch"] = data["ref"] + del data["ref"] + + return data class AsyncSchema(Schema): @@ -130,3 +141,14 @@ class DelayedResponseRPC(JsonRPCResponse): """RPC response schema for project migrate.""" result = fields.Nested(JobDetailsResponse) + + +class ErrorResponse(Schema): + """Renku Service Error Response.""" + + code = fields.Integer(required=True) + userMessage = fields.String(required=True) + devMessage = fields.String(required=True) + userReference = fields.String() + devReference = fields.String() + sentry = fields.String() diff --git a/renku/ui/service/serializers/datasets.py b/renku/ui/service/serializers/datasets.py index 97c4504840..56fc98fd30 100644 --- a/renku/ui/service/serializers/datasets.py +++ b/renku/ui/service/serializers/datasets.py @@ -32,12 +32,6 @@ from renku.ui.service.serializers.rpc import JsonRPCResponse -class DatasetRefSchema(Schema): - """Schema for specifying a reference.""" - - ref = fields.String(metadata={"description": "Target reference."}) - - class DatasetNameSchema(Schema): """Schema for dataset name.""" @@ -53,7 +47,7 @@ class DatasetDetailsRequest(DatasetDetails): class DatasetCreateRequest( - AsyncSchema, DatasetDetailsRequest, DatasetRefSchema, LocalRepositorySchema, RemoteRepositorySchema, MigrateSchema + AsyncSchema, DatasetDetailsRequest, LocalRepositorySchema, RemoteRepositorySchema, MigrateSchema ): """Request schema for a dataset create view.""" @@ -75,7 +69,7 @@ class DatasetCreateResponseRPC(JsonRPCResponse): class DatasetRemoveRequest( - AsyncSchema, DatasetNameSchema, DatasetRefSchema, LocalRepositorySchema, RemoteRepositorySchema, MigrateSchema + AsyncSchema, DatasetNameSchema, LocalRepositorySchema, RemoteRepositorySchema, MigrateSchema ): """Request schema for a dataset remove.""" @@ -99,9 +93,7 @@ class DatasetAddFile(Schema): job_id = fields.String() -class DatasetAddRequest( - AsyncSchema, DatasetNameSchema, DatasetRefSchema, LocalRepositorySchema, RemoteRepositorySchema, MigrateSchema -): +class DatasetAddRequest(AsyncSchema, DatasetNameSchema, LocalRepositorySchema, RemoteRepositorySchema, MigrateSchema): """Request schema for a dataset add file view.""" files = fields.List(fields.Nested(DatasetAddFile), required=True) @@ -203,7 +195,6 @@ class DatasetEditRequest( AsyncSchema, DatasetDetailsRequest, DatasetNameSchema, - DatasetRefSchema, LocalRepositorySchema, RemoteRepositorySchema, MigrateSchema, @@ -240,7 +231,7 @@ class DatasetEditResponseRPC(JsonRPCResponse): class DatasetUnlinkRequest( - AsyncSchema, DatasetNameSchema, DatasetRefSchema, LocalRepositorySchema, RemoteRepositorySchema, MigrateSchema + AsyncSchema, DatasetNameSchema, LocalRepositorySchema, RemoteRepositorySchema, MigrateSchema ): """Dataset unlink file request.""" diff --git a/renku/ui/service/serializers/templates.py b/renku/ui/service/serializers/templates.py index ef3cb5a3b8..adae0d02b6 100644 --- a/renku/ui/service/serializers/templates.py +++ b/renku/ui/service/serializers/templates.py @@ -32,7 +32,6 @@ class ManifestTemplatesRequest(RepositoryCloneRequest): """Request schema for listing manifest templates.""" url = fields.String(required=True) - ref = fields.String(load_default=None) depth = fields.Integer(load_default=TEMPLATE_CLONE_DEPTH_DEFAULT) @pre_load() diff --git a/renku/ui/service/views/apispec.py b/renku/ui/service/views/apispec.py index 25e7d930ee..36a08f84e8 100644 --- a/renku/ui/service/views/apispec.py +++ b/renku/ui/service/views/apispec.py @@ -16,7 +16,7 @@ # limitations under the License. """Renku service apispec views.""" from apispec import APISpec, yaml_utils -from apispec.ext.marshmallow import MarshmallowPlugin +from apispec_oneofschema import MarshmallowPlugin from apispec_webframeworks.flask import FlaskPlugin from flask import Blueprint, current_app, jsonify from flask.views import MethodView diff --git a/renku/ui/service/views/error_handlers.py b/renku/ui/service/views/error_handlers.py index 7751fc60ca..7cd52f13f6 100644 --- a/renku/ui/service/views/error_handlers.py +++ b/renku/ui/service/views/error_handlers.py @@ -454,3 +454,15 @@ def decorated_function(*args, **kwargs): raise ProgramGraphCorruptError(e) return decorated_function + + +def pretty_print_error(error: Exception): + """Use error handlers to pretty print an exception.""" + + @handle_common_except + @handle_migration_read_errors + def _fake_error_source(): + raise error + + response = _fake_error_source() + return response.json["error"] diff --git a/renku/ui/service/views/v1/cache.py b/renku/ui/service/views/v1/cache.py index cb84538e8d..78101db73a 100644 --- a/renku/ui/service/views/v1/cache.py +++ b/renku/ui/service/views/v1/cache.py @@ -14,13 +14,17 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -"""Renku service cache views.""" +"""Renku service cache views for v1.""" +from dataclasses import asdict + from flask import request +from renku.core.errors import AuthenticationError, ProjectNotFound from renku.ui.service.controllers.cache_migrate_project import MigrateProjectCtrl from renku.ui.service.controllers.cache_migrations_check import MigrationsCheckCtrl from renku.ui.service.gateways.gitlab_api_provider import GitlabAPIProvider from renku.ui.service.serializers.v1.cache import ProjectMigrateResponseRPC_1_0, ProjectMigrationCheckResponseRPC_1_5 +from renku.ui.service.views import result_response from renku.ui.service.views.api_versions import V1_0, V1_1, V1_2, V1_3, V1_4, V1_5 from renku.ui.service.views.decorators import accepts_json, optional_identity, requires_cache, requires_identity from renku.ui.service.views.error_handlers import ( @@ -83,9 +87,35 @@ def migration_check_project_view_1_5(user_data, cache): tags: - cache """ + + from flask import jsonify + + from renku.ui.service.serializers.rpc import JsonRPCResponse + from renku.ui.service.views.error_handlers import pretty_print_error + ctrl = MigrationsCheckCtrl(cache, user_data, dict(request.args), GitlabAPIProvider()) - ctrl.RESPONSE_SERIALIZER = ProjectMigrationCheckResponseRPC_1_5() # type: ignore - return ctrl.to_response() + + if "project_id" in ctrl.context: # type: ignore + result = ctrl.execute_op() + else: + # NOTE: use quick flow but fallback to regular flow in case of unexpected exceptions + try: + result = ctrl._fast_op_without_cache() + except (AuthenticationError, ProjectNotFound): + raise + except BaseException: + result = ctrl.execute_op() + + if isinstance(result.core_compatibility_status, Exception): + return jsonify(JsonRPCResponse().dump({"error": pretty_print_error(result.core_compatibility_status)})) + + if isinstance(result.template_status, Exception): + return jsonify(JsonRPCResponse().dump({"error": pretty_print_error(result.template_status)})) + + if isinstance(result.dockerfile_renku_status, Exception): + return jsonify(JsonRPCResponse().dump({"error": pretty_print_error(result.dockerfile_renku_status)})) + + return result_response(ProjectMigrationCheckResponseRPC_1_5(), asdict(result)) def add_v1_specific_endpoints(cache_blueprint): diff --git a/renku/version.py b/renku/version.py index 1678b09903..13f88cc515 100644 --- a/renku/version.py +++ b/renku/version.py @@ -16,14 +16,14 @@ """Version information for Renku.""" import re -from typing import Optional +from typing import Optional, cast try: from importlib.metadata import distribution, version except ImportError: from importlib_metadata import distribution, version # type: ignore -__version__ = version("renku") +__version__ = cast(str, version("renku")) __template_version__ = "0.5.0" __minimum_project_version__ = "2.4.0" diff --git a/run-tests.sh b/run-tests.sh index bd38256d10..b691be4de8 100755 --- a/run-tests.sh +++ b/run-tests.sh @@ -47,7 +47,7 @@ check_styles(){ build_docs(){ sphinx-build -qnNW docs docs/_build/html sphinx-build -nNW -b spelling -d docs/_build/doctrees docs docs/_build/spelling - pytest -v -m "not integration and not publish" -o testpaths="docs conftest.py" --ignore=docs/conf.py + pytest --black --flake8 -v -m "not integration and not publish" -o testpaths="docs conftest.py" --ignore=docs/conf.py } run_tests(){ diff --git a/tests/cli/test_datasets.py b/tests/cli/test_datasets.py index 73f014d7be..a0d935c98b 100644 --- a/tests/cli/test_datasets.py +++ b/tests/cli/test_datasets.py @@ -34,6 +34,7 @@ from renku.core.dataset.providers.dataverse import DataverseProvider from renku.core.dataset.providers.factory import ProviderFactory from renku.core.dataset.providers.zenodo import ZenodoProvider +from renku.core.interface.storage import FileHash from renku.core.storage import track_paths_in_storage from renku.core.util.git import get_dirty_paths from renku.core.util.urls import get_slug @@ -2558,7 +2559,6 @@ def test_add_local_data_to_cloud_datasets(runner, project, mocker, directory_tre cloud_storage.upload.return_value = [] - uri = "s3://s3.endpoint/bucket/path" result = runner.invoke(cli, ["dataset", "create", "cloud-data", "--storage", uri]) assert 0 == result.exit_code, format_result_exception(result) @@ -2587,6 +2587,153 @@ def test_add_local_data_to_cloud_datasets(runner, project, mocker, directory_tre cloud_storage.upload.assert_has_calls(calls=calls, any_order=True) +@pytest.mark.parametrize("uri", ["s3://s3.endpoint/bucket/", "azure://renkupythontest1/test-private-1"]) +def test_dataset_update_remote_file(runner, project, mocker, uri): + """Test updating a file added from remote/cloud storage.""" + storage_factory = mocker.patch("renku.infrastructure.storage.factory.StorageFactory.get_storage", autospec=True) + cloud_storage = storage_factory.return_value + + uri = f"{uri}/path/myfile" + + def _fake_download(uri, destination): + with open(destination, "w") as f: + f.write("a") + + cloud_storage.get_hashes.return_value = [FileHash(uri=uri, path="path/myfile", size=5, hash="deadbeef")] + cloud_storage.download.side_effect = _fake_download + + result = runner.invoke(cli, ["dataset", "create", "local-data"]) + assert 0 == result.exit_code, format_result_exception(result) + + result = runner.invoke(cli, ["dataset", "add", "local-data", uri]) + assert 0 == result.exit_code, format_result_exception(result) + + dataset = get_dataset_with_injection("local-data") + + assert 1 == len(dataset.files) + assert dataset.files[0].based_on.url == uri + assert dataset.files[0].based_on.checksum == "deadbeef" + + # Updating without changes does nothing + result = runner.invoke(cli, ["dataset", "update", "local-data"]) + assert 0 == result.exit_code, format_result_exception(result) + + dataset = get_dataset_with_injection("local-data") + + assert 1 == len(dataset.files) + assert dataset.files[0].based_on.url == uri + assert dataset.files[0].based_on.checksum == "deadbeef" + + # Updating with changes works + def _fake_download2(uri, destination): + with open(destination, "w") as f: + f.write("b") + + cloud_storage.get_hashes.return_value = [FileHash(uri=uri, path="path/myfile", size=7, hash="8badf00d")] + cloud_storage.download.side_effect = _fake_download2 + + result = runner.invoke(cli, ["dataset", "update", "local-data"]) + assert 0 == result.exit_code, format_result_exception(result) + + dataset = get_dataset_with_injection("local-data") + + assert 1 == len(dataset.files) + assert dataset.files[0].based_on.url == uri + assert dataset.files[0].based_on.checksum == "8badf00d" + + cloud_storage.get_hashes.return_value = [] + + # check deletion doesn't happen without --delete + result = runner.invoke(cli, ["dataset", "update", "local-data"]) + assert 0 == result.exit_code, format_result_exception(result) + + dataset = get_dataset_with_injection("local-data") + + assert 1 == len(dataset.files) + + # check deletion + result = runner.invoke(cli, ["dataset", "update", "local-data", "--delete"]) + assert 0 == result.exit_code, format_result_exception(result) + + dataset = get_dataset_with_injection("local-data") + + assert 0 == len(dataset.files) + + +def test_dataset_update_web_file(runner, project, mocker): + """Test updating a file added from remote/cloud storage.""" + + uri = "http://www.example.com/myfile.txt" + + cache = project.path / ".renku" / "cache" + cache.mkdir(parents=True, exist_ok=True) + new_file = cache / "myfile.txt" + new_file.write_text("output") + + mocker.patch("renku.core.util.requests.get_redirect_url", lambda _: uri) + mocker.patch( + "renku.core.util.requests.download_file", + lambda base_directory, url, filename, extract: (cache, [Path(new_file)]), + ) + + result = runner.invoke(cli, ["dataset", "create", "local-data"]) + assert 0 == result.exit_code, format_result_exception(result) + + result = runner.invoke(cli, ["dataset", "add", "local-data", uri]) + assert 0 == result.exit_code, format_result_exception(result) + + dataset = get_dataset_with_injection("local-data") + + assert 1 == len(dataset.files) + assert dataset.files[0].source == uri + assert dataset.files[0].entity.checksum == "6caf68aff423350af0ef7b148fec2ed4243658e5" + + # Updating without changes does nothing + new_file.write_text("output") + + result = runner.invoke(cli, ["dataset", "update", "local-data"]) + assert 0 == result.exit_code, format_result_exception(result) + + dataset = get_dataset_with_injection("local-data") + + assert 1 == len(dataset.files) + assert dataset.files[0].source == uri + assert dataset.files[0].entity.checksum == "6caf68aff423350af0ef7b148fec2ed4243658e5" + + # Updating with changes works + new_file.write_text("output2") + + result = runner.invoke(cli, ["dataset", "update", "local-data"]) + assert 0 == result.exit_code, format_result_exception(result) + + dataset = get_dataset_with_injection("local-data") + + assert 1 == len(dataset.files) + assert dataset.files[0].source == uri + assert dataset.files[0].entity.checksum == "1bc6411450b62581e5cea1174c15269c249dd4ea" + + # check deletion doesn't happen without --delete + def _fake_raise(base_directory, url, filename, extract): + raise errors.RequestError + + mocker.patch("renku.core.util.requests.download_file", _fake_raise) + + result = runner.invoke(cli, ["dataset", "update", "local-data"]) + assert 0 == result.exit_code, format_result_exception(result) + + dataset = get_dataset_with_injection("local-data") + + assert 1 == len(dataset.files) + + # check deletion + result = runner.invoke(cli, ["dataset", "update", "local-data", "--delete"]) + assert 0 == result.exit_code, format_result_exception(result) + + dataset = get_dataset_with_injection("local-data") + + assert 0 == len(dataset.files) + + @pytest.mark.parametrize( "storage", ["s3://s3.endpoint/bucket/path", "azure://renkupythontest1/test-private-1", "/local/file/storage"] ) diff --git a/tests/cli/test_integration_datasets.py b/tests/cli/test_integration_datasets.py index fa3c6d2fe7..c6974d81c9 100644 --- a/tests/cli/test_integration_datasets.py +++ b/tests/cli/test_integration_datasets.py @@ -61,12 +61,20 @@ "name": "pyndl_naive_discr_v0.6.4", "creator": "Konstantin Sering, Marc Weitz, David-Elias Künstle, Lennart Schneider", "version": "v0.6.4", + "keywords": { + "naive discriminative learning", + "linguistics", + "python", + "cognitive science", + "machine learning", + }, }, { "doi": "10.7910/DVN/F4NUMR", "name": "replication_data_for_2.2", "creator": "James Druckman, Martin Kifer, Michael Parkin", "version": "2", + "keywords": {"Social Sciences"}, }, ], ) @@ -104,6 +112,7 @@ def test_dataset_import_real_doi(runner, project, doi, prefix, sleep_after): assert doi["doi"] in dataset.same_as.url assert dataset.date_created is None assert dataset.date_published is not None + assert doi["keywords"] == set(dataset.keywords) result = runner.invoke(cli, ["graph", "export", "--format", "json-ld", "--strict"]) assert 0 == result.exit_code, format_result_exception(result) @@ -825,10 +834,10 @@ def test_dataset_export_upload_failure(runner, tmpdir, project, zenodo_sandbox): [("zenodo", [], "zenodo.org/record"), ("dataverse", ["--dataverse-name", "sdsc-published-test-dataverse"], "doi:")], ) def test_dataset_export_published_url( - runner, tmpdir, project, zenodo_sandbox, dataverse_demo, provider, params, output + runner, tmpdir, project, zenodo_sandbox, dataverse_demo, with_injection, provider, params, output ): """Test publishing of dataset.""" - result = runner.invoke(cli, ["dataset", "create", "my-dataset"]) + result = runner.invoke(cli, ["dataset", "create", "my-dataset", "-k", "keyword", "-k", "data"]) assert 0 == result.exit_code, format_result_exception(result) + str(result.stderr_bytes) assert "OK" in result.output @@ -841,7 +850,7 @@ def test_dataset_export_published_url( result = runner.invoke(cli, ["dataset", "add", "--copy", "my-dataset", str(new_file)]) assert 0 == result.exit_code, format_result_exception(result) + str(result.stderr_bytes) - with with_dataset(name="my-dataset", commit_database=True) as dataset: + with with_injection(), with_dataset(name="my-dataset", commit_database=True) as dataset: dataset.description = "awesome dataset" dataset.creators[0].affiliation = "eth" @@ -854,6 +863,14 @@ def test_dataset_export_published_url( assert "Exported to:" in result.output assert output in result.output + m = re.search(r"Exported to:\s*(\S*)$", result.output, flags=re.MULTILINE) + doi = m.group(1) + result = runner.invoke(cli, ["dataset", "import", doi, "--name", "imported"], input="y") + assert 0 == result.exit_code, format_result_exception(result) + + dataset = get_dataset_with_injection("imported") + assert {"data", "keyword"} == set(dataset.keywords) + @pytest.mark.integration @retry_failed diff --git a/tests/cli/test_session.py b/tests/cli/test_session.py index 84d430213e..6f2e15ed83 100644 --- a/tests/cli/test_session.py +++ b/tests/cli/test_session.py @@ -1,7 +1,6 @@ -# -# Copyright 2021 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and -# Eidgenössische Technische Hochschule Zürich (ETHZ). +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,7 +13,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -"""Test ``service`` command.""" +"""Test ``session`` command.""" import re from unittest.mock import MagicMock, patch @@ -25,7 +24,7 @@ from tests.utils import format_result_exception -def test_session_up_down(runner, project, dummy_session_provider, monkeypatch): +def test_session_up_down(runner, project, dummy_session_provider): """Test starting a session.""" browser = dummy_session_provider @@ -66,7 +65,7 @@ def test_session_up_down(runner, project, dummy_session_provider, monkeypatch): def test_session_start_config_requests(runner, project, dummy_session_provider, monkeypatch): """Test session with configuration in the renku config.""" - import docker + import renku.core.session.docker result = runner.invoke(cli, ["config", "set", "interactive.cpu_request", "0.5"]) assert 0 == result.exit_code, format_result_exception(result) @@ -76,14 +75,64 @@ def test_session_start_config_requests(runner, project, dummy_session_provider, assert 0 == result.exit_code, format_result_exception(result) with monkeypatch.context() as monkey: - docker_mock = MagicMock() - docker_mock.api.inspect_image.return_value = {} - monkey.setattr(docker, "from_env", lambda: docker_mock) + docker_client = MagicMock() + docker_client.api.inspect_image.return_value = {} + monkey.setattr(renku.core.session.docker.DockerSessionProvider, "docker_client", lambda _: docker_client) + result = runner.invoke(cli, ["session", "start", "-p", "docker"], input="y\n") + assert 0 == result.exit_code, format_result_exception(result) assert "successfully started" in result.output +def test_session_start_with_docker_args(runner, project, dummy_session_provider, monkeypatch): + """Test passing docker run arguments to session start.""" + import renku.core.session.docker + + with monkeypatch.context() as monkey: + docker_client = MagicMock() + docker_client.api.inspect_image.return_value = {} + monkey.setattr(renku.core.session.docker.DockerSessionProvider, "docker_client", lambda _: docker_client) + + result = runner.invoke( + cli, + [ + "session", + "start", + "-p", + "docker", + "--cpu-rt-period", + "100", + "--cap-add", + "SYS_ADMIN", + "--cap-add", + "SYS_NICE", + "--env", + "ENV1=value1", + "--env", + "ENV2=value2", + "--read-only", + "--volume", + "/host/path:/container/path", + ], + input="y\n", + ) + + assert 0 == result.exit_code, format_result_exception(result) + + kwargs = docker_client.containers.run.call_args.kwargs + + assert ("SYS_ADMIN", "SYS_NICE") == kwargs["cap_add"] + assert 100 == kwargs["cpu_rt_period"] + assert kwargs["read_only"] is True + assert "ENV1" in kwargs["environment"] + assert "value1" == kwargs["environment"]["ENV1"] + assert "ENV2" in kwargs["environment"] + assert "value2" == kwargs["environment"]["ENV2"] + assert 2 == len(kwargs["volumes"]) + assert "/host/path:/container/path" in kwargs["volumes"] + + def test_session_ssh_setup(runner, project, dummy_session_provider, fake_home): """Test starting a session.""" from renku.core.util.ssh import generate_ssh_keys diff --git a/tests/cli/test_workflow.py b/tests/cli/test_workflow.py index d3c312ad3b..95cbdbc81d 100644 --- a/tests/cli/test_workflow.py +++ b/tests/cli/test_workflow.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -711,7 +710,7 @@ def _flatten_dict(obj, key_string=""): @pytest.mark.parametrize("provider", available_workflow_providers()) -def test_workflow_execute_command_with_api_parameter_set(runner, run_shell, project, capsys, transaction_id, provider): +def test_workflow_execute_command_with_api_parameter_set(runner, run_shell, project, transaction_id, provider): """Test executing a workflow with --set for a renku.ui.api.Parameter.""" script = project.path / "script.py" output = project.path / "output" @@ -740,7 +739,7 @@ def test_workflow_execute_command_with_api_parameter_set(runner, run_shell, proj @pytest.mark.parametrize("provider", available_workflow_providers()) -def test_workflow_execute_command_with_api_input_set(runner, run_shell, project, capsys, transaction_id, provider): +def test_workflow_execute_command_with_api_input_set(runner, run_shell, project, transaction_id, provider): """Test executing a workflow with --set for a renku.ui.api.Input.""" script = project.path / "script.py" output = project.path / "output" @@ -775,7 +774,7 @@ def test_workflow_execute_command_with_api_input_set(runner, run_shell, project, @pytest.mark.parametrize("provider", available_workflow_providers()) -def test_workflow_execute_command_with_api_output_set(runner, run_shell, project, capsys, transaction_id, provider): +def test_workflow_execute_command_with_api_output_set(runner, run_shell, project, transaction_id, provider): """Test executing a workflow with --set for a renku.ui.api.Output.""" script = project.path / "script.py" output = project.path / "output" @@ -806,7 +805,7 @@ def test_workflow_execute_command_with_api_output_set(runner, run_shell, project assert 0 == result.exit_code, format_result_exception(result) -def test_workflow_execute_command_with_api_duplicate_output(runner, run_shell, project, capsys, transaction_id): +def test_workflow_execute_command_with_api_duplicate_output(run_shell, project, transaction_id): """Test executing a workflow with duplicate output with differing path.""" script = project.path / "script.py" output = project.path / "output" @@ -824,7 +823,7 @@ def test_workflow_execute_command_with_api_duplicate_output(runner, run_shell, p assert b"Error: Invalid parameter value - Duplicate input/output name found: my-output\n" in result[0] -def test_workflow_execute_command_with_api_valid_duplicate_output(runner, run_shell, project, capsys, transaction_id): +def test_workflow_execute_command_with_api_valid_duplicate_output(run_shell, project, transaction_id): """Test executing a workflow with duplicate output with same path.""" script = project.path / "script.py" output = project.path / "output" @@ -844,7 +843,7 @@ def test_workflow_execute_command_with_api_valid_duplicate_output(runner, run_sh assert result[1] is None -def test_workflow_execute_command_with_api_duplicate_input(runner, run_shell, project, capsys, transaction_id): +def test_workflow_execute_command_with_api_duplicate_input(run_shell, project, transaction_id): """Test executing a workflow with duplicate input with differing path.""" script = project.path / "script.py" input = project.path / "input" @@ -862,7 +861,7 @@ def test_workflow_execute_command_with_api_duplicate_input(runner, run_shell, pr assert b"Error: Invalid parameter value - Duplicate input/output name found: my-input\n" in result[0] -def test_workflow_execute_command_with_api_valid_duplicate_input(runner, run_shell, project, capsys, transaction_id): +def test_workflow_execute_command_with_api_valid_duplicate_input(run_shell, project, transaction_id): """Test executing a workflow with duplicate input with same path.""" script = project.path / "script.py" input = project.path / "input" diff --git a/tests/core/commands/test_text_canvas.py b/tests/core/commands/test_text_canvas.py new file mode 100644 index 0000000000..491e19a45d --- /dev/null +++ b/tests/core/commands/test_text_canvas.py @@ -0,0 +1,115 @@ +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Renku TextCanvas tests.""" + +import pytest + +from renku.command.view_model.text_canvas import NodeShape, Point, RectangleShape, Size, TextMatrix + + +def test_text_matrix(): + """Test that TextMatrix works.""" + + matrix = TextMatrix(size=Size(5, 5)) + + matrix[0, 0] = "a" + matrix[2, 2] = "b" + matrix[4, 4] = "c" + matrix[0, 4] = "d" + matrix[4, 0] = "e" + + assert matrix[0, 0] == "a" + assert matrix[2, 2] == "b" + assert matrix[4, 4] == "c" + assert matrix[0, 4] == "d" + assert matrix[4, 0] == "e" + + with pytest.raises(AssertionError): + matrix[-1, 0] = "x" + + with pytest.raises(AssertionError): + matrix[0, -1] = "x" + + with pytest.raises(AssertionError): + matrix[-1, -1] = "x" + + with pytest.raises(AssertionError): + matrix[5, 4] = "x" + + with pytest.raises(AssertionError): + matrix[4, 5] = "x" + + with pytest.raises(AssertionError): + matrix[5, 5] = "x" + + assert str(matrix) == "a e\n \n b \n \nd c" + + matrix2 = TextMatrix(size=Size(5, 5), x_offset=1, y_offset=2) + matrix2[0, 0] = "a" + matrix2[2, 2] = "b" + matrix2[-1, -1] = "c" + matrix2[-1, -2] = "d" + + assert matrix2[0, 0] == "a" + assert matrix2[2, 2] == "b" + assert matrix2[-1, -1] == "c" + assert matrix2[-1, -2] == "d" + + with pytest.raises(AssertionError): + matrix2[5, 4] = "x" + + with pytest.raises(AssertionError): + matrix2[-2, -3] = "x" + + assert str(matrix2) == "d \nc \n a \n \n b " + + +@pytest.mark.parametrize( + "start,end,double,result", + [ + (Point(1, 1), Point(7, 4), False, " \n ┌─────┐\n │ │\n │ │\n └─────┘"), + (Point(0, 0), Point(3, 3), False, "┌──┐ \n│ │ \n│ │ \n└──┘ \n "), + (Point(1, 1), Point(4, 3), True, " \n ╔══╗ \n ║ ║ \n ╚══╝ \n "), + ], +) +def test_rectangle_shape(start, end, double, result): + """Test rendering a rectangle shape.""" + matrix = TextMatrix(size=Size(8, 5)) + + shape = RectangleShape(start, end, double_border=double) + + shape.draw(matrix) + + assert str(matrix) == result + + +@pytest.mark.parametrize( + "text,point,result", + [ + ("A", Point(4, 4), " \n \n \n \n ┌─┐ \n │A│ \n └─┘ \n "), + ("AB DE", Point(1, 0), " ┌─────┐\n │AB DE│\n └─────┘\n \n \n \n \n "), + ("A B\nC D", Point(2, 2), " \n \n ┌───┐ \n │A B│ \n │C D│ \n └───┘ \n \n "), + ], +) +def test_node_shape(text, point, result): + """Test rendering a node shape.""" + matrix = TextMatrix(size=Size(8, 8)) + + shape = NodeShape(text=text, point=point) + + shape.draw(matrix) + + assert str(matrix) == result diff --git a/tests/core/commands/test_workflow.py b/tests/core/commands/test_workflow.py index 4775b7a5ee..3f2ab8cec8 100644 --- a/tests/core/commands/test_workflow.py +++ b/tests/core/commands/test_workflow.py @@ -1,5 +1,4 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) +# Copyright Swiss Data Science Center (SDSC) # A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # diff --git a/tests/core/plugins/test_session.py b/tests/core/plugins/test_session.py index ed84a97b43..22c4cbe27e 100644 --- a/tests/core/plugins/test_session.py +++ b/tests/core/plugins/test_session.py @@ -28,6 +28,7 @@ from renku.core.session.renkulab import RenkulabSessionProvider from renku.core.session.session import session_list, session_start, session_stop, ssh_setup from renku.core.util.ssh import SystemSSHConfig +from renku.domain_model.session import SessionStopStatus def fake_start( @@ -46,8 +47,8 @@ def fake_start( def fake_stop(self, project_name, session_name, stop_all): if session_name == "missing_session": - return False - return True + return SessionStopStatus.FAILED + return SessionStopStatus.SUCCESSFUL def fake_find_image(self, image_name, config): @@ -60,7 +61,7 @@ def fake_build_image(self, image_descriptor, image_name, config): return -def fake_session_list(self, project_name, config): +def fake_session_list(self, project_name): return ["0xdeadbeef"] @@ -139,14 +140,7 @@ def test_session_start( ], ) def test_session_stop( - run_shell, - project, - session_provider, - provider_name, - parameters, - provider_patches, - result, - with_injection, + run_shell, project, with_injection, session_provider, provider_name, parameters, provider_patches, result ): """Test stopping sessions.""" with patch.multiple(session_provider, session_stop=fake_stop, **provider_patches): @@ -187,9 +181,9 @@ def test_session_list( if not isinstance(result, list) and issubclass(result, Exception): with pytest.raises(result): - session_list(provider=provider, config_path=None) + session_list(provider=provider) else: - output = session_list(provider=provider, config_path=None) + output = session_list(provider=provider) assert output.sessions == result diff --git a/tests/fixtures/session.py b/tests/fixtures/session.py index 043f7e4041..9180a348e3 100644 --- a/tests/fixtures/session.py +++ b/tests/fixtures/session.py @@ -22,6 +22,8 @@ import pytest +from renku.domain_model.session import SessionStopStatus + @pytest.fixture() def dummy_session_provider(): @@ -53,7 +55,7 @@ def find_image(self, image_name: str, config: Optional[Dict[str, Any]]) -> bool: def session_provider(self) -> ISessionProvider: return self - def session_list(self, project_name: str, config: Optional[Dict[str, Any]]) -> List[Session]: + def session_list(self, project_name: str) -> List[Session]: return [ Session( id=n, @@ -82,13 +84,13 @@ def session_start( self.sessions.append(name) return name, "" - def session_stop(self, project_name: str, session_name: Optional[str], stop_all: bool) -> bool: + def session_stop(self, project_name: str, session_name: Optional[str], stop_all: bool) -> SessionStopStatus: if stop_all: self.sessions.clear() - return True + return SessionStopStatus.SUCCESSFUL self.sessions.remove(session_name) - return True + return SessionStopStatus.SUCCESSFUL def session_url(self, session_name: str) -> Optional[str]: return "http://localhost/" diff --git a/tests/fixtures/templates.py b/tests/fixtures/templates.py index c74736d500..d65e88a5a8 100644 --- a/tests/fixtures/templates.py +++ b/tests/fixtures/templates.py @@ -59,7 +59,7 @@ def template(template_metadata): "url": "https://github.com/SwissDataScienceCenter/renku-project-template", "id": "python-minimal", "index": 1, - "ref": "master", + "branch": "master", # TODO: Add template parameters here once parameters are added to the template. "metadata": {}, "default_metadata": template_metadata, @@ -84,7 +84,7 @@ def project_init(template): "init_custom": [ "init", "--template-ref", - template["ref"], + template["branch"], "--template-id", "python-minimal", data["test_project"], @@ -92,7 +92,7 @@ def project_init(template): "init_custom_template": ( "https://gitlab.dev.renku.ch/renku-python-integration-tests/core-it-template-variable-test-project" ), - "remote": ["--template-source", template["url"], "--template-ref", template["ref"]], + "remote": ["--template-source", template["url"], "--template-ref", template["branch"]], "id": ["--template-id", template["id"]], "force": ["--force"], "parameters": ["--parameter", "p1=v1", "--parameter", "p2=v2"], diff --git a/tests/service/controllers/test_templates_create_project.py b/tests/service/controllers/test_templates_create_project.py index f70c234d01..d2ab69c80c 100644 --- a/tests/service/controllers/test_templates_create_project.py +++ b/tests/service/controllers/test_templates_create_project.py @@ -62,7 +62,7 @@ def test_template_create_project_ctrl(ctrl_init, svc_client_templates_creation, "git_url", "project_name_stripped", "depth", - "ref", + "branch", "new_project_url_with_auth", "url_with_auth", } @@ -83,7 +83,7 @@ def test_template_create_project_ctrl(ctrl_init, svc_client_templates_creation, expected_metadata.add("__renku_version__") assert expected_metadata == set(received_metadata.keys()) assert payload["url"] == received_metadata["__template_source__"] - assert payload["ref"] == received_metadata["__template_ref__"] + assert payload["branch"] == received_metadata["__template_ref__"] assert payload["identifier"] == received_metadata["__template_id__"] assert payload["project_namespace"] == received_metadata["__namespace__"] assert payload["project_repository"] == received_metadata["__repository__"] diff --git a/tests/service/controllers/utils/test_remote_project.py b/tests/service/controllers/utils/test_remote_project.py index f8f91b4ba9..490eee1bbd 100644 --- a/tests/service/controllers/utils/test_remote_project.py +++ b/tests/service/controllers/utils/test_remote_project.py @@ -55,13 +55,13 @@ def test_project_metadata_custom_remote(): request_data = { "git_url": "https://gitlab.dev.renku.ch/renku-python-integration-tests/import-me", - "ref": "my-branch", + "branch": "my-branch", } ctrl = RemoteProject(user_data, request_data) - ref = ctrl.ctx["ref"] + branch = ctrl.ctx["branch"] - assert request_data["ref"] == ref + assert request_data["branch"] == branch def test_project_metadata_remote_err(): @@ -98,15 +98,9 @@ def test_remote_project_context(): with ctrl.remote() as project_path: assert project_path result = migrations_check().build().execute().output - assert result["core_renku_version"] == renku.__version__ - assert result["project_renku_version"] == "pre-0.11.0" - assert result["core_compatibility_status"]["migration_required"] is True - assert result["template_status"]["newer_template_available"] is False - assert result["template_status"]["project_template_version"] is None - assert result["template_status"]["latest_template_version"] is None - assert result["template_status"]["template_source"] is None - assert result["template_status"]["template_ref"] is None - assert result["template_status"]["template_id"] is None - assert result["template_status"]["automated_template_update"] is True - assert result["dockerfile_renku_status"]["automated_dockerfile_update"] is False - assert result["project_supported"] is True + assert result.core_renku_version == renku.__version__ + assert result.project_renku_version == "pre-0.11.0" + assert result.core_compatibility_status.migration_required is True + assert isinstance(result.template_status, ValueError) + assert result.dockerfile_renku_status.automated_dockerfile_update is False + assert result.project_supported is True diff --git a/tests/service/jobs/test_datasets.py b/tests/service/jobs/test_datasets.py index c26f6ce003..041013023f 100644 --- a/tests/service/jobs/test_datasets.py +++ b/tests/service/jobs/test_datasets.py @@ -302,7 +302,7 @@ def test_delay_add_file_job(svc_client_cache, it_remote_repo_url_temp_branch, vi context = DatasetAddRequest().load( { "git_url": it_remote_repo_url, - "ref": branch, + "branch": branch, "name": uuid.uuid4().hex, # NOTE: We test with this only to check that recursive invocation is being prevented. "is_delayed": True, @@ -346,7 +346,7 @@ def test_delay_add_file_job_failure(svc_client_cache, it_remote_repo_url_temp_br context = DatasetAddRequest().load( { "git_url": it_remote_repo_url, - "ref": branch, + "branch": branch, "name": uuid.uuid4().hex, # NOTE: We test with this only to check that recursive invocation is being prevented. "is_delayed": True, @@ -414,7 +414,7 @@ def test_delay_create_dataset_job(svc_client_cache, it_remote_repo_url_temp_bran context = DatasetCreateRequest().load( { "git_url": it_remote_repo_url, - "ref": branch, + "branch": branch, "name": uuid.uuid4().hex, # NOTE: We test with this only to check that recursive invocation is being prevented. "is_delayed": True, @@ -451,7 +451,7 @@ def test_delay_create_dataset_failure(svc_client_cache, it_remote_repo_url_temp_ context = DatasetCreateRequest().load( { "git_url": it_remote_repo_url, - "ref": branch, + "branch": branch, "name": uuid.uuid4().hex, # NOTE: We test with this only to check that recursive invocation is being prevented. "is_delayed": True, @@ -487,7 +487,7 @@ def test_delay_remove_dataset_job(svc_client_cache, it_remote_repo_url_temp_bran request_payload = { "git_url": it_remote_repo_url, - "ref": branch, + "branch": branch, "name": "mydata", "migrate_project": True, } @@ -521,7 +521,7 @@ def test_delay_remove_dataset_job_failure(svc_client_cache, it_remote_repo_url_t request_payload = { "git_url": it_remote_repo_url, - "ref": ref, + "branch": ref, "name": dataset_name, } @@ -549,7 +549,7 @@ def test_delay_edit_dataset_job(svc_client_cache, it_remote_repo_url_temp_branch context = DatasetEditRequest().load( { "git_url": it_remote_repo_url, - "ref": branch, + "branch": branch, "name": "mydata", "title": f"new title => {uuid.uuid4().hex}", # NOTE: We test with this only to check that recursive invocation is being prevented. @@ -588,7 +588,7 @@ def test_delay_edit_dataset_job_failure(svc_client_cache, it_remote_repo_url_tem context = DatasetEditRequest().load( { "git_url": it_remote_repo_url, - "ref": branch, + "branch": branch, "name": "mydata", "title": f"new title => {uuid.uuid4().hex}", "migrate_project": False, @@ -621,7 +621,7 @@ def test_delay_unlink_dataset_job(svc_client_cache, it_remote_repo_url_temp_bran context = DatasetUnlinkRequest().load( { "git_url": it_remote_repo_url, - "ref": branch, + "branch": branch, "name": "ds1", "include_filters": ["data1"], # NOTE: We test with this only to check that recursive invocation is being prevented. @@ -658,7 +658,7 @@ def test_delay_unlink_dataset_job_failure(svc_client_cache, it_remote_repo_url_t it_remote_repo_url, branch = it_remote_repo_url_temp_branch context = DatasetUnlinkRequest().load( - {"git_url": it_remote_repo_url, "ref": branch, "name": "ds1", "include_filters": ["data1"]} + {"git_url": it_remote_repo_url, "branch": branch, "name": "ds1", "include_filters": ["data1"]} ) _, _, cache = svc_client_cache @@ -687,7 +687,7 @@ def test_unlink_dataset_sync(svc_client_cache, it_remote_repo_url_temp_branch, v context = DatasetUnlinkRequest().load( { "git_url": it_remote_repo_url, - "ref": branch, + "branch": branch, "name": "ds1", "include_filters": ["data1"], "migrate_project": True, diff --git a/tests/service/jobs/test_project.py b/tests/service/jobs/test_project.py index 4e6c919838..dad1d59fae 100644 --- a/tests/service/jobs/test_project.py +++ b/tests/service/jobs/test_project.py @@ -30,7 +30,9 @@ def test_delay_migration_job(svc_client_cache, it_remote_old_repo_url_temp_branc it_remote_repo_url, branch = it_remote_old_repo_url_temp_branch - context = ProjectMigrateRequest().load({"git_url": it_remote_repo_url, "ref": branch, "skip_docker_update": True}) + context = ProjectMigrateRequest().load( + {"git_url": it_remote_repo_url, "branch": branch, "skip_docker_update": True} + ) _, _, cache = svc_client_cache renku_module = "renku.ui.service.controllers.cache_migrate_project" diff --git a/tests/service/views/test_cache_views.py b/tests/service/views/test_cache_views.py index 3d72533285..b37b9af453 100644 --- a/tests/service/views/test_cache_views.py +++ b/tests/service/views/test_cache_views.py @@ -33,13 +33,7 @@ from renku.domain_model.provenance.agent import Person from renku.infrastructure.gateway.dataset_gateway import DatasetGateway from renku.infrastructure.repository import Repository -from renku.ui.service.errors import ( - IntermittentFileExistsError, - IntermittentProjectTemplateUnavailable, - UserAnonymousError, - UserProjectTemplateReferenceError, - UserRepoUrlInvalidError, -) +from renku.ui.service.errors import IntermittentFileExistsError, UserAnonymousError, UserRepoUrlInvalidError from renku.ui.service.jobs.cleanup import cache_files_cleanup from renku.ui.service.serializers.headers import JWT_TOKEN_SECRET from tests.utils import assert_rpc_response, retry_failed @@ -947,8 +941,12 @@ def test_migrate_wrong_template_source(svc_client_setup, monkeypatch): response = svc_client.get("/cache.migrations_check", query_string=dict(project_id=project_id), headers=headers) - assert_rpc_response(response, "error") - assert IntermittentProjectTemplateUnavailable.code == response.json["error"]["code"] + assert_rpc_response(response) + + assert response.json["result"].get("template_status", {}).get("code") == 3140 + assert "Error accessing the project template" in response.json["result"].get("template_status", {}).get( + "devMessage" + ) @pytest.mark.service @@ -965,8 +963,12 @@ def test_migrate_wrong_template_ref(svc_client_setup, template, monkeypatch): response = svc_client.get("/cache.migrations_check", query_string=dict(project_id=project_id), headers=headers) - assert_rpc_response(response, "error") - assert UserProjectTemplateReferenceError.code == response.json["error"]["code"] + assert_rpc_response(response) + + assert response.json["result"].get("template_status", {}).get("code") == 1141 + assert "Cannot find the reference 'FAKE_REF' in the template repository" in response.json["result"].get( + "template_status", {} + ).get("devMessage") @pytest.mark.service diff --git a/tests/service/views/test_templates_views.py b/tests/service/views/test_templates_views.py index 52187f0c4a..eeff9937de 100644 --- a/tests/service/views/test_templates_views.py +++ b/tests/service/views/test_templates_views.py @@ -75,7 +75,7 @@ def test_compare_manifests(svc_client_with_templates): assert {"result"} == set(response.json.keys()) assert response.json["result"]["templates"] - templates_source = fetch_templates_source(source=template_params["url"], reference=template_params["ref"]) + templates_source = fetch_templates_source(source=template_params["url"], reference=template_params["branch"]) manifest_file = templates_source.path / TEMPLATE_MANIFEST manifest = TemplatesManifest.from_path(manifest_file).get_raw_content() diff --git a/tests/service/views/v1_0/test_cache_views_1_0.py b/tests/service/views/v1_0/test_cache_views_1_0.py index c359998003..c9d84d7380 100644 --- a/tests/service/views/v1_0/test_cache_views_1_0.py +++ b/tests/service/views/v1_0/test_cache_views_1_0.py @@ -16,9 +16,13 @@ # limitations under the License. """Renku service cache view tests.""" import json +from unittest.mock import MagicMock import pytest +from renku.ui.service.errors import IntermittentProjectTemplateUnavailable +from tests.utils import assert_rpc_response + @pytest.mark.service @pytest.mark.integration @@ -38,3 +42,46 @@ def test_execute_migrations_1_0(svc_client_setup): ) assert "warnings" not in response.json["result"] assert "errors" not in response.json["result"] + + +@pytest.mark.service +@pytest.mark.integration +def test_check_migrations_local_1_0(svc_client_setup): + """Check if migrations are required for a local project.""" + svc_client, headers, project_id, _, _ = svc_client_setup + + response = svc_client.get("/1.0/cache.migrations_check", query_string=dict(project_id=project_id), headers=headers) + assert 200 == response.status_code + + assert not response.json["result"]["core_compatibility_status"]["migration_required"] + assert not response.json["result"]["template_status"]["newer_template_available"] + assert not response.json["result"]["dockerfile_renku_status"]["automated_dockerfile_update"] + assert response.json["result"]["project_supported"] + assert response.json["result"]["project_renku_version"] + assert response.json["result"]["core_renku_version"] + assert "template_source" in response.json["result"]["template_status"] + assert "template_ref" in response.json["result"]["template_status"] + assert "template_id" in response.json["result"]["template_status"] + assert "automated_template_update" in response.json["result"]["template_status"] + + +@pytest.mark.service +@pytest.mark.integration +def test_migrate_wrong_template_source_1_0(svc_client_setup, monkeypatch): + """Check if migrations gracefully fail when the project template is not available.""" + svc_client, headers, project_id, _, _ = svc_client_setup + + # NOTE: fake source + with monkeypatch.context() as monkey: + import renku.core.template.usecase + + monkey.setattr( + renku.core.template.usecase.TemplateMetadata, "source", property(MagicMock(return_value="https://FAKE_URL")) + ) + + response = svc_client.get( + "/1.0/cache.migrations_check", query_string=dict(project_id=project_id), headers=headers + ) + + assert_rpc_response(response, "error") + assert IntermittentProjectTemplateUnavailable.code == response.json["error"]["code"]