From a9994a8cb2541527ca9be731037cc60b03d62cea Mon Sep 17 00:00:00 2001 From: Ralf Grubenmann Date: Fri, 7 Jul 2023 09:31:23 +0200 Subject: [PATCH] feat(service): replace/refactor internal repository cache (#3534) --- cache-cleanup-job/Dockerfile | 12 + cache-cleanup-job/README.md | 6 + cache-cleanup-job/cleanup.sh | 25 + conftest.py | 1 - docker-compose.yml | 13 - .../templates/cache-cleanup-job.yaml | 27 ++ .../templates/cronjob-serviceaccount.yaml | 36 ++ .../renku-core/templates/deployment.yaml | 42 -- poetry.lock | 449 ++++++++++++------ pyproject.toml | 3 - renku/ui/cli/service.py | 18 +- renku/ui/service/__init__.py | 5 +- renku/ui/service/cache/__init__.py | 5 +- renku/ui/service/cache/models/__init__.py | 5 +- renku/ui/service/cache/models/project.py | 29 +- renku/ui/service/cache/projects.py | 4 +- renku/ui/service/cache/serializers/project.py | 7 +- renku/ui/service/cache/users.py | 4 +- renku/ui/service/controllers/api/mixins.py | 157 +----- .../controllers/cache_list_projects.py | 60 --- .../controllers/cache_migrations_check.py | 22 +- .../controllers/cache_project_clone.py | 55 --- .../controllers/utils/project_clone.py | 83 ---- renku/ui/service/entrypoint.py | 5 +- .../service/gateways/gitlab_api_provider.py | 28 +- renku/ui/service/gateways/repository_cache.py | 234 +++++++++ .../ui/service/interfaces/git_api_provider.py | 5 +- .../ui/service/interfaces/repository_cache.py | 41 ++ renku/ui/service/jobs/cleanup.py | 21 - renku/ui/service/logger.py | 2 - renku/ui/service/logging.yaml | 9 - renku/ui/service/scheduler.py | 74 --- renku/ui/service/serializers/cache.py | 5 +- renku/ui/service/serializers/common.py | 16 +- renku/ui/service/serializers/config.py | 12 +- renku/ui/service/serializers/datasets.py | 22 +- renku/ui/service/serializers/graph.py | 9 +- renku/ui/service/serializers/project.py | 7 +- renku/ui/service/serializers/workflows.py | 8 +- renku/ui/service/views/api_versions.py | 7 +- renku/ui/service/views/apispec.py | 14 - renku/ui/service/views/cache.py | 90 ++-- renku/ui/service/views/error_handlers.py | 21 +- renku/ui/service/views/templates.py | 4 +- start-telepresence.sh | 2 +- .../controllers/utils/test_project_clone.py | 60 +-- tests/service/fixtures/service_endpoints.py | 20 +- tests/service/fixtures/service_integration.py | 96 ++-- tests/service/fixtures/service_projects.py | 2 +- tests/service/fixtures/service_scheduler.py | 33 -- tests/service/jobs/test_datasets.py | 53 +-- tests/service/jobs/test_jobs.py | 113 +---- tests/service/scheduler/test_scheduler.py | 28 -- tests/service/views/test_cache_views.py | 172 ++----- tests/service/views/test_config_views.py | 22 +- tests/service/views/test_dataset_views.py | 205 ++++---- tests/service/views/test_exceptions.py | 57 +-- tests/service/views/test_project_views.py | 35 +- .../service/views/test_workflow_plan_views.py | 16 +- .../views/v1_0/test_cache_views_1_0.py | 16 +- tests/utils.py | 4 +- 61 files changed, 1128 insertions(+), 1508 deletions(-) create mode 100644 cache-cleanup-job/Dockerfile create mode 100644 cache-cleanup-job/README.md create mode 100644 cache-cleanup-job/cleanup.sh create mode 100644 helm-chart/renku-core/templates/cache-cleanup-job.yaml create mode 100644 helm-chart/renku-core/templates/cronjob-serviceaccount.yaml delete mode 100644 renku/ui/service/controllers/cache_list_projects.py delete mode 100644 renku/ui/service/controllers/cache_project_clone.py delete mode 100644 renku/ui/service/controllers/utils/project_clone.py create mode 100644 renku/ui/service/gateways/repository_cache.py create mode 100644 renku/ui/service/interfaces/repository_cache.py delete mode 100644 renku/ui/service/scheduler.py delete mode 100644 tests/service/fixtures/service_scheduler.py delete mode 100644 tests/service/scheduler/test_scheduler.py diff --git a/cache-cleanup-job/Dockerfile b/cache-cleanup-job/Dockerfile new file mode 100644 index 0000000000..0541bd81ab --- /dev/null +++ b/cache-cleanup-job/Dockerfile @@ -0,0 +1,12 @@ +# Docker image for core-svc cronjob +FROM alpine:3.18.2 +RUN apk add --no-cache ca-certificates=20230506-r0 curl=8.1.2-r0 bash=5.2.15-r5 && rm -rf /var/cache/apk/* +RUN curl -LO "https://storage.googleapis.com/kubernetes-release/release/$(curl -s https://storage.googleapis.com/kubernetes-release/release/stable.txt)/bin/linux/amd64/kubectl" &&\ + chmod +x ./kubectl &&\ + mv ./kubectl /usr/bin/kubectl + +RUN mkdir /code +WORKDIR /code +COPY cleanup.sh /code/ + +ENTRYPOINT ["/bin/bash", "/code/cleanup.sh"] diff --git a/cache-cleanup-job/README.md b/cache-cleanup-job/README.md new file mode 100644 index 0000000000..2ca7d58314 --- /dev/null +++ b/cache-cleanup-job/README.md @@ -0,0 +1,6 @@ +# Core Service cache cleanup image +Small image to be used for the cache cleanup CronJob for the core service. + +Loops through endpoint slices and call the cleanup endpoint on each core-svc instance. + +Push as `renku/renku-core-cleanup:` to use diff --git a/cache-cleanup-job/cleanup.sh b/cache-cleanup-job/cleanup.sh new file mode 100644 index 0000000000..2502a44937 --- /dev/null +++ b/cache-cleanup-job/cleanup.sh @@ -0,0 +1,25 @@ +#!/bin/bash + +set -e + +core_version=$1 +namespace=$2 + +mapfile -t -d " " pod_ips < <(kubectl -n "$namespace" get pods --selector="app.kubernetes.io/name=core" --selector="app.kubernetes.io/deploymentVersion=$core_version" -o=jsonpath="{.items[*].status.podIP}" ) + +success=true + +for pod_ip in "${pod_ips[@]}" +do + echo "Calling http://$pod_ip:8080/renku/cache.cleanup" + if curl "http://$pod_ip:8080/renku/cache.cleanup" ; then + : + else + echo "Cleanup failed for pod $pod_ip with status $?">&2 + success=false + fi +done + +if ! $success; then + exit 1; +fi diff --git a/conftest.py b/conftest.py index ee6ca10e91..a4c7e2bf64 100644 --- a/conftest.py +++ b/conftest.py @@ -58,7 +58,6 @@ "tests.service.fixtures.service_integration", "tests.service.fixtures.service_jobs", "tests.service.fixtures.service_projects", - "tests.service.fixtures.service_scheduler", ] INCLUDE_FIXTURES = GLOBAL_FIXTURE_LOCATIONS + CORE_FIXTURE_LOCATIONS + CLI_FIXTURE_LOCATIONS + SERVICE_FIXTURE_LOCATIONS diff --git a/docker-compose.yml b/docker-compose.yml index f3d5764178..4ed1333486 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -87,19 +87,6 @@ services: - traefik.http.routers.swagger.rule=PathPrefix(`/api/docs`) - traefik.http.services.my-service.loadbalancer.server.port=8080 - renku-scheduler: - build: - context: . - dockerfile: Dockerfile - args: - BUILD_CORE_SERVICE: 1 - command: ["service", "scheduler"] - depends_on: - - redis - networks: - - net - env_file: .env - renku-worker: build: context: . diff --git a/helm-chart/renku-core/templates/cache-cleanup-job.yaml b/helm-chart/renku-core/templates/cache-cleanup-job.yaml new file mode 100644 index 0000000000..162b8f4b92 --- /dev/null +++ b/helm-chart/renku-core/templates/cache-cleanup-job.yaml @@ -0,0 +1,27 @@ +{{- range $version := .Values.versions }} +{{ if ne $version.name "v9"}} +--- +apiVersion: batch/v1 +kind: CronJob +metadata: + name: {{ include "renku-core.fullname" $ }}-cleanup-{{ $version.name }} + labels: + app.kubernetes.io/deploymentVersion: {{ $version.name }} +spec: + schedule: "*/5 * * * *" + concurrencyPolicy: Forbid + jobTemplate: + spec: + template: + spec: + containers: + - name: {{ include "renku-core.fullname" $ }}-cache-cleanup-{{ $version.name }} + image: renku/renku-core-cleanup:v1 + imagePullPolicy: IfNotPresent + args: + - {{ $version.name | quote}} + - {{ $.Release.Namespace }} + restartPolicy: OnFailure + serviceAccountName: {{ include "renku-core.fullname" $ }}-cleanup +{{ end }} +{{ end }} diff --git a/helm-chart/renku-core/templates/cronjob-serviceaccount.yaml b/helm-chart/renku-core/templates/cronjob-serviceaccount.yaml new file mode 100644 index 0000000000..3b94c995d6 --- /dev/null +++ b/helm-chart/renku-core/templates/cronjob-serviceaccount.yaml @@ -0,0 +1,36 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + name: {{ include "renku-core.fullname" $ }}-cleanup + labels: +{{ include "renku-core.labels" $ | indent 4 }} +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: Role +metadata: + name: {{ include "renku-core.fullname" $ }}-cleanup + labels: +{{ include "renku-core.labels" $ | indent 4 }} +rules: +- apiGroups: + - "" + resources: + - pods + verbs: + - get + - list +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + name: {{ include "renku-core.fullname" $ }}-cleanup + labels: +{{ include "renku-core.labels" $ | indent 4 }} +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: {{ include "renku-core.fullname" $ }}-cleanup +subjects: +- kind: ServiceAccount + name: {{ include "renku-core.fullname" $ }}-cleanup + namespace: {{ $.Release.Namespace }} diff --git a/helm-chart/renku-core/templates/deployment.yaml b/helm-chart/renku-core/templates/deployment.yaml index ef90a93232..345ee6b5fd 100644 --- a/helm-chart/renku-core/templates/deployment.yaml +++ b/helm-chart/renku-core/templates/deployment.yaml @@ -279,48 +279,6 @@ spec: - name: shared-volume mountPath: {{ $.Values.cacheDirectory }} {{- include "certificates.volumeMounts.system" $ | nindent 12 }} - resources: - {{- toYaml $.Values.resources.managementWorkers | nindent 12 }} - - - name: {{ $.Chart.Name }}-scheduler - image: "{{ $version.image.repository }}:{{ $version.image.tag }}" - imagePullPolicy: {{ $version.image.pullPolicy }} - securityContext: - {{- toYaml $.Values.securityContext | nindent 12 }} - args: ["service", "scheduler"] - env: - - name: REDIS_HOST - value: {{ $.Values.global.redis.host | quote }} - - name: REDIS_PORT - value: {{ $.Values.global.redis.port | quote }} - - name: REDIS_DATABASE - value: {{ $.Values.global.redis.dbIndex.coreService | quote }} - - name: REDIS_IS_SENTINEL - value: {{ $.Values.global.redis.sentinel.enabled | quote }} - - name: REDIS_MASTER_SET - value: {{ $.Values.global.redis.sentinel.masterSet | quote }} - - name: REDIS_PASSWORD - valueFrom: - secretKeyRef: - name: {{ $.Values.global.redis.existingSecret }} - key: {{ $.Values.global.redis.existingSecretPasswordKey }} - - name: REDIS_NAMESPACE - value: {{ $version.name }} - - name: CACHE_DIR - value: {{ $.Values.cacheDirectory | quote }} - - name: RENKU_SVC_CLEANUP_INTERVAL - value: {{ $.Values.cleanupInterval | quote }} - - name: SENTRY_ENABLED - value: {{ $.Values.sentry.enabled | quote }} - - name: SENTRY_DSN - value: {{ $.Values.sentry.dsn }} - - name: SENTRY_SAMPLE_RATE - value: {{ $.Values.sentry.sampleRate | quote }} - - name: SENTRY_ENV - value: {{ $.Values.sentry.environment }} - {{- include "certificates.env.python" $ | nindent 12 }} - volumeMounts: - {{- include "certificates.volumeMounts.system" $ | nindent 12 }} resources: {{- toYaml $.Values.resources.scheduler | nindent 12 }} {{- with $.Values.nodeSelector }} diff --git a/poetry.lock b/poetry.lock index c8141fdf27..e1dd812274 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,9 +1,10 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry and should not be changed by hand. [[package]] name = "addict" version = "2.4.0" description = "Addict is a dictionary whose items can be set using both attribute and item syntax." +category = "main" optional = false python-versions = "*" files = [ @@ -15,6 +16,7 @@ files = [ name = "alabaster" version = "0.7.13" description = "A configurable sidebar-enabled Sphinx theme" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -26,6 +28,7 @@ files = [ name = "ansicon" version = "1.89.0" description = "Python wrapper for loading Jason Hood's ANSICON" +category = "main" optional = false python-versions = "*" files = [ @@ -37,6 +40,7 @@ files = [ name = "apispec" version = "6.3.0" description = "A pluggable API specification generator. Currently supports the OpenAPI Specification (f.k.a. the Swagger specification)." +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -61,6 +65,7 @@ yaml = ["PyYAML (>=3.10)"] name = "apispec-oneofschema" version = "3.0.0" description = "Plugin for apispec providing support for Marshmallow-OneOfSchema schemas" +category = "main" optional = true python-versions = "*" files = [ @@ -77,6 +82,7 @@ marshmallow-oneofschema = "*" name = "apispec-webframeworks" version = "0.5.2" description = "Web framework plugins for apispec." +category = "main" optional = true python-versions = ">=3.6" files = [ @@ -96,6 +102,7 @@ tests = ["Flask (==1.1.1)", "bottle (==0.12.17)", "mock", "pytest", "tornado"] name = "appdirs" version = "1.4.4" description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "main" optional = false python-versions = "*" files = [ @@ -105,13 +112,14 @@ files = [ [[package]] name = "argcomplete" -version = "3.0.8" +version = "3.1.1" description = "Bash tab completion for argparse" +category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "argcomplete-3.0.8-py3-none-any.whl", hash = "sha256:e36fd646839933cbec7941c662ecb65338248667358dd3d968405a4506a60d9b"}, - {file = "argcomplete-3.0.8.tar.gz", hash = "sha256:b9ca96448e14fa459d7450a4ab5a22bbf9cee4ba7adddf03e65c398b5daeea28"}, + {file = "argcomplete-3.1.1-py3-none-any.whl", hash = "sha256:35fa893a88deea85ea7b20d241100e64516d6af6d7b0ae2bed1d263d26f70948"}, + {file = "argcomplete-3.1.1.tar.gz", hash = "sha256:6c4c563f14f01440aaffa3eae13441c5db2357b5eec639abe7c0b15334627dff"}, ] [package.extras] @@ -121,6 +129,7 @@ test = ["coverage", "mypy", "pexpect", "ruff", "wheel"] name = "async-timeout" version = "4.0.2" description = "Timeout context manager for asyncio programs" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -132,6 +141,7 @@ files = [ name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -150,6 +160,7 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "babel" version = "2.12.1" description = "Internationalization utilities" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -164,6 +175,7 @@ pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} name = "bagit" version = "1.8.1" description = "Create and validate BagIt packages" +category = "main" optional = false python-versions = "*" files = [ @@ -175,6 +187,7 @@ files = [ name = "bashlex" version = "0.16" description = "Python parser for bash" +category = "main" optional = false python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4" files = [ @@ -186,6 +199,7 @@ files = [ name = "black" version = "23.1.0" description = "The uncompromising code formatter." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -235,6 +249,7 @@ uvloop = ["uvloop (>=0.15.2)"] name = "blessed" version = "1.20.0" description = "Easy, practical library for making terminal apps, by providing an elegant, well-documented interface to Colors, Keyboard input, and screen Positioning capabilities." +category = "main" optional = false python-versions = ">=2.7" files = [ @@ -251,6 +266,7 @@ wcwidth = ">=0.1.4" name = "blinker" version = "1.6.2" description = "Fast, simple object-to-object and broadcast signaling" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -262,6 +278,7 @@ files = [ name = "btrees" version = "5.0" description = "Scalable persistent object containers" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -308,28 +325,30 @@ zodb = ["ZODB"] [[package]] name = "cachecontrol" -version = "0.12.13" +version = "0.12.14" description = "httplib2 caching for requests" +category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "CacheControl-0.12.13-py2.py3-none-any.whl", hash = "sha256:431fc10c5ab1a1589ce08c05b948abac31c0f76962d5fc9efab9da280c9790aa"}, - {file = "CacheControl-0.12.13.tar.gz", hash = "sha256:e28ab6c7b57ff53a7f9a6a8431fff021fb7437794ec581884773610bb8ce3f82"}, + {file = "CacheControl-0.12.14-py2.py3-none-any.whl", hash = "sha256:1c2939be362a70c4e5f02c6249462b3b7a24441e4f1ced5e9ef028172edf356a"}, + {file = "CacheControl-0.12.14.tar.gz", hash = "sha256:d1087f45781c0e00616479bfd282c78504371ca71da017b49df9f5365a95feba"}, ] [package.dependencies] -filelock = {version = ">=3.8.0", optional = true, markers = "extra == \"filecache\""} +lockfile = {version = ">=0.9", optional = true, markers = "extra == \"filecache\""} msgpack = ">=0.5.2" requests = "*" [package.extras] -filecache = ["filelock (>=3.8.0)"] +filecache = ["lockfile (>=0.9)"] redis = ["redis (>=2.10.5)"] [[package]] name = "cachetools" version = "5.3.1" description = "Extensible memoizing collections and decorators" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -341,6 +360,7 @@ files = [ name = "calamus" version = "0.4.2" description = "calamus is a library built on top of marshmallow to allow (de-)Serialization of Python classes to JSON-LD." +category = "main" optional = false python-versions = ">=3.7.1,<4.0.0" files = [ @@ -361,6 +381,7 @@ docs = ["Jinja2 (>=3.0.0,<3.1.0)", "sphinx (>=3.0.3,<4.0.0)", "sphinx-rtd-theme name = "certifi" version = "2023.5.7" description = "Python package for providing Mozilla's CA Bundle." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -372,6 +393,7 @@ files = [ name = "cffi" version = "1.15.1" description = "Foreign Function Interface for Python calling C code." +category = "main" optional = false python-versions = "*" files = [ @@ -448,6 +470,7 @@ pycparser = "*" name = "cfgv" version = "3.3.1" description = "Validate configuration and produce human readable error messages." +category = "dev" optional = false python-versions = ">=3.6.1" files = [ @@ -459,6 +482,7 @@ files = [ name = "charset-normalizer" version = "3.1.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -543,6 +567,7 @@ files = [ name = "circus" version = "0.18.0" description = "Circus is a program that will let you run and watch multiple processes and sockets." +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -562,6 +587,7 @@ test = ["coverage", "flake8 (==2.1.0)", "gevent", "mock", "nose2", "pyyaml", "to name = "click" version = "8.1.3" description = "Composable command line interface toolkit" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -574,27 +600,29 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "click-option-group" -version = "0.5.5" +version = "0.5.6" description = "Option groups missing in Click" +category = "main" optional = false python-versions = ">=3.6,<4" files = [ - {file = "click-option-group-0.5.5.tar.gz", hash = "sha256:78ee474f07a0ca0ef6c0317bb3ebe79387aafb0c4a1e03b1d8b2b0be1e42fc78"}, - {file = "click_option_group-0.5.5-py3-none-any.whl", hash = "sha256:0f8ca79bc9b1d6fcaafdbe194b17ba1a2dde44ddf19087235c3efed2ad288143"}, + {file = "click-option-group-0.5.6.tar.gz", hash = "sha256:97d06703873518cc5038509443742b25069a3c7562d1ea72ff08bfadde1ce777"}, + {file = "click_option_group-0.5.6-py3-none-any.whl", hash = "sha256:38a26d963ee3ad93332ddf782f9259c5bdfe405e73408d943ef5e7d0c3767ec7"}, ] [package.dependencies] Click = ">=7.0,<9" [package.extras] -docs = ["Pallets-Sphinx-Themes", "m2r2", "sphinx (>=3.0,<6)"] +docs = ["Pallets-Sphinx-Themes", "m2r2", "sphinx"] tests = ["pytest"] -tests-cov = ["coverage (<6)", "coveralls", "pytest", "pytest-cov"] +tests-cov = ["coverage", "coveralls", "pytest", "pytest-cov"] [[package]] name = "click-plugins" version = "1.1.1" description = "An extension module for click to enable registering CLI commands via setuptools entry-points." +category = "main" optional = false python-versions = "*" files = [ @@ -612,6 +640,7 @@ dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -623,6 +652,7 @@ files = [ name = "coloredlogs" version = "15.0.1" description = "Colored terminal output for Python's logging module" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -640,6 +670,7 @@ cron = ["capturer (>=2.4)"] name = "coverage" version = "6.4.4" description = "Code coverage measurement for Python" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -701,20 +732,11 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1 [package.extras] toml = ["tomli"] -[[package]] -name = "crontab" -version = "1.0.1" -description = "Parse and use crontab schedules in Python" -optional = true -python-versions = "*" -files = [ - {file = "crontab-1.0.1.tar.gz", hash = "sha256:89477e3f93c81365e738d5ee2659509e6373bb2846de13922663e79aa74c6b91"}, -] - [[package]] name = "cryptography" version = "41.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -756,6 +778,7 @@ test-randomorder = ["pytest-randomly"] name = "cwl-upgrader" version = "1.2.8" description = "Common Workflow Language standalone document upgrader" +category = "main" optional = false python-versions = ">=3.6, <4" files = [ @@ -776,6 +799,7 @@ setuptools = "*" name = "cwl-utils" version = "0.27" description = "" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -800,6 +824,7 @@ pretty = ["cwlformat"] name = "cwltool" version = "3.1.20230425144158" description = "Common workflow language reference implementation" +category = "main" optional = false python-versions = ">=3.6, <4" files = [ @@ -832,6 +857,7 @@ deps = ["galaxy-tool-util (>=22.1.2,<23)"] name = "deal" version = "4.24.1" description = "**Deal** is a Python library for [design by contract][wiki] (DbC) programming." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -841,7 +867,7 @@ files = [ [package.extras] all = ["astroid (>=2.11.0)", "deal-solver", "hypothesis", "pygments", "typeguard (>=3.0.0)", "vaa (>=0.2.1)"] -docs = ["m2r2", "myst-parser", "sphinx (==3.5.*)", "sphinx-rtd-theme (==0.5.*)"] +docs = ["m2r2", "myst-parser", "sphinx (>=3.5.0,<3.6.0)", "sphinx-rtd-theme (>=0.5.0,<0.6.0)"] integration = ["astroid (>=2.11.0)", "deal-solver", "flake8", "hypothesis", "marshmallow", "pygments", "sphinx (>=4.5.0)", "typeguard", "vaa (>=0.2.1)"] lint = ["deal-solver", "flake8", "flake8-commas", "flake8-quotes", "hypothesis", "isort", "mypy (>=0.900)", "mypy_test (>=0.1.1)", "pygments", "typeguard", "unify"] test = ["coverage-conditional-plugin", "coverage[toml]", "docstring-parser", "pytest", "pytest-cov", "urllib3"] @@ -850,6 +876,7 @@ test = ["coverage-conditional-plugin", "coverage[toml]", "docstring-parser", "py name = "deepdiff" version = "6.3.0" description = "Deep Difference and Search of any Python object/data. Recreate objects by adding adding deltas to each other." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -868,6 +895,7 @@ optimize = ["orjson"] name = "deepmerge" version = "1.0.1" description = "a toolset to deeply merge python dictionaries." +category = "main" optional = false python-versions = "*" files = [ @@ -879,6 +907,7 @@ files = [ name = "dill" version = "0.3.6" description = "serialize all of python" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -893,6 +922,7 @@ graph = ["objgraph (>=1.7.2)"] name = "distlib" version = "0.3.6" description = "Distribution utilities" +category = "dev" optional = false python-versions = "*" files = [ @@ -904,6 +934,7 @@ files = [ name = "docker" version = "5.0.3" description = "A Python library for the Docker Engine API." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -924,6 +955,7 @@ tls = ["cryptography (>=3.4.7)", "idna (>=2.0.0)", "pyOpenSSL (>=17.5.0)"] name = "docutils" version = "0.16" description = "Docutils -- Python Documentation Utilities" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -935,6 +967,7 @@ files = [ name = "dunamai" version = "1.17.0" description = "Dynamic version generation" +category = "main" optional = false python-versions = ">=3.5,<4.0" files = [ @@ -949,6 +982,7 @@ packaging = ">=20.9" name = "enlighten" version = "1.11.2" description = "Enlighten Progress Bar" +category = "main" optional = false python-versions = "*" files = [ @@ -964,6 +998,7 @@ prefixed = ">=0.3.2" name = "execnet" version = "1.9.0" description = "execnet: rapid multi-Python deployment" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -978,6 +1013,7 @@ testing = ["pre-commit"] name = "fakeredis" version = "2.11.1" description = "Fake implementation of redis API for testing purposes." +category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -998,6 +1034,7 @@ lua = ["lupa (>=1.14,<2.0)"] name = "filelock" version = "3.12.0" description = "A platform independent file lock." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1013,6 +1050,7 @@ testing = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "diff-cover (>=7.5)", "p name = "flake8" version = "6.0.0" description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" optional = false python-versions = ">=3.8.1" files = [ @@ -1029,6 +1067,7 @@ pyflakes = ">=3.0.0,<3.1.0" name = "flake8-pyproject" version = "1.2.2" description = "Flake8 plug-in loading the configuration from pyproject.toml" +category = "dev" optional = false python-versions = ">= 3.6" files = [ @@ -1046,6 +1085,7 @@ dev = ["pyTest", "pyTest-cov"] name = "flaky" version = "3.7.0" description = "Plugin for nose or pytest that automatically reruns flaky tests." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1057,6 +1097,7 @@ files = [ name = "flask" version = "2.2.5" description = "A simple framework for building complex web applications." +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -1075,24 +1116,11 @@ Werkzeug = ">=2.2.2" async = ["asgiref (>=3.2)"] dotenv = ["python-dotenv"] -[[package]] -name = "freezegun" -version = "1.2.2" -description = "Let your Python tests travel through time" -optional = true -python-versions = ">=3.6" -files = [ - {file = "freezegun-1.2.2-py3-none-any.whl", hash = "sha256:ea1b963b993cb9ea195adbd893a48d573fda951b0da64f60883d7e988b606c9f"}, - {file = "freezegun-1.2.2.tar.gz", hash = "sha256:cd22d1ba06941384410cd967d8a99d5ae2442f57dfafeff2fda5de8dc5c05446"}, -] - -[package.dependencies] -python-dateutil = ">=2.7" - [[package]] name = "frozendict" version = "2.3.8" description = "A simple immutable dictionary" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1139,6 +1167,7 @@ files = [ name = "future" version = "0.18.3" description = "Clean single-source support for Python 3 and 2" +category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1149,6 +1178,7 @@ files = [ name = "gitdb" version = "4.0.10" description = "Git Object Database" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1163,6 +1193,7 @@ smmap = ">=3.0.1,<6" name = "gitpython" version = "3.1.27" description = "GitPython is a python library used to interact with Git repositories" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1177,6 +1208,7 @@ gitdb = ">=4.0.1,<5" name = "grandalf" version = "0.8" description = "Graph and drawing algorithms framework" +category = "main" optional = false python-versions = "*" files = [ @@ -1194,6 +1226,7 @@ full = ["numpy", "ply"] name = "gunicorn" version = "20.1.0" description = "WSGI HTTP Server for UNIX" +category = "main" optional = true python-versions = ">=3.5" files = [ @@ -1214,6 +1247,7 @@ tornado = ["tornado (>=0.2)"] name = "humanfriendly" version = "10.0" description = "Human friendly output for text interfaces using Python" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -1228,6 +1262,7 @@ pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_ve name = "humanize" version = "4.0.0" description = "Python humanize utilities" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1242,6 +1277,7 @@ tests = ["freezegun", "pytest", "pytest-cov"] name = "identify" version = "2.5.24" description = "File identification library for Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1256,6 +1292,7 @@ license = ["ukkonen"] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1267,6 +1304,7 @@ files = [ name = "imagesize" version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1276,13 +1314,14 @@ files = [ [[package]] name = "importlib-metadata" -version = "6.6.0" +version = "6.7.0" description = "Read metadata from Python packages" +category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "importlib_metadata-6.6.0-py3-none-any.whl", hash = "sha256:43dd286a2cd8995d5eaef7fee2066340423b818ed3fd70adf0bad5f1fac53fed"}, - {file = "importlib_metadata-6.6.0.tar.gz", hash = "sha256:92501cdf9cc66ebd3e612f1b4f0c0765dfa42f0fa38ffb319b6bd84dd675d705"}, + {file = "importlib_metadata-6.7.0-py3-none-any.whl", hash = "sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5"}, + {file = "importlib_metadata-6.7.0.tar.gz", hash = "sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4"}, ] [package.dependencies] @@ -1291,12 +1330,13 @@ zipp = ">=0.5" [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] [[package]] name = "importlib-resources" version = "5.12.0" description = "Read resources from Python packages" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1315,6 +1355,7 @@ testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-chec name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1326,6 +1367,7 @@ files = [ name = "inject" version = "4.3.1" description = "Python dependency injection framework" +category = "main" optional = false python-versions = "*" files = [ @@ -1336,6 +1378,7 @@ files = [ name = "isodate" version = "0.6.1" description = "An ISO 8601 date/time/duration parser and formatter" +category = "main" optional = false python-versions = "*" files = [ @@ -1350,6 +1393,7 @@ six = "*" name = "isort" version = "5.10.1" description = "A Python utility / library to sort Python imports." +category = "dev" optional = false python-versions = ">=3.6.1,<4.0" files = [ @@ -1367,6 +1411,7 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] name = "itsdangerous" version = "2.1.2" description = "Safely pass data to untrusted environments and back." +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -1378,6 +1423,7 @@ files = [ name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1395,6 +1441,7 @@ i18n = ["Babel (>=2.7)"] name = "jinxed" version = "1.2.0" description = "Jinxed Terminal Library" +category = "main" optional = false python-versions = "*" files = [ @@ -1409,6 +1456,7 @@ ansicon = {version = "*", markers = "platform_system == \"Windows\""} name = "lazy-object-proxy" version = "1.9.0" description = "A fast and thorough lazy object proxy." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1450,10 +1498,23 @@ files = [ {file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"}, ] +[[package]] +name = "lockfile" +version = "0.12.2" +description = "Platform-independent file locking module" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "lockfile-0.12.2-py2.py3-none-any.whl", hash = "sha256:6c3cb24f344923d30b2785d5ad75182c8ea7ac1b6171b08657258ec7429d50fa"}, + {file = "lockfile-0.12.2.tar.gz", hash = "sha256:6aed02de03cba24efabcd600b30540140634fc06cfa603822d508d5361e9f799"}, +] + [[package]] name = "lupa" version = "1.14.1" description = "Python wrapper around Lua and LuaJIT" +category = "dev" optional = false python-versions = "*" files = [ @@ -1538,6 +1599,7 @@ files = [ name = "lxml" version = "4.9.2" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" files = [ @@ -1630,6 +1692,7 @@ source = ["Cython (>=0.29.7)"] name = "markdown-it-py" version = "2.2.0" description = "Python port of markdown-it. Markdown parsing, done right!" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1654,6 +1717,7 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "markupsafe" version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1713,6 +1777,7 @@ files = [ name = "marshmallow" version = "3.19.0" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1733,6 +1798,7 @@ tests = ["pytest", "pytz", "simplejson"] name = "marshmallow-oneofschema" version = "3.0.1" description = "marshmallow multiplexing schema" +category = "main" optional = true python-versions = ">=3.6" files = [ @@ -1752,6 +1818,7 @@ tests = ["mock", "pytest"] name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1763,6 +1830,7 @@ files = [ name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1774,6 +1842,7 @@ files = [ name = "mistune" version = "2.0.5" description = "A sane Markdown parser with useful plugins and renderers" +category = "main" optional = false python-versions = "*" files = [ @@ -1785,6 +1854,7 @@ files = [ name = "msgpack" version = "1.0.5" description = "MessagePack serializer" +category = "main" optional = false python-versions = "*" files = [ @@ -1857,6 +1927,7 @@ files = [ name = "multidict" version = "6.0.4" description = "multidict implementation" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1940,6 +2011,7 @@ files = [ name = "mypy" version = "1.3.0" description = "Optional static typing for Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1986,6 +2058,7 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1997,6 +2070,7 @@ files = [ name = "networkx" version = "3.1" description = "Python package for creating and manipulating graphs and networks" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2015,6 +2089,7 @@ test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] name = "nodeenv" version = "1.8.0" description = "Node.js virtual environment builder" +category = "dev" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" files = [ @@ -2029,6 +2104,7 @@ setuptools = "*" name = "ordered-set" version = "4.1.0" description = "An OrderedSet is a custom MutableSet that remembers its order, so that every" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2043,6 +2119,7 @@ dev = ["black", "mypy", "pytest"] name = "owlrl" version = "6.0.2" description = "OWL-RL and RDFS based RDF Closure inferencing for Python" +category = "main" optional = false python-versions = "*" files = [ @@ -2057,6 +2134,7 @@ rdflib = ">=6.0.2" name = "packaging" version = "23.1" description = "Core utilities for Python packages" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2068,6 +2146,7 @@ files = [ name = "pathspec" version = "0.11.1" description = "Utility library for gitignore style pattern matching of file paths." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2079,6 +2158,7 @@ files = [ name = "patool" version = "1.12" description = "portable archive file manager" +category = "main" optional = false python-versions = "*" files = [ @@ -2090,6 +2170,7 @@ files = [ name = "pep8" version = "1.7.1" description = "Python style guide checker" +category = "dev" optional = false python-versions = "*" files = [ @@ -2101,6 +2182,7 @@ files = [ name = "persistent" version = "5.0" description = "Translucent persistent objects" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2148,6 +2230,7 @@ test = ["manuel", "zope.testrunner"] name = "pexpect" version = "4.8.0" description = "Pexpect allows easy control of interactive console applications." +category = "dev" optional = false python-versions = "*" files = [ @@ -2162,6 +2245,7 @@ ptyprocess = ">=0.5" name = "pillow" version = "9.5.0" description = "Python Imaging Library (Fork)" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -2241,6 +2325,7 @@ tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "pa name = "plantweb" version = "1.2.1" description = "Python client for the PlantUML server" +category = "dev" optional = false python-versions = "*" files = [ @@ -2255,23 +2340,25 @@ six = "*" [[package]] name = "platformdirs" -version = "3.5.1" +version = "3.6.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.5.1-py3-none-any.whl", hash = "sha256:e2378146f1964972c03c085bb5662ae80b2b8c06226c54b2ff4aa9483e8a13a5"}, - {file = "platformdirs-3.5.1.tar.gz", hash = "sha256:412dae91f52a6f84830f39a8078cecd0e866cb72294a5c66808e74d5e88d251f"}, + {file = "platformdirs-3.6.0-py3-none-any.whl", hash = "sha256:ffa199e3fbab8365778c4a10e1fbf1b9cd50707de826eb304b50e57ec0cc8d38"}, + {file = "platformdirs-3.6.0.tar.gz", hash = "sha256:57e28820ca8094678b807ff529196506d7a21e17156cb1cddb3e74cebce54640"}, ] [package.extras] -docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.2.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)"] [[package]] name = "pluggy" version = "1.0.0" description = "plugin and hook calling mechanisms for python" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2287,6 +2374,7 @@ testing = ["pytest", "pytest-benchmark"] name = "poetry-dynamic-versioning" version = "0.21.5" description = "Plugin for Poetry to enable dynamic versioning based on VCS tags" +category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -2306,6 +2394,7 @@ plugin = ["poetry (>=1.2.0,<2.0.0)"] name = "poetry-lock-package" version = "0.5.0" description = "Poetry lock package generator" +category = "dev" optional = false python-versions = ">=3.6.2,<4.0.0" files = [ @@ -2320,6 +2409,7 @@ toml = ">=0.10.1,<0.11.0" name = "portalocker" version = "2.7.0" description = "Wraps the portalocker recipe for easy usage" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -2337,13 +2427,14 @@ tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "p [[package]] name = "pre-commit" -version = "3.3.2" +version = "3.3.3" description = "A framework for managing and maintaining multi-language pre-commit hooks." +category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "pre_commit-3.3.2-py2.py3-none-any.whl", hash = "sha256:8056bc52181efadf4aac792b1f4f255dfd2fb5a350ded7335d251a68561e8cb6"}, - {file = "pre_commit-3.3.2.tar.gz", hash = "sha256:66e37bec2d882de1f17f88075047ef8962581f83c234ac08da21a0c58953d1f0"}, + {file = "pre_commit-3.3.3-py2.py3-none-any.whl", hash = "sha256:10badb65d6a38caff29703362271d7dca483d01da88f9d7e05d0b97171c136cb"}, + {file = "pre_commit-3.3.3.tar.gz", hash = "sha256:a2256f489cd913d575c145132ae196fe335da32d91a8294b7afe6622335dd023"}, ] [package.dependencies] @@ -2357,6 +2448,7 @@ virtualenv = ">=20.10.0" name = "prefixed" version = "0.7.0" description = "Prefixed alternative numeric library" +category = "main" optional = false python-versions = "*" files = [ @@ -2368,6 +2460,7 @@ files = [ name = "prettytable" version = "2.5.0" description = "A simple Python library for easily displaying tabular data in a visually appealing ASCII table format" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2385,6 +2478,7 @@ tests = ["pytest", "pytest-cov", "pytest-lazy-fixture"] name = "prov" version = "1.5.1" description = "A library for W3C Provenance Data Model supporting PROV-JSON, PROV-XML and PROV-O (RDF)" +category = "main" optional = false python-versions = "*" files = [ @@ -2406,6 +2500,7 @@ dot = ["pydot (>=1.2.0)"] name = "psutil" version = "5.9.1" description = "Cross-platform lib for process and system monitoring in Python." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2450,6 +2545,7 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] name = "ptyprocess" version = "0.7.0" description = "Run a subprocess in a pseudo terminal" +category = "dev" optional = false python-versions = "*" files = [ @@ -2461,6 +2557,7 @@ files = [ name = "py" version = "1.11.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -2472,6 +2569,7 @@ files = [ name = "py-tes" version = "0.4.2" description = "Library for communicating with the GA4GH Task Execution API" +category = "main" optional = false python-versions = ">=2.7, <4" files = [ @@ -2488,6 +2586,7 @@ requests = ">=2.18.1" name = "pycodestyle" version = "2.10.0" description = "Python style guide checker" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2499,6 +2598,7 @@ files = [ name = "pycparser" version = "2.21" description = "C parser in Python" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2510,6 +2610,7 @@ files = [ name = "pydantic" version = "1.10.7" description = "Data validation and settings management using python type hints" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2562,6 +2663,7 @@ email = ["email-validator (>=1.0.3)"] name = "pydocstyle" version = "6.1.1" description = "Python docstring style checker" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2579,6 +2681,7 @@ toml = ["toml"] name = "pydot" version = "1.4.2" description = "Python interface to Graphviz's Dot" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2593,6 +2696,7 @@ pyparsing = ">=2.1.4" name = "pyenchant" version = "3.2.2" description = "Python bindings for the Enchant spellchecking system" +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -2606,6 +2710,7 @@ files = [ name = "pyflakes" version = "3.0.1" description = "passive checker of Python programs" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2617,6 +2722,7 @@ files = [ name = "pygments" version = "2.15.1" description = "Pygments is a syntax highlighting package written in Python." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2631,6 +2737,7 @@ plugins = ["importlib-metadata"] name = "pyjwt" version = "2.4.0" description = "JSON Web Token implementation in Python" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2648,6 +2755,7 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] name = "pyld" version = "2.0.3" description = "Python implementation of the JSON-LD API" +category = "main" optional = false python-versions = "*" files = [ @@ -2669,6 +2777,7 @@ requests = ["requests"] name = "pyopenssl" version = "22.0.0" description = "Python wrapper module around the OpenSSL library" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2685,13 +2794,14 @@ test = ["flaky", "pretend", "pytest (>=3.0.1)"] [[package]] name = "pyparsing" -version = "3.0.9" +version = "3.1.0" description = "pyparsing module - Classes and methods to define and execute parsing grammars" +category = "main" optional = false python-versions = ">=3.6.8" files = [ - {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, - {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, + {file = "pyparsing-3.1.0-py3-none-any.whl", hash = "sha256:d554a96d1a7d3ddaf7183104485bc19fd80543ad6ac5bdb6426719d766fb06c1"}, + {file = "pyparsing-3.1.0.tar.gz", hash = "sha256:edb662d6fe322d6e990b1594b5feaeadf806803359e3d4d42f11e295e588f0ea"}, ] [package.extras] @@ -2701,6 +2811,7 @@ diagrams = ["jinja2", "railroad-diagrams"] name = "pypubsub" version = "4.0.3" description = "Python Publish-Subscribe Package" +category = "main" optional = false python-versions = ">=3.3, <4" files = [ @@ -2711,6 +2822,7 @@ files = [ name = "pyreadline3" version = "3.4.1" description = "A python implementation of GNU readline." +category = "main" optional = false python-versions = "*" files = [ @@ -2722,6 +2834,7 @@ files = [ name = "pyshacl" version = "0.19.1" description = "Python SHACL Validator" +category = "main" optional = false python-versions = ">=3.7.0,<4.0.0" files = [ @@ -2745,6 +2858,7 @@ jsonld = ["rdflib-jsonld (>=0.4.0,<0.6)"] name = "pyte" version = "0.8.1" description = "Simple VTXXX-compatible terminal emulator." +category = "dev" optional = false python-versions = "*" files = [ @@ -2759,6 +2873,7 @@ wcwidth = "*" name = "pytest" version = "7.1.3" description = "pytest: simple powerful testing with Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2782,6 +2897,7 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2. name = "pytest-black" version = "0.3.12" description = "A pytest plugin to enable format checking with black" +category = "dev" optional = false python-versions = ">=2.7" files = [ @@ -2797,6 +2913,7 @@ toml = "*" name = "pytest-cache" version = "1.0" description = "pytest plugin with mechanisms for caching across test runs" +category = "dev" optional = false python-versions = "*" files = [ @@ -2811,6 +2928,7 @@ pytest = ">=2.2" name = "pytest-cov" version = "3.0.0" description = "Pytest plugin for measuring coverage." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2829,6 +2947,7 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "pytest-flake8" version = "1.1.1" description = "pytest plugin to check FLAKE8 requirements" +category = "dev" optional = false python-versions = "*" files = [ @@ -2844,6 +2963,7 @@ pytest = ">=7.0" name = "pytest-lazy-fixture" version = "0.6.3" description = "It helps to use fixtures in pytest.mark.parametrize" +category = "dev" optional = false python-versions = "*" files = [ @@ -2858,6 +2978,7 @@ pytest = ">=3.2.5" name = "pytest-mock" version = "3.10.0" description = "Thin-wrapper around the mock package for easier use with pytest" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2875,6 +2996,7 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] name = "pytest-pep8" version = "1.0.6" description = "pytest plugin to check PEP8 requirements" +category = "dev" optional = false python-versions = "*" files = [ @@ -2890,6 +3012,7 @@ pytest-cache = "*" name = "pytest-recording" version = "0.12.2" description = "A pytest plugin that allows you recording of network interactions via VCR.py" +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -2906,6 +3029,7 @@ vcrpy = ">=2.0.1" name = "pytest-timeout" version = "2.1.0" description = "pytest plugin to abort hanging tests" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2920,6 +3044,7 @@ pytest = ">=5.0.0" name = "pytest-xdist" version = "3.3.1" description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2940,6 +3065,7 @@ testing = ["filelock"] name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -2954,6 +3080,7 @@ six = ">=1.5" name = "python-dotenv" version = "0.20.0" description = "Read key-value pairs from a .env file and set them as environment variables" +category = "main" optional = true python-versions = ">=3.5" files = [ @@ -2968,6 +3095,7 @@ cli = ["click (>=5.0)"] name = "python-editor" version = "1.0.4" description = "Programmatically open an editor, capture the result." +category = "main" optional = false python-versions = "*" files = [ @@ -2980,6 +3108,7 @@ files = [ name = "python-gitlab" version = "3.8.1" description = "Interact with GitLab API" +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -2999,6 +3128,7 @@ yaml = ["PyYaml (>=5.2)"] name = "pytz" version = "2023.3" description = "World timezone definitions, modern and historical" +category = "main" optional = false python-versions = "*" files = [ @@ -3010,6 +3140,7 @@ files = [ name = "pywin32" version = "227" description = "Python for Window Extensions" +category = "main" optional = false python-versions = "*" files = [ @@ -3031,6 +3162,7 @@ files = [ name = "pyyaml" version = "6.0" description = "YAML parser and emitter for Python" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3080,6 +3212,7 @@ files = [ name = "pyzmq" version = "25.1.0" description = "Python bindings for 0MQ" +category = "main" optional = true python-versions = ">=3.6" files = [ @@ -3169,6 +3302,7 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} name = "rdflib" version = "6.3.2" description = "RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information." +category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -3190,6 +3324,7 @@ networkx = ["networkx (>=2.0.0,<3.0.0)"] name = "redis" version = "4.5.5" description = "Python client for Redis database and key-value store" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3208,6 +3343,7 @@ ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)" name = "renku-sphinx-theme" version = "0.2.3" description = "A Sphinx theme for Renku documentation." +category = "dev" optional = false python-versions = "*" files = [ @@ -3228,6 +3364,7 @@ tests = ["check-manifest (>=0.25)", "isort (>=4.2.2)", "pkginfo (<1.9)", "pydocs name = "requests" version = "2.31.0" description = "Python HTTP for Humans." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3249,6 +3386,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests-toolbelt" version = "1.0.0" description = "A utility belt for advanced users of python-requests" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -3263,6 +3401,7 @@ requests = ">=2.0.1,<3.0.0" name = "responses" version = "0.23.1" description = "A utility library for mocking out the `requests` Python library." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3283,6 +3422,7 @@ tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asy name = "rich" version = "13.3.5" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -3302,6 +3442,7 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] name = "rq" version = "1.15.0" description = "RQ is a simple, lightweight, library for creating background jobs, and processing them." +category = "main" optional = true python-versions = ">=3.6" files = [ @@ -3313,27 +3454,11 @@ files = [ click = ">=5.0.0" redis = ">=4.0.0" -[[package]] -name = "rq-scheduler" -version = "0.13.1" -description = "Provides job scheduling capabilities to RQ (Redis Queue)" -optional = true -python-versions = "*" -files = [ - {file = "rq-scheduler-0.13.1.tar.gz", hash = "sha256:89d6a18f215536362b22c0548db7dbb8678bc520c18dc18a82fd0bb2b91695ce"}, - {file = "rq_scheduler-0.13.1-py2.py3-none-any.whl", hash = "sha256:c2b19c3aedfc7de4d405183c98aa327506e423bf4cdc556af55aaab9bbe5d1a1"}, -] - -[package.dependencies] -crontab = ">=0.23.0" -freezegun = "*" -python-dateutil = "*" -rq = ">=0.13" - [[package]] name = "ruamel-yaml" version = "0.17.21" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" +category = "main" optional = false python-versions = ">=3" files = [ @@ -3352,6 +3477,7 @@ jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] name = "ruamel-yaml-clib" version = "0.2.7" description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -3362,8 +3488,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win32.whl", hash = "sha256:763d65baa3b952479c4e972669f679fe490eee058d5aa85da483ebae2009d231"}, {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a"}, {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:1a6391a7cabb7641c32517539ca42cf84b87b667bad38b78d4d42dd23e957c81"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:9c7617df90c1365638916b98cdd9be833d31d337dbcd722485597b43c4a215bf"}, + {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_12_6_arm64.whl", hash = "sha256:721bc4ba4525f53f6a611ec0967bdcee61b31df5a56801281027a3a6d1c2daf5"}, {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94"}, {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-win32.whl", hash = "sha256:f6d3d39611ac2e4f62c3128a9eed45f19a6608670c5a2f4f07f24e8de3441d38"}, {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:da538167284de58a52109a9b89b8f6a53ff8437dd6dc26d33b57bf6699153122"}, @@ -3396,53 +3521,54 @@ files = [ [[package]] name = "schema-salad" -version = "8.4.20230601112322" +version = "8.4.20230606143604" description = "Schema Annotations for Linked Avro Data (SALAD)" +category = "main" optional = false python-versions = ">=3.6,<3.12" files = [ - {file = "schema-salad-8.4.20230601112322.tar.gz", hash = "sha256:8d2c8ac3caf2eb404bdd94a4c2a0e31345c5cc0884801d1c5dc5ca86d18040b4"}, - {file = "schema_salad-8.4.20230601112322-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:5b52e0199c7e02835e808dae114a9aaad603f42962efb9850fe9693c980a11ce"}, - {file = "schema_salad-8.4.20230601112322-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4f9edafac3c2b27584a24ab3be98e09cdda38448b10755b87c20f3ce518c97fd"}, - {file = "schema_salad-8.4.20230601112322-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:301a5686ec6142dfc36f51530f297764a422e12c7a99b981c6d92552852cbd39"}, - {file = "schema_salad-8.4.20230601112322-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:178db41bbc46d68594399b7435969f1ebaba64d96fa9efb08400b16861c08c72"}, - {file = "schema_salad-8.4.20230601112322-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4cadab0f20817a1a80ad89b98099657339e553c41ea07f7ac102603e8f73d648"}, - {file = "schema_salad-8.4.20230601112322-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08da37193385254bca7fdb4748ef6c08cb283dd669f0a56a05a265688463856f"}, - {file = "schema_salad-8.4.20230601112322-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:59d9373f7576e011fb885e4c452a3c1402cb3fa529488198a20951f611ca2d25"}, - {file = "schema_salad-8.4.20230601112322-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e30644406bb7446531e4cd52f3c6bb60086ccaf6beb091be1660f39468b0fb18"}, - {file = "schema_salad-8.4.20230601112322-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:b24fd886b15634ea2819fd89b47972867b48beb33307d919e0860f9d3fdb37fe"}, - {file = "schema_salad-8.4.20230601112322-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d4d53cfbc3d0ba983f2c977e0e1e99e6207453ccfcf4ade393a29afdce32a88e"}, - {file = "schema_salad-8.4.20230601112322-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2d35b578a882364596af0dc0a46aa4b77af913f992bd56da1efb591b0e6fc"}, - {file = "schema_salad-8.4.20230601112322-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:b9787319549edb4d36e44504f36f4a025fbae7cbf1eba2ebe1a647bfde0d7991"}, - {file = "schema_salad-8.4.20230601112322-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9090f654b1ce0fb98be419340d488fb539fe98bb3ac4a23fefd7dc71f173bf90"}, - {file = "schema_salad-8.4.20230601112322-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6c10de96857d24efc7b755667ba16f219e042ddb123ba6f4a8c4b429a14d9c8"}, - {file = "schema_salad-8.4.20230601112322-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ef8a227d974e87bcdb4ec98c32a9354881586a0520985e3fa9fa509123615c2a"}, - {file = "schema_salad-8.4.20230601112322-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54ee3b017c42c3f442d39e16979d9f18b30e02db7817ecb73682fe75ea0810b6"}, - {file = "schema_salad-8.4.20230601112322-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:6ada405a5cbbecd43c73bbc067abb080e29c93eea8ba0a3f30efdb420f52006a"}, - {file = "schema_salad-8.4.20230601112322-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fa2fa08fcded0b0bedc46f4d3582ab9366eaedadc48417e3f67fd1836f300aa7"}, - {file = "schema_salad-8.4.20230601112322-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:249e249f07f14f47e59f1b47fd35de661089896e2055754ee9d5dbec71ab6413"}, - {file = "schema_salad-8.4.20230601112322-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:5d979dea308cc90d6a1cd974f0a4f94cd30c75edaced6b520c507047891c68ae"}, - {file = "schema_salad-8.4.20230601112322-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:368e419e85ab85661680d40b3b9ab1efcdfb43ad12a44f797ac68418053c5baf"}, - {file = "schema_salad-8.4.20230601112322-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b107e6ef58698e7953b4eb2ed0fa1da25ba07f470f209a2aaa6512f86745c8c7"}, - {file = "schema_salad-8.4.20230601112322-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:033f6c5dca6603d7ba12a09081cb7cd7ece8ebf0caa6ba3cf3d1af8b075ac321"}, - {file = "schema_salad-8.4.20230601112322-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ee55dd6d8a0fc08881c1c312510dc9afbf5ddf4c0271958f1b29345512fbb183"}, - {file = "schema_salad-8.4.20230601112322-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:5aaf0d240d93b5bcc99817168fe523a73bb0e9fc0daf90703656209bfbfa3cf3"}, - {file = "schema_salad-8.4.20230601112322-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:07880fbda95c07128e7058da605766fb79d75e61aef3ef0c022316a302f1c625"}, - {file = "schema_salad-8.4.20230601112322-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ec4fb9c8c307202a4c394557ccf131e00f57d9c50bc64957046d302d6ca432b"}, - {file = "schema_salad-8.4.20230601112322-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:af210dbd0fdf68596007026ed2cabd33b54227e49b19549e1fee7963a8381390"}, - {file = "schema_salad-8.4.20230601112322-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2381319d3908b21afff3b162a8399d61daa28aabe50b1c6ca7e9ed1ddef9e884"}, - {file = "schema_salad-8.4.20230601112322-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a230d1a1c344712f212c74d046da78c630fd32a422caa5d1f588acff43ec1fc"}, - {file = "schema_salad-8.4.20230601112322-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:330e04111a1d24b4ac4283e50309d35716e65682a7d6917cee259c5ddcd9271c"}, - {file = "schema_salad-8.4.20230601112322-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:85e069e711364fd65883b7571ce7e9c007e455063ba5fa60e47f0e16d7b5d9f6"}, - {file = "schema_salad-8.4.20230601112322-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:be42d6ae55c0fd95e15d7fb60bb2caa13b2461eb29a7531ed36c3ba086a6fcf5"}, - {file = "schema_salad-8.4.20230601112322-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:91eb43c02f2f3af248b35bbe04963e9437fc5f1c8b4cf7b94021ea2dc2428fda"}, - {file = "schema_salad-8.4.20230601112322-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a282b4603d293529692c67f3d1e12c9299e97ff9f76ce58ee5462f18e8f463df"}, - {file = "schema_salad-8.4.20230601112322-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a26c4d9afd044054f6a4deef9236b278c103bcb85313d6da38b149b93d59e902"}, - {file = "schema_salad-8.4.20230601112322-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1177cc97bdd4131b389b9104c3d87470b9a0a3ed9bead3d4877c0650b5c870c6"}, - {file = "schema_salad-8.4.20230601112322-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6bd461b0053307278bc3a30c6c6277e4cfdad63ba865c6cf6a3d97e43ba296b"}, - {file = "schema_salad-8.4.20230601112322-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:edf61fbbfc1358699a986df7f7632fb25f1892b0a0e1fb805fdd163e78a037ed"}, - {file = "schema_salad-8.4.20230601112322-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f3e154304f054169d88872e749334b83476e3dc7a686d4599486b430e96775b2"}, - {file = "schema_salad-8.4.20230601112322-py3-none-any.whl", hash = "sha256:0e531245757e4ff5fbda6a0fe4749f95f2ed3818870cd2e09417f9bee93cf730"}, + {file = "schema-salad-8.4.20230606143604.tar.gz", hash = "sha256:f19a3d6614b4afecec93b9c7121d31ee01d8c1aa169b272d41844ca61d3d9af6"}, + {file = "schema_salad-8.4.20230606143604-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:5a7e84f7f6a5e7d97b55a342d81e2969014a1b135bba749479b694c0264e043d"}, + {file = "schema_salad-8.4.20230606143604-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3ffda242c3b957ab79a07877c81c4488eda3cbb7c6796943a077462f84d14f8"}, + {file = "schema_salad-8.4.20230606143604-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:accd90fd11a203df7c20505e8187873f36ede9b12379afdd440fa893ab68b6f0"}, + {file = "schema_salad-8.4.20230606143604-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:46e1f334fd51f09e500ff4fe80c68fb5497bc293742ac599d9c40c6ee2baf82d"}, + {file = "schema_salad-8.4.20230606143604-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6489c690cdea3a0989136d2b7ad46e99303b5de57564f4b10c70331dec08d6d1"}, + {file = "schema_salad-8.4.20230606143604-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12cba653cb1ecae6e15fb4705cf34c647acbce56ffd9cb051933229b004a28b8"}, + {file = "schema_salad-8.4.20230606143604-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ea0ed53039666f2c8ff3d3a53a95f0240c70091529cb731a14c5027827e33461"}, + {file = "schema_salad-8.4.20230606143604-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:72ee112e555bada3d2941905bf01ac6ca5531bf4b09fcef9ab98b4ba313a9014"}, + {file = "schema_salad-8.4.20230606143604-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:6d3df5263a7be8c74bf11d46cccf77d004970edb8342a0125a14fba57d4a1033"}, + {file = "schema_salad-8.4.20230606143604-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08112507a4dc5102876fce6d9b2138ae5f0b90d04690a17fd9dee8d11687d91d"}, + {file = "schema_salad-8.4.20230606143604-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c9b601a7f7430f0582dc6be0fda7b7aa6cc112e622ecf60f1236786c4597f86"}, + {file = "schema_salad-8.4.20230606143604-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:4ff932dcbc5ab487e959cab5ab7856d367c8e339d105b9d1fc4659c44668456a"}, + {file = "schema_salad-8.4.20230606143604-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b88d26ddd67b6682599502b1958b5d74622dcd1f0f82501dc3a210935a5ebf4c"}, + {file = "schema_salad-8.4.20230606143604-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b37445c4f4336fcdc278388f2dbc97c9058e06a0935c4f84ad9659e336f7bc1"}, + {file = "schema_salad-8.4.20230606143604-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:68e65194388ad29b399f94c22d4affa63bf9f2602c1dca57acf5a2eaa504dfd0"}, + {file = "schema_salad-8.4.20230606143604-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3e463d658db5640145a4983420721efcd908bbf9342da22a2dc83514cc8ccea3"}, + {file = "schema_salad-8.4.20230606143604-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:217aff85c2eac567dd490fd3c5fc44f0c6323e5c772ab04c599396e05e556f67"}, + {file = "schema_salad-8.4.20230606143604-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dc8fc3f9b25305a2bd660d3343ec099a4d620a659b62530a543f5f5aa30939a2"}, + {file = "schema_salad-8.4.20230606143604-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a496464826b187b6a4a0da14ab9754940e6310a35ba5f22b37de411f4267ae9f"}, + {file = "schema_salad-8.4.20230606143604-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:4e88d3f46895a15c3356840a84cfc965a60ffa16233d3a7237425c294fb08975"}, + {file = "schema_salad-8.4.20230606143604-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2c4550c83760138a242010bd75db589104f19c9071b4edb805dd101137097c70"}, + {file = "schema_salad-8.4.20230606143604-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0006e17d5c2d3edb5f6dc7e915a2355d4ec22a686960f0eecb22bba9c40acfc"}, + {file = "schema_salad-8.4.20230606143604-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d33c3c0a1820fd21bb3b94061a971e5fc5156ed69398ef3eb487779891a396ee"}, + {file = "schema_salad-8.4.20230606143604-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:eb1581920d96c8044376a072cb45189a5aac743789d9b203fe5351a5d1ab7d58"}, + {file = "schema_salad-8.4.20230606143604-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:639fe4c9ca2fa3f577ae6661a4a538ff10d76a7a85c309d480dc278e0ddd635d"}, + {file = "schema_salad-8.4.20230606143604-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7be63416d551ac8e4eb6ecc83268a6ce1775a363f965c60957a27083acff8d96"}, + {file = "schema_salad-8.4.20230606143604-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85cdf3850f1215a3c741d729c1f65514f2ba90dec3ee3597a3a44ac7cb2657e0"}, + {file = "schema_salad-8.4.20230606143604-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6f13c693dbd954d36696f42c7eeb03755ffe1ffdc1e778010d55786671c332b5"}, + {file = "schema_salad-8.4.20230606143604-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1baf114f9128fdbe89f816f7abf93cd2a7927e3e9fc30633ec741badcfe475d4"}, + {file = "schema_salad-8.4.20230606143604-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c55f5ecb7fa1fe844fbdb50f4400a7339f12d053394c51955263541a5ead3cd7"}, + {file = "schema_salad-8.4.20230606143604-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2c0c9f3112627b01bf1eba7633de4d191f0e7dc617604228f810379a65cfc6cd"}, + {file = "schema_salad-8.4.20230606143604-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ea0d4c3942317ad3f9a2c0c9dd9ed8d74752b67736721db07a267dab301d1d6c"}, + {file = "schema_salad-8.4.20230606143604-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:2abe3c6c0308257d5ee9a2755094c60983e5ce9ff1a0594b74d83f982b434ebf"}, + {file = "schema_salad-8.4.20230606143604-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aa35788cdf38becde9166d1703622176f30a1dc62b71fc7b654faea30fb2c3be"}, + {file = "schema_salad-8.4.20230606143604-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:441bc6b2c868e9e8d158b6392fda5c87d1adaf508aa43b101fa7bd6d2dd97eba"}, + {file = "schema_salad-8.4.20230606143604-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6789d1d40066c527b8710c69f17b62f485ef92762ea35ba50334b9fe5c1b5c6e"}, + {file = "schema_salad-8.4.20230606143604-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a6c0c26d12f9081d65621e491708f3c75d8f7ffe0877f980c90c34e779d45d6c"}, + {file = "schema_salad-8.4.20230606143604-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6488b7194fe2542f982476795ea5db6b86e32d9f6454a7ad9d5a4049d033f296"}, + {file = "schema_salad-8.4.20230606143604-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:74a6f5645cbc6236c1b3d90a15987c748590765a24297c25e2f35b6c0981655d"}, + {file = "schema_salad-8.4.20230606143604-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:aadf5899a3fae74badfa71d59dd170cd92eadf5904b4dd6c58656651b9179c9f"}, + {file = "schema_salad-8.4.20230606143604-py3-none-any.whl", hash = "sha256:0f3a9adcafccdfe4728591ee61cce96f9ddb78a6cc664d189d0bb212389e8c54"}, ] [package.dependencies] @@ -3459,13 +3585,14 @@ pycodegen = ["black"] [[package]] name = "sentry-sdk" -version = "1.25.0" +version = "1.25.1" description = "Python client for Sentry (https://sentry.io)" +category = "main" optional = true python-versions = "*" files = [ - {file = "sentry-sdk-1.25.0.tar.gz", hash = "sha256:5be3296fc574fa8a4d9b213b4dcf8c8d0246c08f8bd78315c6286f386c37555a"}, - {file = "sentry_sdk-1.25.0-py2.py3-none-any.whl", hash = "sha256:fe85cf5d0b3d0aa3480df689f9f6dc487de783defb0a95043368375dc893645e"}, + {file = "sentry-sdk-1.25.1.tar.gz", hash = "sha256:aa796423eb6a2f4a8cd7a5b02ba6558cb10aab4ccdc0537f63a47b038c520c38"}, + {file = "sentry_sdk-1.25.1-py2.py3-none-any.whl", hash = "sha256:79afb7c896014038e358401ad1d36889f97a129dfa8031c49b3f238cd1aa3935"}, ] [package.dependencies] @@ -3504,13 +3631,14 @@ tornado = ["tornado (>=5)"] [[package]] name = "setuptools" -version = "67.8.0" +version = "68.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "setuptools-67.8.0-py3-none-any.whl", hash = "sha256:5df61bf30bb10c6f756eb19e7c9f3b473051f48db77fddbe06ff2ca307df9a6f"}, - {file = "setuptools-67.8.0.tar.gz", hash = "sha256:62642358adc77ffa87233bc4d2354c4b2682d214048f500964dbe760ccedf102"}, + {file = "setuptools-68.0.0-py3-none-any.whl", hash = "sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f"}, + {file = "setuptools-68.0.0.tar.gz", hash = "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235"}, ] [package.extras] @@ -3522,6 +3650,7 @@ testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs ( name = "shellescape" version = "3.8.1" description = "Shell escape a string to safely use it as a token in a shell command (backport of cPython shlex.quote for Python versions 2.x & < 3.3)" +category = "main" optional = false python-versions = "*" files = [ @@ -3533,6 +3662,7 @@ files = [ name = "shellingham" version = "1.5.0.post1" description = "Tool to Detect Surrounding Shell" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3544,6 +3674,7 @@ files = [ name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -3555,6 +3686,7 @@ files = [ name = "smmap" version = "5.0.0" description = "A pure Python implementation of a sliding window memory map manager" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3566,6 +3698,7 @@ files = [ name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +category = "dev" optional = false python-versions = "*" files = [ @@ -3577,6 +3710,7 @@ files = [ name = "sortedcontainers" version = "2.4.0" description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" +category = "dev" optional = false python-versions = "*" files = [ @@ -3588,6 +3722,7 @@ files = [ name = "sphinx" version = "4.5.0" description = "Python documentation generator" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -3623,6 +3758,7 @@ test = ["cython", "html5lib", "pytest", "pytest-cov", "typed-ast"] name = "sphinx-click" version = "4.4.0" description = "Sphinx extension that automatically documents click applications" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3639,6 +3775,7 @@ sphinx = ">=2.0" name = "sphinx-rtd-theme" version = "1.0.0" description = "Read the Docs theme for Sphinx" +category = "dev" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" files = [ @@ -3657,6 +3794,7 @@ dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client"] name = "sphinx-tabs" version = "3.2.0" description = "Tabbed views for Sphinx" +category = "dev" optional = false python-versions = "~=3.6" files = [ @@ -3677,6 +3815,7 @@ testing = ["bs4", "coverage", "pygments", "pytest (>=3.6,<4)", "pytest-cov", "py name = "sphinxcontrib-applehelp" version = "1.0.4" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3692,6 +3831,7 @@ test = ["pytest"] name = "sphinxcontrib-devhelp" version = "1.0.2" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -3707,6 +3847,7 @@ test = ["pytest"] name = "sphinxcontrib-htmlhelp" version = "2.0.1" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3722,6 +3863,7 @@ test = ["html5lib", "pytest"] name = "sphinxcontrib-jsmath" version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -3736,6 +3878,7 @@ test = ["flake8", "mypy", "pytest"] name = "sphinxcontrib-qthelp" version = "1.0.3" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -3751,6 +3894,7 @@ test = ["pytest"] name = "sphinxcontrib-serializinghtml" version = "1.1.5" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -3766,6 +3910,7 @@ test = ["pytest"] name = "sphinxcontrib-spelling" version = "8.0.0" description = "Sphinx spelling extension" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3784,6 +3929,7 @@ test = ["coverage (>=4.0,!=4.4)", "pytest", "pytest-cov"] name = "tabulate" version = "0.9.0" description = "Pretty-print tabular data" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3798,6 +3944,7 @@ widechars = ["wcwidth"] name = "termcolor" version = "1.1.0" description = "ANSII Color formatting for output in terminal." +category = "main" optional = false python-versions = "*" files = [ @@ -3808,6 +3955,7 @@ files = [ name = "toil" version = "5.10.0" description = "Pipeline management software for clusters." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3844,6 +3992,7 @@ wdl = ["miniwdl (==1.9.1)", "wdlparse (==0.1.0)"] name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" +category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -3855,6 +4004,7 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3866,6 +4016,7 @@ files = [ name = "tomlkit" version = "0.11.8" description = "Style preserving TOML library" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3877,6 +4028,7 @@ files = [ name = "tornado" version = "6.3.2" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +category = "main" optional = true python-versions = ">= 3.8" files = [ @@ -3897,6 +4049,7 @@ files = [ name = "tqdm" version = "4.65.0" description = "Fast, Extensible Progress Meter" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3917,6 +4070,7 @@ telegram = ["requests"] name = "transaction" version = "3.1.0" description = "Transaction management for Python" +category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" files = [ @@ -3936,6 +4090,7 @@ testing = ["coverage", "mock", "nose"] name = "types-python-dateutil" version = "2.8.19.13" description = "Typing stubs for python-dateutil" +category = "dev" optional = false python-versions = "*" files = [ @@ -3947,6 +4102,7 @@ files = [ name = "types-pyyaml" version = "6.0.12.10" description = "Typing stubs for PyYAML" +category = "dev" optional = false python-versions = "*" files = [ @@ -3958,6 +4114,7 @@ files = [ name = "types-redis" version = "4.0.6" description = "Typing stubs for redis" +category = "dev" optional = false python-versions = "*" files = [ @@ -3969,6 +4126,7 @@ files = [ name = "types-requests" version = "2.28.11.17" description = "Typing stubs for requests" +category = "dev" optional = false python-versions = "*" files = [ @@ -3983,6 +4141,7 @@ types-urllib3 = "<1.27" name = "types-tabulate" version = "0.9.0.2" description = "Typing stubs for tabulate" +category = "dev" optional = false python-versions = "*" files = [ @@ -3994,6 +4153,7 @@ files = [ name = "types-urllib3" version = "1.26.25.13" description = "Typing stubs for urllib3" +category = "dev" optional = false python-versions = "*" files = [ @@ -4005,6 +4165,7 @@ files = [ name = "typing-extensions" version = "4.6.3" description = "Backported and Experimental Type Hints for Python 3.7+" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4016,6 +4177,7 @@ files = [ name = "urllib3" version = "1.26.16" description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -4032,6 +4194,7 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "vcrpy" version = "4.3.1" description = "Automatically mock your HTTP interactions to simplify and speed up testing" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4048,28 +4211,30 @@ yarl = "*" [[package]] name = "virtualenv" -version = "20.23.0" +version = "20.23.1" description = "Virtual Python Environment builder" +category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.23.0-py3-none-any.whl", hash = "sha256:6abec7670e5802a528357fdc75b26b9f57d5d92f29c5462ba0fbe45feacc685e"}, - {file = "virtualenv-20.23.0.tar.gz", hash = "sha256:a85caa554ced0c0afbd0d638e7e2d7b5f92d23478d05d17a76daeac8f279f924"}, + {file = "virtualenv-20.23.1-py3-none-any.whl", hash = "sha256:34da10f14fea9be20e0fd7f04aba9732f84e593dac291b757ce42e3368a39419"}, + {file = "virtualenv-20.23.1.tar.gz", hash = "sha256:8ff19a38c1021c742148edc4f81cb43d7f8c6816d2ede2ab72af5b84c749ade1"}, ] [package.dependencies] distlib = ">=0.3.6,<1" -filelock = ">=3.11,<4" -platformdirs = ">=3.2,<4" +filelock = ">=3.12,<4" +platformdirs = ">=3.5.1,<4" [package.extras] -docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.3.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=67.7.1)", "time-machine (>=2.9)"] +docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.3.1)", "pytest-env (>=0.8.1)", "pytest-freezer (>=0.4.6)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=67.8)", "time-machine (>=2.9)"] [[package]] name = "walrus" version = "0.9.2" description = "walrus" +category = "main" optional = true python-versions = "*" files = [ @@ -4083,6 +4248,7 @@ redis = ">=3.0.0" name = "wcwidth" version = "0.2.6" description = "Measures the displayed width of unicode strings in a terminal" +category = "main" optional = false python-versions = "*" files = [ @@ -4092,13 +4258,14 @@ files = [ [[package]] name = "websocket-client" -version = "1.5.2" +version = "1.6.0" description = "WebSocket client for Python with low level API options" +category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "websocket-client-1.5.2.tar.gz", hash = "sha256:c7d67c13b928645f259d9b847ab5b57fd2d127213ca41ebd880de1f553b7c23b"}, - {file = "websocket_client-1.5.2-py3-none-any.whl", hash = "sha256:f8c64e28cd700e7ba1f04350d66422b6833b82a796b525a51e740b8cc8dab4b1"}, + {file = "websocket-client-1.6.0.tar.gz", hash = "sha256:e84c7eafc66aade6d1967a51dfd219aabdf81d15b9705196e11fd81f48666b78"}, + {file = "websocket_client-1.6.0-py3-none-any.whl", hash = "sha256:72d7802608745b0a212f79b478642473bd825777d8637b6c8c421bf167790d4f"}, ] [package.extras] @@ -4110,6 +4277,7 @@ test = ["websockets"] name = "werkzeug" version = "2.2.3" description = "The comprehensive WSGI web application library." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4127,6 +4295,7 @@ watchdog = ["watchdog"] name = "wrapt" version = "1.15.0" description = "Module for decorators, wrappers and monkey patching." +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -4211,6 +4380,7 @@ files = [ name = "yagup" version = "0.1.1" description = "Parsing and validating git urls." +category = "main" optional = false python-versions = "*" files = [ @@ -4225,6 +4395,7 @@ pyparsing = "*" name = "yarl" version = "1.9.2" description = "Yet another URL library" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4312,6 +4483,7 @@ multidict = ">=4.0" name = "yaspin" version = "2.1.0" description = "Yet Another Terminal Spinner" +category = "main" optional = false python-versions = ">=3.6.2,<4.0.0" files = [ @@ -4326,6 +4498,7 @@ termcolor = ">=1.1.0,<2.0.0" name = "zc-lockfile" version = "3.0.post1" description = "Basic inter-process locks" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4343,6 +4516,7 @@ test = ["zope.testing"] name = "zc-relation" version = "2.0" description = "Index intransitive and transitive n-ary relationships." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4363,6 +4537,7 @@ test = ["ZODB", "zc.relationship (>=2)"] name = "zconfig" version = "4.0" description = "Structured Configuration Library" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4378,6 +4553,7 @@ test = ["docutils", "manuel", "zope.exceptions", "zope.testrunner"] name = "zipp" version = "3.15.0" description = "Backport of pathlib-compatible object wrapper for zip files" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4393,6 +4569,7 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more name = "zodb" version = "5.8.0" description = "ZODB, a Python object-oriented database" +category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" files = [ @@ -4418,6 +4595,7 @@ test = ["manuel", "mock", "zope.testing", "zope.testrunner (>=4.4.6)"] name = "zodbpickle" version = "3.0.1" description = "Fork of Python 3 pickle module." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4463,6 +4641,7 @@ test = ["zope.testrunner"] name = "zope-interface" version = "6.0" description = "Interfaces for Python" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4510,6 +4689,7 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] name = "zope-testing" version = "5.0.1" description = "Zope testing helpers" +category = "main" optional = false python-versions = "*" files = [ @@ -4528,6 +4708,7 @@ test = ["zope.testrunner"] name = "zstandard" version = "0.21.0" description = "Zstandard bindings for Python" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4583,9 +4764,9 @@ cffi = {version = ">=1.11", markers = "platform_python_implementation == \"PyPy\ cffi = ["cffi (>=1.11)"] [extras] -service = ["apispec", "apispec-oneofschema", "apispec-webframeworks", "circus", "flask", "gunicorn", "marshmallow", "marshmallow-oneofschema", "pillow", "python-dotenv", "redis", "rq", "rq-scheduler", "sentry-sdk", "walrus"] +service = ["apispec", "apispec-oneofschema", "apispec-webframeworks", "circus", "flask", "gunicorn", "marshmallow", "marshmallow-oneofschema", "pillow", "python-dotenv", "redis", "rq", "sentry-sdk", "walrus"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<3.12" -content-hash = "ed850d0da9ff45f44bcfe5081c7c3d59fba8631e294e065345b8267274ff1460" +content-hash = "a7907d458c0833f4ab1bc3599e2d2571ecb3753e00e29a3ce56696594d28100f" diff --git a/pyproject.toml b/pyproject.toml index 3ab88f7f6d..5f43482dfb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -118,7 +118,6 @@ pillow = { version = ">=9.0.0,<9.6", optional = true } python-dotenv = { version = ">=0.19.0,<0.21.0", optional = true } redis = { version = ">=3.5.3,<4.6.0", optional = true } rq = { version = "==1.15.0", optional = true } -rq-scheduler = { version = "==0.13.1", optional = true } sentry-sdk = { version = ">=1.5.11,<1.26.0", extras = ["flask"], optional = true } walrus = { version = ">=0.8.2,<0.10.0", optional = true } @@ -184,7 +183,6 @@ service = [ "python-dotenv", "redis", "rq", - "rq-scheduler", "sentry-sdk", "walrus" ] @@ -315,7 +313,6 @@ module = [ "pyte", "ruamel", "rq", - "rq_scheduler", "shellingham", "toil.*", "tqdm", diff --git a/renku/ui/cli/service.py b/renku/ui/cli/service.py index 0c19c9973c..3a41301bff 100644 --- a/renku/ui/cli/service.py +++ b/renku/ui/cli/service.py @@ -33,7 +33,7 @@ RENKU_DAEMON_LOG_FILE = "renku.log" RENKU_DAEMON_ERR_FILE = "renku.err" -SERVICE_COMPONENT_TAGS = ["api", "scheduler", "worker"] +SERVICE_COMPONENT_TAGS = ["api", "worker"] def run_api(addr="0.0.0.0", port=8080, timeout=600): @@ -235,14 +235,6 @@ def api_start(addr, port, timeout): run_api(addr, port, timeout) -@service.command(name="scheduler") -def scheduler_start(): - """Start service scheduler in active shell session.""" - from renku.ui.service.scheduler import start_scheduler - - start_scheduler() - - @service.command(name="worker") @click.option("-q", "--queue", multiple=True) def worker_start(queue): @@ -287,14 +279,6 @@ def all_start(ctx, daemon, runtime_dir): "env": os.environ.copy(), "shell": True, }, - { - "name": "RenkuCoreScheduler", - "cmd": "renku", - "args": ["service", "scheduler"], - "numprocesses": 1, - "env": os.environ.copy(), - "shell": True, - }, { "name": "RenkuCoreWorker", "cmd": "renku", diff --git a/renku/ui/service/__init__.py b/renku/ui/service/__init__.py index 797ab5b115..de85b62f21 100644 --- a/renku/ui/service/__init__.py +++ b/renku/ui/service/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/cache/__init__.py b/renku/ui/service/cache/__init__.py index e507424169..1309f1f7a5 100644 --- a/renku/ui/service/cache/__init__.py +++ b/renku/ui/service/cache/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/cache/models/__init__.py b/renku/ui/service/cache/models/__init__.py index 474618fd06..b1b29ef04c 100644 --- a/renku/ui/service/cache/models/__init__.py +++ b/renku/ui/service/cache/models/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/cache/models/project.py b/renku/ui/service/cache/models/project.py index 711fb0186d..070cadb3a8 100644 --- a/renku/ui/service/cache/models/project.py +++ b/renku/ui/service/cache/models/project.py @@ -18,6 +18,7 @@ import os import shutil from datetime import datetime +from pathlib import Path from typing import Optional import portalocker @@ -37,6 +38,7 @@ class Project(Model): __namespace__ = BaseCache.namespace created_at = DateTimeField() + accessed_at = DateTimeField(default=datetime.utcnow) last_fetched_at = DateTimeField() project_id = TextField(primary_key=True, index=True) @@ -44,18 +46,16 @@ class Project(Model): clone_depth = IntegerField() git_url = TextField(index=True) + branch = TextField(index=True) name = TextField() slug = TextField() - fullname = TextField() description = TextField() - email = TextField() owner = TextField() - token = TextField() initialized = BooleanField() @property - def abs_path(self): + def abs_path(self) -> Path: """Full path of cached project.""" return CACHE_PROJECTS_PATH / self.user_id / self.owner / self.slug @@ -84,20 +84,34 @@ def concurrency_lock(self): def age(self): """Returns project's age in seconds.""" # NOTE: `created_at` field is aligned to UTC timezone. + if not self.created_at: + return None return int((datetime.utcnow() - self.created_at).total_seconds()) + @property + def time_since_access(self): + """Returns time since last access.""" + if not self.accessed_at: + return None + return int((datetime.utcnow() - self.accessed_at).total_seconds()) + @property def fetch_age(self): """Returns project's fetch age in seconds.""" return int((datetime.utcnow() - self.last_fetched_at).total_seconds()) + @property + def is_shallow(self) -> bool: + """Returns whether the project is checked out shallow or not.""" + return self.clone_depth is not None and self.clone_depth > 0 + def exists(self): """Ensure a project exists on file system.""" return self.abs_path.exists() def ttl_expired(self, ttl=None): """Check if project time to live has expired.""" - if not self.created_at: + if not self.time_since_access: # If record does not contain created_at, # it means its an old record, and # we should mark it for deletion. @@ -105,11 +119,12 @@ def ttl_expired(self, ttl=None): # NOTE: time to live measured in seconds ttl = ttl or int(os.getenv("RENKU_SVC_CLEANUP_TTL_PROJECTS", 1800)) - return self.age >= ttl + return self.time_since_access >= ttl def purge(self): """Removes project from file system and cache.""" - shutil.rmtree(str(self.abs_path)) + if self.exists(): + shutil.rmtree(str(self.abs_path)) self.delete() def is_locked(self, jobs): diff --git a/renku/ui/service/cache/projects.py b/renku/ui/service/cache/projects.py index faadc5ff3f..4afe48a323 100644 --- a/renku/ui/service/cache/projects.py +++ b/renku/ui/service/cache/projects.py @@ -15,6 +15,8 @@ # See the License for the specific language governing permissions and # limitations under the License. """Renku service project cache management.""" +from typing import cast + from marshmallow import EXCLUDE from renku.ui.service.cache.base import BaseCache @@ -33,7 +35,7 @@ def make_project(self, user, project_data, persist=True): """Store user project metadata.""" project_data.update({"user_id": user.user_id}) - project_obj = self.project_schema.load(project_data, unknown=EXCLUDE) + project_obj: Project = cast(Project, self.project_schema.load(project_data, unknown=EXCLUDE)) if persist: project_obj.save() diff --git a/renku/ui/service/cache/serializers/project.py b/renku/ui/service/cache/serializers/project.py index cf04ed2190..ed23e4f533 100644 --- a/renku/ui/service/cache/serializers/project.py +++ b/renku/ui/service/cache/serializers/project.py @@ -21,10 +21,10 @@ from marshmallow import fields, post_load from renku.ui.service.cache.models.project import Project -from renku.ui.service.serializers.common import CreationSchema, MandatoryUserSchema +from renku.ui.service.serializers.common import AccessSchema, CreationSchema, MandatoryUserSchema -class ProjectSchema(CreationSchema, MandatoryUserSchema): +class ProjectSchema(CreationSchema, AccessSchema, MandatoryUserSchema): """Context schema for project clone.""" last_fetched_at = fields.DateTime(load_default=datetime.utcnow) @@ -37,10 +37,7 @@ class ProjectSchema(CreationSchema, MandatoryUserSchema): name = fields.String(required=True) slug = fields.String(required=True) description = fields.String(load_default=None) - fullname = fields.String(required=True) - email = fields.String(required=True) owner = fields.String(required=True) - token = fields.String(required=True) initialized = fields.Boolean(dump_default=False) @post_load diff --git a/renku/ui/service/cache/users.py b/renku/ui/service/cache/users.py index e9106c2a42..96399dc894 100644 --- a/renku/ui/service/cache/users.py +++ b/renku/ui/service/cache/users.py @@ -27,10 +27,10 @@ class UserManagementCache(BaseCache): def ensure_user(self, user_data): """Ensure user data registered in a cache.""" - user_obj = self.user_schema.load(user_data) + user_obj: User = self.user_schema.load(user_data) try: - User.get(User.user_id == user_obj.user_id) + user_obj = User.get(User.user_id == user_obj.user_id and User.token == user_obj.token) except ValueError: user_obj.save() diff --git a/renku/ui/service/controllers/api/mixins.py b/renku/ui/service/controllers/api/mixins.py index 934f98d9ba..6b11ea66a9 100644 --- a/renku/ui/service/controllers/api/mixins.py +++ b/renku/ui/service/controllers/api/mixins.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -17,29 +16,28 @@ """Renku service controller mixin.""" import contextlib from abc import ABCMeta, abstractmethod -from datetime import datetime from functools import wraps from pathlib import Path import portalocker from renku.core.constant import RENKU_HOME -from renku.core.errors import GitCommandError, GitConfigurationError, LockError, RenkuException, UninitializedProject +from renku.core.errors import LockError, RenkuException, UninitializedProject from renku.core.util.contexts import renku_project_context from renku.infrastructure.repository import Repository from renku.ui.service.cache.config import REDIS_NAMESPACE from renku.ui.service.cache.models.job import Job from renku.ui.service.cache.models.project import Project from renku.ui.service.cache.models.user import User -from renku.ui.service.config import PROJECT_CLONE_DEPTH_DEFAULT, PROJECT_CLONE_NO_DEPTH +from renku.ui.service.config import PROJECT_CLONE_DEPTH_DEFAULT from renku.ui.service.controllers.utils.remote_project import RemoteProject from renku.ui.service.errors import ( IntermittentAuthenticationError, - IntermittentCacheError, IntermittentLockError, ProgramRenkuError, UserAnonymousError, ) +from renku.ui.service.gateways.repository_cache import LocalRepositoryCache from renku.ui.service.jobs.contexts import enqueue_retry from renku.ui.service.jobs.delayed_ctrl import delayed_ctrl_job from renku.ui.service.serializers.common import DelayedResponseRPC @@ -112,7 +110,7 @@ def renku_op(self): """Implements operation for the controller.""" raise NotImplementedError - def ensure_migrated(self, project_id): + def ensure_migrated(self, project: Project): """Ensure that project is migrated.""" if not self.migrate_project: return @@ -120,9 +118,10 @@ def ensure_migrated(self, project_id): from renku.ui.service.controllers.cache_migrate_project import MigrateProjectCtrl migrate_context = { - "project_id": project_id, + "git_url": project.git_url, "skip_docker_update": True, "skip_template_update": True, + "branch": project.branch, } migration_response = MigrateProjectCtrl( self.cache, self.user_data, migrate_context, skip_lock=True @@ -150,136 +149,16 @@ def execute_op(self): return job - if "project_id" in self.context: - return self.local() - - elif "git_url" in self.context and "user_id" not in self.user_data: + if "git_url" in self.context and "user_id" not in self.user_data: # NOTE: Anonymous session support. return self.remote() elif "git_url" in self.context and "user_id" in self.user_data: - try: - project = Project.get( - (Project.user_id == self.user_data["user_id"]) & (Project.git_url == self.context["git_url"]) - ) - except ValueError: - from renku.ui.service.controllers.cache_project_clone import ProjectCloneCtrl - - clone_context = { - "git_url": self.request_data["git_url"], - } - - if "branch" in self.request_data: - clone_context["branch"] = self.request_data["branch"] - - # NOTE: If we want to migrate project, then we need to do full clone. - # This operation can take very long time, and as such is expected - # only to be executed from delayed tasks. - if self.migrate_project: - clone_context["depth"] = PROJECT_CLONE_NO_DEPTH - elif self.clone_depth: - clone_context["depth"] = self.clone_depth - - project = ProjectCloneCtrl(self.cache, self.user_data, clone_context).project_clone() - - if not project.initialized: - raise UninitializedProject(project.abs_path) - else: - branch = self.request_data.get("branch", None) - - if branch: - with Repository(project.abs_path) as repository: - if branch != repository.active_branch.name: - # NOTE: Command called for different branch than the one used in cache, change branch - if len(repository.remotes) != 1: - raise RenkuException("Couldn't find remote for project in cache.") - origin = repository.remotes[0] - remote_branch = f"{origin}/{branch}" - - with project.write_lock(): - # NOTE: Add new branch to remote branches - repository.run_git_command("remote", "set-branches", "--add", origin, branch) - if self.migrate_project or self.clone_depth == PROJECT_CLONE_NO_DEPTH: - repository.fetch(origin, branch) - else: - repository.fetch(origin, branch, depth=self.clone_depth) - - # NOTE: Switch to new ref - repository.run_git_command("checkout", "--track", "-f", "-b", branch, remote_branch) - - # NOTE: cleanup remote branches in case a remote was deleted (fetch fails otherwise) - repository.run_git_command("remote", "prune", origin) - - for branch in repository.branches: - if branch.remote_branch and not branch.remote_branch.is_valid(): - repository.branches.remove(branch, force=True) - # NOTE: Remove left-over refspec - try: - with repository.get_configuration(writable=True) as config: - config.remove_value(f"remote.{origin}.fetch", f"origin.{branch}$") - except GitConfigurationError: - pass - else: - self.reset_local_repo(project) - - self.context["project_id"] = project.project_id return self.local() else: raise RenkuException("context does not contain `project_id` or `git_url`") - def reset_local_repo(self, project): - """Reset the local repo to be up to date with the remote.""" - - from renku.ui.service.controllers.cache_migrate_project import MigrateProjectCtrl - - # NOTE: Only do a fetch every >30s to get eventual consistency but not slow things down too much, - # except for MigrateProject since that is likely to require to unshallow the repository - if project.fetch_age < PROJECT_FETCH_TIME and not isinstance(self, MigrateProjectCtrl): - return - - lock = project.write_lock() - - if self.skip_lock: - lock = contextlib.suppress() - try: - with lock: - if project.fetch_age < PROJECT_FETCH_TIME: - # NOTE: return immediately in case of multiple writers waiting - return - - with Repository(project.abs_path) as repository: - origin = None - tracking_branch = repository.active_branch.remote_branch - if tracking_branch: - origin = tracking_branch.remote - elif len(repository.remotes) == 1: - origin = repository.remotes[0] - - if origin: - unshallow = self.migrate_project or self.clone_depth == PROJECT_CLONE_NO_DEPTH - if unshallow: - try: - # NOTE: It could happen that repository is already un-shallowed, - # in this case we don't want to leak git exception, but still want to fetch. - repository.fetch("origin", repository.active_branch, unshallow=True) - except GitCommandError: - repository.fetch("origin", repository.active_branch) - - repository.reset(f"{origin}/{repository.active_branch}", hard=True) - else: - try: - # NOTE: it rarely happens that origin is not reachable. Try again if it fails. - repository.fetch("origin", repository.active_branch) - repository.reset(f"{origin}/{repository.active_branch}", hard=True) - except GitCommandError as e: - project.purge() - raise IntermittentCacheError(e) - project.last_fetched_at = datetime.utcnow() - project.save() - except (portalocker.LockException, portalocker.AlreadyLocked, LockError) as e: - raise IntermittentLockError() from e - @local_identity def local(self): """Execute renku operation against service cache.""" @@ -287,7 +166,15 @@ def local(self): error = Exception("local execution is disabled") raise ProgramRenkuError(error) - project = self.cache.get_project(self.user, self.context["project_id"]) + project = LocalRepositoryCache().get( + self.cache, + self.request_data["git_url"], + self.request_data.get("branch"), + self.user, + self.clone_depth is not None, + ) + + self.context["project_id"] = project.project_id if self.skip_lock: lock = contextlib.suppress() @@ -297,17 +184,13 @@ def local(self): lock = project.read_lock() try: with project.concurrency_lock(): - self.reset_local_repo(project) - with lock: # NOTE: Get up-to-date version of object current_project = Project.load(project.project_id) - if not current_project.initialized: - raise UninitializedProject(project.abs_path) if self.migrate_project: - self.ensure_migrated(project.project_id) + self.ensure_migrated(current_project) - self.project_path = project.abs_path + self.project_path = current_project.abs_path with renku_project_context(self.project_path): return self.renku_op() diff --git a/renku/ui/service/controllers/cache_list_projects.py b/renku/ui/service/controllers/cache_list_projects.py deleted file mode 100644 index ea8b89c662..0000000000 --- a/renku/ui/service/controllers/cache_list_projects.py +++ /dev/null @@ -1,60 +0,0 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and -# Eidgenössische Technische Hochschule Zürich (ETHZ). -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Renku service cache list cached projects controller.""" -import itertools - -from renku.ui.service.controllers.api.abstract import ServiceCtrl -from renku.ui.service.controllers.api.mixins import RenkuOperationMixin -from renku.ui.service.serializers.cache import ProjectListResponseRPC -from renku.ui.service.views import result_response - - -class ListProjectsCtrl(ServiceCtrl, RenkuOperationMixin): - """Controller for listing cached projects endpoint.""" - - RESPONSE_SERIALIZER = ProjectListResponseRPC() - - def __init__(self, cache, user_data): - """Construct controller.""" - self.ctx = {} - super().__init__(cache, user_data, {}) - - @property - def context(self): - """Controller operation context.""" - return self.ctx - - def list_projects(self): - """List locally cache projects.""" - projects = [project for project in self.cache.get_projects(self.user) if project.abs_path.exists()] - - result = { - "projects": [ - max(g, key=lambda p: p.created_at) for _, g in itertools.groupby(projects, lambda p: p.git_url) - ] - } - - return result - - def renku_op(self): - """Renku operation for the controller.""" - # NOTE: We leave it empty since it does not execute renku operation. - pass - - def to_response(self): - """Execute controller flow and serialize to service response.""" - return result_response(ListProjectsCtrl.RESPONSE_SERIALIZER, self.list_projects()) diff --git a/renku/ui/service/controllers/cache_migrations_check.py b/renku/ui/service/controllers/cache_migrations_check.py index c5584ebc70..473908c682 100644 --- a/renku/ui/service/controllers/cache_migrations_check.py +++ b/renku/ui/service/controllers/cache_migrations_check.py @@ -27,6 +27,7 @@ from renku.ui.service.controllers.api.abstract import ServiceCtrl from renku.ui.service.controllers.api.mixins import RenkuOperationMixin from renku.ui.service.interfaces.git_api_provider import IGitAPIProvider +from renku.ui.service.logger import service_log from renku.ui.service.serializers.cache import ProjectMigrationCheckRequest, ProjectMigrationCheckResponseRPC from renku.ui.service.views import result_response @@ -51,7 +52,7 @@ def context(self): def _fast_op_without_cache(self): """Execute renku_op with only necessary files, without cloning the whole repo.""" if "git_url" not in self.context: - raise RenkuException("context does not contain `project_id` or `git_url`") + raise RenkuException("context does not contain `git_url`") with tempfile.TemporaryDirectory() as tempdir: tempdir_path = Path(tempdir) @@ -63,9 +64,10 @@ def _fast_op_without_cache(self): target_folder=tempdir_path, remote=self.ctx["git_url"], branch=self.request_data.get("branch", None), - token=self.user_data.get("token", None), + token=self.user.token, ) with renku_project_context(tempdir_path): + self.project_path = tempdir_path return self.renku_op() def renku_op(self): @@ -83,16 +85,14 @@ def to_response(self): """Execute controller flow and serialize to service response.""" from renku.ui.service.views.error_handlers import pretty_print_error - if "project_id" in self.context: + # NOTE: use quick flow but fallback to regular flow in case of unexpected exceptions + try: + result = self._fast_op_without_cache() + except (AuthenticationError, ProjectNotFound): + raise + except BaseException as e: + service_log.info(f"fast gitlab checkout didnt work: {e}", exc_info=e) result = self.execute_op() - else: - # NOTE: use quick flow but fallback to regular flow in case of unexpected exceptions - try: - result = self._fast_op_without_cache() - except (AuthenticationError, ProjectNotFound): - raise - except BaseException: - result = self.execute_op() result_dict = asdict(result) diff --git a/renku/ui/service/controllers/cache_project_clone.py b/renku/ui/service/controllers/cache_project_clone.py deleted file mode 100644 index bf0c9420a2..0000000000 --- a/renku/ui/service/controllers/cache_project_clone.py +++ /dev/null @@ -1,55 +0,0 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and -# Eidgenössische Technische Hochschule Zürich (ETHZ). -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Renku service cache clone project controller.""" -from marshmallow import EXCLUDE - -from renku.ui.service.controllers.api.abstract import ServiceCtrl -from renku.ui.service.controllers.api.mixins import RenkuOperationMixin -from renku.ui.service.controllers.utils.project_clone import user_project_clone -from renku.ui.service.serializers.cache import ProjectCloneContext, ProjectCloneResponseRPC, RepositoryCloneRequest -from renku.ui.service.views import result_response - - -class ProjectCloneCtrl(ServiceCtrl, RenkuOperationMixin): - """Controller for cloning a project endpoint.""" - - REQUEST_SERIALIZER = RepositoryCloneRequest() - RESPONSE_SERIALIZER = ProjectCloneResponseRPC() - - def __init__(self, cache, user_data, request_data): - """Construct controller.""" - self.request_data = ProjectCloneCtrl.REQUEST_SERIALIZER.load(request_data) - self.ctx = ProjectCloneContext().load({**user_data, **self.request_data}, unknown=EXCLUDE) - super().__init__(cache, user_data, self.request_data) - - @property - def context(self): - """Controller operation context.""" - return self.ctx - - def project_clone(self): - """Clones a remote project.""" - return user_project_clone(self.user_data, self.ctx) - - def renku_op(self): - """Renku operation for the controller.""" - # NOTE: We leave it empty since it does not execute renku operation. - pass - - def to_response(self): - """Execute controller flow and serialize to service response.""" - return result_response(ProjectCloneCtrl.RESPONSE_SERIALIZER, self.project_clone()) diff --git a/renku/ui/service/controllers/utils/project_clone.py b/renku/ui/service/controllers/utils/project_clone.py deleted file mode 100644 index cf7b320198..0000000000 --- a/renku/ui/service/controllers/utils/project_clone.py +++ /dev/null @@ -1,83 +0,0 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and -# Eidgenössische Technische Hochschule Zürich (ETHZ). -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Utilities for renku service controllers.""" -import shutil - -from renku.command.clone import project_clone_command -from renku.core.util.contexts import renku_project_context -from renku.ui.service.cache.models.project import Project -from renku.ui.service.logger import service_log -from renku.ui.service.views.decorators import requires_cache - - -@requires_cache -def user_project_clone(cache, user_data, project_data): - """Clones the project for a given user.""" - if "project_id" in project_data: - project_data.pop("project_id") - - user = cache.ensure_user(user_data) - project = cache.make_project(user, project_data, persist=False) - - # NOTE: Create parent dir so lock file can be created. - project.abs_path.parent.mkdir(parents=True, exist_ok=True) - - with project.write_lock(), renku_project_context(project.abs_path, check_git_path=False): - git_url = project_data.get("git_url") - - if git_url is not None: - try: - # NOTE: If two requests ran at the same time, by the time we acquire the lock a project might already - # be cloned by an earlier request. - found_project = Project.get( - (Project.user_id == user_data["user_id"]) - & (Project.git_url == git_url) - & (Project.project_id != project.project_id) - ) - except ValueError: - pass - else: - service_log.debug(f"project already cloned, skipping clone: {git_url}") - return found_project - - if project.abs_path.exists(): - # NOTE: Remove dir since a previous clone might have failed somewhere in the middle. - shutil.rmtree(str(project.abs_path)) - - project.abs_path.mkdir(parents=True, exist_ok=True) - - repo, project.initialized = ( - project_clone_command() - .build() - .execute( - project_data["url_with_auth"], - path=project.abs_path, - depth=project_data["depth"], - raise_git_except=True, - config={ - "user.name": project_data["fullname"], - "user.email": project_data["email"], - "pull.rebase": False, - }, - checkout_revision=project_data["branch"], - ) - ).output - project.save() - - service_log.debug(f"project successfully cloned: {repo}") - - return project diff --git a/renku/ui/service/entrypoint.py b/renku/ui/service/entrypoint.py index d3eadca274..1571c5ebda 100644 --- a/renku/ui/service/entrypoint.py +++ b/renku/ui/service/entrypoint.py @@ -1,6 +1,5 @@ -# -# Copyright 2022 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/gateways/gitlab_api_provider.py b/renku/ui/service/gateways/gitlab_api_provider.py index 5cbb7f311d..6b4f401058 100644 --- a/renku/ui/service/gateways/gitlab_api_provider.py +++ b/renku/ui/service/gateways/gitlab_api_provider.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -19,7 +18,7 @@ import tarfile import tempfile from pathlib import Path -from typing import List, Optional, Union +from typing import Generator, List, Optional, Union import gitlab @@ -27,6 +26,7 @@ from renku.core.util.os import delete_dataset_file from renku.domain_model.git import GitURL from renku.ui.service.interfaces.git_api_provider import IGitAPIProvider +from renku.ui.service.logger import service_log class GitlabAPIProvider(IGitAPIProvider): @@ -80,10 +80,18 @@ def download_files_from_api( raise errors.AuthenticationError from e except gitlab.GitlabGetError as e: # NOTE: better to re-raise this as a core error since it's a common case + service_log.warn(f"fast project clone didn't work: {e}", exc_info=e) if "project not found" in getattr(e, "error_message", "").lower(): raise errors.ProjectNotFound from e else: raise + except gitlab.GitlabGetError as e: + # NOTE: better to re-raise this as a core error since it's a common case + service_log.warn(f"fast project clone didn't work: {e}", exc_info=e) + if "project not found" in getattr(e, "error_message", "").lower(): + raise errors.ProjectNotFound from e + else: + raise for file in files: full_path = target_folder / file @@ -93,7 +101,8 @@ def download_files_from_api( try: with open(full_path, "wb") as f: project.files.raw(file_path=str(file), ref=branch, streamed=True, action=f.write) - except gitlab.GitlabGetError: + except gitlab.GitlabGetError as e: + service_log.info("Gitlab get error", exc_info=e) delete_dataset_file(full_path) continue @@ -102,4 +111,11 @@ def download_files_from_api( project.repository_archive(path=str(folder), sha=branch, streamed=True, action=f.write, format="tar.gz") f.seek(0) with tarfile.open(fileobj=f) as archive: - archive.extractall(path=target_folder) + archive.extractall(path=target_folder, members=tar_members_without_top_folder(archive, 1)) + + +def tar_members_without_top_folder(tar: tarfile.TarFile, strip: int) -> Generator[tarfile.TarInfo, None, None]: + """Gets tar members, ignoring the top folder.""" + for member in tar.getmembers(): + member.path = member.path.split("/", strip)[-1] + yield member diff --git a/renku/ui/service/gateways/repository_cache.py b/renku/ui/service/gateways/repository_cache.py new file mode 100644 index 0000000000..08933be566 --- /dev/null +++ b/renku/ui/service/gateways/repository_cache.py @@ -0,0 +1,234 @@ +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Repository cache interface.""" + +import os +import shutil +import uuid +from datetime import datetime +from typing import Optional +from urllib.parse import urlparse + +import portalocker +from marshmallow import ValidationError + +from renku.command.clone import project_clone_command +from renku.core import errors +from renku.core.util.contexts import renku_project_context +from renku.core.util.os import normalize_to_ascii +from renku.domain_model.git import GitURL +from renku.infrastructure.repository import Repository +from renku.ui.service.cache import ServiceCache +from renku.ui.service.cache.models.project import Project +from renku.ui.service.cache.models.user import User +from renku.ui.service.config import PROJECT_CLONE_DEPTH_DEFAULT +from renku.ui.service.errors import IntermittentCacheError, IntermittentLockError +from renku.ui.service.interfaces.repository_cache import IRepositoryCache +from renku.ui.service.logger import service_log + + +class LocalRepositoryCache(IRepositoryCache): + """Cache for project repos stored on local disk.""" + + def get( + self, cache: ServiceCache, git_url: str, branch: Optional[str], user: User, shallow: bool = True + ) -> Project: + """Get a project from cache (clone if necessary).""" + if git_url is None: + raise ValidationError("Invalid `git_url`, URL is empty", "git_url") + + try: + project = Project.get( + (Project.user_id == user.user_id) & (Project.git_url == git_url) & (Project.branch == branch) + ) + except ValueError: + # project not found in DB + return self._clone_project(cache, git_url, branch, user, shallow) + + if not project.abs_path.exists(): + # cache folder doesn't exist anymore + project.delete() + return self._clone_project(cache, git_url, branch, user, shallow) + + if not shallow and project.is_shallow: + self._unshallow_project(project, user) + + self._maybe_update_cache(project, user) + + if not project.initialized: + raise errors.UninitializedProject(project.git_url) + + self._update_project_access_date(project) + + return project + + def evict(self, project: Project): + """Evict a project from cache.""" + try: + with project.write_lock(): + service_log.debug(f"purging project {project.project_id}:{project.name}") + project.purge() + except FileNotFoundError: + project.delete() + except Exception as e: + service_log.error(f"Couldn't purge project {project.project_id}:{project.name} from cache", exc_info=e) + + def evict_expired(self): + """Evict expired projects from cache.""" + for project in Project.all(): + if project.ttl_expired(): + self.evict(project) + + def _update_project_access_date(self, project: Project): + """Update the access date of the project to current datetime.""" + project.accessed_at = datetime.utcnow() + project.save() + + def _clone_project( + self, cache: ServiceCache, git_url: str, branch: Optional[str], user: User, shallow: bool = True + ) -> Project: + """Clone a project to cache.""" + try: + parsed_git_url = GitURL.parse(git_url) + except UnicodeError as e: + raise ValidationError("`git_url` contains unsupported characters", "git_url") from e + except errors.InvalidGitURL as e: + raise ValidationError("Invalid `git_url`", "git_url") from e + + if parsed_git_url.owner is None or parsed_git_url.name is None: + raise ValidationError("Invalid `git_url`, missing owner or repository", "git_url") + + project_data = { + "project_id": uuid.uuid4().hex, + "owner": parsed_git_url.owner, + "name": parsed_git_url.name, + "slug": normalize_to_ascii(parsed_git_url.name), + "depth": PROJECT_CLONE_DEPTH_DEFAULT if shallow else None, + "branch": branch, + "git_url": git_url, + "user_id": user.user_id, + } + project = cache.make_project(user, project_data, persist=False) + + # NOTE: Create parent dir so lock file can be created. + project.abs_path.parent.mkdir(parents=True, exist_ok=True) + + try: + with project.write_lock(), renku_project_context(project.abs_path, check_git_path=False): + try: + # NOTE: If two requests ran at the same time, by the time we acquire the lock a project might + # already be cloned by an earlier request. + found_project = Project.get( + (Project.user_id == user.user_id) + & (Project.git_url == git_url) + & (Project.branch == branch) + & (Project.project_id != project.project_id) + ) + except ValueError: + pass + else: + if found_project.abs_path.exists(): + service_log.debug(f"project already cloned, skipping clone: {git_url}") + self._update_project_access_date(found_project) + return found_project + + # clean directory in case of previous failed state + # NOTE: we only want to delete the contents, NOT the folder itself, in case it's still referenced + for root, dirs, files in os.walk(project.abs_path): + for f in files: + os.unlink(os.path.join(root, f)) + for d in dirs: + shutil.rmtree(os.path.join(root, d)) + + repo, project.initialized = ( + project_clone_command() + .build() + .execute( + git_url_with_auth(project, user), + path=project.abs_path, + depth=project.clone_depth, + raise_git_except=True, + config={ + "user.name": user.fullname, + "user.email": user.email, + "pull.rebase": False, + }, + checkout_revision=project.branch, + ) + ).output + project.save() + + service_log.debug(f"project successfully cloned: {repo}") + + if not project.initialized: + raise errors.UninitializedProject(project.git_url) + + return project + except (portalocker.LockException, portalocker.AlreadyLocked, errors.LockError) as e: + raise IntermittentLockError() from e + + def _unshallow_project(self, project: Project, user: User): + """Turn a shallow clone into a full clone.""" + try: + with project.write_lock(), Repository(project.abs_path) as repository: + try: + # NOTE: It could happen that repository is already un-shallowed, + # in this case we don't want to leak git exception, but still want to fetch. + repository.fetch("origin", repository.active_branch, unshallow=True) + except errors.GitCommandError: + repository.fetch("origin", repository.active_branch) + + repository.reset(f"origin/{repository.active_branch}", hard=True) + project.clone_depth = None + project.save() + except (portalocker.LockException, portalocker.AlreadyLocked, errors.LockError) as e: + raise IntermittentLockError() from e + + def _maybe_update_cache(self, project: Project, user: User): + """Update the cache from the remote if it's out of date.""" + from renku.ui.service.controllers.api.mixins import PROJECT_FETCH_TIME + + if project.fetch_age < PROJECT_FETCH_TIME: + return + + try: + with project.write_lock(), Repository(project.abs_path) as repository: + try: + # NOTE: it rarely happens that origin is not reachable. Try again if it fails. + repository.fetch( + "origin", + repository.active_branch, + depth=project.clone_depth + if project.clone_depth is not None and project.clone_depth > 0 + else None, + ) + repository.reset(f"origin/{repository.active_branch}", hard=True) + except errors.GitCommandError as e: + project.purge() + raise IntermittentCacheError(e) + + project.last_fetched_at = datetime.utcnow() + project.save() + except (portalocker.LockException, portalocker.AlreadyLocked, errors.LockError) as e: + raise IntermittentLockError() from e + + +def git_url_with_auth(project: Project, user: User): + """Format url with auth.""" + git_url = urlparse(project.git_url) + + url = "oauth2:{}@{}".format(user.token, git_url.netloc) + return git_url._replace(netloc=url).geturl() diff --git a/renku/ui/service/interfaces/git_api_provider.py b/renku/ui/service/interfaces/git_api_provider.py index dfe7d022ba..bd8407d7aa 100644 --- a/renku/ui/service/interfaces/git_api_provider.py +++ b/renku/ui/service/interfaces/git_api_provider.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/interfaces/repository_cache.py b/renku/ui/service/interfaces/repository_cache.py new file mode 100644 index 0000000000..619341be9e --- /dev/null +++ b/renku/ui/service/interfaces/repository_cache.py @@ -0,0 +1,41 @@ +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Repository cache interface.""" + +from abc import ABC +from typing import Optional + +from renku.ui.service.cache import ServiceCache +from renku.ui.service.cache.models.project import Project +from renku.ui.service.cache.models.user import User + + +class IRepositoryCache(ABC): + """Interface for repository cache manager.""" + + def get( + self, cache: ServiceCache, git_url: str, branch: Optional[str], user: User, shallow: bool = True + ) -> Project: + """Get a project from cache (clone if necessary).""" + raise NotImplementedError() + + def evict(self, project: Project): + """Evict a project from cache.""" + raise NotImplementedError() + + def evict_expired(self): + """Evict expired projects from cache.""" + raise NotImplementedError() diff --git a/renku/ui/service/jobs/cleanup.py b/renku/ui/service/jobs/cleanup.py index a067248b8a..836bb711c4 100644 --- a/renku/ui/service/jobs/cleanup.py +++ b/renku/ui/service/jobs/cleanup.py @@ -60,24 +60,3 @@ def cache_files_cleanup(): for chunk_folder in chunk_folders: shutil.rmtree(chunk_folder, ignore_errors=True) - - -def cache_project_cleanup(): - """Cache project a cleanup job.""" - cache = ServiceCache() - worker_log.debug("executing cache projects cleanup") - - for user, projects in cache.user_projects(): - jobs = [ - job for job in cache.get_jobs(user) if job.state in [USER_JOB_STATE_ENQUEUED, USER_JOB_STATE_IN_PROGRESS] - ] - - for project in projects: - if project.is_locked(jobs): - continue - - if project.exists() and project.ttl_expired(): - worker_log.debug(f"purging project {project.project_id}:{project.name}") - project.purge() - elif not project.exists(): - project.delete() diff --git a/renku/ui/service/logger.py b/renku/ui/service/logger.py index 4b067d6e2b..903f4f8d1d 100644 --- a/renku/ui/service/logger.py +++ b/renku/ui/service/logger.py @@ -29,11 +29,9 @@ service_log = logging.getLogger("renku.ui.service") worker_log = logging.getLogger("renku.worker") -scheduler_log = logging.getLogger("renku.scheduler") __all__ = [ "service_log", "worker_log", - "scheduler_log", "DEPLOYMENT_LOG_LEVEL", ] diff --git a/renku/ui/service/logging.yaml b/renku/ui/service/logging.yaml index 1a0bdb4acb..1652a91cfd 100644 --- a/renku/ui/service/logging.yaml +++ b/renku/ui/service/logging.yaml @@ -19,17 +19,8 @@ loggers: - console level: DEBUG propagate: false - renku.scheduler: - handlers: - - console - level: DEBUG - propagate: false rq.worker: level: INFO - rq_scheduler.scheduler: - handlers: - - console - level: INFO root: handlers: - console diff --git a/renku/ui/service/scheduler.py b/renku/ui/service/scheduler.py deleted file mode 100644 index 88ca08e42e..0000000000 --- a/renku/ui/service/scheduler.py +++ /dev/null @@ -1,74 +0,0 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and -# Eidgenössische Technische Hochschule Zürich (ETHZ). -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Renku service scheduler.""" -import os -from contextlib import contextmanager -from datetime import datetime - -from rq_scheduler import Scheduler - -from renku.ui.service.jobs.cleanup import cache_files_cleanup, cache_project_cleanup -from renku.ui.service.jobs.queues import CLEANUP_QUEUE_FILES, CLEANUP_QUEUE_PROJECTS, WorkerQueues -from renku.ui.service.logger import DEPLOYMENT_LOG_LEVEL, scheduler_log - - -@contextmanager -def schedule(connection=None): - """Creates scheduler object.""" - cleanup_interval = int(os.getenv("RENKU_SVC_CLEANUP_INTERVAL", 60)) - scheduler_log.info(f"cleanup interval set to {cleanup_interval}") - - build_scheduler = Scheduler(connection=connection or WorkerQueues.connection, interval=cleanup_interval) - build_scheduler.log = scheduler_log - build_scheduler.log.debug = build_scheduler.log.info # type: ignore - scheduler_log.info("scheduler created") - - # remove old jobs from the queue - for job in build_scheduler.get_jobs(): - build_scheduler.cancel(job) - - build_scheduler.schedule( - scheduled_time=datetime.utcnow(), - queue_name=CLEANUP_QUEUE_FILES, - func=cache_files_cleanup, - interval=cleanup_interval, - timeout=cleanup_interval - 1, # NOTE: Ensure job times out before next job starts - result_ttl=cleanup_interval * 2, - ) - - build_scheduler.schedule( - scheduled_time=datetime.utcnow(), - queue_name=CLEANUP_QUEUE_PROJECTS, - func=cache_project_cleanup, - interval=cleanup_interval, - timeout=cleanup_interval - 1, # NOTE: Ensure job times out before next job starts - result_ttl=cleanup_interval * 2, - ) - - scheduler_log.info(f"log level set to {DEPLOYMENT_LOG_LEVEL}") - yield build_scheduler - - -def start_scheduler(connection=None): - """Build and start scheduler.""" - with schedule(connection=connection) as scheduler: - scheduler_log.info("running scheduler") - scheduler.run() - - -if __name__ == "__main__": - start_scheduler() diff --git a/renku/ui/service/serializers/cache.py b/renku/ui/service/serializers/cache.py index f4b93c516f..fd93254ab8 100644 --- a/renku/ui/service/serializers/cache.py +++ b/renku/ui/service/serializers/cache.py @@ -32,7 +32,6 @@ AsyncSchema, ErrorResponse, FileDetailsSchema, - LocalRepositorySchema, RemoteRepositorySchema, RenkuSyncSchema, ) @@ -233,7 +232,7 @@ class ProjectListResponseRPC(JsonRPCResponse): result = fields.Nested(ProjectListResponse) -class ProjectMigrateRequest(AsyncSchema, LocalRepositorySchema, RemoteRepositorySchema): +class ProjectMigrateRequest(AsyncSchema, RemoteRepositorySchema): """Request schema for project migrate.""" force_template_update = fields.Boolean(dump_default=False) @@ -259,7 +258,7 @@ class ProjectMigrateResponseRPC(JsonRPCResponse): result = fields.Nested(ProjectMigrateResponse) -class ProjectMigrationCheckRequest(LocalRepositorySchema, RemoteRepositorySchema): +class ProjectMigrationCheckRequest(RemoteRepositorySchema): """Request schema for project migration check.""" diff --git a/renku/ui/service/serializers/common.py b/renku/ui/service/serializers/common.py index bc2a666434..1840da5a73 100644 --- a/renku/ui/service/serializers/common.py +++ b/renku/ui/service/serializers/common.py @@ -25,13 +25,6 @@ from renku.ui.service.serializers.rpc import JsonRPCResponse -class LocalRepositorySchema(Schema): - """Schema for identifying a locally stored repository.""" - - # In the long term, the id should be used only for internal operations - project_id = fields.String(metadata={"description": "Reference to access the project in the local cache."}) - - class RemoteRepositoryBaseSchema(Schema): """Schema for tracking a remote repository.""" @@ -109,6 +102,15 @@ class CreationSchema(Schema): ) +class AccessSchema(Schema): + """Schema for access date.""" + + accessed_at = fields.DateTime( + load_default=datetime.utcnow, + metadata={"description": "Access date."}, + ) + + class FileDetailsSchema(ArchiveSchema, CreationSchema): """Schema for file details.""" diff --git a/renku/ui/service/serializers/config.py b/renku/ui/service/serializers/config.py index 010f5681f8..e5bfac5623 100644 --- a/renku/ui/service/serializers/config.py +++ b/renku/ui/service/serializers/config.py @@ -18,17 +18,11 @@ from marshmallow import Schema, fields -from renku.ui.service.serializers.common import ( - AsyncSchema, - LocalRepositorySchema, - MigrateSchema, - RemoteRepositorySchema, - RenkuSyncSchema, -) +from renku.ui.service.serializers.common import AsyncSchema, MigrateSchema, RemoteRepositorySchema, RenkuSyncSchema from renku.ui.service.serializers.rpc import JsonRPCResponse -class ConfigShowRequest(LocalRepositorySchema, RemoteRepositorySchema): +class ConfigShowRequest(RemoteRepositorySchema): """Request schema for config show.""" @@ -50,7 +44,7 @@ class ConfigShowResponseRPC(JsonRPCResponse): result = fields.Nested(ConfigShowResponse) -class ConfigSetRequest(AsyncSchema, ConfigShowSchema, LocalRepositorySchema, MigrateSchema, RemoteRepositorySchema): +class ConfigSetRequest(AsyncSchema, ConfigShowSchema, MigrateSchema, RemoteRepositorySchema): """Request schema for config set.""" diff --git a/renku/ui/service/serializers/datasets.py b/renku/ui/service/serializers/datasets.py index 56fc98fd30..a75569ae6a 100644 --- a/renku/ui/service/serializers/datasets.py +++ b/renku/ui/service/serializers/datasets.py @@ -24,7 +24,6 @@ from renku.ui.service.serializers.common import ( AsyncSchema, JobDetailsResponse, - LocalRepositorySchema, MigrateSchema, RemoteRepositorySchema, RenkuSyncSchema, @@ -46,9 +45,7 @@ class DatasetDetailsRequest(DatasetDetails): custom_metadata: fields.Field = fields.Dict() -class DatasetCreateRequest( - AsyncSchema, DatasetDetailsRequest, LocalRepositorySchema, RemoteRepositorySchema, MigrateSchema -): +class DatasetCreateRequest(AsyncSchema, DatasetDetailsRequest, RemoteRepositorySchema, MigrateSchema): """Request schema for a dataset create view.""" # NOTE: Override field in DatasetDetails @@ -68,9 +65,7 @@ class DatasetCreateResponseRPC(JsonRPCResponse): result = fields.Nested(DatasetCreateResponse) -class DatasetRemoveRequest( - AsyncSchema, DatasetNameSchema, LocalRepositorySchema, RemoteRepositorySchema, MigrateSchema -): +class DatasetRemoveRequest(AsyncSchema, DatasetNameSchema, RemoteRepositorySchema, MigrateSchema): """Request schema for a dataset remove.""" @@ -93,7 +88,7 @@ class DatasetAddFile(Schema): job_id = fields.String() -class DatasetAddRequest(AsyncSchema, DatasetNameSchema, LocalRepositorySchema, RemoteRepositorySchema, MigrateSchema): +class DatasetAddRequest(AsyncSchema, DatasetNameSchema, RemoteRepositorySchema, MigrateSchema): """Request schema for a dataset add file view.""" files = fields.List(fields.Nested(DatasetAddFile), required=True) @@ -126,7 +121,7 @@ class DatasetAddResponseRPC(JsonRPCResponse): result = fields.Nested(DatasetAddResponse) -class DatasetListRequest(LocalRepositorySchema, RemoteRepositorySchema): +class DatasetListRequest(RemoteRepositorySchema): """Request schema for dataset list view.""" @@ -148,7 +143,7 @@ class DatasetListResponseRPC(JsonRPCResponse): result = fields.Nested(DatasetListResponse) -class DatasetFilesListRequest(DatasetNameSchema, LocalRepositorySchema, RemoteRepositorySchema): +class DatasetFilesListRequest(DatasetNameSchema, RemoteRepositorySchema): """Request schema for dataset files list view.""" @@ -172,7 +167,7 @@ class DatasetFilesListResponseRPC(JsonRPCResponse): result = fields.Nested(DatasetFilesListResponse) -class DatasetImportRequest(AsyncSchema, LocalRepositorySchema, RemoteRepositorySchema, MigrateSchema): +class DatasetImportRequest(AsyncSchema, RemoteRepositorySchema, MigrateSchema): """Dataset import request.""" dataset_uri = fields.String(required=True) @@ -195,7 +190,6 @@ class DatasetEditRequest( AsyncSchema, DatasetDetailsRequest, DatasetNameSchema, - LocalRepositorySchema, RemoteRepositorySchema, MigrateSchema, ): @@ -230,9 +224,7 @@ class DatasetEditResponseRPC(JsonRPCResponse): result = fields.Nested(DatasetEditResponse) -class DatasetUnlinkRequest( - AsyncSchema, DatasetNameSchema, LocalRepositorySchema, RemoteRepositorySchema, MigrateSchema -): +class DatasetUnlinkRequest(AsyncSchema, DatasetNameSchema, RemoteRepositorySchema, MigrateSchema): """Dataset unlink file request.""" include_filters = fields.List(fields.String()) diff --git a/renku/ui/service/serializers/graph.py b/renku/ui/service/serializers/graph.py index 1a613933a4..f7081e9be8 100644 --- a/renku/ui/service/serializers/graph.py +++ b/renku/ui/service/serializers/graph.py @@ -17,16 +17,11 @@ """Renku graph serializers.""" from marshmallow import Schema, fields, validate -from renku.ui.service.serializers.common import ( - AsyncSchema, - LocalRepositorySchema, - MigrateSchema, - RemoteRepositorySchema, -) +from renku.ui.service.serializers.common import AsyncSchema, MigrateSchema, RemoteRepositorySchema from renku.ui.service.serializers.rpc import JsonRPCResponse -class GraphExportRequest(AsyncSchema, LocalRepositorySchema, RemoteRepositorySchema, MigrateSchema): +class GraphExportRequest(AsyncSchema, RemoteRepositorySchema, MigrateSchema): """Request schema for dataset list view.""" callback_url = fields.URL() diff --git a/renku/ui/service/serializers/project.py b/renku/ui/service/serializers/project.py index a90fc3604b..e93fa1a41d 100644 --- a/renku/ui/service/serializers/project.py +++ b/renku/ui/service/serializers/project.py @@ -21,7 +21,6 @@ from renku.domain_model.dataset import DatasetCreatorsJson as DatasetCreators from renku.ui.service.serializers.common import ( AsyncSchema, - LocalRepositorySchema, MigrateSchema, RemoteRepositoryBaseSchema, RemoteRepositorySchema, @@ -30,7 +29,7 @@ from renku.ui.service.serializers.rpc import JsonRPCResponse -class ProjectShowRequest(AsyncSchema, LocalRepositorySchema, RemoteRepositorySchema, MigrateSchema): +class ProjectShowRequest(AsyncSchema, RemoteRepositorySchema, MigrateSchema): """Project show metadata request.""" @@ -66,7 +65,7 @@ class ProjectShowResponseRPC(RenkuSyncSchema): result = fields.Nested(ProjectShowResponse) -class ProjectEditRequest(AsyncSchema, LocalRepositorySchema, RemoteRepositorySchema, MigrateSchema): +class ProjectEditRequest(AsyncSchema, RemoteRepositorySchema, MigrateSchema): """Project edit metadata request.""" description = fields.String(metadata={"description": "New description for the project"}) @@ -94,7 +93,7 @@ class ProjectEditResponseRPC(JsonRPCResponse): result = fields.Nested(ProjectEditResponse) -class ProjectLockStatusRequest(LocalRepositorySchema, RemoteRepositoryBaseSchema): +class ProjectLockStatusRequest(RemoteRepositoryBaseSchema): """Project lock status request.""" timeout = fields.Float( diff --git a/renku/ui/service/serializers/workflows.py b/renku/ui/service/serializers/workflows.py index 3dee669b51..a02f5f82a5 100644 --- a/renku/ui/service/serializers/workflows.py +++ b/renku/ui/service/serializers/workflows.py @@ -22,11 +22,11 @@ from renku.domain_model.dataset import DatasetCreatorsJson from renku.ui.cli.utils.plugins import get_supported_formats -from renku.ui.service.serializers.common import LocalRepositorySchema, RemoteRepositorySchema +from renku.ui.service.serializers.common import RemoteRepositorySchema from renku.ui.service.serializers.rpc import JsonRPCResponse -class WorkflowPlansListRequest(LocalRepositorySchema, RemoteRepositorySchema): +class WorkflowPlansListRequest(RemoteRepositorySchema): """Request schema for plan list view.""" @@ -64,7 +64,7 @@ class WorkflowPlansListResponseRPC(JsonRPCResponse): result = fields.Nested(WorkflowPlansListResponse) -class WorkflowPlansShowRequest(LocalRepositorySchema, RemoteRepositorySchema): +class WorkflowPlansShowRequest(RemoteRepositorySchema): """Request schema for plan show view.""" plan_id = fields.String(required=True) @@ -201,7 +201,7 @@ class WorkflowPlansShowResponseRPC(JsonRPCResponse): ) -class WorkflowPlansExportRequest(LocalRepositorySchema, RemoteRepositorySchema): +class WorkflowPlansExportRequest(RemoteRepositorySchema): """Request schema for exporting a plan.""" plan_id = fields.String(required=True) diff --git a/renku/ui/service/views/api_versions.py b/renku/ui/service/views/api_versions.py index 487bfebda7..e7511a3f8a 100644 --- a/renku/ui/service/views/api_versions.py +++ b/renku/ui/service/views/api_versions.py @@ -62,12 +62,13 @@ def add_url_rule( V1_3 = ApiVersion("1.3") V1_4 = ApiVersion("1.4") V1_5 = ApiVersion("1.5") -V2_0 = ApiVersion("2.0", is_base_version=True) +V2_0 = ApiVersion("2.0") +V2_1 = ApiVersion("2.1", is_base_version=True) -VERSIONS_FROM_V1_5 = [V1_5, V2_0] +VERSIONS_FROM_V1_5 = [V1_5, V2_0, V2_1] VERSIONS_FROM_V1_4 = [V1_4] + VERSIONS_FROM_V1_5 VERSIONS_FROM_V1_1 = [V1_1, V1_2, V1_3] + VERSIONS_FROM_V1_4 ALL_VERSIONS = [V1_0] + VERSIONS_FROM_V1_1 MINIMUM_VERSION = V1_0 -MAXIMUM_VERSION = V2_0 +MAXIMUM_VERSION = V2_1 diff --git a/renku/ui/service/views/apispec.py b/renku/ui/service/views/apispec.py index 36a08f84e8..82287b6cc2 100644 --- a/renku/ui/service/views/apispec.py +++ b/renku/ui/service/views/apispec.py @@ -40,20 +40,6 @@ TOP_LEVEL_DESCRIPTION = """ This is the API specification of the renku core service. -The basic API is low-level and requires that the client handles project -(repository) state in the service cache by invoking the `cache.project_clone` -method. This returns a `project_id` that is required for many of the other API -calls. Note that the `project_id` identifies a combination of `git_url` and -`ref` - i.e. each combination of `git_url` and `ref` receives a different -`project_id`. - -## Higher-level interface - -Some API methods allow the client to defer repository management to the service. -In these cases, the API documentation will include `project_id` _and_ -`git_url`+`ref` in the spec. Note that for such methods, _either_ `project_id` -_or_ `git_url` (and optionally `ref`) should be passed in the request body. - ## Responses Loosely following the JSON-RPC 2.0 Specification, the methods all return with diff --git a/renku/ui/service/views/cache.py b/renku/ui/service/views/cache.py index 403ac162e8..9803c25e4d 100644 --- a/renku/ui/service/views/cache.py +++ b/renku/ui/service/views/cache.py @@ -15,18 +15,18 @@ # See the License for the specific language governing permissions and # limitations under the License. """Renku service cache views.""" -from flask import request +from flask import jsonify, request from renku.ui.service.config import SERVICE_PREFIX from renku.ui.service.controllers.cache_files_delete_chunks import DeleteFileChunksCtrl from renku.ui.service.controllers.cache_files_upload import UploadFilesCtrl -from renku.ui.service.controllers.cache_list_projects import ListProjectsCtrl from renku.ui.service.controllers.cache_list_uploaded import ListUploadedFilesCtrl from renku.ui.service.controllers.cache_migrate_project import MigrateProjectCtrl from renku.ui.service.controllers.cache_migrations_check import MigrationsCheckCtrl -from renku.ui.service.controllers.cache_project_clone import ProjectCloneCtrl from renku.ui.service.gateways.gitlab_api_provider import GitlabAPIProvider -from renku.ui.service.views.api_versions import ALL_VERSIONS, V2_0, VERSIONS_FROM_V1_1, VersionedBlueprint +from renku.ui.service.gateways.repository_cache import LocalRepositoryCache +from renku.ui.service.jobs.cleanup import cache_files_cleanup +from renku.ui.service.views.api_versions import ALL_VERSIONS, V2_0, V2_1, VERSIONS_FROM_V1_1, VersionedBlueprint from renku.ui.service.views.decorators import accepts_json, optional_identity, requires_cache, requires_identity from renku.ui.service.views.error_handlers import ( handle_common_except, @@ -126,58 +126,6 @@ def delete_file_chunks_view(user_data, cache): return DeleteFileChunksCtrl(cache, user_data, dict(request.json)).to_response() # type: ignore -@cache_blueprint.route("/cache.project_clone", methods=["POST"], provide_automatic_options=False, versions=ALL_VERSIONS) -@handle_common_except -@accepts_json -@requires_cache -@requires_identity -def project_clone_view(user_data, cache): - """ - Clone a remote project. - - --- - post: - description: Clone a remote project. If the project is cached already, - a new clone operation will override the old cache state. - requestBody: - content: - application/json: - schema: RepositoryCloneRequest - responses: - 200: - description: Cloned project. - content: - application/json: - schema: ProjectCloneResponseRPC - tags: - - cache - """ - return ProjectCloneCtrl(cache, user_data, dict(request.json)).to_response() # type: ignore - - -@cache_blueprint.route("/cache.project_list", methods=["GET"], provide_automatic_options=False, versions=ALL_VERSIONS) -@handle_common_except -@requires_cache -@requires_identity -def list_projects_view(user_data, cache): - """ - List cached projects. - - --- - get: - description: List cached projects. - responses: - 200: - description: List of cached projects. - content: - application/json: - schema: ProjectListResponseRPC - tags: - - cache - """ - return ListProjectsCtrl(cache, user_data).to_response() - - @cache_blueprint.route("/cache.migrate", methods=["POST"], provide_automatic_options=False, versions=VERSIONS_FROM_V1_1) @handle_common_except @handle_migration_write_errors @@ -207,7 +155,9 @@ def migrate_project_view(user_data, cache): return MigrateProjectCtrl(cache, user_data, dict(request.json)).to_response() # type: ignore -@cache_blueprint.route("/cache.migrations_check", methods=["GET"], provide_automatic_options=False, versions=[V2_0]) +@cache_blueprint.route( + "/cache.migrations_check", methods=["GET"], provide_automatic_options=False, versions=[V2_0, V2_1] +) @handle_common_except @handle_migration_read_errors @requires_cache @@ -234,4 +184,30 @@ def migration_check_project_view(user_data, cache): return MigrationsCheckCtrl(cache, user_data, dict(request.args), GitlabAPIProvider()).to_response() +@cache_blueprint.route("/cache.cleanup", methods=["GET"], provide_automatic_options=False, versions=[V2_1]) +@handle_common_except +@handle_migration_read_errors +@requires_cache +@optional_identity +def cache_cleanup(user_data, cache): + """ + Cleanup local project cache. + + --- + get: + description: Retrieve migration information for a project. + responses: + 200: + description: Information about required migrations for the project. + content: + application/json: + schema: CacheCleanupResponseRPC + tags: + - cache + """ + LocalRepositoryCache().evict_expired() + cache_files_cleanup() + return jsonify({"result": "ok"}) + + cache_blueprint = add_v1_specific_endpoints(cache_blueprint) diff --git a/renku/ui/service/views/error_handlers.py b/renku/ui/service/views/error_handlers.py index 7cd52f13f6..0e828d6f0c 100644 --- a/renku/ui/service/views/error_handlers.py +++ b/renku/ui/service/views/error_handlers.py @@ -108,12 +108,15 @@ def decorated_function(*args, **kwargs): try: return f(*args, **kwargs) except ValidationError as e: - items = squash(e.messages).items() - reasons = [] - for key, value in items: - if key == "project_id": - raise IntermittentProjectIdError(e) - reasons.append(f"'{key}': {', '.join(value)}") + if isinstance(e.messages, dict): + items = squash(e.messages).items() + reasons = [] + for key, value in items: + if key == "project_id": + raise IntermittentProjectIdError(e) + reasons.append(f"'{key}': {', '.join(value)}") + else: + reasons = e.messages error_message = f"{'; '.join(reasons)}" if "Invalid `git_url`" in error_message: @@ -176,7 +179,11 @@ def decorated_function(*args, **kwargs): error_message_safe = re.sub("^(.+oauth2:)[^@]+(@.+)$", r"\1\2", error_message_safe) if "access denied" in error_message: raise UserRepoNoAccessError(e, error_message_safe) - elif "is this a git repository?" in error_message or "not found" in error_message: + elif ( + "is this a git repository?" in error_message + or "not found" in error_message + or "ailed to connect to" in error_message # Sometimes the 'f' is capitalized, sometimes not + ): raise UserRepoUrlInvalidError(e, error_message_safe) elif "connection timed out" in error_message: raise IntermittentTimeoutError(e) diff --git a/renku/ui/service/views/templates.py b/renku/ui/service/views/templates.py index fdcee4c952..f515840abb 100644 --- a/renku/ui/service/views/templates.py +++ b/renku/ui/service/views/templates.py @@ -20,7 +20,7 @@ from renku.ui.service.config import SERVICE_PREFIX from renku.ui.service.controllers.templates_create_project import TemplatesCreateProjectCtrl from renku.ui.service.controllers.templates_read_manifest import TemplatesReadManifestCtrl -from renku.ui.service.views.api_versions import ALL_VERSIONS, V2_0, VersionedBlueprint +from renku.ui.service.views.api_versions import ALL_VERSIONS, V2_0, V2_1, VersionedBlueprint from renku.ui.service.views.decorators import accepts_json, requires_cache, requires_identity from renku.ui.service.views.error_handlers import ( handle_common_except, @@ -34,7 +34,7 @@ @templates_blueprint.route( - "/templates.read_manifest", methods=["GET"], provide_automatic_options=False, versions=[V2_0] + "/templates.read_manifest", methods=["GET"], provide_automatic_options=False, versions=[V2_0, V2_1] ) @handle_common_except @handle_templates_read_errors diff --git a/start-telepresence.sh b/start-telepresence.sh index c295bafdc0..1738271d80 100755 --- a/start-telepresence.sh +++ b/start-telepresence.sh @@ -72,7 +72,7 @@ then mkdir temp/service_cache fi -POD_NAME="${DEV_NAMESPACE}-renku-core-${CORE_VERSION}" +POD_NAME="${DEV_NAMESPACE}-core-${CORE_VERSION}" echo -e "" echo -e "Context: ${COLOR_RED}${CURRENT_CONTEXT}${COLOR_RESET}, target: ${COLOR_RED}${POD_NAME}${COLOR_RESET}" echo "Starting telepresence..." diff --git a/tests/service/controllers/utils/test_project_clone.py b/tests/service/controllers/utils/test_project_clone.py index 1349e9b129..55596113ad 100644 --- a/tests/service/controllers/utils/test_project_clone.py +++ b/tests/service/controllers/utils/test_project_clone.py @@ -16,64 +16,12 @@ # limitations under the License. """Renku service project clone tests.""" import json -import time -import uuid import pytest -from marshmallow import EXCLUDE from werkzeug.utils import secure_filename -from renku.ui.service.controllers.utils.project_clone import user_project_clone from renku.ui.service.serializers.headers import encode_b64 -from renku.ui.service.serializers.templates import ProjectTemplateRequest -from tests.utils import assert_rpc_response, modified_environ, retry_failed - - -@pytest.mark.integration -@retry_failed -def test_service_user_project_clone(svc_client_cache): - """Test service user project clone.""" - client, _, cache = svc_client_cache - - user_data = { - "user_id": uuid.uuid4().hex, - "email": "contact@renkulab.io", - "fullname": "renku the frog", - "token": "None", - } - project_data = { - "project_name": "deadbeef", - "project_repository": "https://dev.renku.ch", - "project_namespace": "renku-qa", - "identifier": "0xdeadbeef", - "depth": 1, - "url": "https://github.com/SwissDataScienceCenter/renku-project-template", - "owner": "SwissDataScienceCenter", - } - - project_data = ProjectTemplateRequest().load({**user_data, **project_data}, unknown=EXCLUDE) - project_one = user_project_clone(user_data, project_data) - assert project_one.age >= 0 - assert not project_one.ttl_expired() - assert project_one.exists() - old_path = project_one.abs_path - - with modified_environ(RENKU_SVC_CLEANUP_TTL_PROJECTS="1"): - time.sleep(1) - assert project_one.ttl_expired() - - with modified_environ(RENKU_SVC_CLEANUP_TTL_PROJECTS="3600"): - project_two = user_project_clone(user_data, project_data) - assert project_two.age >= 0 - assert not project_two.ttl_expired() - assert project_two.exists() - - new_path = project_two.abs_path - assert old_path == new_path - user = cache.get_user(user_data["user_id"]) - projects = [project.project_id for project in cache.get_projects(user)] - assert project_one.project_id in projects - assert project_two.project_id in projects +from tests.utils import assert_rpc_response, retry_failed @pytest.mark.service @@ -86,8 +34,8 @@ def test_service_user_non_existing_project_clone(svc_client_cache, it_remote_rep user = cache.ensure_user({"user_id": user_id}) # NOTE: clone a valid repo and verify there is one project in the cache - payload = {"git_url": it_remote_repo_url, "depth": -1} - response = svc_client.post("/cache.project_clone", data=json.dumps(payload), headers=headers) + payload = {"git_url": it_remote_repo_url} + response = svc_client.post("/project.show", data=json.dumps(payload), headers=headers) assert_rpc_response(response) projects = list(cache.get_projects(user)) @@ -100,7 +48,7 @@ def test_service_user_non_existing_project_clone(svc_client_cache, it_remote_rep # NOTE: try to clone a non-existing repo and verify no other projects are added to the cache payload["git_url"] = f"{it_remote_repo_url}-non-existing-project-url" - response = svc_client.post("/cache.project_clone", data=json.dumps(payload), headers=headers) + response = svc_client.post("/project.show", data=json.dumps(payload), headers=headers) assert_rpc_response(response, "error") projects = list(cache.get_projects(user)) diff --git a/tests/service/fixtures/service_endpoints.py b/tests/service/fixtures/service_endpoints.py index 99623791a5..e4f229856d 100644 --- a/tests/service/fixtures/service_endpoints.py +++ b/tests/service/fixtures/service_endpoints.py @@ -28,16 +28,6 @@ "headers": {"Content-Type": "application/json", "accept": "application/json"}, }, {"url": "/cache.files_upload", "allowed_method": "POST", "headers": {}}, - { - "url": "/cache.project_clone", - "allowed_method": "POST", - "headers": {"Content-Type": "application/json", "accept": "application/json"}, - }, - { - "url": "/cache.project_list", - "allowed_method": "GET", - "headers": {"Content-Type": "application/json", "accept": "application/json"}, - }, { "url": "/datasets.add", "allowed_method": "POST", @@ -78,14 +68,14 @@ def service_allowed_endpoint(request, svc_client, mock_redis): "headers": {"Content-Type": "application/json", "accept": "application/json"}, }, { - "url": "/cache.project_clone", + "url": "/project.show", "allowed_method": "POST", "headers": {"Content-Type": "application/json", "accept": "application/json"}, }, ] ) def service_unallowed_endpoint(request, svc_client): - """Ensure not allawed methods do not crash the app.""" + """Ensure not allowed methods do not crash the app.""" methods = { "PUT": svc_client.put, "DELETE": svc_client.delete, @@ -101,15 +91,15 @@ def unlink_file_setup(svc_client_with_repo): """Setup for testing of unlinking of a file.""" from tests.utils import make_dataset_add_payload - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo - payload = make_dataset_add_payload(project_id, [("file_path", "README.md")]) + payload = make_dataset_add_payload(url_components.href, [("file_path", "README.md")]) response = svc_client.post("/datasets.add", data=json.dumps(payload), headers=headers) assert 200 == response.status_code unlink_payload = { - "project_id": project_id, + "git_url": url_components.href, "name": response.json["result"]["name"], "include_filters": [response.json["result"]["files"][0]["file_path"]], } diff --git a/tests/service/fixtures/service_integration.py b/tests/service/fixtures/service_integration.py index 718331f906..d0157276e1 100644 --- a/tests/service/fixtures/service_integration.py +++ b/tests/service/fixtures/service_integration.py @@ -36,21 +36,21 @@ def _mock_cache_sync(repository: Repository): We don't want to undo that temporary migration with an actual cache sync, as it would break tests with repeat service calls, if the migration was just done locally in the fixture. """ - from renku.ui.service.controllers.api import mixins + from renku.ui.service.gateways.repository_cache import LocalRepositoryCache current_reference = repository.head.reference if repository.head.is_valid() else repository.head.commit - def _mocked_repo_reset(self, project): + def _mocked_repo_reset(self, project, user): """Mock repo reset to work with mocked renku save.""" repository.reset(current_reference, hard=True) - reset_repo_function = mixins.RenkuOperationMixin.reset_local_repo - mixins.RenkuOperationMixin.reset_local_repo = _mocked_repo_reset # type: ignore + reset_repo_function = LocalRepositoryCache._maybe_update_cache + LocalRepositoryCache._maybe_update_cache = _mocked_repo_reset # type: ignore try: yield finally: - mixins.RenkuOperationMixin.reset_local_repo = reset_repo_function # type: ignore + LocalRepositoryCache._maybe_update_cache = reset_repo_function # type: ignore def integration_repo_path(headers, project_id, url_components): @@ -102,6 +102,9 @@ def integration_lifecycle( ): """Setup and teardown steps for integration tests.""" from renku.domain_model.git import GitURL + from renku.ui.service.cache import cache + from renku.ui.service.gateways.repository_cache import LocalRepositoryCache + from renku.ui.service.serializers.headers import RequiredIdentityHeaders marker = request.node.get_closest_marker("remote_repo") @@ -118,20 +121,16 @@ def integration_lifecycle( url_components = GitURL.parse(remote_repo) - payload = {"git_url": remote_repo, "depth": -1} - - response = svc_client.post("/cache.project_clone", data=json.dumps(payload), headers=identity_headers) - assert response - assert {"result"} == set(response.json.keys()) + user_data = RequiredIdentityHeaders().load(identity_headers) + user = cache.ensure_user(user_data) - project_id = response.json["result"]["project_id"] - assert isinstance(uuid.UUID(project_id), uuid.UUID) + project = LocalRepositoryCache().get(cache, remote_repo, branch=None, user=user, shallow=False) - yield svc_client, identity_headers, project_id, url_components + yield svc_client, identity_headers, project.project_id, url_components # Teardown step: Delete all branches except master (if needed). - if integration_repo_path(identity_headers, project_id, url_components).exists(): - with integration_repo(identity_headers, project_id, url_components) as repository: + if integration_repo_path(identity_headers, project.project_id, url_components).exists(): + with integration_repo(identity_headers, project.project_id, url_components) as repository: try: repository.push(remote="origin", refspec=f":{repository.active_branch.name}") except errors.GitCommandError: @@ -170,7 +169,7 @@ def svc_client_with_repo(svc_client_setup): svc_client, headers, project_id, url_components, repo = svc_client_setup response = svc_client.post( - "/cache.migrate", data=json.dumps(dict(project_id=project_id, skip_docker_update=True)), headers=headers + "/cache.migrate", data=json.dumps(dict(git_url=url_components.href, skip_docker_update=True)), headers=headers ) assert response.json["result"] @@ -182,49 +181,31 @@ def svc_client_with_repo(svc_client_setup): @pytest.fixture def svc_protected_old_repo(svc_synced_client, it_protected_repo_url): """Service client with remote protected repository.""" + from renku.ui.service.cache import cache as redis_cache + from renku.ui.service.gateways.repository_cache import LocalRepositoryCache + from renku.ui.service.serializers.headers import RequiredIdentityHeaders + svc_client, identity_headers, cache, user = svc_synced_client - payload = { - "git_url": it_protected_repo_url, - "depth": 1, - } + user_data = RequiredIdentityHeaders().load(identity_headers) + user = redis_cache.ensure_user(user_data) - response = svc_client.post("/cache.project_clone", data=json.dumps(payload), headers=identity_headers) - project_id = response.json["result"]["project_id"] + project = LocalRepositoryCache().get(redis_cache, it_protected_repo_url, branch=None, user=user, shallow=False) - yield svc_client, identity_headers, project_id, cache, user + yield svc_client, identity_headers, project.project_id, cache, user, it_protected_repo_url @pytest.fixture() def local_remote_repository(svc_client, tmp_path, mock_redis, identity_headers, real_sync): """Client with a local remote to test pushes.""" - from marshmallow import pre_load - from renku.core.util.contexts import chdir + from renku.domain_model import git from renku.ui.cli import cli - from renku.ui.service.config import PROJECT_CLONE_NO_DEPTH - from renku.ui.service.serializers import cache + from renku.ui.service.cache import cache as redis_cache + from renku.ui.service.gateways.repository_cache import LocalRepositoryCache + from renku.ui.service.serializers.headers import RequiredIdentityHeaders from tests.fixtures.runners import RenkuRunner - # NOTE: prevent service from adding an auth token as it doesn't work with local repos - def _no_auth_format(self, data, **kwargs): - return data["git_url"] - - orig_format_url = cache.ProjectCloneContext.format_url - cache.ProjectCloneContext.format_url = _no_auth_format - - # NOTE: mock owner/project so service is happy - def _mock_owner(self, data, **kwargs): - data["owner"] = "dummy" - - data["name"] = "project" - data["slug"] = "project" - - return data - - orig_set_owner = cache.ProjectCloneContext.set_owner_name - cache.ProjectCloneContext.set_owner_name = pre_load(_mock_owner) - remote_repo_path = tmp_path / "remote_repo" remote_repo = Repository.initialize(remote_repo_path, bare=True) @@ -233,6 +214,13 @@ def _mock_owner(self, data, **kwargs): remote_repo_checkout = Repository.clone_from(url=remote_repo_path, path=remote_repo_checkout_path) + # NOTE: Mock GitURL parsing for local URL + def _parse(href): + return git.GitURL(href=href, regex="", owner="dummy", name="project", slug="project", path=remote_repo_path) + + original_giturl_parse = git.GitURL.parse + git.GitURL.parse = _parse + home = tmp_path / "user_home" home.mkdir() @@ -258,20 +246,18 @@ def _mock_owner(self, data, **kwargs): except OSError: pass - payload = {"git_url": f"file://{remote_repo_path}", "depth": PROJECT_CLONE_NO_DEPTH} - response = svc_client.post("/cache.project_clone", data=json.dumps(payload), headers=identity_headers) + user_data = RequiredIdentityHeaders().load(identity_headers) + user = redis_cache.ensure_user(user_data) + remote_url = f"file://{remote_repo_path}" - assert response - assert {"result"} == set(response.json.keys()), response.json + project = LocalRepositoryCache().get(redis_cache, remote_url, branch=None, user=user, shallow=False) - project_id = response.json["result"]["project_id"] - assert isinstance(uuid.UUID(project_id), uuid.UUID) + project_id = project.project_id try: - yield svc_client, identity_headers, project_id, remote_repo, remote_repo_checkout + yield svc_client, identity_headers, project_id, remote_repo, remote_repo_checkout, remote_url finally: - cache.ProjectCloneContext.format_url = orig_format_url - cache.ProjectCloneContext.set_owner_name = orig_set_owner + git.GitURL.parse = original_giturl_parse try: shutil.rmtree(remote_repo_path) diff --git a/tests/service/fixtures/service_projects.py b/tests/service/fixtures/service_projects.py index baa6f137c1..5d274cc2f9 100644 --- a/tests/service/fixtures/service_projects.py +++ b/tests/service/fixtures/service_projects.py @@ -41,7 +41,7 @@ def project_metadata(project) -> Generator[Tuple["RenkuProject", Dict[str, Any]] "email": "my@email.com", "owner": "me", "token": "awesome token", - "git_url": "git@gitlab.com", + "git_url": "https://example.com/a/b.git", "initialized": True, } diff --git a/tests/service/fixtures/service_scheduler.py b/tests/service/fixtures/service_scheduler.py deleted file mode 100644 index c547dd8542..0000000000 --- a/tests/service/fixtures/service_scheduler.py +++ /dev/null @@ -1,33 +0,0 @@ -# -# Copyright 2021 Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and -# Eidgenössische Technische Hochschule Zürich (ETHZ). -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Renku service fixtures for scheduler management.""" -import pytest - - -@pytest.fixture -def with_scheduler(mock_redis): - """Scheduler fixture.""" - from renku.ui.service.jobs.queues import WorkerQueues - from renku.ui.service.scheduler import start_scheduler - from renku.ui.service.utils.timeout import timeout - - timeout(start_scheduler, fn_kwargs={"connection": WorkerQueues.connection}, timeout_duration=5) - - from rq import Connection - - with Connection(WorkerQueues.connection): - yield diff --git a/tests/service/jobs/test_datasets.py b/tests/service/jobs/test_datasets.py index b10d64e423..8e51457f17 100644 --- a/tests/service/jobs/test_datasets.py +++ b/tests/service/jobs/test_datasets.py @@ -25,7 +25,6 @@ from renku.core.errors import DatasetExistsError, DatasetNotFound, ParameterError from renku.infrastructure.repository import Repository -from renku.ui.service.jobs.cleanup import cache_project_cleanup from renku.ui.service.jobs.datasets import dataset_add_remote_file, dataset_import from renku.ui.service.serializers.headers import JWT_TOKEN_SECRET, encode_b64 from renku.ui.service.utils import make_project_path @@ -50,7 +49,7 @@ def test_dataset_url_import_job(url, svc_client_with_repo): } payload = { - "project_id": project_id, + "git_url": url_components.href, "dataset_uri": url, } @@ -98,7 +97,7 @@ def test_dataset_import_job(doi, svc_client_with_repo): user = {"user_id": user_id} payload = { - "project_id": project_id, + "git_url": url_components.href, "dataset_uri": doi, } response = svc_client.post("/datasets.import", data=json.dumps(payload), headers=headers) @@ -153,7 +152,7 @@ def test_dataset_import_junk_job(doi, expected_err, svc_client_with_repo): user = {"user_id": user_id} payload = { - "project_id": project_id, + "git_url": url_components.href, "dataset_uri": doi, } response = svc_client.post("/datasets.import", data=json.dumps(payload), headers=headers) @@ -202,7 +201,7 @@ def test_dataset_import_twice_job(doi, svc_client_with_repo): user = {"user_id": user_id} payload = { - "project_id": project_id, + "git_url": url_components.href, "dataset_uri": doi, } response = svc_client.post("/datasets.import", data=json.dumps(payload), headers=headers) @@ -257,7 +256,12 @@ def test_dataset_add_remote_file(url, svc_client_with_repo): user_id = encode_b64(secure_filename("9ab2fc80-3a5c-426d-ae78-56de01d214df")) user = {"user_id": user_id} - payload = {"project_id": project_id, "name": uuid.uuid4().hex, "create_dataset": True, "files": [{"file_url": url}]} + payload = { + "git_url": url_components.href, + "name": uuid.uuid4().hex, + "create_dataset": True, + "files": [{"file_url": url}], + } response = svc_client.post("/datasets.add", data=json.dumps(payload), headers=headers) assert_rpc_response(response) @@ -365,43 +369,6 @@ def test_delay_add_file_job_failure(svc_client_cache, it_remote_repo_url_temp_br delayed_ctrl_job(context, view_user_data, job.job_id, renku_module, renku_ctrl) -@pytest.mark.parametrize("doi", ["10.5281/zenodo.3761586"]) -@pytest.mark.integration -@pytest.mark.service -def test_dataset_project_lock(doi, svc_client_with_repo): - """Test dataset project lock.""" - svc_client, headers, project_id, url_components = svc_client_with_repo - user_id = encode_b64(secure_filename("9ab2fc80-3a5c-426d-ae78-56de01d214df")) - user = {"user_id": user_id} - - payload = { - "project_id": project_id, - "dataset_uri": doi, - } - response = svc_client.post("/datasets.import", data=json.dumps(payload), headers=headers) - - assert_rpc_response(response) - assert {"job_id", "created_at"} == set(response.json["result"].keys()) - - dest = make_project_path( - user, - { - "owner": url_components.owner, - "name": url_components.name, - "slug": url_components.slug, - "project_id": project_id, - }, - ) - - old_commit = Repository(dest).head.commit - - cache_project_cleanup() - - new_commit = Repository(dest).head.commit - assert old_commit.hexsha == new_commit.hexsha - assert dest.exists() and [file for file in dest.glob("*")] - - @pytest.mark.service @pytest.mark.integration @retry_failed diff --git a/tests/service/jobs/test_jobs.py b/tests/service/jobs/test_jobs.py index 804bbe7232..9be09f9695 100644 --- a/tests/service/jobs/test_jobs.py +++ b/tests/service/jobs/test_jobs.py @@ -17,16 +17,12 @@ """Renku service job tests.""" import io import os -import time import uuid import pytest -from marshmallow import EXCLUDE -from renku.ui.service.controllers.utils.project_clone import user_project_clone -from renku.ui.service.jobs.cleanup import cache_files_cleanup, cache_project_cleanup -from renku.ui.service.serializers.templates import ProjectTemplateRequest -from tests.utils import assert_rpc_response, modified_environ, retry_failed +from renku.ui.service.jobs.cleanup import cache_files_cleanup +from tests.utils import assert_rpc_response, retry_failed @pytest.mark.service @@ -103,59 +99,6 @@ def test_cleanup_files_old_keys(svc_client_with_user, service_job, tmp_path): assert 0 == len(list(cache.get_chunks(user, chunk_id))) -@pytest.mark.service -@pytest.mark.jobs -@pytest.mark.integration -@retry_failed -def test_cleanup_old_project(datapack_zip, svc_client_with_repo, service_job): - """Upload archive and add its contents to a dataset.""" - svc_client, headers, _, _ = svc_client_with_repo - headers.pop("Content-Type") - - response = svc_client.get("/cache.project_list", headers=headers) - - assert_rpc_response(response) - assert 1 == len(response.json["result"]["projects"]) - - cache_project_cleanup() - response = svc_client.get("/cache.project_list", headers=headers) - - assert_rpc_response(response) - assert 0 == len(response.json["result"]["projects"]) - - -@pytest.mark.service -@pytest.mark.jobs -def test_cleanup_project_old_keys(svc_client_with_user, service_job): - """Cleanup old project with old hset keys.""" - svc_client, headers, cache, user = svc_client_with_user - - project = { - "project_id": uuid.uuid4().hex, - "name": "my-project", - "slug": "my-project", - "fullname": "full project name", - "email": "my@email.com", - "owner": "me", - "token": "awesome token", - "git_url": "git@gitlab.com", - "initialized": True, - } - project = cache.make_project(user, project) - os.makedirs(str(project.abs_path), exist_ok=True) - - response = svc_client.get("/cache.project_list", headers=headers) - - assert_rpc_response(response) - assert 1 == len(response.json["result"]["projects"]) - - cache_project_cleanup() - response = svc_client.get("/cache.project_list", headers=headers) - - assert_rpc_response(response) - assert 0 == len(response.json["result"]["projects"]) - - @pytest.mark.service @pytest.mark.jobs def test_job_constructor_lock(svc_client_with_user, service_job): @@ -184,55 +127,3 @@ def test_job_constructor_lock(svc_client_with_user, service_job): assert project.project_id == job.project_id assert user.user_id == job.user_id assert project.project_id in {_id.decode("utf-8") for _id in job.locked.members()} - - -@pytest.mark.integration -@retry_failed -def test_project_cleanup_success(svc_client_cache): - """Test project cleanup through the job.""" - client, _, cache = svc_client_cache - - user_data = { - "user_id": uuid.uuid4().hex, - "email": "contact@renkulab.io", - "fullname": "renku the frog", - "token": "None", - } - project_data = { - "project_name": "deadbeef", - "project_repository": "https://dev.renku.ch", - "project_namespace": "renku-qa", - "identifier": "0xdeadbeef", - "depth": 1, - "url": "https://github.com/SwissDataScienceCenter/renku-project-template", - "owner": "SwissDataScienceCenter", - } - project_data = ProjectTemplateRequest().load({**user_data, **project_data}, unknown=EXCLUDE) - assert "user_id" not in project_data.keys() - project_one = user_project_clone(user_data, project_data) - - assert project_one.age >= 0 - assert not project_one.ttl_expired() - assert project_one.exists() - - with modified_environ(RENKU_SVC_CLEANUP_TTL_PROJECTS="1"): - time.sleep(1) - - assert project_one.age >= 1 - assert project_one.ttl_expired() - - cache_project_cleanup() - - project_data = ProjectTemplateRequest().load({**user_data, **project_data}, unknown=EXCLUDE) - assert "user_id" not in project_data.keys() - user = cache.get_user(user_data["user_id"]) - projects = cache.get_projects(user) - assert [] == [p.project_id for p in projects] - - project_two = user_project_clone(user_data, project_data) - with modified_environ(RENKU_SVC_CLEANUP_TTL_PROJECTS="1800"): - assert project_two.age >= 0 - assert not project_two.ttl_expired() - assert project_two.exists() - - assert project_one.project_id != project_two.project_id diff --git a/tests/service/scheduler/test_scheduler.py b/tests/service/scheduler/test_scheduler.py deleted file mode 100644 index bb3cee76f6..0000000000 --- a/tests/service/scheduler/test_scheduler.py +++ /dev/null @@ -1,28 +0,0 @@ -# -# Copyright 2021 Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and -# Eidgenössische Technische Hochschule Zürich (ETHZ). -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Renku service tests for scheduler management.""" -from rq import Queue - - -def test_enqueue_jobs(with_scheduler): - """Enqueue jobs at a interval.""" - queues = Queue.all() - assert queues - - assert 2 == len(queues) - for q in queues: - assert 1 == q.count diff --git a/tests/service/views/test_cache_views.py b/tests/service/views/test_cache_views.py index b37b9af453..033ac9b767 100644 --- a/tests/service/views/test_cache_views.py +++ b/tests/service/views/test_cache_views.py @@ -27,7 +27,6 @@ from renku.core.dataset.context import DatasetContext from renku.core.util.git import with_commit -from renku.domain_model.git import GitURL from renku.domain_model.project import Project from renku.domain_model.project_context import project_context from renku.domain_model.provenance.agent import Person @@ -436,15 +435,13 @@ def test_clone_projects_no_auth(svc_client, identity_headers, it_remote_repo_url "git_url": it_remote_repo_url, } - response = svc_client.post( - "/cache.project_clone", data=json.dumps(payload), headers={"Content-Type": "application/json"} - ) + response = svc_client.post("/project.show", data=json.dumps(payload), headers={"Content-Type": "application/json"}) assert 200 == response.status_code assert {"error"} == set(response.json.keys()) assert UserAnonymousError.code == response.json["error"]["code"] - response = svc_client.post("/cache.project_clone", data=json.dumps(payload), headers=identity_headers) + response = svc_client.post("/project.show", data=json.dumps(payload), headers=identity_headers) assert 200 == response.status_code assert {"result"} == set(response.json.keys()) @@ -458,122 +455,11 @@ def test_clone_projects_with_auth(svc_client, identity_headers, it_remote_repo_u "git_url": it_remote_repo_url, } - response = svc_client.post("/cache.project_clone", data=json.dumps(payload), headers=identity_headers) - - assert response - assert {"result"} == set(response.json.keys()) - assert response.json["result"]["initialized"] - - -@pytest.mark.service -@pytest.mark.integration -@retry_failed -def test_clone_projects_multiple(svc_client, identity_headers, it_remote_repo_url): - """Check multiple cloning of remote repository.""" - project_ids = [] - - payload = { - "git_url": it_remote_repo_url, - } - - response = svc_client.post("/cache.project_clone", data=json.dumps(payload), headers=identity_headers) - assert response - - assert {"result"} == set(response.json.keys()) - project_ids.append(response.json["result"]) - - response = svc_client.post("/cache.project_clone", data=json.dumps(payload), headers=identity_headers) - - assert response - assert {"result"} == set(response.json.keys()) - project_ids.append(response.json["result"]) - - response = svc_client.post("/cache.project_clone", data=json.dumps(payload), headers=identity_headers) - - assert response - assert {"result"} == set(response.json.keys()) - project_ids.append(response.json["result"]) - - response = svc_client.post("/cache.project_clone", data=json.dumps(payload), headers=identity_headers) - - assert response - assert {"result"} == set(response.json.keys()) - last_pid = response.json["result"]["project_id"] - - response = svc_client.get("/cache.project_list", headers=identity_headers) - - assert response - assert {"result"} == set(response.json.keys()) - - pids = [p["project_id"] for p in response.json["result"]["projects"]] - assert last_pid in pids - assert 1 == len(pids) - - for inserted in project_ids: - assert inserted["project_id"] == last_pid - - -@pytest.mark.service -@pytest.mark.integration -@retry_failed -def test_clone_projects_list_view_errors(svc_client, identity_headers, it_remote_repo_url): - """Check cache state of cloned projects with no headers.""" - payload = { - "git_url": it_remote_repo_url, - } - - response = svc_client.post("/cache.project_clone", data=json.dumps(payload), headers=identity_headers) - assert response - assert {"result"} == set(response.json.keys()) - - assert isinstance(uuid.UUID(response.json["result"]["project_id"]), uuid.UUID) - - response = svc_client.get( - "/cache.project_list", - # no auth headers, expected error - ) - assert 200 == response.status_code - assert {"error"} == set(response.json.keys()) - assert UserAnonymousError.code == response.json["error"]["code"] - - response = svc_client.get("/cache.project_list", headers=identity_headers) - - assert response - assert {"result"} == set(response.json.keys()) - assert 1 == len(response.json["result"]["projects"]) - - project = response.json["result"]["projects"][0] - assert isinstance(uuid.UUID(project["project_id"]), uuid.UUID) - assert isinstance(GitURL.parse(project["git_url"]), GitURL) - - -@pytest.mark.service -@pytest.mark.integration -@retry_failed -def test_clone_projects_invalid_headers(svc_client, identity_headers, it_remote_repo_url): - """Check cache state of cloned projects with invalid headers.""" - payload = { - "git_url": it_remote_repo_url, - } - - response = svc_client.post("/cache.project_clone", data=json.dumps(payload), headers=identity_headers) - assert response - - assert {"result"} == set(response.json.keys()) - - response = svc_client.get( - "/cache.project_list", - # no auth headers, expected error - ) - assert 200 == response.status_code - assert {"error"} == set(response.json.keys()) - assert UserAnonymousError.code == response.json["error"]["code"] - - response = svc_client.get("/cache.project_list", headers=identity_headers) + response = svc_client.post("/project.show", data=json.dumps(payload), headers=identity_headers) assert response assert {"result"} == set(response.json.keys()) - assert 1 == len(response.json["result"]["projects"]) + assert response.json["result"]["name"] == "core-integration-test" @pytest.mark.service @@ -803,10 +689,10 @@ def test_field_upload_resp_fields(datapack_tar, svc_client_with_repo): @pytest.mark.remote_repo("old") def test_execute_migrations(svc_client_setup): """Check execution of all migrations.""" - svc_client, headers, project_id, _, _ = svc_client_setup + svc_client, headers, project_id, url_components, _ = svc_client_setup response = svc_client.post( - "/cache.migrate", data=json.dumps(dict(project_id=project_id, skip_docker_update=True)), headers=headers + "/cache.migrate", data=json.dumps(dict(git_url=url_components.href, skip_docker_update=True)), headers=headers ) assert 200 == response.status_code @@ -823,10 +709,10 @@ def test_execute_migrations(svc_client_setup): @pytest.mark.integration def test_execute_migrations_job(svc_client_setup): """Check execution of all migrations.""" - svc_client, headers, project_id, _, _ = svc_client_setup + svc_client, headers, project_id, url_components, _ = svc_client_setup response = svc_client.post( - "/cache.migrate", data=json.dumps(dict(project_id=project_id, is_delayed=True)), headers=headers + "/cache.migrate", data=json.dumps(dict(git_url=url_components.href, is_delayed=True)), headers=headers ) assert 200 == response.status_code @@ -856,9 +742,11 @@ def test_execute_migrations_remote(svc_client, identity_headers, it_remote_old_r @pytest.mark.integration def test_check_migrations_local(svc_client_setup): """Check if migrations are required for a local project.""" - svc_client, headers, project_id, _, _ = svc_client_setup + svc_client, headers, project_id, url_components, _ = svc_client_setup - response = svc_client.get("/cache.migrations_check", query_string=dict(project_id=project_id), headers=headers) + response = svc_client.get( + "/cache.migrations_check", query_string=dict(git_url=url_components.href), headers=headers + ) assert 200 == response.status_code assert not response.json["result"]["core_compatibility_status"]["migration_required"] @@ -929,7 +817,7 @@ def test_check_migrations_remote_errors( @pytest.mark.integration def test_migrate_wrong_template_source(svc_client_setup, monkeypatch): """Check if migrations gracefully fail when the project template is not available.""" - svc_client, headers, project_id, _, _ = svc_client_setup + svc_client, headers, project_id, url_components, _ = svc_client_setup # NOTE: fake source with monkeypatch.context() as monkey: @@ -939,7 +827,9 @@ def test_migrate_wrong_template_source(svc_client_setup, monkeypatch): renku.core.template.usecase.TemplateMetadata, "source", property(MagicMock(return_value="https://FAKE_URL")) ) - response = svc_client.get("/cache.migrations_check", query_string=dict(project_id=project_id), headers=headers) + response = svc_client.get( + "/cache.migrations_check", query_string=dict(git_url=url_components.href), headers=headers + ) assert_rpc_response(response) @@ -953,7 +843,7 @@ def test_migrate_wrong_template_source(svc_client_setup, monkeypatch): @pytest.mark.integration def test_migrate_wrong_template_ref(svc_client_setup, template, monkeypatch): """Check if migrations gracefully fail when the project template points to a wrong ref.""" - svc_client, headers, project_id, _, _ = svc_client_setup + svc_client, headers, project_id, url_components, _ = svc_client_setup # NOTE: fake reference with monkeypatch.context() as monkey: from renku.domain_model.template import TemplateMetadata @@ -961,7 +851,9 @@ def test_migrate_wrong_template_ref(svc_client_setup, template, monkeypatch): monkey.setattr(TemplateMetadata, "source", property(MagicMock(return_value=template["url"]))) monkey.setattr(TemplateMetadata, "reference", property(MagicMock(return_value="FAKE_REF"))) - response = svc_client.get("/cache.migrations_check", query_string=dict(project_id=project_id), headers=headers) + response = svc_client.get( + "/cache.migrations_check", query_string=dict(git_url=url_components.href), headers=headers + ) assert_rpc_response(response) @@ -977,7 +869,7 @@ def test_migrate_wrong_template_ref(svc_client_setup, template, monkeypatch): @retry_failed def test_cache_is_reset_after_failing_push(svc_protected_old_repo): """Check cache state is reset after pushing to a protected branch fails.""" - svc_client, headers, project_id, cache, user = svc_protected_old_repo + svc_client, headers, project_id, cache, user, url = svc_protected_old_repo project = cache.get_project(user, project_id) repository = Repository(path=project.abs_path) @@ -985,7 +877,7 @@ def test_cache_is_reset_after_failing_push(svc_protected_old_repo): active_branch_before = repository.active_branch.name response = svc_client.post( - "/cache.migrate", data=json.dumps(dict(project_id=project_id, skip_docker_update=True)), headers=headers + "/cache.migrate", data=json.dumps(dict(git_url=url, skip_docker_update=True)), headers=headers ) assert 200 == response.status_code assert response.json["result"]["was_migrated"] @@ -1003,14 +895,14 @@ def test_cache_is_reset_after_failing_push(svc_protected_old_repo): @retry_failed def test_migrating_protected_branch(svc_protected_old_repo): """Check migrating on a protected branch does not change cache state.""" - svc_client, headers, project_id, _, _ = svc_protected_old_repo + svc_client, headers, project_id, _, _, url = svc_protected_old_repo - response = svc_client.get("/cache.migrations_check", query_string=dict(project_id=project_id), headers=headers) + response = svc_client.get("/cache.migrations_check", query_string=dict(git_url=url), headers=headers) assert 200 == response.status_code assert response.json["result"]["core_compatibility_status"]["migration_required"] response = svc_client.post( - "/cache.migrate", data=json.dumps(dict(project_id=project_id, skip_docker_update=True)), headers=headers + "/cache.migrate", data=json.dumps(dict(git_url=url, skip_docker_update=True)), headers=headers ) assert 200 == response.status_code @@ -1019,7 +911,7 @@ def test_migrating_protected_branch(svc_protected_old_repo): m.startswith("Successfully applied") and m.endswith("migrations.") for m in response.json["result"]["messages"] ) - response = svc_client.get("/cache.migrations_check", query_string=dict(project_id=project_id), headers=headers) + response = svc_client.get("/cache.migrations_check", query_string=dict(git_url=url), headers=headers) assert 200 == response.status_code assert response.json["result"]["core_compatibility_status"]["migration_required"] @@ -1032,7 +924,7 @@ def test_cache_gets_synchronized(local_remote_repository, directory_tree, quick_ """Test that the cache stays synchronized with the remote repository.""" from renku.domain_model.provenance.agent import Person - svc_client, identity_headers, project_id, remote_repo, remote_repo_checkout = local_remote_repository + svc_client, identity_headers, project_id, remote_repo, remote_repo_checkout, remote_url = local_remote_repository with project_context.with_path(remote_repo_checkout.path): with with_injection(remote_repo_checkout): @@ -1046,7 +938,7 @@ def test_cache_gets_synchronized(local_remote_repository, directory_tree, quick_ remote_repo_checkout.push() params = { - "project_id": project_id, + "git_url": remote_url, } response = svc_client.get("/datasets.list", query_string=params, headers=identity_headers) @@ -1057,7 +949,7 @@ def test_cache_gets_synchronized(local_remote_repository, directory_tree, quick_ assert 1 == len(response.json["result"]["datasets"]) payload = { - "project_id": project_id, + "git_url": remote_url, "name": uuid.uuid4().hex, } @@ -1097,7 +989,7 @@ def test_check_migrations_local_minimum_version(svc_client_setup, mocker, monkey """Check if migrations are required for a local project.""" monkeypatch.setenv("RENKU_SKIP_MIN_VERSION_CHECK", "0") - svc_client, headers, project_id, _, _ = svc_client_setup + svc_client, headers, project_id, url_components, _ = svc_client_setup def mock_database_project(project): def mocked_getter(self, key): @@ -1112,7 +1004,9 @@ def mocked_getter(self, key): mocker.patch("renku.infrastructure.database.Database.__getitem__", mock_database_project(dummy_project)) mocker.patch("renku.version.__version__", "1.0.0") - response = svc_client.get("/cache.migrations_check", query_string=dict(project_id=project_id), headers=headers) + response = svc_client.get( + "/cache.migrations_check", query_string=dict(git_url=url_components.href), headers=headers + ) assert 200 == response.status_code assert response.json["result"]["core_compatibility_status"] diff --git a/tests/service/views/test_config_views.py b/tests/service/views/test_config_views.py index c7ec1ab43e..b7df4edabf 100644 --- a/tests/service/views/test_config_views.py +++ b/tests/service/views/test_config_views.py @@ -29,10 +29,10 @@ @retry_failed def test_config_view_show(svc_client_with_repo): """Check config show view.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo params = { - "project_id": project_id, + "git_url": url_components.href, } response = svc_client.get("/config.show", query_string=params, headers=headers) @@ -82,10 +82,10 @@ def test_config_view_show_remote(svc_client_with_repo, it_remote_repo_url): @retry_failed def test_config_view_set(svc_client_with_repo): """Check config set view.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo payload = { - "project_id": project_id, + "git_url": url_components.href, "config": { "lfs_threshold": "1b", "renku.autocommit_lfs": "true", @@ -100,7 +100,7 @@ def test_config_view_set(svc_client_with_repo): assert {"error"} != set(response.json.keys()) params = { - "project_id": project_id, + "git_url": url_components.href, } response = svc_client.get("/config.show", query_string=params, headers=headers) @@ -113,7 +113,7 @@ def test_config_view_set(svc_client_with_repo): assert 200 == response.status_code payload = { - "project_id": project_id, + "git_url": url_components.href, "config": {"lfs_threshold": None, "interactive.default_url": "/still_not_lab", "interactive.dummy": None}, } @@ -136,12 +136,12 @@ def test_config_view_set(svc_client_with_repo): @retry_failed def test_config_view_set_failures(svc_client_with_repo): """Check errors triggered while invoking config set.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo # NOTE: remove a non existing value non_existing_param = "NON_EXISTING" payload = { - "project_id": project_id, + "git_url": url_components.href, "config": { non_existing_param: None, }, @@ -160,11 +160,11 @@ def test_config_view_set_failures(svc_client_with_repo): @retry_failed def test_config_view_set_and_show_failures(svc_client_with_repo): """Check errors triggered while invoking config set.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo # NOTE: use sections with wrong chars introduces a readin error. Should we handle it at write time? payload = { - "project_id": project_id, + "git_url": url_components.href, "config": {".NON_EXISTING": "test"}, } @@ -173,7 +173,7 @@ def test_config_view_set_and_show_failures(svc_client_with_repo): assert 200 == response.status_code assert {"error"} != set(response.json.keys()) - response = svc_client.get("/config.show", query_string={"project_id": project_id}, headers=headers) + response = svc_client.get("/config.show", query_string={"git_url": url_components.href}, headers=headers) assert 200 == response.status_code assert {"error"} == set(response.json.keys()) diff --git a/tests/service/views/test_dataset_views.py b/tests/service/views/test_dataset_views.py index 677efbdb9b..16f409ed23 100644 --- a/tests/service/views/test_dataset_views.py +++ b/tests/service/views/test_dataset_views.py @@ -29,8 +29,8 @@ from renku.ui.service.errors import ( IntermittentDatasetExistsError, IntermittentFileNotExistsError, - IntermittentProjectIdError, ProgramInvalidGenericFieldsError, + ProgramRepoUnknownError, UserAnonymousError, UserDatasetsMultipleImagesError, UserDatasetsUnlinkError, @@ -71,10 +71,10 @@ def upload_file(svc_client, headers, filename) -> str: @retry_failed def test_create_dataset_view(svc_client_with_repo): """Create a new dataset successfully.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo payload = { - "project_id": project_id, + "git_url": url_components.href, "name": uuid.uuid4().hex, } @@ -90,9 +90,9 @@ def test_create_dataset_view(svc_client_with_repo): @retry_failed def test_create_dataset_view_with_datadir(svc_client_with_repo): """Create a new dataset successfully.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo - payload = {"project_id": project_id, "name": uuid.uuid4().hex, "data_directory": "my-folder/"} + payload = {"git_url": url_components.href, "name": uuid.uuid4().hex, "data_directory": "my-folder/"} response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) @@ -101,7 +101,7 @@ def test_create_dataset_view_with_datadir(svc_client_with_repo): assert payload["name"] == response.json["result"]["name"] params = { - "project_id": project_id, + "git_url": url_components.href, } response = svc_client.get("/datasets.list", query_string=params, headers=headers) @@ -153,13 +153,13 @@ def test_create_dataset_wrong_ref_view(svc_client_with_repo): svc_client, headers, _, _ = svc_client_with_repo payload = { - "project_id": "ref does not exist", + "git_url": "http://doesnotexistanywhere994455/a/b.git", "name": uuid.uuid4().hex, } response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response, "error") - assert IntermittentProjectIdError.code == response.json["error"]["code"], response.json + assert ProgramRepoUnknownError.code == response.json["error"]["code"], response.json @pytest.mark.service @@ -167,9 +167,9 @@ def test_create_dataset_wrong_ref_view(svc_client_with_repo): @retry_failed def test_remove_dataset_view(svc_client_with_repo): """Create a new dataset successfully.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo payload = { - "project_id": project_id, + "git_url": url_components.href, "name": uuid.uuid4().hex, } @@ -181,7 +181,7 @@ def test_remove_dataset_view(svc_client_with_repo): assert payload["name"] == response.json["result"]["name"] # NOTE: Ensure that dataset does not exists in this project anymore! - response = svc_client.get("/datasets.list", query_string={"project_id": project_id}, headers=headers) + response = svc_client.get("/datasets.list", query_string={"git_url": url_components.href}, headers=headers) assert_rpc_response(response) datasets = [ds["name"] for ds in response.json["result"]["datasets"]] assert payload["name"] not in datasets @@ -208,10 +208,10 @@ def test_remote_remove_view(svc_client, it_remote_repo_url, identity_headers): @retry_failed def test_create_dataset_with_metadata(svc_client_with_repo): """Create a new dataset with metadata.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo payload = { - "project_id": project_id, + "git_url": url_components.href, "name": uuid.uuid4().hex, "title": "my little dataset", "creators": [{"name": "name123", "email": "name123@ethz.ch", "affiliation": "ethz"}], @@ -226,7 +226,7 @@ def test_create_dataset_with_metadata(svc_client_with_repo): assert payload["name"] == response.json["result"]["name"] params = { - "project_id": project_id, + "git_url": url_components.href, } response = svc_client.get("/datasets.list", query_string=params, headers=headers) @@ -244,10 +244,10 @@ def test_create_dataset_with_metadata(svc_client_with_repo): @retry_failed def test_create_dataset_with_images(svc_client_with_repo): """Create a new dataset with metadata.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo payload = { - "project_id": project_id, + "git_url": url_components.href, "name": uuid.uuid4().hex, "title": "my little dataset", "creators": [{"name": "name123", "email": "name123@ethz.ch", "affiliation": "ethz"}], @@ -263,7 +263,7 @@ def test_create_dataset_with_images(svc_client_with_repo): assert UserDatasetsMultipleImagesError.code == response.json["error"]["code"] payload = { - "project_id": project_id, + "git_url": url_components.href, "name": uuid.uuid4().hex, "title": "my little dataset", "creators": [{"name": "name123", "email": "name123@ethz.ch", "affiliation": "ethz"}], @@ -281,7 +281,7 @@ def test_create_dataset_with_images(svc_client_with_repo): assert payload["name"] == response.json["result"]["name"] params = { - "project_id": project_id, + "git_url": url_components.href, } response = svc_client.get("/datasets.list", query_string=params, headers=headers) assert_rpc_response(response) @@ -306,10 +306,10 @@ def test_create_dataset_with_images(svc_client_with_repo): @retry_failed def test_create_dataset_with_custom_metadata(svc_client_with_repo): """Create a new dataset with metadata.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo payload = { - "project_id": project_id, + "git_url": url_components.href, "name": uuid.uuid4().hex, "title": "my little dataset", "creators": [{"name": "name123", "email": "name123@ethz.ch", "affiliation": "ethz"}], @@ -329,7 +329,7 @@ def test_create_dataset_with_custom_metadata(svc_client_with_repo): assert payload["name"] == response.json["result"]["name"] params = { - "project_id": project_id, + "git_url": url_components.href, } response = svc_client.get("/datasets.list", query_string=params, headers=headers) assert_rpc_response(response) @@ -352,10 +352,10 @@ def test_create_dataset_with_custom_metadata(svc_client_with_repo): @retry_failed def test_create_dataset_with_image_download(svc_client_with_repo, img_url): """Create a new dataset with metadata.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo payload = { - "project_id": project_id, + "git_url": url_components.href, "name": uuid.uuid4().hex, "title": "my little dataset", "creators": [{"name": "name123", "email": "name123@ethz.ch", "affiliation": "ethz"}], @@ -368,7 +368,7 @@ def test_create_dataset_with_image_download(svc_client_with_repo, img_url): assert UserDatasetsUnreachableImageError.code == response.json["error"]["code"] payload = { - "project_id": project_id, + "git_url": url_components.href, "name": uuid.uuid4().hex, "title": "my little dataset", "creators": [{"name": "name123", "email": "name123@ethz.ch", "affiliation": "ethz"}], @@ -382,7 +382,7 @@ def test_create_dataset_with_image_download(svc_client_with_repo, img_url): assert payload["name"] == response.json["result"]["name"] params = { - "project_id": project_id, + "git_url": url_components.href, } response = svc_client.get("/datasets.list", query_string=params, headers=headers) assert_rpc_response(response) @@ -400,13 +400,13 @@ def test_create_dataset_with_image_download(svc_client_with_repo, img_url): @retry_failed def test_create_dataset_with_uploaded_images(svc_client_with_repo): """Create a new dataset with metadata.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo file_id1 = upload_file(svc_client, headers, "image1.jpg") file_id2 = upload_file(svc_client, headers, "image2.png") payload = { - "project_id": project_id, + "git_url": url_components.href, "name": uuid.uuid4().hex, "title": "my little dataset", "creators": [{"name": "name123", "email": "name123@ethz.ch", "affiliation": "ethz"}], @@ -420,7 +420,7 @@ def test_create_dataset_with_uploaded_images(svc_client_with_repo): assert payload["name"] == response.json["result"]["name"] params = { - "project_id": project_id, + "git_url": url_components.href, } response = svc_client.get("/datasets.list", query_string=params, headers=headers) assert_rpc_response(response) @@ -446,10 +446,10 @@ def test_create_dataset_with_uploaded_images(svc_client_with_repo): @retry_failed def test_create_dataset_invalid_creator(svc_client_with_repo): """Create a new dataset with metadata.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo payload = { - "project_id": project_id, + "git_url": url_components.href, "name": uuid.uuid4().hex, "title": "my little dataset", "creators": [{"name": None, "email": "name123@ethz.ch", "affiliation": "ethz"}], @@ -468,10 +468,10 @@ def test_create_dataset_invalid_creator(svc_client_with_repo): @retry_failed def test_create_dataset_view_dataset_exists(svc_client_with_repo): """Create a new dataset which already exists.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo payload = { - "project_id": project_id, + "git_url": url_components.href, "name": "mydataset", } @@ -488,10 +488,10 @@ def test_create_dataset_view_dataset_exists(svc_client_with_repo): @retry_failed def test_create_dataset_view_unknown_param(svc_client_with_repo): """Create new dataset by specifying unknown parameters.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo unknown_field = "remote_name" - payload = {"project_id": project_id, "name": "mydata", unknown_field: "origin"} + payload = {"git_url": url_components.href, "name": "mydata", unknown_field: "origin"} response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response, "error") @@ -504,10 +504,10 @@ def test_create_dataset_view_unknown_param(svc_client_with_repo): @retry_failed def test_create_dataset_with_no_identity(svc_client_with_repo): """Create a new dataset with no identification provided.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo payload = { - "project_id": project_id, + "git_url": url_components.href, "name": "mydata", "remote_name": "origin", } @@ -525,9 +525,9 @@ def test_create_dataset_with_no_identity(svc_client_with_repo): @retry_failed def test_add_file_view_with_no_identity(svc_client_with_repo): """Check identity error raise in dataset add.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo payload = { - "project_id": project_id, + "git_url": url_components.href, "name": "mydata", "remote_name": "origin", } @@ -545,12 +545,12 @@ def test_add_file_view_with_no_identity(svc_client_with_repo): @retry_failed def test_add_file_view(svc_client_with_repo): """Check adding of uploaded file to dataset.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo file_id = upload_file(svc_client, headers, "datafile1.txt") payload = { - "project_id": project_id, + "git_url": url_components.href, "name": uuid.uuid4().hex, "create_dataset": True, "files": [{"file_id": file_id}], @@ -587,12 +587,12 @@ def test_remote_add_view(svc_client, it_remote_repo_url, identity_headers): @retry_failed def test_add_file_failure(svc_client_with_repo): """Check adding of uploaded file to dataset with non-existing file.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo file_id = upload_file(svc_client, headers, "datafile1.txt") payload = { - "project_id": project_id, + "git_url": url_components.href, "name": uuid.uuid4().hex, "create_dataset": True, "files": [{"file_id": file_id}, {"file_path": "my problem right here"}], @@ -608,10 +608,10 @@ def test_add_file_failure(svc_client_with_repo): @retry_failed def test_list_datasets_view(svc_client_with_repo): """Check listing of existing datasets.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo params = { - "project_id": project_id, + "git_url": url_components.href, } response = svc_client.get("/datasets.list", query_string=params, headers=headers) @@ -639,15 +639,7 @@ def test_list_datasets_view(svc_client_with_repo): @retry_failed def test_list_datasets_anonymous(svc_client_with_repo, it_remote_repo_url): """Check listing of existing datasets.""" - svc_client, headers, project_id, _ = svc_client_with_repo - - params = { - "project_id": project_id, - } - - response = svc_client.get("/datasets.list", query_string=params, headers={}) - assert_rpc_response(response, "error") - assert UserAnonymousError.code == response.json["error"]["code"] + svc_client, _, _, _ = svc_client_with_repo params = { "git_url": it_remote_repo_url, @@ -702,15 +694,15 @@ def test_list_datasets_view_remote(svc_client_with_repo, it_remote_repo_url): @retry_failed def test_list_datasets_view_no_auth(svc_client_with_repo): """Check listing of existing datasets with no auth.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo params = { - "project_id": project_id, + "git_url": url_components.href, } response = svc_client.get("/datasets.list", query_string=params) assert_rpc_response(response, "error") - assert UserAnonymousError.code == response.json["error"]["code"] + assert UserRepoNoAccessError.code == response.json["error"]["code"] @pytest.mark.service @@ -718,13 +710,7 @@ def test_list_datasets_view_no_auth(svc_client_with_repo): @retry_failed def test_list_dataset_files_anonymous(svc_client_with_repo, it_remote_repo_url): """Check listing of existing dataset files.""" - svc_client, headers, project_id, _ = svc_client_with_repo - - params = {"project_id": project_id, "name": "ds1"} - - response = svc_client.get("/datasets.files_list", query_string=params, headers={}) - assert_rpc_response(response, "error") - assert UserAnonymousError.code == response.json["error"]["code"] + svc_client, _, _, _ = svc_client_with_repo params = {"git_url": it_remote_repo_url, "name": "ds1"} @@ -779,10 +765,10 @@ def test_remote_create_view(svc_client, it_remote_repo_url, identity_headers): @retry_failed def test_create_and_list_datasets_view(svc_client_with_repo): """Create and list created dataset.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo payload = { - "project_id": project_id, + "git_url": url_components.href, "name": uuid.uuid4().hex, } @@ -792,7 +778,7 @@ def test_create_and_list_datasets_view(svc_client_with_repo): assert payload["name"] == response.json["result"]["name"] params_list = { - "project_id": project_id, + "git_url": url_components.href, } response = svc_client.get("/datasets.list", query_string=params_list, headers=headers) @@ -822,13 +808,13 @@ def test_create_and_list_datasets_view(svc_client_with_repo): @retry_failed def test_list_dataset_files(svc_client_with_repo): """Check listing of dataset files.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo file_name = uuid.uuid4().hex file_id = upload_file(svc_client, headers, file_name) payload = { - "project_id": project_id, + "git_url": url_components.href, "name": "mydata", "files": [{"file_id": file_id}], } @@ -839,7 +825,7 @@ def test_list_dataset_files(svc_client_with_repo): assert file_id == response.json["result"]["files"][0]["file_id"] params = { - "project_id": project_id, + "git_url": url_components.href, "name": "mydata", } @@ -856,7 +842,7 @@ def test_list_dataset_files(svc_client_with_repo): @retry_failed def test_add_with_unpacked_archive(datapack_zip, svc_client_with_repo): """Upload archive and add it to a dataset.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo content_type = headers.pop("Content-Type") response = svc_client.post( @@ -879,7 +865,7 @@ def test_add_with_unpacked_archive(datapack_zip, svc_client_with_repo): file_ = mm["file2"] payload = { - "project_id": project_id, + "git_url": url_components.href, "name": uuid.uuid4().hex, } @@ -890,7 +876,7 @@ def test_add_with_unpacked_archive(datapack_zip, svc_client_with_repo): assert {"name", "remote_branch"} == set(response.json["result"].keys()) assert payload["name"] == response.json["result"]["name"] - payload = {"project_id": project_id, "name": payload["name"], "files": [{"file_id": file_["file_id"]}]} + payload = {"git_url": url_components.href, "name": payload["name"], "files": [{"file_id": file_["file_id"]}]} response = svc_client.post("/datasets.add", data=json.dumps(payload), headers=headers) assert_rpc_response(response) @@ -898,7 +884,7 @@ def test_add_with_unpacked_archive(datapack_zip, svc_client_with_repo): assert file_["file_id"] == response.json["result"]["files"][0]["file_id"] params = { - "project_id": project_id, + "git_url": url_components.href, "name": payload["name"], } response = svc_client.get("/datasets.files_list", query_string=params, headers=headers) @@ -914,7 +900,7 @@ def test_add_with_unpacked_archive(datapack_zip, svc_client_with_repo): @retry_failed def test_add_with_unpacked_archive_all(datapack_zip, svc_client_with_repo): """Upload archive and add its contents to a dataset.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo content_type = headers.pop("Content-Type") response = svc_client.post( @@ -939,7 +925,7 @@ def test_add_with_unpacked_archive_all(datapack_zip, svc_client_with_repo): files = [{"file_id": file_["file_id"]} for file_ in response.json["result"]["files"]] payload = { - "project_id": project_id, + "git_url": url_components.href, "name": uuid.uuid4().hex, } headers["Content-Type"] = content_type @@ -950,7 +936,7 @@ def test_add_with_unpacked_archive_all(datapack_zip, svc_client_with_repo): assert payload["name"] == response.json["result"]["name"] payload = { - "project_id": project_id, + "git_url": url_components.href, "name": payload["name"], "files": files, } @@ -961,7 +947,7 @@ def test_add_with_unpacked_archive_all(datapack_zip, svc_client_with_repo): assert files == response.json["result"]["files"] params = { - "project_id": project_id, + "git_url": url_components.href, "name": payload["name"], } response = svc_client.get("/datasets.files_list", query_string=params, headers=headers) @@ -976,9 +962,9 @@ def test_add_with_unpacked_archive_all(datapack_zip, svc_client_with_repo): @retry_failed def test_add_existing_file(svc_client_with_repo): """Upload archive and add it to a dataset.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo payload = { - "project_id": project_id, + "git_url": url_components.href, "name": uuid.uuid4().hex, } response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) @@ -989,7 +975,7 @@ def test_add_existing_file(svc_client_with_repo): files = [{"file_path": "README.md"}] payload = { - "project_id": project_id, + "git_url": url_components.href, "name": payload["name"], "files": files, } @@ -1028,7 +1014,7 @@ def test_cached_import_dataset_job(doi, svc_client_cache, project): "email": "my@email.com", "owner": "me", "token": "awesome token", - "git_url": "git@gitlab.com", + "git_url": "https://example.com/a/b.git", "initialized": True, } @@ -1041,7 +1027,7 @@ def test_cached_import_dataset_job(doi, svc_client_cache, project): response = client.post( "/datasets.import", - data=json.dumps({"project_id": project_meta["project_id"], "dataset_uri": doi}), + data=json.dumps({"git_url": project_meta["git_url"], "dataset_uri": doi}), headers=headers, ) @@ -1094,7 +1080,7 @@ def test_dataset_add_remote(url, svc_client_cache, project_metadata): if not (project.path / dest).exists(): shutil.copytree(project.path, dest) - payload = make_dataset_add_payload(project_meta["project_id"], [url]) + payload = make_dataset_add_payload(project_meta["git_url"], [url]) response = client.post("/datasets.add", data=json.dumps(payload), headers=headers) assert_rpc_response(response) @@ -1130,7 +1116,7 @@ def test_dataset_add_multiple_remote(svc_client_cache, project_metadata): if not (project.path / dest).exists(): shutil.copytree(project.path, dest) - payload = make_dataset_add_payload(project_meta["project_id"], [url_gist, url_dbox]) + payload = make_dataset_add_payload(project_meta["git_url"], [url_gist, url_dbox]) response = client.post("/datasets.add", data=json.dumps(payload), headers=headers) assert_rpc_response(response) @@ -1154,10 +1140,11 @@ def test_dataset_add_multiple_remote(svc_client_cache, project_metadata): @retry_failed def test_add_remote_and_local_file(svc_client_with_repo): """Test dataset add remote and local files.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo payload = make_dataset_add_payload( - project_id, [("file_path", "README.md"), "https://gist.github.com/jsam/d957f306ed0fe4ff018e902df6a1c8e3"] + url_components.href, + [("file_path", "README.md"), "https://gist.github.com/jsam/d957f306ed0fe4ff018e902df6a1c8e3"], ) response = svc_client.post("/datasets.add", data=json.dumps(payload), headers=headers) @@ -1202,11 +1189,11 @@ def test_add_remote_and_local_file(svc_client_with_repo): @retry_failed def test_edit_datasets_view(svc_client_with_repo, custom_metadata, custom_metadata_source): """Test editing dataset metadata.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo name = uuid.uuid4().hex payload = { - "project_id": project_id, + "git_url": url_components.href, "name": name, } @@ -1217,14 +1204,14 @@ def test_edit_datasets_view(svc_client_with_repo, custom_metadata, custom_metada assert payload["name"] == response.json["result"]["name"] params_list = { - "project_id": project_id, + "git_url": url_components.href, } response = svc_client.get("/datasets.list", query_string=params_list, headers=headers) assert_rpc_response(response) edit_payload = { - "project_id": project_id, + "git_url": url_components.href, "name": name, "title": "my new title", "keywords": ["keyword1"], @@ -1249,11 +1236,11 @@ def test_edit_datasets_view(svc_client_with_repo, custom_metadata, custom_metada @retry_failed def test_edit_datasets_view_without_modification(svc_client_with_repo): """Test editing dataset metadata.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo name = uuid.uuid4().hex payload = { - "project_id": project_id, + "git_url": url_components.href, "name": name, "creators": [{"name": "name123", "email": "name123@ethz.ch", "affiliation": "ethz"}], "title": "my-title", @@ -1268,14 +1255,14 @@ def test_edit_datasets_view_without_modification(svc_client_with_repo): assert payload["name"] == response.json["result"]["name"] params_list = { - "project_id": project_id, + "git_url": url_components.href, } response = svc_client.get("/datasets.list", query_string=params_list, headers=headers) assert_rpc_response(response) edit_payload = { - "project_id": project_id, + "git_url": url_components.href, "name": name, } response = svc_client.post("/datasets.edit", data=json.dumps(edit_payload), headers=headers) @@ -1285,7 +1272,7 @@ def test_edit_datasets_view_without_modification(svc_client_with_repo): assert {} == response.json["result"]["edited"] params_list = { - "project_id": project_id, + "git_url": url_components.href, } response = svc_client.get("/datasets.list", query_string=params_list, headers=headers) @@ -1304,11 +1291,11 @@ def test_edit_datasets_view_without_modification(svc_client_with_repo): @retry_failed def test_edit_datasets_view_unset_values(svc_client_with_repo): """Test editing dataset metadata.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo name = uuid.uuid4().hex payload = { - "project_id": project_id, + "git_url": url_components.href, "name": name, "creators": [{"name": "name123", "email": "name123@ethz.ch", "affiliation": "ethz"}], "title": "my-title", @@ -1326,14 +1313,14 @@ def test_edit_datasets_view_unset_values(svc_client_with_repo): assert payload["name"] == response.json["result"]["name"] params_list = { - "project_id": project_id, + "git_url": url_components.href, } response = svc_client.get("/datasets.list", query_string=params_list, headers=headers) assert_rpc_response(response) edit_payload = { - "project_id": project_id, + "git_url": url_components.href, "name": name, "keywords": None, "images": None, @@ -1352,7 +1339,7 @@ def test_edit_datasets_view_unset_values(svc_client_with_repo): ]["edited"] params_list = { - "project_id": project_id, + "git_url": url_components.href, } response = svc_client.get("/datasets.list", query_string=params_list, headers=headers) @@ -1370,12 +1357,12 @@ def test_edit_datasets_view_unset_values(svc_client_with_repo): @retry_failed def test_edit_dataset_with_images(svc_client_with_repo): """Edit images of a dataset.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo name = uuid.uuid4().hex payload = { - "project_id": project_id, + "git_url": url_components.href, "name": name, "title": "my little dataset", "creators": [{"name": "name123", "email": "name123@ethz.ch", "affiliation": "ethz"}], @@ -1393,7 +1380,7 @@ def test_edit_dataset_with_images(svc_client_with_repo): assert payload["name"] == response.json["result"]["name"] params = { - "project_id": project_id, + "git_url": url_components.href, } response = svc_client.get("/datasets.list", query_string=params, headers=headers) @@ -1402,7 +1389,7 @@ def test_edit_dataset_with_images(svc_client_with_repo): # NOTE: test edit reordering and add edit_payload = { - "project_id": project_id, + "git_url": url_components.href, "name": name, "images": [ {"content_url": "data/renku_logo.png", "position": 1}, @@ -1430,7 +1417,7 @@ def test_edit_dataset_with_images(svc_client_with_repo): # NOTE: test edit with duplicate position edit_payload = { - "project_id": project_id, + "git_url": url_components.href, "name": name, "images": [ {"content_url": "data/renku_logo.png", "position": 1}, @@ -1445,7 +1432,7 @@ def test_edit_dataset_with_images(svc_client_with_repo): # NOTE: test edit remove images edit_payload = { - "project_id": project_id, + "git_url": url_components.href, "name": name, "images": [], } @@ -1458,7 +1445,7 @@ def test_edit_dataset_with_images(svc_client_with_repo): # NOTE: test edit no change edit_payload = { - "project_id": project_id, + "git_url": url_components.href, "name": name, "images": [], } @@ -1491,10 +1478,10 @@ def test_remote_edit_view(svc_client, it_remote_repo_url, identity_headers): @retry_failed def test_protected_branch(svc_client_with_repo): """Test adding a file to protected branch.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo payload = { - "project_id": project_id, + "git_url": url_components.href, "name": uuid.uuid4().hex, } response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) diff --git a/tests/service/views/test_exceptions.py b/tests/service/views/test_exceptions.py index 4289031788..e6f96822da 100644 --- a/tests/service/views/test_exceptions.py +++ b/tests/service/views/test_exceptions.py @@ -22,7 +22,6 @@ from renku.ui.service.config import SVC_ERROR_PROGRAMMING from renku.ui.service.errors import ( - IntermittentProjectIdError, IntermittentTimeoutError, ProgramContentTypeError, UserAnonymousError, @@ -79,11 +78,11 @@ def test_auth_headers_exc(service_allowed_endpoint): def test_content_type_headers_exc(svc_client_with_repo): """Verify exceptions are triggered when missing data.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo headers["Content-Type"] = "Fake" payload = { - "project_id": project_id, + "git_url": url_components.href, "config": { "lfs_threshold": "1b", "renku.autocommit_lfs": "true", @@ -103,10 +102,10 @@ def test_content_type_headers_exc(svc_client_with_repo): @retry_failed def test_migration_required_flag(svc_client_setup): """Check migration required failure.""" - svc_client, headers, project_id, _, _ = svc_client_setup + svc_client, headers, project_id, url_components, _ = svc_client_setup payload = { - "project_id": project_id, + "git_url": url_components.href, "name": uuid.uuid4().hex, } @@ -150,21 +149,13 @@ def test_project_uninitialized(svc_client, it_non_renku_repo_url, identity_heade """Check migration required failure.""" payload = {"git_url": it_non_renku_repo_url} - response = svc_client.post("/cache.project_clone", data=json.dumps(payload), headers=identity_headers) + response = svc_client.post("/project.show", data=json.dumps(payload), headers=identity_headers) assert response - assert "result" in response.json - assert "error" not in response.json + assert "error" in response.json + assert response.json["error"]["code"] == 1110 - project_id = response.json["result"]["project_id"] - initialized = response.json["result"]["initialized"] - - assert not initialized - - payload = { - "project_id": project_id, - "name": uuid.uuid4().hex, - } + payload["name"] = uuid.uuid4().hex response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=identity_headers) @@ -178,17 +169,13 @@ def test_project_uninitialized(svc_client, it_non_renku_repo_url, identity_heade def test_project_no_commits(svc_client, it_no_commit_repo_url, identity_headers): """Check migration required failure.""" payload = {"git_url": it_no_commit_repo_url} - response = svc_client.post("/cache.project_clone", data=json.dumps(payload), headers=identity_headers) + response = svc_client.post("/project.show", data=json.dumps(payload), headers=identity_headers) - assert_rpc_response(response) - project_id = response.json["result"]["project_id"] - initialized = response.json["result"]["initialized"] - assert not initialized + assert response + assert "error" in response.json + assert response.json["error"]["code"] == 1110 - payload = { - "project_id": project_id, - "name": uuid.uuid4().hex, - } + payload["name"] = uuid.uuid4().hex response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=identity_headers) assert_rpc_response(response, "error") @@ -197,7 +184,7 @@ def test_project_no_commits(svc_client, it_no_commit_repo_url, identity_headers) @pytest.mark.service @pytest.mark.integration -@retry_failed +# @retry_failed @pytest.mark.parametrize( "git_url", [ @@ -221,22 +208,6 @@ def test_invalid_git_remote(git_url, svc_client_with_user): assert response_code == code_invalid or response_code == code_timeout -@pytest.mark.service -@pytest.mark.integration -@retry_failed -def test_invalid_project_id(svc_client_with_repo): - """Test error on wrong project_id while showing project metadata.""" - svc_client, headers, project_id, _ = svc_client_with_repo - - show_payload = { - "project_id": project_id + "12345", - } - response = svc_client.post("/project.show", data=json.dumps(show_payload), headers=headers) - - assert_rpc_response(response, "error") - assert IntermittentProjectIdError.code == response.json["error"]["code"] - - @pytest.mark.integration @pytest.mark.service def test_user_without_permissons(svc_client_with_user): diff --git a/tests/service/views/test_project_views.py b/tests/service/views/test_project_views.py index be7fbde44b..5c256220ee 100644 --- a/tests/service/views/test_project_views.py +++ b/tests/service/views/test_project_views.py @@ -31,10 +31,10 @@ @retry_failed def test_show_project_view(svc_client_with_repo): """Test show project metadata.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo show_payload = { - "project_id": project_id, + "git_url": url_components.href, } response = svc_client.post("/project.show", data=json.dumps(show_payload), headers=headers) @@ -85,10 +85,10 @@ def test_show_project_view(svc_client_with_repo): @retry_failed def test_edit_project_view(svc_client_with_repo, custom_metadata, custom_metadata_source): """Test editing project metadata.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo edit_payload = { - "project_id": project_id, + "git_url": url_components.href, "description": "my new title", "creator": {"name": "name123", "email": "name123@ethz.ch", "affiliation": "ethz"}, "custom_metadata": custom_metadata, @@ -106,7 +106,7 @@ def test_edit_project_view(svc_client_with_repo, custom_metadata, custom_metadat } == response.json["result"]["edited"] edit_payload = { - "project_id": project_id, + "git_url": url_components.href, } response = svc_client.post("/project.edit", data=json.dumps(edit_payload), headers=headers) @@ -120,10 +120,10 @@ def test_edit_project_view(svc_client_with_repo, custom_metadata, custom_metadat @retry_failed def test_edit_project_view_unset(svc_client_with_repo): """Test editing project metadata by unsetting values.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo edit_payload = { - "project_id": project_id, + "git_url": url_components.href, "description": "my new title", "creator": {"name": "name123", "email": "name123@ethz.ch", "affiliation": "ethz"}, "keywords": ["keyword1", "keyword2"], @@ -138,7 +138,7 @@ def test_edit_project_view_unset(svc_client_with_repo): } response = svc_client.post("/project.edit", data=json.dumps(edit_payload), headers=headers) - edit_payload = {"project_id": project_id, "custom_metadata": None, "keywords": None} + edit_payload = {"git_url": url_components.href, "custom_metadata": None, "keywords": None} response = svc_client.post("/project.edit", data=json.dumps(edit_payload), headers=headers) assert_rpc_response(response) @@ -156,10 +156,10 @@ def test_edit_project_view_unset(svc_client_with_repo): @retry_failed def test_edit_project_view_failures(svc_client_with_repo): """Test failures when editing project metadata providing wrong data.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo payload = { - "project_id": project_id, + "git_url": url_components.href, "description": "my new title", "creator": {"name": "name123", "email": "name123@ethz.ch", "affiliation": "ethz"}, "custom_metadata": [ @@ -198,11 +198,11 @@ def test_remote_edit_view(svc_client, it_remote_repo_url, identity_headers): @pytest.mark.service def test_get_lock_status_unlocked(svc_client_setup): """Test getting lock status for an unlocked project.""" - svc_client, headers, project_id, _, _ = svc_client_setup + svc_client, headers, project_id, url_components, _ = svc_client_setup response = svc_client.get( "/1.1/project.lock_status", - query_string={"project_id": project_id, "timeout": 1.0}, + query_string={"git_url": url_components.href, "timeout": 1.0}, headers=headers, content_type="text/xml", ) @@ -216,7 +216,7 @@ def test_get_lock_status_unlocked(svc_client_setup): @pytest.mark.service def test_get_lock_status_locked(svc_client_setup): """Test getting lock status for a locked project.""" - svc_client, headers, project_id, _, repository = svc_client_setup + svc_client, headers, project_id, url_components, repository = svc_client_setup def mock_lock(): return portalocker.Lock(f"{repository.path}.lock", flags=portalocker.LOCK_EX, timeout=0) @@ -224,7 +224,7 @@ def mock_lock(): with mock_lock(): start = time.monotonic() response = svc_client.get( - "/1.1/project.lock_status", query_string={"project_id": project_id, "timeout": 1.0}, headers=headers + "/1.1/project.lock_status", query_string={"git_url": url_components.href, "timeout": 1.0}, headers=headers ) assert time.monotonic() - start >= 1.0 @@ -235,10 +235,11 @@ def mock_lock(): @pytest.mark.integration @pytest.mark.service -@pytest.mark.parametrize("query_params", [{"project_id": "dummy"}, {"git_url": "https://example.com/repo.git"}]) -def test_get_lock_status_for_project_not_in_cache(svc_client, identity_headers, query_params): +def test_get_lock_status_for_project_not_in_cache(svc_client, identity_headers): """Test getting lock status for an unlocked project which is not cached.""" - response = svc_client.get("/1.1/project.lock_status", query_string=query_params, headers=identity_headers) + response = svc_client.get( + "/1.1/project.lock_status", query_string={"git_url": "https://example.com/repo.git"}, headers=identity_headers + ) assert_rpc_response(response) assert {"locked"} == set(response.json["result"].keys()) diff --git a/tests/service/views/test_workflow_plan_views.py b/tests/service/views/test_workflow_plan_views.py index 1308ff77a3..ee6cf90209 100644 --- a/tests/service/views/test_workflow_plan_views.py +++ b/tests/service/views/test_workflow_plan_views.py @@ -29,10 +29,10 @@ @retry_failed def test_list_workflow_plans_view(svc_client_with_repo): """Check listing of plans.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo params = { - "project_id": project_id, + "git_url": url_components.href, } response = svc_client.get("/workflow_plans.list", query_string=params, headers=headers) @@ -158,9 +158,9 @@ def test_list_workflow_plans_view(svc_client_with_repo): @retry_failed def test_show_workflow_plans_view(plan_id, expected_fields, executions, touches_files, latest, svc_client_with_repo): """Check showing of plans.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo - params = {"project_id": project_id, "plan_id": plan_id} + params = {"git_url": url_components.href, "plan_id": plan_id} response = svc_client.get("/workflow_plans.show", query_string=params, headers=headers) @@ -192,9 +192,9 @@ def test_show_workflow_plans_view(plan_id, expected_fields, executions, touches_ @retry_failed def test_workflow_export(plan_id, svc_client_with_repo, tmp_path): """Check exporting of workflows.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo - params = {"project_id": project_id, "plan_id": plan_id} + params = {"git_url": url_components.href, "plan_id": plan_id} response = svc_client.post("/workflow_plans.export", data=json.dumps(params), headers=headers) @@ -249,9 +249,9 @@ def test_workflow_export(plan_id, svc_client_with_repo, tmp_path): @retry_failed def test_workflow_export_with_values(plan_id, values, expected_cwl_substrings, svc_client_with_repo, tmp_path): """Check exporting of workflows when values are passed.""" - svc_client, headers, project_id, _ = svc_client_with_repo + svc_client, headers, project_id, url_components = svc_client_with_repo - params = {"project_id": project_id, "plan_id": plan_id, "values": values} + params = {"git_url": url_components.href, "plan_id": plan_id, "values": values} response = svc_client.post("/workflow_plans.export", data=json.dumps(params), headers=headers) diff --git a/tests/service/views/v1_0/test_cache_views_1_0.py b/tests/service/views/v1_0/test_cache_views_1_0.py index c9d84d7380..996c75253d 100644 --- a/tests/service/views/v1_0/test_cache_views_1_0.py +++ b/tests/service/views/v1_0/test_cache_views_1_0.py @@ -29,10 +29,12 @@ @pytest.mark.remote_repo("old") def test_execute_migrations_1_0(svc_client_setup): """Check execution of all migrations.""" - svc_client, headers, project_id, _, _ = svc_client_setup + svc_client, headers, project_id, url_components, _ = svc_client_setup response = svc_client.post( - "/1.0/cache.migrate", data=json.dumps(dict(project_id=project_id, skip_docker_update=True)), headers=headers + "/1.0/cache.migrate", + data=json.dumps(dict(git_url=url_components.href, skip_docker_update=True)), + headers=headers, ) assert 200 == response.status_code @@ -48,9 +50,11 @@ def test_execute_migrations_1_0(svc_client_setup): @pytest.mark.integration def test_check_migrations_local_1_0(svc_client_setup): """Check if migrations are required for a local project.""" - svc_client, headers, project_id, _, _ = svc_client_setup + svc_client, headers, project_id, url_components, _ = svc_client_setup - response = svc_client.get("/1.0/cache.migrations_check", query_string=dict(project_id=project_id), headers=headers) + response = svc_client.get( + "/1.0/cache.migrations_check", query_string=dict(git_url=url_components.href), headers=headers + ) assert 200 == response.status_code assert not response.json["result"]["core_compatibility_status"]["migration_required"] @@ -69,7 +73,7 @@ def test_check_migrations_local_1_0(svc_client_setup): @pytest.mark.integration def test_migrate_wrong_template_source_1_0(svc_client_setup, monkeypatch): """Check if migrations gracefully fail when the project template is not available.""" - svc_client, headers, project_id, _, _ = svc_client_setup + svc_client, headers, project_id, url_components, _ = svc_client_setup # NOTE: fake source with monkeypatch.context() as monkey: @@ -80,7 +84,7 @@ def test_migrate_wrong_template_source_1_0(svc_client_setup, monkeypatch): ) response = svc_client.get( - "/1.0/cache.migrations_check", query_string=dict(project_id=project_id), headers=headers + "/1.0/cache.migrations_check", query_string=dict(git_url=url_components.href), headers=headers ) assert_rpc_response(response, "error") diff --git a/tests/utils.py b/tests/utils.py index f0a6e39113..5541a91451 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -62,7 +62,7 @@ def not_raises(): return not_raises() -def make_dataset_add_payload(project_id, urls, name=None): +def make_dataset_add_payload(git_url, urls, name=None): """Make dataset add request payload.""" files = [] for url in urls: @@ -73,7 +73,7 @@ def make_dataset_add_payload(project_id, urls, name=None): files.append({"file_url": url}) return { - "project_id": project_id, + "git_url": git_url, "name": name or uuid.uuid4().hex, "create_dataset": True, "force": False,