From f710119a8da294f7bbf67329af9409b6fa6782eb Mon Sep 17 00:00:00 2001 From: Ralf Grubenmann Date: Fri, 17 Feb 2023 11:43:37 +0100 Subject: [PATCH 01/10] fix(service): return proper errors on migrations check --- renku/command/migrate.py | 181 ++++++++++++++---- renku/core/migration/utils/__init__.py | 9 +- renku/ui/cli/migrate.py | 7 +- .../controllers/cache_migrations_check.py | 41 ++-- renku/ui/service/serializers/cache.py | 6 + renku/ui/service/views/error_handlers.py | 47 +++-- renku/ui/service/views/v1/cache.py | 34 +++- renku/version.py | 3 +- tests/service/views/test_cache_views.py | 30 +-- .../views/v1_0/test_cache_views_1_0.py | 47 +++++ 10 files changed, 303 insertions(+), 102 deletions(-) diff --git a/renku/command/migrate.py b/renku/command/migrate.py index d2a56c8a04..0281487be6 100644 --- a/renku/command/migrate.py +++ b/renku/command/migrate.py @@ -17,11 +17,15 @@ # limitations under the License. """Migrate project to the latest Renku version.""" -from typing import List +from dataclasses import dataclass +from enum import Enum, auto +from typing import Dict, List, Optional, Tuple from pydantic import validate_arguments from renku.command.command_builder.command import Command +from renku.core.errors import MinimumVersionError +from renku.core.migration.migrate import SUPPORTED_PROJECT_VERSION from renku.domain_model.project_context import project_context SUPPORTED_RENKU_PROJECT = 1 @@ -33,12 +37,98 @@ DOCKERFILE_UPDATE_POSSIBLE = 64 +class MigrationType(Enum): + """Enum for different migration types.""" + + CORE = auto() + DOCKERFILE = auto() + TEMPLATE = auto() + + +@dataclass +class CoreStatusResult: + """Core migration status.""" + + migration_required: bool + project_metadata_version: Optional[int] + current_metadata_version: int + + +@dataclass +class DockerfileStatusResult: + """Docker migration status.""" + + automated_dockerfile_update: bool + newer_renku_available: Optional[bool] + dockerfile_renku_version: Optional[str] + latest_renku_version: str + + +@dataclass +class TemplateStatusResult: + """Template migration status.""" + + automated_template_update: bool + newer_template_available: bool + project_template_version: Optional[str] + latest_template_version: Optional[str] + template_source: Optional[str] + template_ref: Optional[str] + template_id: Optional[str] + ssh_supported: bool + + +@dataclass +class MigrationCheckResult: + """Migration check output.""" + + project_supported: bool + core_renku_version: str + project_renku_version: Optional[str] + core_compatibility_status: Optional[CoreStatusResult] + dockerfile_renku_status: Optional[DockerfileStatusResult] + template_status: Optional[TemplateStatusResult] + errors: Optional[Dict[MigrationType, Exception]] = None + + @staticmethod + def from_minimum_version_error(minimum_version_error: MinimumVersionError) -> "MigrationCheckResult": + """Create a migration check when the project isn't supported yet.""" + from renku import __version__ + + return MigrationCheckResult( + project_supported=False, + core_renku_version=str(minimum_version_error.current_version), + project_renku_version=f">={minimum_version_error.minimum_version}", + core_compatibility_status=CoreStatusResult( + migration_required=False, + project_metadata_version=None, + current_metadata_version=SUPPORTED_PROJECT_VERSION, + ), + dockerfile_renku_status=DockerfileStatusResult( + dockerfile_renku_version="unknown", + latest_renku_version=__version__, + newer_renku_available=False, + automated_dockerfile_update=False, + ), + template_status=TemplateStatusResult( + automated_template_update=False, + newer_template_available=False, + template_source="unknown", + template_ref="unknown", + template_id="unknown", + project_template_version="unknown", + latest_template_version="unknown", + ssh_supported=False, + ), + ) + + def migrations_check(): """Return a command for a migrations check.""" return Command().command(_migrations_check).with_database(write=False) -def _migrations_check(): +def _migrations_check() -> MigrationCheckResult: """Check migration status of project. Returns: @@ -48,14 +138,35 @@ def _migrations_check(): core_version, latest_version = _migrations_versions() - return { - "project_supported": not is_project_unsupported(), - "core_renku_version": core_version, - "project_renku_version": latest_version, - "core_compatibility_status": _metadata_migration_check(), - "dockerfile_renku_status": _dockerfile_migration_check(), - "template_status": _template_migration_check(), - } + errors: Dict[MigrationType, Exception] = {} + + try: + core_compatibility_status = _metadata_migration_check() + except Exception as e: + core_compatibility_status = None + errors[MigrationType.CORE] = e + + try: + docker_status = _dockerfile_migration_check() + except Exception as e: + docker_status = None + errors[MigrationType.CORE] = e + + try: + template_status = _template_migration_check() + except Exception as e: + template_status = None + errors[MigrationType.CORE] = e + + return MigrationCheckResult( + project_supported=not is_project_unsupported(), + core_renku_version=core_version, + project_renku_version=latest_version, + core_compatibility_status=core_compatibility_status, + dockerfile_renku_status=docker_status, + template_status=template_status, + errors=errors, + ) def migrations_versions(): @@ -63,7 +174,7 @@ def migrations_versions(): return Command().command(_migrations_versions).lock_project().with_database() -def _migrations_versions(): +def _migrations_versions() -> Tuple[str, Optional[str]]: """Return source and destination migration versions. Returns: @@ -82,7 +193,7 @@ def _migrations_versions(): return __version__, latest_agent -def _template_migration_check(): +def _template_migration_check() -> TemplateStatusResult: """Return template migration status. Returns: @@ -108,16 +219,16 @@ def _template_migration_check(): update_available, update_allowed, current_version, new_version = check_for_template_update(project) - return { - "automated_template_update": update_allowed, - "newer_template_available": update_available, - "project_template_version": current_version, - "latest_template_version": new_version, - "template_source": template_source, - "template_ref": template_ref, - "template_id": template_id, - "ssh_supported": ssh_supported, - } + return TemplateStatusResult( + automated_template_update=update_allowed, + newer_template_available=update_available, + project_template_version=current_version, + latest_template_version=new_version, + template_source=template_source, + template_ref=template_ref, + template_id=template_id, + ssh_supported=ssh_supported, + ) def dockerfile_migration_check(): @@ -125,7 +236,7 @@ def dockerfile_migration_check(): return Command().command(_dockerfile_migration_check) -def _dockerfile_migration_check(): +def _dockerfile_migration_check() -> DockerfileStatusResult: """Return Dockerfile migration status. Returns: @@ -136,12 +247,12 @@ def _dockerfile_migration_check(): automated_dockerfile_update, newer_renku_available, dockerfile_renku_version = is_docker_update_possible() - return { - "automated_dockerfile_update": automated_dockerfile_update, - "newer_renku_available": newer_renku_available, - "dockerfile_renku_version": dockerfile_renku_version, - "latest_renku_version": __version__, - } + return DockerfileStatusResult( + automated_dockerfile_update=automated_dockerfile_update, + newer_renku_available=newer_renku_available, + dockerfile_renku_version=dockerfile_renku_version, + latest_renku_version=__version__, + ) def metadata_migration_check(): @@ -149,7 +260,7 @@ def metadata_migration_check(): return Command().command(_metadata_migration_check) -def _metadata_migration_check(): +def _metadata_migration_check() -> CoreStatusResult: """Return metadata migration status. Returns: @@ -157,11 +268,11 @@ def _metadata_migration_check(): """ from renku.core.migration.migrate import SUPPORTED_PROJECT_VERSION, get_project_version, is_migration_required - return { - "migration_required": is_migration_required(), - "project_metadata_version": get_project_version(), - "current_metadata_version": SUPPORTED_PROJECT_VERSION, - } + return CoreStatusResult( + migration_required=is_migration_required(), + project_metadata_version=get_project_version(), + current_metadata_version=SUPPORTED_PROJECT_VERSION, + ) def migrate_project_command(): diff --git a/renku/core/migration/utils/__init__.py b/renku/core/migration/utils/__init__.py index 2c5f5fd7d3..a4bdc112c1 100644 --- a/renku/core/migration/utils/__init__.py +++ b/renku/core/migration/utils/__init__.py @@ -21,6 +21,7 @@ import posixpath import threading import uuid +from typing import Optional, cast from urllib.parse import ParseResult, quote, urljoin, urlparse from renku.core.util.yaml import read_yaml @@ -164,7 +165,7 @@ def read_project_version() -> str: return read_project_version_from_yaml(yaml_data) -def read_latest_agent(): +def read_latest_agent() -> Optional[str]: """Read project version from metadata file.""" import pyld @@ -178,16 +179,16 @@ def read_latest_agent(): yaml_data = read_yaml(metadata_path) jsonld = pyld.jsonld.expand(yaml_data)[0] jsonld = normalize(jsonld) - return _get_jsonld_property(jsonld, "http://schema.org/agent", "pre-0.11.0") + return cast(str, _get_jsonld_property(jsonld, "http://schema.org/agent", "pre-0.11.0")) -def read_project_version_from_yaml(yaml_data): +def read_project_version_from_yaml(yaml_data) -> str: """Read project version from YAML data.""" import pyld jsonld = pyld.jsonld.expand(yaml_data)[0] jsonld = normalize(jsonld) - return _get_jsonld_property(jsonld, "http://schema.org/schemaVersion", "1") + return cast(str, _get_jsonld_property(jsonld, "http://schema.org/schemaVersion", "1")) def _get_jsonld_property(jsonld, property_name, default=None): diff --git a/renku/ui/cli/migrate.py b/renku/ui/cli/migrate.py index 83fff78712..77ab3d56c5 100644 --- a/renku/ui/cli/migrate.py +++ b/renku/ui/cli/migrate.py @@ -153,7 +153,12 @@ def migrationscheck(): from renku.command.migrate import migrations_check result = migrations_check().lock_project().build().execute().output - click.echo(json.dumps(result)) + result_dict = result.as_dict() + + if result_dict.get("errors"): + for key, value in result_dict["errors"]: + result_dict["errors"][key] = str(value) + click.echo(json.dumps(result_dict)) @click.command(hidden=True) diff --git a/renku/ui/service/controllers/cache_migrations_check.py b/renku/ui/service/controllers/cache_migrations_check.py index ff6317b935..0b123efca7 100644 --- a/renku/ui/service/controllers/cache_migrations_check.py +++ b/renku/ui/service/controllers/cache_migrations_check.py @@ -18,18 +18,17 @@ """Renku service migrations check controller.""" import tempfile +from dataclasses import asdict from pathlib import Path -from renku.command.migrate import migrations_check +from renku.command.migrate import MigrationCheckResult, migrations_check from renku.core.errors import AuthenticationError, MinimumVersionError, ProjectNotFound, RenkuException -from renku.core.migration.migrate import SUPPORTED_PROJECT_VERSION from renku.core.util.contexts import renku_project_context from renku.ui.service.controllers.api.abstract import ServiceCtrl from renku.ui.service.controllers.api.mixins import RenkuOperationMixin from renku.ui.service.interfaces.git_api_provider import IGitAPIProvider from renku.ui.service.serializers.cache import ProjectMigrationCheckRequest, ProjectMigrationCheckResponseRPC from renku.ui.service.views import result_response -from renku.version import __version__ class MigrationsCheckCtrl(ServiceCtrl, RenkuOperationMixin): @@ -78,34 +77,12 @@ def renku_op(self): try: return migrations_check().build().execute().output except MinimumVersionError as e: - return { - "project_supported": False, - "core_renku_version": e.current_version, - "project_renku_version": f">={e.minimum_version}", - "core_compatibility_status": { - "migration_required": False, - "project_metadata_version": f">={SUPPORTED_PROJECT_VERSION}", - "current_metadata_version": SUPPORTED_PROJECT_VERSION, - }, - "dockerfile_renku_status": { - "dockerfile_renku_version": "unknown", - "latest_renku_version": __version__, - "newer_renku_available": False, - "automated_dockerfile_update": False, - }, - "template_status": { - "automated_template_update": False, - "newer_template_available": False, - "template_source": "unknown", - "template_ref": "unknown", - "template_id": "unknown", - "project_template_version": "unknown", - "latest_template_version": "unknown", - }, - } + return MigrationCheckResult.from_minimum_version_error(e) def to_response(self): """Execute controller flow and serialize to service response.""" + from renku.ui.service.views.error_handlers import pretty_print_error + if "project_id" in self.context: result = self.execute_op() else: @@ -117,4 +94,10 @@ def to_response(self): except BaseException: result = self.execute_op() - return result_response(self.RESPONSE_SERIALIZER, result) + result_dict = asdict(result) + + if result.errors: + for key, value in result.errors.items(): + result_dict["errors"][key] = pretty_print_error(value) + + return result_response(self.RESPONSE_SERIALIZER, result_dict) diff --git a/renku/ui/service/serializers/cache.py b/renku/ui/service/serializers/cache.py index a5861c0e38..0ba28c39a9 100644 --- a/renku/ui/service/serializers/cache.py +++ b/renku/ui/service/serializers/cache.py @@ -23,6 +23,7 @@ from marshmallow import Schema, ValidationError, fields, post_load, pre_load, validates_schema from werkzeug.utils import secure_filename +from renku.command.migrate import MigrationType from renku.core import errors from renku.core.util.os import normalize_to_ascii from renku.domain_model.git import GitURL @@ -368,6 +369,11 @@ class ProjectMigrationCheckResponse(Schema): TemplateStatusResponse, metadata={"description": "Fields detailing the status of the project template used by this project."}, ) + errors = fields.Dict( + fields.Enum(MigrationType), + fields.Dict, + metadata={"description": "Errors if there were any (corresponding entry will be empty)."}, + ) class ProjectMigrationCheckResponseRPC(JsonRPCResponse): diff --git a/renku/ui/service/views/error_handlers.py b/renku/ui/service/views/error_handlers.py index 3169b5f053..73b2b3edfa 100644 --- a/renku/ui/service/views/error_handlers.py +++ b/renku/ui/service/views/error_handlers.py @@ -86,7 +86,7 @@ def handle_redis_except(f): """Wrapper which handles Redis exceptions.""" - # noqa + @wraps(f) def decorated_function(*args, **kwargs): """Represents decorated function.""" @@ -101,7 +101,6 @@ def decorated_function(*args, **kwargs): def handle_validation_except(f): """Wrapper which handles marshmallow `ValidationError`.""" - # noqa @wraps(f) def decorated_function(*args, **kwargs): """Represents decorated function.""" @@ -128,7 +127,7 @@ def decorated_function(*args, **kwargs): def handle_jwt_except(f): """Wrapper which handles invalid JWT.""" - # noqa + @wraps(f) def decorated_function(*args, **kwargs): """Represents decorated function.""" @@ -142,7 +141,7 @@ def decorated_function(*args, **kwargs): def handle_renku_except(f): """Wrapper which handles `RenkuException`.""" - # noqa + @wraps(f) def decorated_function(*args, **kwargs): """Represents decorated function.""" @@ -162,7 +161,7 @@ def decorated_function(*args, **kwargs): def handle_git_except(f): """Wrapper which handles `RenkuException`.""" - # noqa + @wraps(f) def decorated_function(*args, **kwargs): """Represents decorated function.""" @@ -186,7 +185,7 @@ def decorated_function(*args, **kwargs): def handle_base_except(f): """Wrapper which handles base exceptions.""" - # noqa + @wraps(f) def decorated_function(*args, **kwargs): """Represents decorated function.""" @@ -212,7 +211,7 @@ def decorated_function(*args, **kwargs): def handle_common_except(f): """Handle common exceptions.""" - # noqa + @wraps(f) def dec(*args, **kwargs): """Decorated function.""" @@ -232,7 +231,7 @@ def _wrapped(*args_, **kwargs_): def handle_templates_read_errors(f): """Wrapper which handles reading templates errors.""" - # noqa + @wraps(f) def decorated_function(*args, **kwargs): """Represents decorated function.""" @@ -256,7 +255,7 @@ def decorated_function(*args, **kwargs): @handle_templates_read_errors def handle_templates_create_errors(f): """Wrapper which handles template creating projects errors.""" - # noqa + @wraps(f) def decorated_function(*args, **kwargs): """Represents decorated function.""" @@ -288,7 +287,7 @@ def get_schema_error_message(e): def handle_project_write_errors(f): """Wrapper which handles writing project metadata errors.""" - # noqa + @wraps(f) def decorated_function(*args, **kwargs): """Represents decorated function.""" @@ -304,7 +303,7 @@ def decorated_function(*args, **kwargs): def handle_config_read_errors(f): """Wrapper which handles reading config errors.""" - # noqa + @wraps(f) def decorated_function(*args, **kwargs): """Represents decorated function.""" @@ -321,7 +320,7 @@ def decorated_function(*args, **kwargs): @handle_config_read_errors def handle_config_write_errors(f): """Wrapper which handles setting config errors.""" - # noqa + @wraps(f) def decorated_function(*args, **kwargs): """Represents decorated function.""" @@ -341,7 +340,7 @@ def decorated_function(*args, **kwargs): def handle_datasets_write_errors(f): """Wrapper which handles datasets write errors.""" - # noqa + @wraps(f) def decorated_function(*args, **kwargs): """Represents decorated function.""" @@ -372,7 +371,7 @@ def decorated_function(*args, **kwargs): def handle_workflow_errors(f): """Wrapper which handles workflow errors.""" - # noqa + @wraps(f) def decorated_function(*args, **kwargs): """Represents decorated function.""" @@ -388,7 +387,7 @@ def decorated_function(*args, **kwargs): def handle_datasets_unlink_errors(f): """Wrapper which handles datasets unlink errors.""" - # noqa + @wraps(f) def decorated_function(*args, **kwargs): """Represents decorated function.""" @@ -408,7 +407,7 @@ def decorated_function(*args, **kwargs): def handle_migration_read_errors(f): """Wrapper which handles migrations read exceptions.""" - # noqa + @wraps(f) def decorated_function(*args, **kwargs): """Represents decorated function.""" @@ -427,7 +426,7 @@ def decorated_function(*args, **kwargs): @handle_migration_read_errors def handle_migration_write_errors(f): """Wrapper which handles migrations write exceptions.""" - # noqa + @wraps(f) def decorated_function(*args, **kwargs): """Represents decorated function.""" @@ -441,7 +440,7 @@ def decorated_function(*args, **kwargs): def handle_graph_errors(f): """Wrapper which handles graph exceptions.""" - # noqa + @wraps(f) def decorated_function(*args, **kwargs): """Represents decorated function.""" @@ -452,3 +451,15 @@ def decorated_function(*args, **kwargs): raise ProgramGraphCorruptError(e) return decorated_function + + +def pretty_print_error(error: Exception): + """Use error handlers to pretty print an exception.""" + + @handle_common_except + @handle_migration_read_errors + def _fake_error_source(): + raise error + + response = _fake_error_source() + return response.json["error"] diff --git a/renku/ui/service/views/v1/cache.py b/renku/ui/service/views/v1/cache.py index 7beb4dae94..65c33f6151 100644 --- a/renku/ui/service/views/v1/cache.py +++ b/renku/ui/service/views/v1/cache.py @@ -15,13 +15,17 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -"""Renku service cache views.""" +"""Renku service cache views for v1.""" +from dataclasses import asdict + from flask import request +from renku.core.errors import AuthenticationError, ProjectNotFound from renku.ui.service.controllers.cache_migrate_project import MigrateProjectCtrl from renku.ui.service.controllers.cache_migrations_check import MigrationsCheckCtrl from renku.ui.service.gateways.gitlab_api_provider import GitlabAPIProvider from renku.ui.service.serializers.v1.cache import ProjectMigrateResponseRPC_1_0, ProjectMigrationCheckResponseRPC_1_5 +from renku.ui.service.views import result_response from renku.ui.service.views.api_versions import V1_0, V1_1, V1_2, V1_3, V1_4, V1_5 from renku.ui.service.views.decorators import accepts_json, optional_identity, requires_cache, requires_identity from renku.ui.service.views.error_handlers import ( @@ -84,9 +88,33 @@ def migration_check_project_view_1_5(user_data, cache): tags: - cache """ + + from flask import jsonify + + from renku.ui.service.serializers.rpc import JsonRPCResponse + from renku.ui.service.views.error_handlers import pretty_print_error + ctrl = MigrationsCheckCtrl(cache, user_data, dict(request.args), GitlabAPIProvider()) - ctrl.RESPONSE_SERIALIZER = ProjectMigrationCheckResponseRPC_1_5() # type: ignore - return ctrl.to_response() + + if "project_id" in ctrl.context: # type: ignore + result = asdict(ctrl.execute_op()) + else: + # NOTE: use quick flow but fallback to regular flow in case of unexpected exceptions + try: + result = asdict(ctrl._fast_op_without_cache()) + except (AuthenticationError, ProjectNotFound): + raise + except BaseException: + result = asdict(ctrl.execute_op()) + + if result.get("errors", None): + error = result["errors"][list(result["errors"].keys())[0]] + return jsonify(JsonRPCResponse().dump({"error": pretty_print_error(error)})) + + else: + del result["errors"] + + return result_response(ProjectMigrationCheckResponseRPC_1_5(), result) def add_v1_specific_endpoints(cache_blueprint): diff --git a/renku/version.py b/renku/version.py index af0b53fcbb..e0ea0c6265 100644 --- a/renku/version.py +++ b/renku/version.py @@ -18,13 +18,14 @@ """Version information for Renku.""" import re +from typing import cast try: from importlib.metadata import distribution, version except ImportError: from importlib_metadata import distribution, version # type: ignore -__version__ = version("renku") +__version__ = cast(str, version("renku")) __template_version__ = "0.4.1" __minimum_project_version__ = "2.1.0" diff --git a/tests/service/views/test_cache_views.py b/tests/service/views/test_cache_views.py index fbfffa8916..30beadcce7 100644 --- a/tests/service/views/test_cache_views.py +++ b/tests/service/views/test_cache_views.py @@ -34,13 +34,7 @@ from renku.domain_model.provenance.agent import Person from renku.infrastructure.gateway.dataset_gateway import DatasetGateway from renku.infrastructure.repository import Repository -from renku.ui.service.errors import ( - IntermittentFileExistsError, - IntermittentProjectTemplateUnavailable, - UserAnonymousError, - UserProjectTemplateReferenceError, - UserRepoUrlInvalidError, -) +from renku.ui.service.errors import IntermittentFileExistsError, UserAnonymousError, UserRepoUrlInvalidError from renku.ui.service.jobs.cleanup import cache_files_cleanup from renku.ui.service.serializers.headers import JWT_TOKEN_SECRET from tests.utils import assert_rpc_response, retry_failed @@ -877,6 +871,8 @@ def test_check_migrations_local(svc_client_setup): assert "template_ref" in response.json["result"]["template_status"] assert "template_id" in response.json["result"]["template_status"] assert "automated_template_update" in response.json["result"]["template_status"] + assert "errors" in response.json["result"] + assert not response.json["result"]["errors"] assert "ssh_supported" in response.json["result"]["template_status"] assert not response.json["result"]["template_status"]["ssh_supported"] @@ -897,6 +893,8 @@ def test_check_migrations_remote(svc_client, identity_headers, it_remote_repo_ur assert response.json["result"]["project_supported"] assert response.json["result"]["project_renku_version"] assert response.json["result"]["core_renku_version"] + assert "errors" in response.json["result"] + assert not response.json["result"]["errors"] @pytest.mark.service @@ -947,8 +945,13 @@ def test_migrate_wrong_template_source(svc_client_setup, monkeypatch): response = svc_client.get("/cache.migrations_check", query_string=dict(project_id=project_id), headers=headers) - assert_rpc_response(response, "error") - assert IntermittentProjectTemplateUnavailable.code == response.json["error"]["code"] + assert_rpc_response(response) + assert "errors" in response.json["result"] + assert len(response.json["result"]["errors"]) == 1 + assert response.json["result"]["errors"].get("CORE").get("userMessage") == ( + "The reference template for the project is currently unavailable. " + "It may be a temporary problem, or the template may not be accessible anymore." + ) @pytest.mark.service @@ -965,8 +968,13 @@ def test_migrate_wrong_template_ref(svc_client_setup, template, monkeypatch): response = svc_client.get("/cache.migrations_check", query_string=dict(project_id=project_id), headers=headers) - assert_rpc_response(response, "error") - assert UserProjectTemplateReferenceError.code == response.json["error"]["code"] + assert_rpc_response(response) + assert "errors" in response.json["result"] + assert len(response.json["result"]["errors"]) == 1 + assert ( + response.json["result"]["errors"][0] + == "Template status: Cannot clone template repository from https://FAKE_URL" + ) @pytest.mark.service diff --git a/tests/service/views/v1_0/test_cache_views_1_0.py b/tests/service/views/v1_0/test_cache_views_1_0.py index 836a458dbe..501134a262 100644 --- a/tests/service/views/v1_0/test_cache_views_1_0.py +++ b/tests/service/views/v1_0/test_cache_views_1_0.py @@ -17,9 +17,13 @@ # limitations under the License. """Renku service cache view tests.""" import json +from unittest.mock import MagicMock import pytest +from renku.ui.service.errors import IntermittentProjectTemplateUnavailable +from tests.utils import assert_rpc_response + @pytest.mark.service @pytest.mark.integration @@ -38,3 +42,46 @@ def test_execute_migrations_1_0(svc_client_setup): ) assert "warnings" not in response.json["result"] assert "errors" not in response.json["result"] + + +@pytest.mark.service +@pytest.mark.integration +def test_check_migrations_local_1_0(svc_client_setup): + """Check if migrations are required for a local project.""" + svc_client, headers, project_id, _, _ = svc_client_setup + + response = svc_client.get("/1.0/cache.migrations_check", query_string=dict(project_id=project_id), headers=headers) + assert 200 == response.status_code + + assert response.json["result"]["core_compatibility_status"]["migration_required"] + assert not response.json["result"]["template_status"]["newer_template_available"] + assert not response.json["result"]["dockerfile_renku_status"]["automated_dockerfile_update"] + assert response.json["result"]["project_supported"] + assert response.json["result"]["project_renku_version"] + assert response.json["result"]["core_renku_version"] + assert "template_source" in response.json["result"]["template_status"] + assert "template_ref" in response.json["result"]["template_status"] + assert "template_id" in response.json["result"]["template_status"] + assert "automated_template_update" in response.json["result"]["template_status"] + + +@pytest.mark.service +@pytest.mark.integration +def test_migrate_wrong_template_source_1_0(svc_client_setup, monkeypatch): + """Check if migrations gracefully fail when the project template is not available.""" + svc_client, headers, project_id, _, _ = svc_client_setup + + # NOTE: fake source + with monkeypatch.context() as monkey: + import renku.core.template.usecase + + monkey.setattr( + renku.core.template.usecase.TemplateMetadata, "source", property(MagicMock(return_value="https://FAKE_URL")) + ) + + response = svc_client.get( + "/1.0/cache.migrations_check", query_string=dict(project_id=project_id), headers=headers + ) + + assert_rpc_response(response, "error") + assert IntermittentProjectTemplateUnavailable.code == response.json["error"]["code"] From 39dbf8b16f197f4df6b43fe39e50d1660f4cfdcb Mon Sep 17 00:00:00 2001 From: Ralf Grubenmann Date: Mon, 27 Feb 2023 09:29:46 +0100 Subject: [PATCH 02/10] fix tests --- renku/ui/cli/migrate.py | 3 ++- renku/ui/service/controllers/graph_export.py | 2 +- tests/cli/test_migrate.py | 1 + .../controllers/utils/test_remote_project.py | 24 +++++++++---------- .../service/views/test_api_versions_views.py | 2 +- tests/service/views/test_cache_views.py | 8 +++---- 6 files changed, 21 insertions(+), 19 deletions(-) diff --git a/renku/ui/cli/migrate.py b/renku/ui/cli/migrate.py index 77ab3d56c5..faa3e9a9b5 100644 --- a/renku/ui/cli/migrate.py +++ b/renku/ui/cli/migrate.py @@ -60,6 +60,7 @@ """ import json import os +from dataclasses import asdict import click @@ -153,7 +154,7 @@ def migrationscheck(): from renku.command.migrate import migrations_check result = migrations_check().lock_project().build().execute().output - result_dict = result.as_dict() + result_dict = asdict(result) if result_dict.get("errors"): for key, value in result_dict["errors"]: diff --git a/renku/ui/service/controllers/graph_export.py b/renku/ui/service/controllers/graph_export.py index d869ddec16..247aa8dbef 100644 --- a/renku/ui/service/controllers/graph_export.py +++ b/renku/ui/service/controllers/graph_export.py @@ -56,7 +56,7 @@ def renku_op(self): """Renku operation for the controller.""" result = migrations_check().build().execute().output - if not result["project_supported"]: + if not result.project_supported: raise RenkuException("project not supported") callback_payload = { diff --git a/tests/cli/test_migrate.py b/tests/cli/test_migrate.py index e37264f6e0..8096d8f806 100644 --- a/tests/cli/test_migrate.py +++ b/tests/cli/test_migrate.py @@ -69,6 +69,7 @@ def test_migration_check(isolated_runner, project): "core_compatibility_status", "dockerfile_renku_status", "template_status", + "errors", } assert output["core_compatibility_status"].keys() == { "project_metadata_version", diff --git a/tests/service/controllers/utils/test_remote_project.py b/tests/service/controllers/utils/test_remote_project.py index cb3f75919f..97779178bd 100644 --- a/tests/service/controllers/utils/test_remote_project.py +++ b/tests/service/controllers/utils/test_remote_project.py @@ -99,15 +99,15 @@ def test_remote_project_context(): with ctrl.remote() as project_path: assert project_path result = migrations_check().build().execute().output - assert result["core_renku_version"] == renku.__version__ - assert result["project_renku_version"] == "pre-0.11.0" - assert result["core_compatibility_status"]["migration_required"] is True - assert result["template_status"]["newer_template_available"] is False - assert result["template_status"]["project_template_version"] is None - assert result["template_status"]["latest_template_version"] is None - assert result["template_status"]["template_source"] is None - assert result["template_status"]["template_ref"] is None - assert result["template_status"]["template_id"] is None - assert result["template_status"]["automated_template_update"] is True - assert result["dockerfile_renku_status"]["automated_dockerfile_update"] is False - assert result["project_supported"] is True + assert result.core_renku_version == renku.__version__ + assert result.project_renku_version == "pre-0.11.0" + assert result.core_compatibility_status.migration_required is True + assert result.template_status.newer_template_available is False + assert result.template_status.project_template_version is None + assert result.template_status.latest_template_version is None + assert result.template_status.template_source is None + assert result.template_status.template_ref is None + assert result.template_status.template_id is None + assert result.template_status.automated_template_update is True + assert result.dockerfile_renku_status.automated_dockerfile_update is False + assert result.project_supported is True diff --git a/tests/service/views/test_api_versions_views.py b/tests/service/views/test_api_versions_views.py index c8e8e951ba..3c77aa5173 100644 --- a/tests/service/views/test_api_versions_views.py +++ b/tests/service/views/test_api_versions_views.py @@ -49,7 +49,7 @@ def test_versions_differences(svc_client, identity_headers, it_remote_repo_url): assert 200 == response_default.status_code assert response_default.json["result"]["core_compatibility_status"]["migration_required"] is True assert response_default.json["result"].keys() == response_new.json["result"].keys() - assert response_default.json["result"].keys() == response_old.json["result"].keys() + assert response_default.json["result"].keys() != response_old.json["result"].keys() assert ( response_default.json["result"]["template_status"].keys() == response_new.json["result"]["template_status"].keys() diff --git a/tests/service/views/test_cache_views.py b/tests/service/views/test_cache_views.py index 30beadcce7..c7910dd4f7 100644 --- a/tests/service/views/test_cache_views.py +++ b/tests/service/views/test_cache_views.py @@ -971,10 +971,10 @@ def test_migrate_wrong_template_ref(svc_client_setup, template, monkeypatch): assert_rpc_response(response) assert "errors" in response.json["result"] assert len(response.json["result"]["errors"]) == 1 - assert ( - response.json["result"]["errors"][0] - == "Template status: Cannot clone template repository from https://FAKE_URL" - ) + + assert "Cannot find the reference 'FAKE_REF' in the template repository" in response.json["result"][ + "errors" + ].get("CORE").get("userMessage") @pytest.mark.service From 59219db6288829e9093cbcb52ddc947d5b22ed88 Mon Sep 17 00:00:00 2001 From: Ralf Grubenmann Date: Mon, 27 Feb 2023 16:10:38 +0100 Subject: [PATCH 03/10] change response format --- renku/command/migrate.py | 36 ++++------- .../controllers/cache_migrations_check.py | 12 +++- renku/ui/service/serializers/cache.py | 59 ++++++++++++++++--- renku/ui/service/serializers/common.py | 11 ++++ renku/ui/service/views/v1/cache.py | 20 ++++--- tests/cli/test_migrate.py | 1 - tests/service/views/test_cache_views.py | 22 +++---- 7 files changed, 99 insertions(+), 62 deletions(-) diff --git a/renku/command/migrate.py b/renku/command/migrate.py index 0281487be6..5c5110a732 100644 --- a/renku/command/migrate.py +++ b/renku/command/migrate.py @@ -18,8 +18,7 @@ """Migrate project to the latest Renku version.""" from dataclasses import dataclass -from enum import Enum, auto -from typing import Dict, List, Optional, Tuple +from typing import List, Optional, Tuple, Union from pydantic import validate_arguments @@ -37,14 +36,6 @@ DOCKERFILE_UPDATE_POSSIBLE = 64 -class MigrationType(Enum): - """Enum for different migration types.""" - - CORE = auto() - DOCKERFILE = auto() - TEMPLATE = auto() - - @dataclass class CoreStatusResult: """Core migration status.""" @@ -85,10 +76,9 @@ class MigrationCheckResult: project_supported: bool core_renku_version: str project_renku_version: Optional[str] - core_compatibility_status: Optional[CoreStatusResult] - dockerfile_renku_status: Optional[DockerfileStatusResult] - template_status: Optional[TemplateStatusResult] - errors: Optional[Dict[MigrationType, Exception]] = None + core_compatibility_status: Union[CoreStatusResult, Exception] + dockerfile_renku_status: Union[DockerfileStatusResult, Exception] + template_status: Union[TemplateStatusResult, Exception] @staticmethod def from_minimum_version_error(minimum_version_error: MinimumVersionError) -> "MigrationCheckResult": @@ -138,25 +128,20 @@ def _migrations_check() -> MigrationCheckResult: core_version, latest_version = _migrations_versions() - errors: Dict[MigrationType, Exception] = {} - try: - core_compatibility_status = _metadata_migration_check() + core_compatibility_status: Union[CoreStatusResult, Exception] = _metadata_migration_check() except Exception as e: - core_compatibility_status = None - errors[MigrationType.CORE] = e + core_compatibility_status = e try: - docker_status = _dockerfile_migration_check() + docker_status: Union[DockerfileStatusResult, Exception] = _dockerfile_migration_check() except Exception as e: - docker_status = None - errors[MigrationType.CORE] = e + docker_status = e try: - template_status = _template_migration_check() + template_status: Union[TemplateStatusResult, Exception] = _template_migration_check() except Exception as e: - template_status = None - errors[MigrationType.CORE] = e + template_status = e return MigrationCheckResult( project_supported=not is_project_unsupported(), @@ -165,7 +150,6 @@ def _migrations_check() -> MigrationCheckResult: core_compatibility_status=core_compatibility_status, dockerfile_renku_status=docker_status, template_status=template_status, - errors=errors, ) diff --git a/renku/ui/service/controllers/cache_migrations_check.py b/renku/ui/service/controllers/cache_migrations_check.py index 0b123efca7..0bcab018cc 100644 --- a/renku/ui/service/controllers/cache_migrations_check.py +++ b/renku/ui/service/controllers/cache_migrations_check.py @@ -96,8 +96,14 @@ def to_response(self): result_dict = asdict(result) - if result.errors: - for key, value in result.errors.items(): - result_dict["errors"][key] = pretty_print_error(value) + # NOTE: Pretty-print errors for the UI + if isinstance(result.template_status, Exception): + result_dict["template_status"] = pretty_print_error(result.template_status) + + if isinstance(result.dockerfile_renku_status, Exception): + result_dict["dockerfile_renku_status"] = pretty_print_error(result.dockerfile_renku_status) + + if isinstance(result.core_compatibility_status, Exception): + result_dict["core_compatibility_status"] = pretty_print_error(result.core_compatibility_status) return result_response(self.RESPONSE_SERIALIZER, result_dict) diff --git a/renku/ui/service/serializers/cache.py b/renku/ui/service/serializers/cache.py index 0ba28c39a9..423a7a5316 100644 --- a/renku/ui/service/serializers/cache.py +++ b/renku/ui/service/serializers/cache.py @@ -21,9 +21,9 @@ from urllib.parse import urlparse from marshmallow import Schema, ValidationError, fields, post_load, pre_load, validates_schema +from marshmallow_oneofschema import OneOfSchema from werkzeug.utils import secure_filename -from renku.command.migrate import MigrationType from renku.core import errors from renku.core.util.os import normalize_to_ascii from renku.domain_model.git import GitURL @@ -31,6 +31,7 @@ from renku.ui.service.serializers.common import ( ArchiveSchema, AsyncSchema, + ErrorResponse, FileDetailsSchema, LocalRepositorySchema, RemoteRepositorySchema, @@ -276,7 +277,7 @@ class ProjectMigrationCheckRequest(LocalRepositorySchema, RemoteRepositorySchema """Request schema for project migration check.""" -class ProjectCompatibilityResponse(Schema): +class ProjectCompatibilityResponseDetail(Schema): """Response schema outlining service compatibility for migrations check.""" project_metadata_version = fields.String( @@ -290,7 +291,22 @@ class ProjectCompatibilityResponse(Schema): ) -class DockerfileStatusResponse(Schema): +class ProjectCompatibilityResponse(OneOfSchema): + """Combined schema of DockerfileStatusResponseDetail or Exception.""" + + type_schemas = {"detail": ProjectCompatibilityResponseDetail, "error": ErrorResponse} + + def get_obj_type(self, obj): + """Get type from object.""" + from renku.command.migrate import CoreStatusResult + + if isinstance(obj, CoreStatusResult) or (isinstance(obj, dict) and "userMessage" not in obj): + return "detail" + + return "error" + + +class DockerfileStatusResponseDetail(Schema): """Response schema outlining dockerfile status for migrations check.""" newer_renku_available = fields.Boolean( @@ -307,7 +323,22 @@ class DockerfileStatusResponse(Schema): dockerfile_renku_version = fields.String(metadata={"description": "Version of Renku specified in the Dockerfile."}) -class TemplateStatusResponse(Schema): +class DockerfileStatusResponse(OneOfSchema): + """Combined schema of DockerfileStatusResponseDetail or Exception.""" + + type_schemas = {"detail": DockerfileStatusResponseDetail, "error": ErrorResponse} + + def get_obj_type(self, obj): + """Get type from object.""" + from renku.command.migrate import DockerfileStatusResult + + if isinstance(obj, DockerfileStatusResult) or (isinstance(obj, dict) and "userMessage" not in obj): + return "detail" + + return "error" + + +class TemplateStatusResponseDetail(Schema): """Response schema outlining template status for migrations check.""" automated_template_update = fields.Boolean( @@ -345,6 +376,21 @@ class TemplateStatusResponse(Schema): ) +class TemplateStatusResponse(OneOfSchema): + """Combined schema of TemplateStatusResponseDetail or Exception.""" + + type_schemas = {"detail": TemplateStatusResponseDetail, "error": ErrorResponse} + + def get_obj_type(self, obj): + """Get type from object.""" + from renku.command.migrate import TemplateStatusResult + + if isinstance(obj, TemplateStatusResult) or (isinstance(obj, dict) and "userMessage" not in obj): + return "detail" + + return "error" + + class ProjectMigrationCheckResponse(Schema): """Response schema for project migration check.""" @@ -369,11 +415,6 @@ class ProjectMigrationCheckResponse(Schema): TemplateStatusResponse, metadata={"description": "Fields detailing the status of the project template used by this project."}, ) - errors = fields.Dict( - fields.Enum(MigrationType), - fields.Dict, - metadata={"description": "Errors if there were any (corresponding entry will be empty)."}, - ) class ProjectMigrationCheckResponseRPC(JsonRPCResponse): diff --git a/renku/ui/service/serializers/common.py b/renku/ui/service/serializers/common.py index 41c70b5fd8..a9302f34e6 100644 --- a/renku/ui/service/serializers/common.py +++ b/renku/ui/service/serializers/common.py @@ -131,3 +131,14 @@ class DelayedResponseRPC(JsonRPCResponse): """RPC response schema for project migrate.""" result = fields.Nested(JobDetailsResponse) + + +class ErrorResponse(Schema): + """Renku Service Error Response.""" + + code = fields.Integer(required=True) + userMessage = fields.String(required=True) + devMessage = fields.String(required=True) + userReference = fields.String() + devReference = fields.String() + sentry = fields.String() diff --git a/renku/ui/service/views/v1/cache.py b/renku/ui/service/views/v1/cache.py index 65c33f6151..2cdefe3e09 100644 --- a/renku/ui/service/views/v1/cache.py +++ b/renku/ui/service/views/v1/cache.py @@ -97,24 +97,26 @@ def migration_check_project_view_1_5(user_data, cache): ctrl = MigrationsCheckCtrl(cache, user_data, dict(request.args), GitlabAPIProvider()) if "project_id" in ctrl.context: # type: ignore - result = asdict(ctrl.execute_op()) + result = ctrl.execute_op() else: # NOTE: use quick flow but fallback to regular flow in case of unexpected exceptions try: - result = asdict(ctrl._fast_op_without_cache()) + result = ctrl._fast_op_without_cache() except (AuthenticationError, ProjectNotFound): raise except BaseException: - result = asdict(ctrl.execute_op()) + result = ctrl.execute_op() - if result.get("errors", None): - error = result["errors"][list(result["errors"].keys())[0]] - return jsonify(JsonRPCResponse().dump({"error": pretty_print_error(error)})) + if isinstance(result.core_compatibility_status, Exception): + return jsonify(JsonRPCResponse().dump({"error": pretty_print_error(result.core_compatibility_status)})) - else: - del result["errors"] + if isinstance(result.template_status, Exception): + return jsonify(JsonRPCResponse().dump({"error": pretty_print_error(result.template_status)})) + + if isinstance(result.dockerfile_renku_status, Exception): + return jsonify(JsonRPCResponse().dump({"error": pretty_print_error(result.dockerfile_renku_status)})) - return result_response(ProjectMigrationCheckResponseRPC_1_5(), result) + return result_response(ProjectMigrationCheckResponseRPC_1_5(), asdict(result)) def add_v1_specific_endpoints(cache_blueprint): diff --git a/tests/cli/test_migrate.py b/tests/cli/test_migrate.py index 8096d8f806..e37264f6e0 100644 --- a/tests/cli/test_migrate.py +++ b/tests/cli/test_migrate.py @@ -69,7 +69,6 @@ def test_migration_check(isolated_runner, project): "core_compatibility_status", "dockerfile_renku_status", "template_status", - "errors", } assert output["core_compatibility_status"].keys() == { "project_metadata_version", diff --git a/tests/service/views/test_cache_views.py b/tests/service/views/test_cache_views.py index c7910dd4f7..dfd50ff955 100644 --- a/tests/service/views/test_cache_views.py +++ b/tests/service/views/test_cache_views.py @@ -871,8 +871,6 @@ def test_check_migrations_local(svc_client_setup): assert "template_ref" in response.json["result"]["template_status"] assert "template_id" in response.json["result"]["template_status"] assert "automated_template_update" in response.json["result"]["template_status"] - assert "errors" in response.json["result"] - assert not response.json["result"]["errors"] assert "ssh_supported" in response.json["result"]["template_status"] assert not response.json["result"]["template_status"]["ssh_supported"] @@ -893,8 +891,6 @@ def test_check_migrations_remote(svc_client, identity_headers, it_remote_repo_ur assert response.json["result"]["project_supported"] assert response.json["result"]["project_renku_version"] assert response.json["result"]["core_renku_version"] - assert "errors" in response.json["result"] - assert not response.json["result"]["errors"] @pytest.mark.service @@ -946,11 +942,10 @@ def test_migrate_wrong_template_source(svc_client_setup, monkeypatch): response = svc_client.get("/cache.migrations_check", query_string=dict(project_id=project_id), headers=headers) assert_rpc_response(response) - assert "errors" in response.json["result"] - assert len(response.json["result"]["errors"]) == 1 - assert response.json["result"]["errors"].get("CORE").get("userMessage") == ( - "The reference template for the project is currently unavailable. " - "It may be a temporary problem, or the template may not be accessible anymore." + + assert response.json["result"].get("template_status", {}).get("code") == 3140 + assert "Error accessing the project template" in response.json["result"].get("template_status", {}).get( + "devMessage" ) @@ -969,12 +964,11 @@ def test_migrate_wrong_template_ref(svc_client_setup, template, monkeypatch): response = svc_client.get("/cache.migrations_check", query_string=dict(project_id=project_id), headers=headers) assert_rpc_response(response) - assert "errors" in response.json["result"] - assert len(response.json["result"]["errors"]) == 1 - assert "Cannot find the reference 'FAKE_REF' in the template repository" in response.json["result"][ - "errors" - ].get("CORE").get("userMessage") + assert response.json["result"].get("template_status", {}).get("code") == 1141 + assert "Cannot find the reference 'FAKE_REF' in the template repository" in response.json["result"].get( + "template_status", {} + ).get("devMessage") @pytest.mark.service From a8957168d8fe15b60d38259a1ad2b71d741f10c9 Mon Sep 17 00:00:00 2001 From: Ralf Grubenmann Date: Mon, 27 Feb 2023 17:26:40 +0100 Subject: [PATCH 04/10] fix test --- tests/service/views/test_api_versions_views.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/service/views/test_api_versions_views.py b/tests/service/views/test_api_versions_views.py index 3c77aa5173..c8e8e951ba 100644 --- a/tests/service/views/test_api_versions_views.py +++ b/tests/service/views/test_api_versions_views.py @@ -49,7 +49,7 @@ def test_versions_differences(svc_client, identity_headers, it_remote_repo_url): assert 200 == response_default.status_code assert response_default.json["result"]["core_compatibility_status"]["migration_required"] is True assert response_default.json["result"].keys() == response_new.json["result"].keys() - assert response_default.json["result"].keys() != response_old.json["result"].keys() + assert response_default.json["result"].keys() == response_old.json["result"].keys() assert ( response_default.json["result"]["template_status"].keys() == response_new.json["result"]["template_status"].keys() From 591e27d2e8355d45b3c9c1d7db90b712c001141d Mon Sep 17 00:00:00 2001 From: Ralf Grubenmann Date: Wed, 22 Mar 2023 16:34:03 +0100 Subject: [PATCH 05/10] don't swallow metadata error --- renku/command/migrate.py | 17 +++++------------ 1 file changed, 5 insertions(+), 12 deletions(-) diff --git a/renku/command/migrate.py b/renku/command/migrate.py index 246dd68ea3..63eae6edf0 100644 --- a/renku/command/migrate.py +++ b/renku/command/migrate.py @@ -185,18 +185,11 @@ def _template_migration_check() -> TemplateStatusResult: from renku.core.config import get_value from renku.core.template.usecase import check_for_template_update - try: - project = project_context.project - template_source = project.template_metadata.template_source - template_ref = project.template_metadata.template_ref - template_id = project.template_metadata.template_id - ssh_supported = project.template_metadata.ssh_supported - except (ValueError, AttributeError): - project = None - template_source = None - template_ref = None - template_id = None - ssh_supported = False + project = project_context.project + template_source = project.template_metadata.template_source + template_ref = project.template_metadata.template_ref + template_id = project.template_metadata.template_id + ssh_supported = project.template_metadata.ssh_supported ssh_supported = get_value("renku", "ssh_supported") == "true" or ssh_supported From 3225904e3609d5c448204934283acb5239f98f74 Mon Sep 17 00:00:00 2001 From: Ralf Grubenmann Date: Fri, 24 Mar 2023 16:54:21 +0100 Subject: [PATCH 06/10] test: remove description on nested --- renku/ui/service/serializers/cache.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/renku/ui/service/serializers/cache.py b/renku/ui/service/serializers/cache.py index d38c8d3d73..4ab41d2a1d 100644 --- a/renku/ui/service/serializers/cache.py +++ b/renku/ui/service/serializers/cache.py @@ -410,10 +410,7 @@ class ProjectMigrationCheckResponse(Schema): DockerfileStatusResponse, metadata={"description": "Fields detailing the status of the Dockerfile in the project."}, ) - template_status = fields.Nested( - TemplateStatusResponse, - metadata={"description": "Fields detailing the status of the project template used by this project."}, - ) + template_status = fields.Nested(TemplateStatusResponse) class ProjectMigrationCheckResponseRPC(JsonRPCResponse): From 5bd64a957c485b40828878eae999ac29b297a8c0 Mon Sep 17 00:00:00 2001 From: Ralf Grubenmann Date: Wed, 29 Mar 2023 12:12:54 +0200 Subject: [PATCH 07/10] fix openapi for marshmallow_oneof schemas --- poetry.lock | 220 ++++++++++++++------------ pyproject.toml | 5 +- renku/ui/cli/service.py | 3 + renku/ui/service/serializers/cache.py | 5 +- renku/ui/service/views/apispec.py | 2 +- 5 files changed, 130 insertions(+), 105 deletions(-) diff --git a/poetry.lock b/poetry.lock index 06e47ec65c..b2c1d9769c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -38,28 +38,46 @@ files = [ [[package]] name = "apispec" -version = "5.2.2" +version = "6.3.0" description = "A pluggable API specification generator. Currently supports the OpenAPI Specification (f.k.a. the Swagger specification)." category = "main" optional = true python-versions = ">=3.7" files = [ - {file = "apispec-5.2.2-py3-none-any.whl", hash = "sha256:f5f0d6b452c3e4a0e0922dce8815fac89dc4dbc758acef21fb9e01584d6602a5"}, - {file = "apispec-5.2.2.tar.gz", hash = "sha256:6ea6542e1ebffe9fd95ba01ef3f51351eac6c200a974562c7473059b9cd20aa7"}, + {file = "apispec-6.3.0-py3-none-any.whl", hash = "sha256:95a0b9355785df998bb0e9b939237a30ee4c7428fd6ef97305eae3da06b9b339"}, + {file = "apispec-6.3.0.tar.gz", hash = "sha256:6cb08d92ce73ff0b3bf46cb2ea5c00d57289b0f279fb0256a3df468182ba5344"}, ] [package.dependencies] +packaging = ">=21.3" PyYAML = {version = ">=3.10", optional = true, markers = "extra == \"yaml\""} [package.extras] -dev = ["PyYAML (>=3.10)", "flake8 (==4.0.1)", "flake8-bugbear (==22.4.25)", "marshmallow (>=3.13.0)", "mock", "mypy (==0.950)", "prance[osv] (>=0.11)", "pre-commit (>=2.4,<3.0)", "pytest", "tox", "types-PyYAML"] -docs = ["marshmallow (>=3.13.0)", "pyyaml (==6.0)", "sphinx (==4.5.0)", "sphinx-issues (==3.0.1)", "sphinx-rtd-theme (==1.0.0)"] -lint = ["flake8 (==4.0.1)", "flake8-bugbear (==22.4.25)", "mypy (==0.950)", "pre-commit (>=2.4,<3.0)", "types-PyYAML"] -marshmallow = ["marshmallow (>=3.13.0)"] -tests = ["PyYAML (>=3.10)", "marshmallow (>=3.13.0)", "mock", "prance[osv] (>=0.11)", "pytest"] -validation = ["prance[osv] (>=0.11)"] +dev = ["PyYAML (>=3.10)", "flake8 (==5.0.4)", "flake8-bugbear (==22.9.23)", "marshmallow (>=3.13.0)", "mypy (==0.982)", "openapi-spec-validator (<0.5)", "prance[osv] (>=0.11)", "pre-commit (>=2.4,<3.0)", "pytest", "tox", "types-PyYAML"] +docs = ["marshmallow (>=3.13.0)", "pyyaml (==6.0)", "sphinx (==5.2.3)", "sphinx-issues (==3.0.1)", "sphinx-rtd-theme (==1.0.0)"] +lint = ["flake8 (==5.0.4)", "flake8-bugbear (==22.9.23)", "mypy (==0.982)", "pre-commit (>=2.4,<3.0)", "types-PyYAML"] +marshmallow = ["marshmallow (>=3.18.0)"] +tests = ["PyYAML (>=3.10)", "marshmallow (>=3.13.0)", "openapi-spec-validator (<0.5)", "prance[osv] (>=0.11)", "pytest"] +validation = ["openapi-spec-validator (<0.5)", "prance[osv] (>=0.11)"] yaml = ["PyYAML (>=3.10)"] +[[package]] +name = "apispec-oneofschema" +version = "3.0.0" +description = "Plugin for apispec providing support for Marshmallow-OneOfSchema schemas" +category = "main" +optional = true +python-versions = "*" +files = [ + {file = "apispec-oneofschema-3.0.0.tar.gz", hash = "sha256:56a84492d2105340df059d477f28e67f63e38d14e40364229b68d7d512495664"}, + {file = "apispec_oneofschema-3.0.0-py2.py3-none-any.whl", hash = "sha256:8f106bcb394e764d532f1d43c658034e986aaa526a6ec1d20d1d7063aa1f0a86"}, +] + +[package.dependencies] +apispec = ">=3.0.0" +marshmallow = "<4.0.0" +marshmallow-oneofschema = "*" + [[package]] name = "apispec-webframeworks" version = "0.5.2" @@ -94,19 +112,18 @@ files = [ [[package]] name = "argcomplete" -version = "3.0.2" +version = "3.0.5" description = "Bash tab completion for argparse" category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "argcomplete-3.0.2-py3-none-any.whl", hash = "sha256:ba35518a5640ebbda5fb77d94e1db535233a7eac18ffb905216753749eaff8e1"}, - {file = "argcomplete-3.0.2.tar.gz", hash = "sha256:fb929c2f609f195a2b11dc914924149b45c2b55b9f48283362e18b8d35f855c1"}, + {file = "argcomplete-3.0.5-py3-none-any.whl", hash = "sha256:e858595eee91732440e7291dbb49ae73d3fb9bfcc073429a16d54b7b374a7a3d"}, + {file = "argcomplete-3.0.5.tar.gz", hash = "sha256:fe3ce77125f434a0dd1bffe5f4643e64126d5731ce8d173d36f62fa43d6eb6f7"}, ] [package.extras] -lint = ["flake8", "mypy"] -test = ["coverage", "flake8", "mypy", "pexpect", "wheel"] +test = ["coverage", "mypy", "pexpect", "ruff", "wheel"] [[package]] name = "attrs" @@ -785,18 +802,18 @@ deps = ["galaxy-tool-util (>=21.1.0)"] [[package]] name = "deal" -version = "4.24.0" +version = "4.24.1" description = "**Deal** is a Python library for [design by contract][wiki] (DbC) programming." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "deal-4.24.0-py3-none-any.whl", hash = "sha256:12349ee5d74663a7b15513d4193747a2c93530f48b1524b3003128b8446478b9"}, - {file = "deal-4.24.0.tar.gz", hash = "sha256:157abffa21911e81df82999905ff98d3be9b65baea92109332e4f9eac08a4666"}, + {file = "deal-4.24.1-py3-none-any.whl", hash = "sha256:d6bdc6085dd77ac10d47e7c916c3beac33833b90348e884b171ae637e3a0d251"}, + {file = "deal-4.24.1.tar.gz", hash = "sha256:18b66d40e8f552cf3018f741c610041d9f293f3bf02a1e29dc7cd8419543bd46"}, ] [package.extras] -all = ["astroid (>=2.11.0)", "deal-solver", "hypothesis", "pygments", "typeguard", "vaa (>=0.2.1)"] +all = ["astroid (>=2.11.0)", "deal-solver", "hypothesis", "pygments", "typeguard (>=3.0.0)", "vaa (>=0.2.1)"] docs = ["m2r2", "myst-parser", "sphinx (>=3.5.0,<3.6.0)", "sphinx-rtd-theme (>=0.5.0,<0.6.0)"] integration = ["astroid (>=2.11.0)", "deal-solver", "flake8", "hypothesis", "marshmallow", "pygments", "sphinx (>=4.5.0)", "typeguard", "vaa (>=0.2.1)"] lint = ["deal-solver", "flake8", "flake8-commas", "flake8-quotes", "hypothesis", "isort", "mypy (>=0.900)", "mypy_test (>=0.1.1)", "pygments", "typeguard", "unify"] @@ -1067,28 +1084,33 @@ dotenv = ["python-dotenv"] [[package]] name = "frozendict" -version = "2.3.5" +version = "2.3.6" description = "A simple immutable dictionary" category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "frozendict-2.3.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fa08c3f361e26c698c22f008804cac4a5b51437c12feafb983daadac12f66ead"}, - {file = "frozendict-2.3.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9b8cbed40c96fce53e5a31ff2db30ca2c56992ba033555b08c22d099c3576ec"}, - {file = "frozendict-2.3.5-cp310-cp310-win_amd64.whl", hash = "sha256:64a00bcad55ff122293b0d362856dce0b248e894f1dcb0a0f68227a5ba9e4be6"}, - {file = "frozendict-2.3.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:08f8efd6fbe885e6217d210302cdc12cb8134aeac2b83db898511bc5e34719c5"}, - {file = "frozendict-2.3.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26a2c371d23f148886864a5b82f1e5eefed35ce145b5d59dcfd3d66c9391bb45"}, - {file = "frozendict-2.3.5-cp36-cp36m-win_amd64.whl", hash = "sha256:de96ccf6e574482c9537ffa68b2cb381537a5a085483001d4a2b93847089bc04"}, - {file = "frozendict-2.3.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1dbe11318b423fb3591e08d8b832d27dfd7b74dc20486d3384b8e05d6de2bcf7"}, - {file = "frozendict-2.3.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30af9f39a5e29edca96b09c8d0a17fc78a0efd5f31f74d5eebb4c9a28d03032f"}, - {file = "frozendict-2.3.5-cp37-cp37m-win_amd64.whl", hash = "sha256:d1677e53d370ba44a07fbcc036fa24d4ae5693f0ed785496caf49e12a238d41f"}, - {file = "frozendict-2.3.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1411ef255b7a55fc337022ba158acf1391cd0d9a5c13142abbb7367936ab6f78"}, - {file = "frozendict-2.3.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4a1c8febc23f3c81c2b94d70268b5b760ed7e5e81c90c3baa22bf144db3d2f9"}, - {file = "frozendict-2.3.5-cp38-cp38-win_amd64.whl", hash = "sha256:210a59a5267ae79b5d92cd50310cd5bcb122f1783a3d9016ad6db9cc179d4fbe"}, - {file = "frozendict-2.3.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:21dd627c5bdcdf0743d49f7667dd186234baa85db91517de8cb80d3bda7018d9"}, - {file = "frozendict-2.3.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d58ca5f9094725c2f44b09fe4e71f7ddd250d5cdaca7219c674bd691373fed3a"}, - {file = "frozendict-2.3.5-cp39-cp39-win_amd64.whl", hash = "sha256:f407d9d661d77896b7a6dae6ab7545c913e65d23a312cf2893406432069408db"}, - {file = "frozendict-2.3.5.tar.gz", hash = "sha256:65d7e3995c9174b77d7d80514d7062381750491e112bbeb44323368baa3e636a"}, + {file = "frozendict-2.3.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2178f8cc97d4ca8736df2fea0ca18094e259db086b560e5905ecac7f0894adc5"}, + {file = "frozendict-2.3.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e3255b0f33a65b558d99d067c1dbedc6f30effe66967d5b201c7fcffb20a86e"}, + {file = "frozendict-2.3.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e25079fe3e11ca1360b8435f4a848c851b3c657b3ba3577a6627bfe6628705f"}, + {file = "frozendict-2.3.6-cp310-cp310-win_amd64.whl", hash = "sha256:caed7300322a47ceeaa00a311f902cbbeb7a3d1c8955487de3ea0ad618048325"}, + {file = "frozendict-2.3.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a31827dbe892172e1b000ffe22d577986c863e790cc7d51ba8c7fff4f44ac7cf"}, + {file = "frozendict-2.3.6-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d73b4cca476f3d51f1b11264a41e0c3dbc20a53fb6b1d0878e01cc0b71fa16ce"}, + {file = "frozendict-2.3.6-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:1b4abc85e4788c2fe434851971a5fde4b079ed369965a21911e485dde97c08f4"}, + {file = "frozendict-2.3.6-cp36-cp36m-win_amd64.whl", hash = "sha256:9c312b6dfe6a3b6b910637442989119d38bac417486c44a0ed402a80a4f5890e"}, + {file = "frozendict-2.3.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a9def8ad754a405f0d982fe59e4f47bccd760ddcb472bfe0149f07410245f3d7"}, + {file = "frozendict-2.3.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8d49faff6eb3f2678dff4d3c83fcb49ccebb13d8360389a0b8941d3e1ee8a93"}, + {file = "frozendict-2.3.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d310b7ea4fdfe70935d3e9193c8711689914b35300b7234137760c7cdbf257e1"}, + {file = "frozendict-2.3.6-cp37-cp37m-win_amd64.whl", hash = "sha256:63745ca9a3ffb98f7c6a742afb10cd998e8fcd4f59b40c0c7c0e5795e2092cc8"}, + {file = "frozendict-2.3.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bd420cad2ec3223b3210c3b5a30fdc4842021ff037ae3c00e5ba6dd48731b753"}, + {file = "frozendict-2.3.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ea7c9fe1661a0f4135179bc056d5d90f197df5602532d224507c301b1cbda5e"}, + {file = "frozendict-2.3.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6e52018dc4a8ceaabe420fba4b3c003e8c0391d2efe5c164008ff791e61471d6"}, + {file = "frozendict-2.3.6-cp38-cp38-win_amd64.whl", hash = "sha256:5b7bbbeabdafa31d7abfea68d8d532994767e56a137855f6dafdcbbc1dc924a8"}, + {file = "frozendict-2.3.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:638d056ddff3e96f288176fcd18330ee68cc38f39ee67e10e43f4ee57552ed82"}, + {file = "frozendict-2.3.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b722fca999b1d14e277e095214d2296e355f6f17d7727c17a4aa95882738184a"}, + {file = "frozendict-2.3.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9c8314f5b8812cb7bc6001595e5cc521dffe220a8789d942a3f946660cc45672"}, + {file = "frozendict-2.3.6-cp39-cp39-win_amd64.whl", hash = "sha256:d1e36e820fd2cae4e26935a4b82ccaa2eef8d347709d1c27e3b7608744f72f83"}, + {file = "frozendict-2.3.6.tar.gz", hash = "sha256:14e376fb0c8fe0b11b77872a84356dfa191b24a240be66bf5ec66d9b0d203bb8"}, ] [[package]] @@ -1202,14 +1224,14 @@ tests = ["freezegun", "pytest", "pytest-cov"] [[package]] name = "identify" -version = "2.5.21" +version = "2.5.22" description = "File identification library for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "identify-2.5.21-py2.py3-none-any.whl", hash = "sha256:69edcaffa8e91ae0f77d397af60f148b6b45a8044b2cc6d99cafa5b04793ff00"}, - {file = "identify-2.5.21.tar.gz", hash = "sha256:7671a05ef9cfaf8ff63b15d45a91a1147a03aaccb2976d4e9bd047cbbc508471"}, + {file = "identify-2.5.22-py2.py3-none-any.whl", hash = "sha256:f0faad595a4687053669c112004178149f6c326db71ee999ae4636685753ad2f"}, + {file = "identify-2.5.22.tar.gz", hash = "sha256:f7a93d6cf98e29bd07663c60728e7a4057615068d7a639d132dc883b2d54d31e"}, ] [package.extras] @@ -2267,19 +2289,19 @@ six = "*" [[package]] name = "platformdirs" -version = "3.1.1" +version = "3.2.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.1.1-py3-none-any.whl", hash = "sha256:e5986afb596e4bb5bde29a79ac9061aa955b94fca2399b7aaac4090860920dd8"}, - {file = "platformdirs-3.1.1.tar.gz", hash = "sha256:024996549ee88ec1a9aa99ff7f8fc819bb59e2c3477b410d90a16d32d6e707aa"}, + {file = "platformdirs-3.2.0-py3-none-any.whl", hash = "sha256:ebe11c0d7a805086e99506aa331612429a72ca7cd52a1f0d277dc4adc20cb10e"}, + {file = "platformdirs-3.2.0.tar.gz", hash = "sha256:d5b638ca397f25f979350ff789db335903d7ea010ab28903f57b27e1b16c2b08"}, ] [package.extras] docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] [[package]] name = "pluggy" @@ -3043,14 +3065,14 @@ yaml = ["PyYaml (>=5.2)"] [[package]] name = "pytz" -version = "2022.7.1" +version = "2023.3" description = "World timezone definitions, modern and historical" category = "main" optional = false python-versions = "*" files = [ - {file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"}, - {file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"}, + {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, + {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, ] [[package]] @@ -3531,14 +3553,14 @@ tornado = ["tornado (>=5)"] [[package]] name = "setuptools" -version = "67.6.0" +version = "67.6.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "setuptools-67.6.0-py3-none-any.whl", hash = "sha256:b78aaa36f6b90a074c1fa651168723acbf45d14cb1196b6f02c0fd07f17623b2"}, - {file = "setuptools-67.6.0.tar.gz", hash = "sha256:2ee892cd5f29f3373097f5a814697e397cf3ce313616df0af11231e2ad118077"}, + {file = "setuptools-67.6.1-py3-none-any.whl", hash = "sha256:e728ca814a823bf7bf60162daf9db95b93d532948c4c0bea762ce62f60189078"}, + {file = "setuptools-67.6.1.tar.gz", hash = "sha256:257de92a9d50a60b8e22abfcbb771571fde0dbf3ec234463212027a4eeecbe9a"}, ] [package.extras] @@ -3912,14 +3934,14 @@ files = [ [[package]] name = "tomlkit" -version = "0.11.6" +version = "0.11.7" description = "Style preserving TOML library" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "tomlkit-0.11.6-py3-none-any.whl", hash = "sha256:07de26b0d8cfc18f871aec595fda24d95b08fef89d147caa861939f37230bf4b"}, - {file = "tomlkit-0.11.6.tar.gz", hash = "sha256:71b952e5721688937fb02cf9d354dbcf0785066149d2855e44531ebdd2b65d73"}, + {file = "tomlkit-0.11.7-py3-none-any.whl", hash = "sha256:5325463a7da2ef0c6bbfefb62a3dc883aebe679984709aee32a317907d0a8d3c"}, + {file = "tomlkit-0.11.7.tar.gz", hash = "sha256:f392ef70ad87a672f02519f99967d28a4d3047133e2d1df936511465fbb3791d"}, ] [[package]] @@ -3985,26 +4007,26 @@ testing = ["coverage", "mock", "nose"] [[package]] name = "types-python-dateutil" -version = "2.8.19.10" +version = "2.8.19.11" description = "Typing stubs for python-dateutil" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-python-dateutil-2.8.19.10.tar.gz", hash = "sha256:c640f2eb71b4b94a9d3bfda4c04250d29a24e51b8bad6e12fddec0cf6e96f7a3"}, - {file = "types_python_dateutil-2.8.19.10-py3-none-any.whl", hash = "sha256:fbecd02c19cac383bf4a16248d45ffcff17c93a04c0794be5f95d42c6aa5de39"}, + {file = "types-python-dateutil-2.8.19.11.tar.gz", hash = "sha256:de66222c54318c2e05ceb4956976d16696240a45fc2c98e54bfe9a56ce5e1eff"}, + {file = "types_python_dateutil-2.8.19.11-py3-none-any.whl", hash = "sha256:357553f8056cfbb8ce8ea0ca4a6a3480268596748360df73a94c2b8c113a5b06"}, ] [[package]] name = "types-pyyaml" -version = "6.0.12.8" +version = "6.0.12.9" description = "Typing stubs for PyYAML" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-PyYAML-6.0.12.8.tar.gz", hash = "sha256:19304869a89d49af00be681e7b267414df213f4eb89634c4495fa62e8f942b9f"}, - {file = "types_PyYAML-6.0.12.8-py3-none-any.whl", hash = "sha256:5314a4b2580999b2ea06b2e5f9a7763d860d6e09cdf21c0e9561daa9cbd60178"}, + {file = "types-PyYAML-6.0.12.9.tar.gz", hash = "sha256:c51b1bd6d99ddf0aa2884a7a328810ebf70a4262c292195d3f4f9a0005f9eeb6"}, + {file = "types_PyYAML-6.0.12.9-py3-none-any.whl", hash = "sha256:5aed5aa66bd2d2e158f75dda22b059570ede988559f030cf294871d3b647e3e8"}, ] [[package]] @@ -4045,14 +4067,14 @@ files = [ [[package]] name = "types-toml" -version = "0.10.8.5" +version = "0.10.8.6" description = "Typing stubs for toml" category = "dev" optional = false python-versions = "*" files = [ - {file = "types-toml-0.10.8.5.tar.gz", hash = "sha256:bf80fce7d2d74be91148f47b88d9ae5adeb1024abef22aa2fdbabc036d6b8b3c"}, - {file = "types_toml-0.10.8.5-py3-none-any.whl", hash = "sha256:2432017febe43174af0f3c65f03116e3d3cf43e7e1406b8200e106da8cf98992"}, + {file = "types-toml-0.10.8.6.tar.gz", hash = "sha256:6d3ac79e36c9ee593c5d4fb33a50cca0e3adceb6ef5cff8b8e5aef67b4c4aaf2"}, + {file = "types_toml-0.10.8.6-py3-none-any.whl", hash = "sha256:de7b2bb1831d6f7a4b554671ffe5875e729753496961b3e9b202745e4955dafa"}, ] [[package]] @@ -4554,48 +4576,42 @@ test = ["BTrees[test]", "ZEO[test]", "ZODB[test]", "persistent[test]"] [[package]] name = "zodbpickle" -version = "2.6" -description = "Fork of Python 2 and 3 pickle module." +version = "3.0.1" +description = "Fork of Python 3 pickle module." category = "main" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "zodbpickle-2.6-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:2e15fc076026aa30d91de7eaae6378663d6aa89bb59f4235c20dc3e10594126d"}, - {file = "zodbpickle-2.6-cp27-cp27m-win_amd64.whl", hash = "sha256:9c7b172e284a38901fdff9d6f44a2f902de74c361296c41b6c5d18f1709a24bb"}, - {file = "zodbpickle-2.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:79e99fa54b64df572273b760e7af03e426e0cbe40da9dc836b78d2090d3d9091"}, - {file = "zodbpickle-2.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d74caa8eb5fec689b0f15a9de4ef1e4d4da5a6d7893e6b68f18d37016301a982"}, - {file = "zodbpickle-2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69c8a45dbf9a25ec2f00b2b4171ed86eb499a263c8123a3b5913b29a2a2c75aa"}, - {file = "zodbpickle-2.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:bfc422785b2184160b097692d85c1934e54168a2faaa42621dfc2b5e41b878ed"}, - {file = "zodbpickle-2.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:009ed8b1c2adffac290d4547069e91d4bc3ef7cc5103bf7ae2a405c55d6190ea"}, - {file = "zodbpickle-2.6-cp310-cp310-win_amd64.whl", hash = "sha256:54ee31a9cba9985acd7a448baba72832b9b345fc01a275acab415f98544f3fb0"}, - {file = "zodbpickle-2.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a4540342d3ed0f2f56dc5b20cbddf1b8e7c4a2a067afb7bbddc7514f2640ee7"}, - {file = "zodbpickle-2.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9b11d31e4a9d2bb12d1e3db5bf17f501b083397bdd2ee497baeb41099feaea90"}, - {file = "zodbpickle-2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17626ddbfed5f8520e6197ae930b74acce0979cfc4c1eb1a08c923b7e3bae02e"}, - {file = "zodbpickle-2.6-cp311-cp311-win_amd64.whl", hash = "sha256:02cb17253f128384ca748fad36422db26e29d6e06d866794de34c896ebad19c8"}, - {file = "zodbpickle-2.6-cp35-cp35m-win_amd64.whl", hash = "sha256:6825944371f4bb3524c04f89e5585da353bd4f0b280b9e267117bf478a3cf086"}, - {file = "zodbpickle-2.6-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:d44cfcbbd501a96088207bc9ac77a7339a022bd73ce85bf49b3b38510c270818"}, - {file = "zodbpickle-2.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eda6fbd4413f770fff745f6a8400c56043cb2da18406e225266ec6171d052f47"}, - {file = "zodbpickle-2.6-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:3451417b040052e5b3b341dd369bdac9613ebd2ace9aafb59895d199bf611d98"}, - {file = "zodbpickle-2.6-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ead2f539cf8fac96b46693e0fd5119fac2ce3f46d584550344745297cafa8209"}, - {file = "zodbpickle-2.6-cp36-cp36m-win_amd64.whl", hash = "sha256:e0e2c8de1380b16aeba5919641b060cfa7fa42846b4b8e2ff84ccc24e069a200"}, - {file = "zodbpickle-2.6-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:47bad4c1b1bf7c31b0fdf5e1077126bad6d43710a77d86644d5dab6e8e97577d"}, - {file = "zodbpickle-2.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15f9edc0a48808a6c0a00df9a919e64c559053f92436571dfd2c1b989111efc5"}, - {file = "zodbpickle-2.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1154439ef9f8808369890657aa331df87278f24e2372798d69ebc3921e4d30c3"}, - {file = "zodbpickle-2.6-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8dc0c35fbf4c3d0d32a22ba6c7bc08667939c5db17fdb4430c4cd456e0dd53be"}, - {file = "zodbpickle-2.6-cp37-cp37m-win_amd64.whl", hash = "sha256:5f98f75e71de0179637e57372fd01f585ffab9893f8b4d6fa071c0bb51170ce2"}, - {file = "zodbpickle-2.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efa94e9a13bf7e9809633da9406c132b804c9f35f321377b74c5614bf2c00020"}, - {file = "zodbpickle-2.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c895678ca30dcbd59d2e5e559bbc40ffca67df29e44dfc51f97ab7036b840dfa"}, - {file = "zodbpickle-2.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cbceb2e0e3de75ea12f678c2a22606003c10d6ab16c94125b7f9c4be6fc2db0"}, - {file = "zodbpickle-2.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f02de35fe28080f26cfc6da87c9c3e28c854ed34614fb7c7cdc882da32119a9f"}, - {file = "zodbpickle-2.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9ca90e91d2b240ae6ee7d5678acc40900fdddfb4d8453f400212203a5d601fb8"}, - {file = "zodbpickle-2.6-cp38-cp38-win_amd64.whl", hash = "sha256:f3779a2e1a67d82be12d5955b081882bf303f77777abc55519497d78e997cc6d"}, - {file = "zodbpickle-2.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ba9935ab6717cf9aec51d9e4972d9a3e828ff14917e164dd07c94acbae29ec6a"}, - {file = "zodbpickle-2.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f0edc7f20beefd2aac1dc74a7e36ee62a854ee28b7b0f6d4ce4c9c93e3a36245"}, - {file = "zodbpickle-2.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c27b79c3a34b5d0c404feee8ec7c0367fc94e690834b1f6bf119494700b018f6"}, - {file = "zodbpickle-2.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5898ac17106399adca3a57d1f6a04fe7065c5d11e68a8fae06eb270e24780a10"}, - {file = "zodbpickle-2.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb751a78bc13639ed2b088eecaba6c1056d43b99c59cdb3210eab1a1a6e180c1"}, - {file = "zodbpickle-2.6-cp39-cp39-win_amd64.whl", hash = "sha256:3f763c943e5116587b6ffd070c2e4cb7a16a4fef29f3eb8dac743895b95bc500"}, - {file = "zodbpickle-2.6.tar.gz", hash = "sha256:05978fc24ff93f349045aea11fa3ad1efa80eab19cab6251eac7417c6311a1d2"}, + {file = "zodbpickle-3.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6e6468bdbe53d6e8d96e345d221eaaa14bfc34e4be4cf9d434d35743dc459e6c"}, + {file = "zodbpickle-3.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f5c5f0ef97b1f8fce78caa820fef23595de5726a3ecc400aaf79b752f1c258d8"}, + {file = "zodbpickle-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c867e2588cd15805113212292564509ff669aa7e4181385bb0619686e168f207"}, + {file = "zodbpickle-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:963873ed3ec611b8b0e678f0dd013f83fb8284a184b41f36c8a0a83c5e29baef"}, + {file = "zodbpickle-3.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43ee315ab9ad2355ab1f33225cf84a098c4f7cf4e9b2e8760a8f8be1cf0c4535"}, + {file = "zodbpickle-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:8089ff8c01c982b4d7316a052ad71e63b007c88c1963625c999a28ae4e524ea0"}, + {file = "zodbpickle-3.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:023d75d7f323555b9fd7588d6be321c10244adfc2a24c8be645b807d371efec6"}, + {file = "zodbpickle-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9266dc8112133e45bf42a1267b72b07c483a4d47f23bed8f63c9c46c7f42c169"}, + {file = "zodbpickle-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c93486da9f638d93d24e8658c05fdb64964172c8b1a73e553a15cabf2da8d37"}, + {file = "zodbpickle-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:189ba7a64e47353851407628b0599ab8acc8c16cf7adc3ec3132ba2b5ad2aafd"}, + {file = "zodbpickle-3.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:156371b488536dd07257e19f11a3f04b70bb5331df3a3b1864f207c62a8a06ab"}, + {file = "zodbpickle-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:fcbdab1b4f6eb596d537f8237c34ea401b0a12c54c6d2507ab16570247b52f04"}, + {file = "zodbpickle-3.0.1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:d84d9de119efba08c959b9d3d64c20f7f3e692373cc21552dd7ec47c4cac6fe9"}, + {file = "zodbpickle-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bc212dfc90f702c9f0095c76b77708b30d41be36b599641de8a73c7be73b2b6"}, + {file = "zodbpickle-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b96ad4dc089c445a8fdb03205c4f96dab93de786962a1aeceabf24612736dc54"}, + {file = "zodbpickle-3.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:acc095c8cc5364266b447600adb1a80155bd8c31cff46ee94f81730c6a9fbcd0"}, + {file = "zodbpickle-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:49bd0631baaf07ccf48129a635896a95b0f3fceba88207f1411be9fc66c6258f"}, + {file = "zodbpickle-3.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:507672605448d15d551cc3d3d7a07a8e020b72cea72174fec2b004c2ff08194b"}, + {file = "zodbpickle-3.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:53744793e8d1cb78a5266834551ded320bc57d3f0f3b15173400bff07d6dea5b"}, + {file = "zodbpickle-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91f52f80d59bc5d005f0788e2797ac6d25e993f38730c684a2eec5c8f5f928d4"}, + {file = "zodbpickle-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4923dcfd48dd629b9a9352b1a0d13ac129f502f8674118f5d6ae4b9406d6bd0f"}, + {file = "zodbpickle-3.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:770e833290894621da69bdb90317070a26ca64f6d266d5bfe5fc9d59524c2694"}, + {file = "zodbpickle-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:c3827131253837014ffaaf9c3eedf8807f6ad3af47b26cd2a7f75d0f03eb466f"}, + {file = "zodbpickle-3.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2165721b2475c3a95950a960ea863a2851eb5389705e45c02aba838d669f6b64"}, + {file = "zodbpickle-3.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a13a791615b0b230e599a0abc961d8d26345c9ab0a43ca9d8a72e2a606a6ecd5"}, + {file = "zodbpickle-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6b61c594ef921c901e421b48f1857442b2c962d8dedda166100a76a9154fafd"}, + {file = "zodbpickle-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e5cc7c56085c415b5d33325ca94ffe7ac19ec505265db93b2a2fa70cbf0063a"}, + {file = "zodbpickle-3.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3138cfad7d00493b2ed0626128cef6b6e7e84d5011eb3ab33d612d8589eb742f"}, + {file = "zodbpickle-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:6d1d0ad8ed32b03a38f8673712ac8b3c5c8a8c8d3bd72a80696db7d0fb674d91"}, + {file = "zodbpickle-3.0.1.tar.gz", hash = "sha256:0f0975bdd4a7615320e74af2b0b8be4763c73cf8bf7c4212ab72044289e77498"}, ] [package.dependencies] @@ -4732,9 +4748,9 @@ cffi = {version = ">=1.11", markers = "platform_python_implementation == \"PyPy\ cffi = ["cffi (>=1.11)"] [extras] -service = ["apispec", "apispec-webframeworks", "circus", "flask", "gunicorn", "marshmallow", "marshmallow-oneofschema", "pillow", "python-dotenv", "redis", "rq", "rq-scheduler", "sentry-sdk", "walrus"] +service = ["apispec", "apispec-oneofschema", "apispec-webframeworks", "circus", "flask", "gunicorn", "marshmallow", "marshmallow-oneofschema", "pillow", "python-dotenv", "redis", "rq", "rq-scheduler", "sentry-sdk", "walrus"] [metadata] lock-version = "2.0" python-versions = "^3.8.1" -content-hash = "36cd7f93223525c4ba573a3a6e57c56380055c1ffed30aa5e0f699588360a3ab" +content-hash = "c561f327891ce41c5140e2b3d5535b0e1b5ed6aa03efd9ea9e40301b228c1858" diff --git a/pyproject.toml b/pyproject.toml index 97d4c4af29..8a6d60540b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -105,7 +105,8 @@ zodb = "==5.6.0" zstandard = ">=0.16.0,<0.18.0" # service dependencies: -apispec = { version = ">=4.0.0,<5.3.0", optional = true } +apispec = { version = ">=6.3.0,<6.4.0", optional = true } +apispec-oneofschema = { version = ">=3.0.0,<4.0.0", optional = true} apispec-webframeworks = { version = "<0.6,>=0.5.2", optional = true } circus = { version = "==0.17.1", optional = true } flask = { version = "==2.1.1", optional = true } @@ -170,6 +171,7 @@ sphinxcontrib-spelling = "7.*" [tool.poetry.extras] service = [ "apispec", + "apispec-oneofschema", "apispec-webframeworks", "circus", "flask", @@ -278,6 +280,7 @@ check_untyped_defs = true [[tool.mypy.overrides]] module = [ "apispec.*", + "apispec_oneofschema.*", "apispec_webframeworks.*", "appdirs", "BTrees.*", diff --git a/renku/ui/cli/service.py b/renku/ui/cli/service.py index b2d08d2340..0c19c9973c 100644 --- a/renku/ui/cli/service.py +++ b/renku/ui/cli/service.py @@ -179,6 +179,9 @@ def service(ctx, env): import rq # noqa: F401 from dotenv import load_dotenv + if ctx.invoked_subcommand in ["apispec", "logs", "api"]: + return # Redis not needed + try: from renku.ui.service.cache.base import BaseCache diff --git a/renku/ui/service/serializers/cache.py b/renku/ui/service/serializers/cache.py index 4ab41d2a1d..d38c8d3d73 100644 --- a/renku/ui/service/serializers/cache.py +++ b/renku/ui/service/serializers/cache.py @@ -410,7 +410,10 @@ class ProjectMigrationCheckResponse(Schema): DockerfileStatusResponse, metadata={"description": "Fields detailing the status of the Dockerfile in the project."}, ) - template_status = fields.Nested(TemplateStatusResponse) + template_status = fields.Nested( + TemplateStatusResponse, + metadata={"description": "Fields detailing the status of the project template used by this project."}, + ) class ProjectMigrationCheckResponseRPC(JsonRPCResponse): diff --git a/renku/ui/service/views/apispec.py b/renku/ui/service/views/apispec.py index 25e7d930ee..36a08f84e8 100644 --- a/renku/ui/service/views/apispec.py +++ b/renku/ui/service/views/apispec.py @@ -16,7 +16,7 @@ # limitations under the License. """Renku service apispec views.""" from apispec import APISpec, yaml_utils -from apispec.ext.marshmallow import MarshmallowPlugin +from apispec_oneofschema import MarshmallowPlugin from apispec_webframeworks.flask import FlaskPlugin from flask import Blueprint, current_app, jsonify from flask.views import MethodView From 26018757292e6d7cf0812e27b65de54279252ec2 Mon Sep 17 00:00:00 2001 From: Ralf Grubenmann Date: Mon, 8 May 2023 18:10:54 +0200 Subject: [PATCH 08/10] feat(service): add support for doctor check in cache migration endpoint. (#3384) * feat(service): add support for doctor check in cache migration endpoint. --- poetry.lock | 24 +++++----- renku/command/checks/activities.py | 10 ++--- renku/command/checks/datasets.py | 22 ++++----- renku/command/checks/githooks.py | 8 ++-- renku/command/checks/migration.py | 4 +- renku/command/checks/project.py | 18 +++++--- renku/command/checks/storage.py | 8 ++-- renku/command/checks/validate_shacl.py | 17 ++----- renku/command/checks/workflow.py | 16 +++---- renku/command/doctor.py | 7 ++- renku/ui/cli/doctor.py | 6 ++- .../controllers/cache_migrations_check.py | 17 +++---- .../service/gateways/gitlab_api_provider.py | 45 +++++++++++++++---- .../ui/service/interfaces/git_api_provider.py | 3 +- renku/ui/service/serializers/cache.py | 6 +++ .../controllers/utils/test_remote_project.py | 8 +--- .../views/v1_0/test_cache_views_1_0.py | 2 +- 17 files changed, 126 insertions(+), 95 deletions(-) diff --git a/poetry.lock b/poetry.lock index f5c9c04c2f..652b43306e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. +# This file is automatically @generated by Poetry and should not be changed by hand. [[package]] name = "addict" @@ -378,14 +378,14 @@ docs = ["Jinja2 (>=3.0.0,<3.1.0)", "sphinx (>=3.0.3,<4.0.0)", "sphinx-rtd-theme [[package]] name = "certifi" -version = "2022.12.7" +version = "2023.5.7" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, - {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, + {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"}, + {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"}, ] [[package]] @@ -3303,18 +3303,18 @@ tests = ["berkeleydb", "html5lib", "networkx", "pytest", "pytest-cov", "pytest-s [[package]] name = "redis" -version = "4.5.4" +version = "4.5.5" description = "Python client for Redis database and key-value store" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "redis-4.5.4-py3-none-any.whl", hash = "sha256:2c19e6767c474f2e85167909061d525ed65bea9301c0770bb151e041b7ac89a2"}, - {file = "redis-4.5.4.tar.gz", hash = "sha256:73ec35da4da267d6847e47f68730fdd5f62e2ca69e3ef5885c6a78a9374c3893"}, + {file = "redis-4.5.5-py3-none-any.whl", hash = "sha256:77929bc7f5dab9adf3acba2d3bb7d7658f1e0c2f1cafe7eb36434e751c471119"}, + {file = "redis-4.5.5.tar.gz", hash = "sha256:dc87a0bdef6c8bfe1ef1e1c40be7034390c2ae02d92dcd0c7ca1729443899880"}, ] [package.dependencies] -async-timeout = {version = ">=4.0.2", markers = "python_version <= \"3.11.2\""} +async-timeout = {version = ">=4.0.2", markers = "python_full_version <= \"3.11.2\""} [package.extras] hiredis = ["hiredis (>=1.0.0)"] @@ -4503,14 +4503,14 @@ test = ["ZODB", "zc.relationship (>=2)"] [[package]] name = "zconfig" -version = "3.6.1" +version = "4.0" description = "Structured Configuration Library" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "ZConfig-3.6.1-py2.py3-none-any.whl", hash = "sha256:1459a11f22e1268ab1f9a368bd2fe2c2018ceeb01c653eef3a5f7d8f5608f041"}, - {file = "ZConfig-3.6.1.tar.gz", hash = "sha256:4422c7d663af762cd7795775366bc6a67ab440a1ab796eb0f7425b0e9034f076"}, + {file = "ZConfig-4.0-py3-none-any.whl", hash = "sha256:1c131c1a52d3de9bc1feaa6abc8d895961e1ebef7e1f50041e21a328c6878c73"}, + {file = "ZConfig-4.0.tar.gz", hash = "sha256:f8d642fba6ba98d08631be2c1f71ad1957c051fef4aa3d3fb9f1e08dc61d0156"}, ] [package.extras] diff --git a/renku/command/checks/activities.py b/renku/command/checks/activities.py index 4cadd48c70..37f2cda48e 100644 --- a/renku/command/checks/activities.py +++ b/renku/command/checks/activities.py @@ -58,7 +58,7 @@ def check_migrated_activity_ids(fix, activity_gateway: IActivityGateway, **_): wrong_activities = [] if not wrong_activities: - return True, None + return True, False, None problems = ( WARNING @@ -68,7 +68,7 @@ def check_migrated_activity_ids(fix, activity_gateway: IActivityGateway, **_): + "\n" ) - return False, problems + return False, True, problems @inject.autoparams("activity_gateway") @@ -95,7 +95,7 @@ def check_activity_dates(fix, activity_gateway: IActivityGateway, **_): invalid_activities.append(activity) if not invalid_activities: - return True, None + return True, False, None if not fix: ids = [a.id for a in invalid_activities] message = ( @@ -104,13 +104,13 @@ def check_activity_dates(fix, activity_gateway: IActivityGateway, **_): + "\n\t" + "\n\t".join(ids) ) - return False, message + return False, True, message fix_activity_dates(activities=invalid_activities) project_context.database.commit() communication.info("Activity dates were fixed") - return True, None + return True, False, None def fix_activity_dates(activities): diff --git a/renku/command/checks/datasets.py b/renku/command/checks/datasets.py index e0fc35bc3a..52cc04a197 100644 --- a/renku/command/checks/datasets.py +++ b/renku/command/checks/datasets.py @@ -43,7 +43,7 @@ def check_dataset_old_metadata_location(**_): old_metadata = get_pre_0_3_4_datasets_metadata() if not old_metadata: - return True, None + return True, False, None problems = ( WARNING + "There are metadata files in the old location." @@ -52,7 +52,7 @@ def check_dataset_old_metadata_location(**_): + "\n" ) - return False, problems + return False, False, problems @inject.autoparams("dataset_gateway") @@ -79,7 +79,7 @@ def check_missing_files(dataset_gateway: IDatasetGateway, **_): missing[dataset.name].append(file_.entity.path) if not missing: - return True, None + return True, False, None problems = WARNING + "There are missing files in datasets." @@ -91,7 +91,7 @@ def check_missing_files(dataset_gateway: IDatasetGateway, **_): + "\n\t ".join(click.style(path, fg="red") for path in files) ) - return False, problems + return False, False, problems @inject.autoparams("dataset_gateway") @@ -130,7 +130,7 @@ def fix_or_report(dataset): break if not invalid_datasets: - return True, None + return True, False, None problems = ( WARNING @@ -140,7 +140,7 @@ def fix_or_report(dataset): + "\n" ) - return False, problems + return False, True, problems @inject.autoparams("dataset_gateway") @@ -193,9 +193,9 @@ def check_dataset_files_outside_datadir(fix, dataset_gateway: IDatasetGateway, * + "\n\t".join(click.style(file.entity.path, fg="yellow") for file in invalid_files) + "\n" ) - return False, problems + return False, True, problems - return True, None + return True, False, None @inject.autoparams("dataset_gateway") @@ -222,7 +222,7 @@ def check_external_files(fix, dataset_gateway: IDatasetGateway, **_): datasets[dataset.name].append(file) if not external_files: - return True, None + return True, False, None external_files_str = "\n\t".join(sorted(external_files)) @@ -232,7 +232,7 @@ def check_external_files(fix, dataset_gateway: IDatasetGateway, **_): "Use 'renku dataset rm' or rerun 'renku doctor' with '--fix' flag to remove them:\n\t" f"{external_files_str}\n" ) - return False, problems + return False, True, problems communication.info( "The following external files were deleted from the project. You need to add them later manually using a " @@ -242,4 +242,4 @@ def check_external_files(fix, dataset_gateway: IDatasetGateway, **_): for name, files in datasets.items(): file_unlink(name=name, yes=True, dataset_files=files) - return True, None + return True, False, None diff --git a/renku/command/checks/githooks.py b/renku/command/checks/githooks.py index 524ba59ce7..c630261eef 100644 --- a/renku/command/checks/githooks.py +++ b/renku/command/checks/githooks.py @@ -41,7 +41,7 @@ def check_git_hooks_installed(**_): hook_path = get_hook_path(name=hook, path=project_context.path) if not hook_path.exists(): message = WARNING + "Git hooks are not installed. " 'Use "renku githooks install" to install them. \n' - return False, message + return False, False, message with hook_path.open() as file_: actual_hook = _extract_renku_hook(file_) @@ -50,16 +50,16 @@ def check_git_hooks_installed(**_): if not expected_hook: message = WARNING + "Cannot check for existence of Git hooks.\n" - return False, message + return False, False, message if actual_hook != expected_hook: message = ( WARNING + "Git hooks are outdated or not installed.\n" ' (use "renku githooks install --force" to update them) \n' ) - return False, message + return False, False, message - return True, None + return True, False, None def _extract_renku_hook(file): diff --git a/renku/command/checks/migration.py b/renku/command/checks/migration.py index 9e81af9e31..13b81969f7 100644 --- a/renku/command/checks/migration.py +++ b/renku/command/checks/migration.py @@ -35,6 +35,6 @@ def check_migration(**_): ERROR + "Project version is not supported by your version of Renku.\n" + " (upgrade your Renku version)\n" ) else: - return True, None + return True, False, None - return False, problems + return False, False, problems diff --git a/renku/command/checks/project.py b/renku/command/checks/project.py index eb79d5f0bd..8e333c5ef9 100644 --- a/renku/command/checks/project.py +++ b/renku/command/checks/project.py @@ -42,21 +42,25 @@ def check_project_id_group(fix, project_gateway: IProjectGateway, **_): ) if namespace is None or name is None: - return True, None + return True, False, None generated_id = Project.generate_id(namespace=namespace, name=name) if generated_id == current_project.id: - return True, None + return True, False, None if fix: communication.info(f"Fixing project id '{current_project.id}' -> '{generated_id}'") current_project.id = generated_id project_gateway.update_project(current_project) - return True, None + return True, False, None - return True, ( - WARNING - + "Project id doesn't match id created based on the current Git remote (use 'renku doctor --fix' to fix it):" - f"\n\t'{current_project.id}' -> '{generated_id}'" + return ( + False, + True, + ( + WARNING + + "Project id doesn't match id based on the current Git remote (use 'renku doctor --fix' to fix it):" + f"\n\t'{current_project.id}' -> '{generated_id}'" + ), ) diff --git a/renku/command/checks/storage.py b/renku/command/checks/storage.py index 709507db2d..e7a85cbb0d 100644 --- a/renku/command/checks/storage.py +++ b/renku/command/checks/storage.py @@ -29,19 +29,19 @@ def check_lfs_info(**_): Tuple of whether project structure is valid and string of found problems. """ if not check_external_storage(): - return True, None + return True, False, None files = check_lfs_migrate_info() if not files: - return True, None + return True, False, None message = ( WARNING + "Git history contains large files - consider moving them " - + "to external storage like git LFS\n\t" + + "to external storage like git LFS using 'renku storage migrate'\n\t" + "\n\t".join(files) + "\n" ) - return False, message + return False, False, message diff --git a/renku/command/checks/validate_shacl.py b/renku/command/checks/validate_shacl.py index e08c1900da..6db02275f2 100644 --- a/renku/command/checks/validate_shacl.py +++ b/renku/command/checks/validate_shacl.py @@ -16,7 +16,6 @@ """Check KG structure using SHACL.""" import pyld -import yaml from renku.command.command_builder import inject from renku.command.schema.dataset import dump_dataset_as_jsonld @@ -24,7 +23,6 @@ from renku.command.util import WARNING from renku.core.interface.dataset_gateway import IDatasetGateway from renku.core.util.shacl import validate_graph -from renku.core.util.yaml import NoDatesSafeLoader from renku.domain_model.project_context import project_context @@ -78,11 +76,11 @@ def check_project_structure(**_): conform, graph, t = _check_shacl_structure(data) if conform: - return True, None + return True, False, None problems = f"{WARNING}Invalid structure of project metadata\n\t{_shacl_graph_to_string(graph)}" - return False, problems + return False, False, problems @inject.autoparams("dataset_gateway") @@ -116,16 +114,9 @@ def check_datasets_structure(dataset_gateway: IDatasetGateway, **_): problems.append(f"{dataset.name}\n\t{_shacl_graph_to_string(graph)}\n") if ok: - return True, None + return True, False, None - return False, "\n".join(problems) - - -def _check_shacl_structure_for_path(path): - with path.open(mode="r") as fp: - data = yaml.load(fp, Loader=NoDatesSafeLoader) or {} - - return _check_shacl_structure(data) + return False, False, "\n".join(problems) def _check_shacl_structure(data): diff --git a/renku/command/checks/workflow.py b/renku/command/checks/workflow.py index b4cf5407c0..a02647c213 100644 --- a/renku/command/checks/workflow.py +++ b/renku/command/checks/workflow.py @@ -26,7 +26,7 @@ from renku.infrastructure.gateway.activity_gateway import reindex_catalog -def check_activity_catalog(fix, force, **_) -> Tuple[bool, Optional[str]]: +def check_activity_catalog(fix, force, **_) -> Tuple[bool, bool, Optional[str]]: """Check if the activity-catalog needs to be rebuilt. Args: @@ -44,25 +44,25 @@ def check_activity_catalog(fix, force, **_) -> Tuple[bool, Optional[str]]: # NOTE: If len(activity_catalog) > 0 then either the project is fixed or it used a fixed Renku version but still has # broken metadata. ``force`` allows to rebuild the metadata in the latter case. if (len(relations) == 0 or len(activity_catalog) > 0) and not (force and fix): - return True, None + return True, False, None if not fix: problems = ( WARNING + "The project's workflow metadata needs to be rebuilt (use 'renku doctor --fix' to rebuild it).\n" ) - return False, problems + return False, True, problems with communication.busy("Rebuilding workflow metadata ..."): reindex_catalog(database=database) communication.info("Workflow metadata was rebuilt") - return True, None + return True, False, None @inject.autoparams("plan_gateway") -def check_plan_modification_date(fix, plan_gateway: IPlanGateway, **_) -> Tuple[bool, Optional[str]]: +def check_plan_modification_date(fix, plan_gateway: IPlanGateway, **_) -> Tuple[bool, bool, Optional[str]]: """Check if all plans have modification date set for them. Args: @@ -81,7 +81,7 @@ def check_plan_modification_date(fix, plan_gateway: IPlanGateway, **_) -> Tuple[ to_be_processed.append(plan) if not to_be_processed: - return True, None + return True, False, None if not fix: ids = [plan.id for plan in to_be_processed] message = ( @@ -89,13 +89,13 @@ def check_plan_modification_date(fix, plan_gateway: IPlanGateway, **_) -> Tuple[ + "The following workflows have incorrect modification date (use 'renku doctor --fix' to fix them):\n\t" + "\n\t".join(ids) ) - return False, message + return False, True, message fix_plan_dates(plans=to_be_processed, plan_gateway=plan_gateway) project_context.database.commit() communication.info("Workflow modification dates were fixed") - return True, None + return True, False, None def fix_plan_dates(plans: List[AbstractPlan], plan_gateway): diff --git a/renku/command/doctor.py b/renku/command/doctor.py index cb211edf48..9150352934 100644 --- a/renku/command/doctor.py +++ b/renku/command/doctor.py @@ -44,22 +44,25 @@ def _doctor_check(fix: bool, force: bool): from renku.command import checks is_ok = True + fixes_available = False problems = [] for check in checks.__all__: try: - ok, problems_ = getattr(checks, check)(fix=fix, force=force) + ok, has_fix, problems_ = getattr(checks, check)(fix=fix, force=force) except Exception: ok = False + has_fix = False tb = "\n\t".join(traceback.format_exc().split("\n")) problems_ = f"{ERROR}Exception raised when running {check}\n\t{tb}" is_ok &= ok + fixes_available |= has_fix if problems_: problems.append(problems_) - return is_ok, "\n".join(problems) + return is_ok, fixes_available, "\n".join(problems) def doctor_check_command(with_fix): diff --git a/renku/ui/cli/doctor.py b/renku/ui/cli/doctor.py index d5f4b90a90..e522ddbb56 100644 --- a/renku/ui/cli/doctor.py +++ b/renku/ui/cli/doctor.py @@ -58,11 +58,15 @@ def doctor(ctx, fix, force): command = doctor_check_command(with_fix=fix) if fix: command = command.with_communicator(communicator) - is_ok, problems = command.build().execute(fix=fix, force=force).output + is_ok, fixes_available, problems = command.build().execute(fix=fix, force=force).output if is_ok: click.secho("Everything seems to be ok.", fg=color.GREEN) ctx.exit(0) click.echo(problems) + + if fixes_available: + click.echo("Run with '--fix' flag to try and fix these issues.") + ctx.exit(1) diff --git a/renku/ui/service/controllers/cache_migrations_check.py b/renku/ui/service/controllers/cache_migrations_check.py index c6c1a0b4da..744763cf26 100644 --- a/renku/ui/service/controllers/cache_migrations_check.py +++ b/renku/ui/service/controllers/cache_migrations_check.py @@ -20,6 +20,7 @@ from dataclasses import asdict from pathlib import Path +from renku.command.doctor import doctor_check_command from renku.command.migrate import MigrationCheckResult, migrations_check from renku.core.errors import AuthenticationError, MinimumVersionError, ProjectNotFound, RenkuException from renku.core.util.contexts import renku_project_context @@ -54,16 +55,12 @@ def _fast_op_without_cache(self): with tempfile.TemporaryDirectory() as tempdir: tempdir_path = Path(tempdir) - self.git_api_provider.download_files_from_api( - [ - ".renku/metadata/root", - ".renku/metadata/project", - ".renku/metadata.yml", - ".renku/renku.ini", + files=[ "Dockerfile", ], - tempdir_path, + folders=[".renku"], + target_folder=tempdir_path, remote=self.ctx["git_url"], ref=self.request_data.get("ref", None), token=self.user_data.get("token", None), @@ -74,7 +71,11 @@ def _fast_op_without_cache(self): def renku_op(self): """Renku operation for the controller.""" try: - return migrations_check().build().execute().output + migrations_check_result = migrations_check().build().execute().output + doctor_result = doctor_check_command(with_fix=False).build().execute(fix=False, force=False).output + migrations_check_result.core_compatibility_status.fixes_available = doctor_result[1] + migrations_check_result.core_compatibility_status.issues_found = doctor_result[2] + return migrations_check_result except MinimumVersionError as e: return MigrationCheckResult.from_minimum_version_error(e) diff --git a/renku/ui/service/gateways/gitlab_api_provider.py b/renku/ui/service/gateways/gitlab_api_provider.py index 8d14f1b00c..84499c4308 100644 --- a/renku/ui/service/gateways/gitlab_api_provider.py +++ b/renku/ui/service/gateways/gitlab_api_provider.py @@ -16,6 +16,8 @@ # limitations under the License. """Git APi provider interface.""" +import tarfile +import tempfile from pathlib import Path from typing import List, Optional, Union @@ -43,13 +45,23 @@ class GitlabAPIProvider(IGitAPIProvider): def download_files_from_api( self, - paths: List[Union[Path, str]], + files: List[Union[Path, str]], + folders: List[Union[Path, str]], target_folder: Union[Path, str], remote: str, token: str, ref: Optional[str] = None, ): - """Download files through a remote Git API.""" + """Download files through a remote Git API. + + Args: + files(List[Union[Path, str]]): Files to download. + folders(List[Union[Path, str]]): Folders to download. + target_folder(Union[Path, str]): Destination to save downloads to. + remote(str): Git remote URL. + token(str): Gitlab API token. + ref(Optional[str]): Git reference (Default value = None). + """ if not ref: ref = "HEAD" @@ -73,18 +85,33 @@ def download_files_from_api( else: raise - result_paths = [] - - for path in paths: - full_path = target_folder / path + for file in files: + full_path = target_folder / file full_path.parent.mkdir(parents=True, exist_ok=True) try: with open(full_path, "wb") as f: - project.files.raw(file_path=str(path), ref=ref, streamed=True, action=f.write) - - result_paths.append(full_path) + project.files.raw(file_path=str(file), ref=ref, streamed=True, action=f.write) except gitlab.GitlabGetError: delete_dataset_file(full_path) continue + + for folder in folders: + with tempfile.NamedTemporaryFile() as f: + project.repository_archive(path=str(folder), sha=ref, streamed=True, action=f.write, format="tar.gz") + f.seek(0) + with tarfile.open(fileobj=f) as archive: + archive.extractall( + path=target_folder, members=self._set_tarfile_members_path_relative_to_base(archive) + ) + + def _set_tarfile_members_path_relative_to_base(self, archive: tarfile.TarFile): + """Changes member paths in a tar file from `folder/*` to `./*`.""" + if not archive.getmembers(): + return + base_path = archive.getmembers()[0].path.split("/")[0] + path_len = len(base_path) + for member in archive.getmembers(): + if member.path.startswith(base_path): + member.path = "." + member.path[path_len:] diff --git a/renku/ui/service/interfaces/git_api_provider.py b/renku/ui/service/interfaces/git_api_provider.py index e0e4ce9d46..85ab378691 100644 --- a/renku/ui/service/interfaces/git_api_provider.py +++ b/renku/ui/service/interfaces/git_api_provider.py @@ -26,7 +26,8 @@ class IGitAPIProvider(ABC): def download_files_from_api( self, - paths: List[Union[Path, str]], + files: List[Union[Path, str]], + folders: List[Union[Path, str]], target_folder: Union[Path, str], remote: str, token: str, diff --git a/renku/ui/service/serializers/cache.py b/renku/ui/service/serializers/cache.py index d38c8d3d73..e75530b5ce 100644 --- a/renku/ui/service/serializers/cache.py +++ b/renku/ui/service/serializers/cache.py @@ -288,6 +288,12 @@ class ProjectCompatibilityResponseDetail(Schema): migration_required = fields.Boolean( metadata={"description": "Whether or not a metadata migration is required to be compatible with this service."} ) + fixes_available = fields.Boolean( + metadata={ + "description": "Whether automated fixes of metadata (beyond those done during migration) are available." + } + ) + issues_found = fields.List(fields.Str, metadata={"description": "Metadata issues found on project."}) class ProjectCompatibilityResponse(OneOfSchema): diff --git a/tests/service/controllers/utils/test_remote_project.py b/tests/service/controllers/utils/test_remote_project.py index 49e93ac96c..da8721960a 100644 --- a/tests/service/controllers/utils/test_remote_project.py +++ b/tests/service/controllers/utils/test_remote_project.py @@ -101,12 +101,6 @@ def test_remote_project_context(): assert result.core_renku_version == renku.__version__ assert result.project_renku_version == "pre-0.11.0" assert result.core_compatibility_status.migration_required is True - assert result.template_status.newer_template_available is False - assert result.template_status.project_template_version is None - assert result.template_status.latest_template_version is None - assert result.template_status.template_source is None - assert result.template_status.template_ref is None - assert result.template_status.template_id is None - assert result.template_status.automated_template_update is True + assert isinstance(result.template_status, ValueError) assert result.dockerfile_renku_status.automated_dockerfile_update is False assert result.project_supported is True diff --git a/tests/service/views/v1_0/test_cache_views_1_0.py b/tests/service/views/v1_0/test_cache_views_1_0.py index 5186910963..c9d84d7380 100644 --- a/tests/service/views/v1_0/test_cache_views_1_0.py +++ b/tests/service/views/v1_0/test_cache_views_1_0.py @@ -53,7 +53,7 @@ def test_check_migrations_local_1_0(svc_client_setup): response = svc_client.get("/1.0/cache.migrations_check", query_string=dict(project_id=project_id), headers=headers) assert 200 == response.status_code - assert response.json["result"]["core_compatibility_status"]["migration_required"] + assert not response.json["result"]["core_compatibility_status"]["migration_required"] assert not response.json["result"]["template_status"]["newer_template_available"] assert not response.json["result"]["dockerfile_renku_status"]["automated_dockerfile_update"] assert response.json["result"]["project_supported"] From fca39f51f6c3aa62caeaebab369769e4d3c9c09a Mon Sep 17 00:00:00 2001 From: Lorenzo Cavazzi <43481553+lorenzo-cavazzi@users.noreply.github.com> Date: Fri, 26 May 2023 17:29:43 +0200 Subject: [PATCH 09/10] chore: add documentation reference to UpdateProject error (#3485) --- renku/ui/service/errors.py | 1 + 1 file changed, 1 insertion(+) diff --git a/renku/ui/service/errors.py b/renku/ui/service/errors.py index a8f3e58e20..434a0f9b7a 100644 --- a/renku/ui/service/errors.py +++ b/renku/ui/service/errors.py @@ -527,6 +527,7 @@ class ProgramUpdateProjectError(ServiceError): code = SVC_ERROR_USER + 140 userMessage = "Our servers could not update the project succesfully. You could try doing it manually in a session." devMessage = "Updating the target project failed. Check the Sentry exception for further details." + userReference = "https://renku.readthedocs.io/en/stable/how-to-guides/general/upgrading-renku.html" def __init__(self, exception=None): super().__init__(exception=exception) From 634427732db74be1d14bba364f4427639fc05ab3 Mon Sep 17 00:00:00 2001 From: Ralf Grubenmann Date: Wed, 31 May 2023 10:09:22 +0200 Subject: [PATCH 10/10] address comments --- renku/command/checks/activities.py | 3 ++- renku/command/checks/datasets.py | 9 +++++---- renku/command/checks/migration.py | 2 +- renku/command/checks/project.py | 2 +- renku/command/checks/storage.py | 2 +- renku/command/checks/validate_shacl.py | 5 +++-- renku/command/checks/workflow.py | 6 ++++-- renku/ui/service/gateways/gitlab_api_provider.py | 14 +------------- 8 files changed, 18 insertions(+), 25 deletions(-) diff --git a/renku/command/checks/activities.py b/renku/command/checks/activities.py index 37f2cda48e..b220ececf7 100644 --- a/renku/command/checks/activities.py +++ b/renku/command/checks/activities.py @@ -81,7 +81,8 @@ def check_activity_dates(fix, activity_gateway: IActivityGateway, **_): _: keyword arguments. Returns: - Tuple[bool, Optional[str]]: Tuple of whether there are activities with invalid dates a string of the problem. + Tuple[bool, Optional[str]]: Tuple of whether there are activities with invalid dates, if they can be + automatically fixed and a string of the problem. """ invalid_activities = [] diff --git a/renku/command/checks/datasets.py b/renku/command/checks/datasets.py index 52cc04a197..511bf9405a 100644 --- a/renku/command/checks/datasets.py +++ b/renku/command/checks/datasets.py @@ -38,7 +38,8 @@ def check_dataset_old_metadata_location(**_): _: keyword arguments. Returns: - Tuple of whether dataset metadata location is valid and string of found problems. + Tuple of whether dataset metadata location is valid, if an automated fix is available and string of + found problems. """ old_metadata = get_pre_0_3_4_datasets_metadata() @@ -64,7 +65,7 @@ def check_missing_files(dataset_gateway: IDatasetGateway, **_): _: keyword arguments. Returns: - Tuple of whether all dataset files are there and string of found problems. + Tuple of whether all dataset files are there, if an automated fix is available and string of found problems. """ missing = defaultdict(list) @@ -104,7 +105,7 @@ def check_invalid_datasets_derivation(fix, dataset_gateway: IDatasetGateway, **_ _: keyword arguments. Returns: - Tuple of whether dataset derivations are valid and string of found problems. + Tuple of whether dataset derivations are valid, if an automated fix is available and string of found problems. """ invalid_datasets = [] @@ -208,7 +209,7 @@ def check_external_files(fix, dataset_gateway: IDatasetGateway, **_): _: keyword arguments. Returns: - Tuple of whether no external files are found and string of found problems. + Tuple of whether no external files are found, if an automated fix is available and string of found problems. """ from renku.core.dataset.dataset import file_unlink diff --git a/renku/command/checks/migration.py b/renku/command/checks/migration.py index 13b81969f7..5ac8d692e3 100644 --- a/renku/command/checks/migration.py +++ b/renku/command/checks/migration.py @@ -26,7 +26,7 @@ def check_migration(**_): _: keyword arguments. Returns: - Tuple of whether project metadata is up to date and string of found problems. + Tuple of whether project metadata is up to date, if an automated fix is available and string of found problems. """ if is_migration_required(): problems = WARNING + "Project requires migration.\n" + ' (use "renku migrate" to fix this issue)\n' diff --git a/renku/command/checks/project.py b/renku/command/checks/project.py index 8e333c5ef9..85485c01ee 100644 --- a/renku/command/checks/project.py +++ b/renku/command/checks/project.py @@ -33,7 +33,7 @@ def check_project_id_group(fix, project_gateway: IProjectGateway, **_): _: keyword arguments. Returns: - Tuple of whether project id is valid. + Tuple of whether project id is valid, if an automated fix is available and string of found problems. """ current_project = project_gateway.get_project() diff --git a/renku/command/checks/storage.py b/renku/command/checks/storage.py index e7a85cbb0d..7deb79d548 100644 --- a/renku/command/checks/storage.py +++ b/renku/command/checks/storage.py @@ -26,7 +26,7 @@ def check_lfs_info(**_): _: keyword arguments. Returns: - Tuple of whether project structure is valid and string of found problems. + Tuple of whether project structure is valid, if an automated fix is available and string of found problems. """ if not check_external_storage(): return True, False, None diff --git a/renku/command/checks/validate_shacl.py b/renku/command/checks/validate_shacl.py index 6db02275f2..69c031a024 100644 --- a/renku/command/checks/validate_shacl.py +++ b/renku/command/checks/validate_shacl.py @@ -69,7 +69,7 @@ def check_project_structure(**_): _: keyword arguments. Returns: - Tuple of whether project structure is valid and string of found problems. + Tuple of whether project structure is valid, if an automated fix is available and string of found problems. """ data = ProjectSchema().dump(project_context.project) @@ -92,7 +92,8 @@ def check_datasets_structure(dataset_gateway: IDatasetGateway, **_): _: keyword arguments. Returns: - Tuple[bool, str]: Tuple of whether structure is valid and of problems that might have been found. + Tuple[bool, str]: Tuple of whether structure is valid, if an automated fix is available and of problems + that might have been found. """ ok = True diff --git a/renku/command/checks/workflow.py b/renku/command/checks/workflow.py index a02647c213..8a8b4b968f 100644 --- a/renku/command/checks/workflow.py +++ b/renku/command/checks/workflow.py @@ -35,7 +35,8 @@ def check_activity_catalog(fix, force, **_) -> Tuple[bool, bool, Optional[str]]: _: keyword arguments. Returns: - Tuple of whether the activity-catalog needs to be rebuilt and a string of found problems. + Tuple of whether the activity-catalog needs to be rebuilt, if an automated fix is available and a string of + found problems. """ database = project_context.database activity_catalog = database["activity-catalog"] @@ -71,7 +72,8 @@ def check_plan_modification_date(fix, plan_gateway: IPlanGateway, **_) -> Tuple[ _: keyword arguments. Returns: - Tuple[bool, Optional[str]]: Tuple of whether there are plans without modification date and a string of their IDs + Tuple[bool, Optional[str]]: Tuple of whether there are plans without modification date, if an automated fix is + available and a string of their IDs """ plans: List[AbstractPlan] = plan_gateway.get_all_plans() diff --git a/renku/ui/service/gateways/gitlab_api_provider.py b/renku/ui/service/gateways/gitlab_api_provider.py index 63b59b3392..5cbb7f311d 100644 --- a/renku/ui/service/gateways/gitlab_api_provider.py +++ b/renku/ui/service/gateways/gitlab_api_provider.py @@ -102,16 +102,4 @@ def download_files_from_api( project.repository_archive(path=str(folder), sha=branch, streamed=True, action=f.write, format="tar.gz") f.seek(0) with tarfile.open(fileobj=f) as archive: - archive.extractall( - path=target_folder, members=self._set_tarfile_members_path_relative_to_base(archive) - ) - - def _set_tarfile_members_path_relative_to_base(self, archive: tarfile.TarFile): - """Changes member paths in a tar file from `folder/*` to `./*`.""" - if not archive.getmembers(): - return - base_path = archive.getmembers()[0].path.split("/")[0] - path_len = len(base_path) - for member in archive.getmembers(): - if member.path.startswith(base_path): - member.path = "." + member.path[path_len:] + archive.extractall(path=target_folder)