Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(service): add support for doctor check in cache migration endpoint. #3384

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
24 changes: 12 additions & 12 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

10 changes: 5 additions & 5 deletions renku/command/checks/activities.py
Expand Up @@ -58,7 +58,7 @@ def check_migrated_activity_ids(fix, activity_gateway: IActivityGateway, **_):
wrong_activities = []

if not wrong_activities:
return True, None
return True, False, None

problems = (
WARNING
Expand All @@ -68,7 +68,7 @@ def check_migrated_activity_ids(fix, activity_gateway: IActivityGateway, **_):
+ "\n"
)

return False, problems
return False, True, problems


@inject.autoparams("activity_gateway")
Expand All @@ -95,7 +95,7 @@ def check_activity_dates(fix, activity_gateway: IActivityGateway, **_):
invalid_activities.append(activity)

if not invalid_activities:
return True, None
return True, False, None
if not fix:
ids = [a.id for a in invalid_activities]
message = (
Expand All @@ -104,13 +104,13 @@ def check_activity_dates(fix, activity_gateway: IActivityGateway, **_):
+ "\n\t"
+ "\n\t".join(ids)
)
return False, message
return False, True, message

fix_activity_dates(activities=invalid_activities)
project_context.database.commit()
communication.info("Activity dates were fixed")

return True, None
return True, False, None


def fix_activity_dates(activities):
Expand Down
22 changes: 11 additions & 11 deletions renku/command/checks/datasets.py
Expand Up @@ -43,7 +43,7 @@ def check_dataset_old_metadata_location(**_):
old_metadata = get_pre_0_3_4_datasets_metadata()

if not old_metadata:
return True, None
return True, False, None

problems = (
WARNING + "There are metadata files in the old location."
Expand All @@ -52,7 +52,7 @@ def check_dataset_old_metadata_location(**_):
+ "\n"
)

return False, problems
return False, False, problems


@inject.autoparams("dataset_gateway")
Expand All @@ -79,7 +79,7 @@ def check_missing_files(dataset_gateway: IDatasetGateway, **_):
missing[dataset.name].append(file_.entity.path)

if not missing:
return True, None
return True, False, None

problems = WARNING + "There are missing files in datasets."

Expand All @@ -91,7 +91,7 @@ def check_missing_files(dataset_gateway: IDatasetGateway, **_):
+ "\n\t ".join(click.style(path, fg="red") for path in files)
)

return False, problems
return False, False, problems


@inject.autoparams("dataset_gateway")
Expand Down Expand Up @@ -130,7 +130,7 @@ def fix_or_report(dataset):
break

if not invalid_datasets:
return True, None
return True, False, None

problems = (
WARNING
Expand All @@ -140,7 +140,7 @@ def fix_or_report(dataset):
+ "\n"
)

return False, problems
return False, True, problems


@inject.autoparams("dataset_gateway")
Expand Down Expand Up @@ -193,9 +193,9 @@ def check_dataset_files_outside_datadir(fix, dataset_gateway: IDatasetGateway, *
+ "\n\t".join(click.style(file.entity.path, fg="yellow") for file in invalid_files)
+ "\n"
)
return False, problems
return False, True, problems

return True, None
return True, False, None


@inject.autoparams("dataset_gateway")
Expand All @@ -222,7 +222,7 @@ def check_external_files(fix, dataset_gateway: IDatasetGateway, **_):
datasets[dataset.name].append(file)

if not external_files:
return True, None
return True, False, None

external_files_str = "\n\t".join(sorted(external_files))

Expand All @@ -232,7 +232,7 @@ def check_external_files(fix, dataset_gateway: IDatasetGateway, **_):
"Use 'renku dataset rm' or rerun 'renku doctor' with '--fix' flag to remove them:\n\t"
f"{external_files_str}\n"
)
return False, problems
return False, True, problems

communication.info(
"The following external files were deleted from the project. You need to add them later manually using a "
Expand All @@ -242,4 +242,4 @@ def check_external_files(fix, dataset_gateway: IDatasetGateway, **_):
for name, files in datasets.items():
file_unlink(name=name, yes=True, dataset_files=files)

return True, None
return True, False, None
8 changes: 4 additions & 4 deletions renku/command/checks/githooks.py
Expand Up @@ -41,7 +41,7 @@ def check_git_hooks_installed(**_):
hook_path = get_hook_path(name=hook, path=project_context.path)
if not hook_path.exists():
message = WARNING + "Git hooks are not installed. " 'Use "renku githooks install" to install them. \n'
return False, message
return False, False, message

with hook_path.open() as file_:
actual_hook = _extract_renku_hook(file_)
Expand All @@ -50,16 +50,16 @@ def check_git_hooks_installed(**_):

if not expected_hook:
message = WARNING + "Cannot check for existence of Git hooks.\n"
return False, message
return False, False, message

if actual_hook != expected_hook:
message = (
WARNING + "Git hooks are outdated or not installed.\n"
' (use "renku githooks install --force" to update them) \n'
)
return False, message
return False, False, message

return True, None
return True, False, None


def _extract_renku_hook(file):
Expand Down
4 changes: 2 additions & 2 deletions renku/command/checks/migration.py
Expand Up @@ -35,6 +35,6 @@ def check_migration(**_):
ERROR + "Project version is not supported by your version of Renku.\n" + " (upgrade your Renku version)\n"
)
else:
return True, None
return True, False, None

return False, problems
return False, False, problems
18 changes: 11 additions & 7 deletions renku/command/checks/project.py
Expand Up @@ -42,21 +42,25 @@ def check_project_id_group(fix, project_gateway: IProjectGateway, **_):
)

if namespace is None or name is None:
return True, None
return True, False, None

generated_id = Project.generate_id(namespace=namespace, name=name)

if generated_id == current_project.id:
return True, None
return True, False, None

if fix:
communication.info(f"Fixing project id '{current_project.id}' -> '{generated_id}'")
current_project.id = generated_id
project_gateway.update_project(current_project)
return True, None
return True, False, None

return True, (
WARNING
+ "Project id doesn't match id created based on the current Git remote (use 'renku doctor --fix' to fix it):"
f"\n\t'{current_project.id}' -> '{generated_id}'"
return (
False,
True,
(
WARNING
+ "Project id doesn't match id based on the current Git remote (use 'renku doctor --fix' to fix it):"
f"\n\t'{current_project.id}' -> '{generated_id}'"
),
)
8 changes: 4 additions & 4 deletions renku/command/checks/storage.py
Expand Up @@ -29,19 +29,19 @@ def check_lfs_info(**_):
Tuple of whether project structure is valid and string of found problems.
"""
if not check_external_storage():
return True, None
return True, False, None

files = check_lfs_migrate_info()

if not files:
return True, None
return True, False, None

message = (
WARNING
+ "Git history contains large files - consider moving them "
+ "to external storage like git LFS\n\t"
+ "to external storage like git LFS using 'renku storage migrate'\n\t"
+ "\n\t".join(files)
+ "\n"
)

return False, message
return False, False, message
17 changes: 4 additions & 13 deletions renku/command/checks/validate_shacl.py
Expand Up @@ -16,15 +16,13 @@
"""Check KG structure using SHACL."""

import pyld
import yaml

from renku.command.command_builder import inject
from renku.command.schema.dataset import dump_dataset_as_jsonld
from renku.command.schema.project import ProjectSchema
from renku.command.util import WARNING
from renku.core.interface.dataset_gateway import IDatasetGateway
from renku.core.util.shacl import validate_graph
from renku.core.util.yaml import NoDatesSafeLoader
from renku.domain_model.project_context import project_context


Expand Down Expand Up @@ -78,11 +76,11 @@ def check_project_structure(**_):
conform, graph, t = _check_shacl_structure(data)

if conform:
return True, None
return True, False, None

problems = f"{WARNING}Invalid structure of project metadata\n\t{_shacl_graph_to_string(graph)}"

return False, problems
return False, False, problems


@inject.autoparams("dataset_gateway")
Expand Down Expand Up @@ -116,16 +114,9 @@ def check_datasets_structure(dataset_gateway: IDatasetGateway, **_):
problems.append(f"{dataset.name}\n\t{_shacl_graph_to_string(graph)}\n")

if ok:
return True, None
return True, False, None

return False, "\n".join(problems)


def _check_shacl_structure_for_path(path):
with path.open(mode="r") as fp:
data = yaml.load(fp, Loader=NoDatesSafeLoader) or {}

return _check_shacl_structure(data)
return False, False, "\n".join(problems)


def _check_shacl_structure(data):
Expand Down