Skip to content

Commit

Permalink
feat(service): adds endpoints for unlinking files from a dataset (#1314)
Browse files Browse the repository at this point in the history
  • Loading branch information
jsam committed Jun 15, 2020
1 parent 4ddc1c2 commit 1b78b16
Show file tree
Hide file tree
Showing 6 changed files with 190 additions and 3 deletions.
26 changes: 26 additions & 0 deletions conftest.py
Expand Up @@ -40,6 +40,7 @@
from _pytest.monkeypatch import MonkeyPatch
from click.testing import CliRunner
from git import Repo
from tests.utils import make_dataset_add_payload
from walrus import Database

IT_PROTECTED_REMOTE_REPO_URL = os.getenv(
Expand Down Expand Up @@ -1031,6 +1032,31 @@ def service_job(svc_client, mock_redis):
os.environ.update(old_environ)


@pytest.fixture
def unlink_file_setup(svc_client_with_repo):
"""Setup for testing of unlinking of a file."""
svc_client, headers, project_id, _ = svc_client_with_repo

payload = make_dataset_add_payload(
project_id,
[('file_path', 'README.md')],
)
response = svc_client.post(
'/datasets.add',
data=json.dumps(payload),
headers=headers,
)
assert 200 == response.status_code

unlink_payload = {
'project_id': project_id,
'short_name': response.json['result']['short_name'],
'include_filters': [response.json['result']['files'][0]['file_path']]
}

yield svc_client, headers, unlink_payload


@pytest.fixture
def dummy_run_plugin_hook():
"""A dummy hook to be used with the renku run plugin."""
Expand Down
2 changes: 2 additions & 0 deletions renku/core/commands/dataset.py
Expand Up @@ -397,6 +397,8 @@ def file_unlink(

dataset.to_yaml()

return records


@pass_local_client(
clean=False,
Expand Down
4 changes: 3 additions & 1 deletion renku/service/entrypoint.py
Expand Up @@ -38,7 +38,7 @@
from renku.service.views.datasets import DATASET_BLUEPRINT_TAG, \
add_file_to_dataset_view, create_dataset_view, dataset_blueprint, \
edit_dataset_view, import_dataset_view, list_dataset_files_view, \
list_datasets_view
list_datasets_view, unlink_file_view
from renku.service.views.jobs import JOBS_BLUEPRINT_TAG, jobs_blueprint, \
list_jobs
from renku.service.views.templates import TEMPLATES_BLUEPRINT_TAG, \
Expand Down Expand Up @@ -108,8 +108,10 @@ def build_routes(app):
docs.register(create_dataset_view, blueprint=DATASET_BLUEPRINT_TAG)
docs.register(import_dataset_view, blueprint=DATASET_BLUEPRINT_TAG)
docs.register(edit_dataset_view, blueprint=DATASET_BLUEPRINT_TAG)
docs.register(unlink_file_view, blueprint=DATASET_BLUEPRINT_TAG)

docs.register(list_jobs, blueprint=JOBS_BLUEPRINT_TAG)

docs.register(
read_manifest_from_template, blueprint=TEMPLATES_BLUEPRINT_TAG
)
Expand Down
37 changes: 37 additions & 0 deletions renku/service/serializers/datasets.py
Expand Up @@ -225,3 +225,40 @@ class DatasetEditResponseRPC(JsonRPCResponse):
"""RPC schema for a dataset import."""

result = fields.Nested(DatasetEditResponse)


class DatasetUnlinkRequest(Schema):
"""Dataset unlink file request."""

project_id = fields.String(required=True)
short_name = fields.String(required=True)

include_filters = fields.List(fields.String())
exclude_filters = fields.List(fields.String())

commit_message = fields.String()

@post_load()
def check_filters(self, data, **kwargs):
"""Check filters."""
include_filter = data.get('include_filters')
exclude_filter = data.get('exclude_filters')

if not include_filter and not exclude_filter:
raise marshmallow.ValidationError(
'one of the filters must be specified'
)

return data


class DatasetUnlinkResponse(Schema):
"""Dataset unlink files response."""

unlinked = fields.List(fields.String())


class DatasetUnlinkResponseRPC(JsonRPCResponse):
"""Dataset unlink files RPC response."""

result = fields.Nested(DatasetUnlinkResponse)
62 changes: 60 additions & 2 deletions renku/service/views/datasets.py
Expand Up @@ -25,7 +25,7 @@
from git import GitCommandError, Repo

from renku.core.commands.dataset import add_file, create_dataset, \
edit_dataset, list_datasets, list_files
edit_dataset, file_unlink, list_datasets, list_files
from renku.core.commands.save import repo_sync
from renku.core.models import json
from renku.core.utils.contexts import chdir
Expand All @@ -39,7 +39,8 @@
DatasetAddResponseRPC, DatasetCreateRequest, DatasetCreateResponseRPC, \
DatasetEditRequest, DatasetEditResponseRPC, DatasetFilesListRequest, \
DatasetFilesListResponseRPC, DatasetImportRequest, \
DatasetImportResponseRPC, DatasetListRequest, DatasetListResponseRPC
DatasetImportResponseRPC, DatasetListRequest, DatasetListResponseRPC, \
DatasetUnlinkRequest, DatasetUnlinkResponseRPC
from renku.service.views import error_response, result_response
from renku.service.views.decorators import accepts_json, handle_base_except, \
handle_git_except, handle_renku_except, handle_validation_except, \
Expand Down Expand Up @@ -263,7 +264,10 @@ def create_dataset_view(user, cache):
provide_automatic_options=False,
)
@handle_base_except
@handle_git_except
@handle_renku_except
@handle_validation_except
@accepts_json
@requires_cache
@requires_identity
def import_dataset_view(user_data, cache):
Expand Down Expand Up @@ -347,3 +351,57 @@ def edit_dataset_view(user_data, cache):
'warnings': warnings
}
)


@use_kwargs(DatasetUnlinkRequest)
@marshal_with(DatasetUnlinkResponseRPC)
@header_doc('Unlink a file from a dataset', tags=(DATASET_BLUEPRINT_TAG, ))
@dataset_blueprint.route(
'/datasets.unlink',
methods=['POST'],
provide_automatic_options=False,
)
@handle_base_except
@handle_git_except
@handle_renku_except
@handle_validation_except
@accepts_json
@requires_cache
@requires_identity
def unlink_file_view(user_data, cache):
"""Unlink a file from a dataset."""
ctx = DatasetUnlinkRequest().load(request.json)

include = ctx.get('include_filter')
exclude = ctx.get('exclude_filter')

user = cache.ensure_user(user_data)
project = cache.get_project(user, ctx['project_id'])

if ctx.get('commit_message') is None:
if include and exclude:
filters = '-I {0} -X {0}'.format(include, exclude)
elif not include and exclude:
filters = '-X {0}'.format(exclude)
else:
filters = '-I {0}'.format(include)

ctx['commit_message'] = (
'service: unlink dataset {0} {1}'.format(
ctx['short_name'], filters
)
)

with chdir(project.abs_path):
records = file_unlink(
short_name=ctx['short_name'],
include=ctx.get('include_filters'),
exclude=ctx.get('exclude_filters'),
yes=True,
interactive=False,
commit_message=ctx['commit_message']
)

unlinked = [record.path for record in records]

return result_response(DatasetUnlinkResponseRPC(), {'unlinked': unlinked})
62 changes: 62 additions & 0 deletions tests/service/views/test_dataset_views.py
Expand Up @@ -1115,3 +1115,65 @@ def test_protected_branch(svc_protected_repo):

assert {'result'} == set(response.json.keys())
assert 'master' != response.json['result']['remote_branch']


@pytest.mark.integration
@pytest.mark.service
@flaky(max_runs=10, min_passes=1)
def test_unlink_file(unlink_file_setup):
"""Check unlinking of a file from a dataset."""
svc_client, headers, unlink_payload = unlink_file_setup

response = svc_client.post(
'/datasets.unlink',
data=json.dumps(unlink_payload),
headers=headers,
)

assert {'result': {'unlinked': ['README.md']}} == response.json


@pytest.mark.integration
@pytest.mark.service
@flaky(max_runs=10, min_passes=1)
def test_unlink_file_no_filter_error(unlink_file_setup):
"""Check for correct exception raise when no filters specified."""
svc_client, headers, unlink_payload = unlink_file_setup
unlink_payload.pop('include_filters')

response = svc_client.post(
'/datasets.unlink',
data=json.dumps(unlink_payload),
headers=headers,
)

assert {
'error': {
'code': -32602,
'reason': {
'_schema': ['one of the filters must be specified']
}
}
} == response.json


@pytest.mark.integration
@pytest.mark.service
@flaky(max_runs=10, min_passes=1)
def test_unlink_file_exclude(unlink_file_setup):
"""Check unlinking of a file from a dataset with exclude."""
svc_client, headers, unlink_payload = unlink_file_setup
unlink_payload['exclude_filters'] = unlink_payload.pop('include_filters')

response = svc_client.post(
'/datasets.unlink',
data=json.dumps(unlink_payload),
headers=headers,
)

assert {
'error': {
'code': -32100,
'reason': 'Invalid parameter value - No records found.'
}
} == response.json

0 comments on commit 1b78b16

Please sign in to comment.