Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Jkmarx/move node related to data set manager #2911

Merged
merged 4 commits into from
Jul 27, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
4 changes: 2 additions & 2 deletions refinery/config/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,15 @@
from config.utils import RouterCombiner
from core.api import (AnalysisResource, DataSetResource, ExtendedGroupResource,
GroupManagementResource, InvitationResource,
NodeResource, ProjectResource, StatisticsResource,
ProjectResource, StatisticsResource,
UserAuthenticationResource, UserProfileResource,
WorkflowResource)
from core.forms import RegistrationFormWithCustomFields
from core.models import AuthenticationFormUsernameOrEmail
from core.urls import core_router
from core.views import CustomRegistrationView
from data_set_manager.api import (AssayResource, AttributeResource,
InvestigationResource,
InvestigationResource, NodeResource,
ProtocolReferenceParameterResource,
ProtocolReferenceResource, ProtocolResource,
PublicationResource, StudyResource)
Expand Down
67 changes: 1 addition & 66 deletions refinery/core/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@

from data_set_manager.api import (AssayResource, InvestigationResource,
StudyResource)
from data_set_manager.models import Attribute, Node, Study
from data_set_manager.models import Study
from file_store.models import FileStoreItem
from .models import (Analysis, DataSet, ExtendedGroup, GroupManagement,
Invitation, Project, ResourceStatistics, Tutorials,
Expand Down Expand Up @@ -1056,71 +1056,6 @@ def alter_list_data_to_serialize(self, request, data):
return data


class NodeResource(ModelResource):
parents = fields.ToManyField('core.api.NodeResource', 'parents')
children = fields.ToManyField('core.api.NodeResource', 'children')
study = fields.ToOneField('data_set_manager.api.StudyResource', 'study')
assay = fields.ToOneField('data_set_manager.api.AssayResource', 'assay',
null=True)
attributes = fields.ToManyField(
'data_set_manager.api.AttributeResource',
attribute=lambda bundle: (
Attribute.objects
.exclude(value__isnull=True)
.exclude(value__exact='')
.filter(node=bundle.obj, subtype='organism')
), use_in='all', full=True, null=True
)

class Meta:
queryset = Node.objects.all()
resource_name = 'node'
detail_uri_name = 'uuid' # for using UUIDs instead of pk in URIs
# required for public data set access by anonymous users
authentication = Authentication()
authorization = Authorization()
allowed_methods = ['get']
fields = ['analysis_uuid', 'assay', 'attributes', 'children',
'file_url', 'file_uuid', 'name', 'parents', 'study',
'subanalysis', 'type', 'uuid']
filtering = {
'uuid': ALL,
'study': ALL_WITH_RELATIONS,
'assay': ALL_WITH_RELATIONS,
'file_uuid': ALL,
'type': ALL
}
limit = 0
max_limit = 0

def prepend_urls(self):
return [
url((r"^(?P<resource_name>%s)/(?P<uuid>" + UUID_RE + r")/$") %
self._meta.resource_name,
self.wrap_view('dispatch_detail'),
name="api_dispatch_detail"),
]

def dehydrate(self, bundle):
# return download URL of file if a file is associated with the node
try:
file_item = FileStoreItem.objects.get(uuid=bundle.obj.file_uuid)
except AttributeError:
logger.warning("No UUID provided")
bundle.data['file_url'] = None
bundle.data['file_import_status'] = None
except FileStoreItem.DoesNotExist:
logger.warning(
"Unable to find file store item with UUID '%s'",
bundle.obj.file_uuid)
bundle.data['file_url'] = None
bundle.data['file_import_status'] = None
else:
bundle.data['file_url'] = file_item.get_datafile_url()
bundle.data['file_import_status'] = file_item.get_import_status()
return bundle


class StatisticsResource(Resource):
user = fields.IntegerField(attribute='user')
group = fields.IntegerField(attribute='group')
Expand Down
12 changes: 0 additions & 12 deletions refinery/core/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,6 @@
from rest_framework import serializers
from rest_framework.validators import UniqueValidator

from data_set_manager.models import Node

from .models import DataSet, Event, User, UserProfile, Workflow

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -115,16 +113,6 @@ class Meta:
model = Workflow


class NodeSerializer(serializers.ModelSerializer):
file_uuid = serializers.CharField(max_length=36,
required=False,
allow_null=True)

class Meta:
model = Node
fields = ('uuid', 'file_uuid')


class EventSerializer(serializers.ModelSerializer):
data_set = DataSetSerializer()
user = UserSerializer()
Expand Down
4 changes: 3 additions & 1 deletion refinery/core/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,10 @@
from tastypie.exceptions import NotFound
from tastypie.test import ResourceTestCase

from data_set_manager.api import NodeResource
from data_set_manager.models import Investigation, Study
from .api import AnalysisResource, DataSetResource, NodeResource

from .api import AnalysisResource, DataSetResource
from .models import (Analysis, Node, Project, UserProfile, Workflow,
WorkflowEngine)

Expand Down
5 changes: 1 addition & 4 deletions refinery/core/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,7 @@
from rest_framework.routers import DefaultRouter

from .views import (AnalysesViewSet, DataSetsViewSet, EventViewSet,
NodeViewSet, OpenIDToken, UserProfileViewSet,
WorkflowViewSet)
OpenIDToken, UserProfileViewSet, WorkflowViewSet)

urlpatterns = patterns(
'core.views',
Expand Down Expand Up @@ -84,8 +83,6 @@
DataSetsViewSet.as_view()),
url(r'^analyses/(?P<uuid>' + UUID_RE + r')/$',
AnalysesViewSet.as_view()),
url(r'^nodes/(?P<uuid>' + UUID_RE + r')/$',
NodeViewSet.as_view()),
url(r'^openid_token/$',
OpenIDToken.as_view(), name="openid-token")
])
86 changes: 1 addition & 85 deletions refinery/core/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,14 +39,11 @@
from rest_framework.views import APIView
import xmltodict

from data_set_manager.models import Node
from file_store.models import FileStoreItem

from .forms import ProjectForm, UserForm, UserProfileForm, WorkflowForm
from .models import (Analysis, CustomRegistrationProfile, DataSet, Event,
ExtendedGroup, Invitation, Ontology, Project,
UserProfile, Workflow, WorkflowEngine)
from .serializers import (DataSetSerializer, EventSerializer, NodeSerializer,
from .serializers import (DataSetSerializer, EventSerializer,
UserProfileSerializer, WorkflowSerializer)
from .utils import (api_error_response, get_data_sets_annotations,
get_resources_for_user)
Expand Down Expand Up @@ -695,87 +692,6 @@ def graph(self, request, *args, **kwargs):
)


class NodeViewSet(APIView):
"""API endpoint that allows Nodes to be viewed".
---
#YAML

PATCH:
parameters_strategy:
form: replace
query: merge

parameters:
- name: uuid
description: User profile uuid used as an identifier
type: string
paramType: path
required: true
- name: file_uuid
description: uuid for file store item
type: string
paramType: form
required: false
...
"""
http_method_names = ['get', 'patch']

def get_object(self, uuid):
try:
return Node.objects.get(uuid=uuid)
except Node.DoesNotExist as e:
logger.error(e)
raise Http404
except Node.MultipleObjectsReturned as e:
logger.error(e)
raise APIException("Multiple objects returned.")

def get(self, request, uuid):
node = self.get_object(uuid)
data_set = node.study.get_dataset()
public_group = ExtendedGroup.objects.public_group()

if request.user.has_perm('core.read_dataset', data_set) or \
'read_dataset' in get_perms(public_group, data_set):
serializer = NodeSerializer(node)
return Response(serializer.data)

return Response(uuid, status=status.HTTP_401_UNAUTHORIZED)

def patch(self, request, uuid):
node = self.get_object(uuid)
new_file_uuid = request.data.get('file_uuid')
data_set = node.study.get_dataset()

if not data_set.is_clean():
return Response(
'Files cannot be removed once an analysis or visualization '
'has ran on a data set ',
status=status.HTTP_400_BAD_REQUEST
)

if data_set.get_owner() == request.user:
# to remove the data file, we need to delete it and update index,
# the file store item uuid should remain
if new_file_uuid is None:
try:
file_store_item = FileStoreItem.objects.get(
uuid=node.file_uuid
)
except (FileStoreItem.DoesNotExist,
FileStoreItem.MultipleObjectsReturned) as e:
logger.error(e)
else:
file_store_item.delete_datafile()

node.update_solr_index()
return Response(
NodeSerializer(node).data, status=status.HTTP_200_OK
)

return Response(uuid, status=status.HTTP_401_UNAUTHORIZED)


class EventViewSet(viewsets.ModelViewSet):
"""API endpoint that allows Events to be viewed"""
queryset = Event.objects.all()
Expand Down
75 changes: 75 additions & 0 deletions refinery/data_set_manager/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,25 @@

@author: nils
'''
import logging

from django.conf.urls import url

from constants import UUID_RE
from tastypie import fields
from tastypie.authentication import Authentication
from tastypie.authorization import Authorization
from tastypie.constants import ALL, ALL_WITH_RELATIONS
from tastypie.resources import ModelResource

from file_store.models import FileStoreItem

from .models import (Assay, Attribute, Investigation, Node, Protocol,
ProtocolReference, ProtocolReferenceParameter,
Publication, Study)

logger = logging.getLogger(__name__)


class AttributeResource(ModelResource):
node = fields.ForeignKey('core.api.NodeResource', 'node', use_in='all')
Expand All @@ -37,6 +47,71 @@ class Meta:
]


class NodeResource(ModelResource):
parents = fields.ToManyField('core.api.NodeResource', 'parents')
children = fields.ToManyField('core.api.NodeResource', 'children')
study = fields.ToOneField('data_set_manager.api.StudyResource', 'study')
assay = fields.ToOneField('data_set_manager.api.AssayResource', 'assay',
null=True)
attributes = fields.ToManyField(
'data_set_manager.api.AttributeResource',
attribute=lambda bundle: (
Attribute.objects
.exclude(value__isnull=True)
.exclude(value__exact='')
.filter(node=bundle.obj, subtype='organism')
), use_in='all', full=True, null=True
)

class Meta:
queryset = Node.objects.all()
resource_name = 'node'
detail_uri_name = 'uuid' # for using UUIDs instead of pk in URIs
# required for public data set access by anonymous users
authentication = Authentication()
authorization = Authorization()
allowed_methods = ['get']
fields = ['analysis_uuid', 'assay', 'attributes', 'children',
'file_url', 'file_uuid', 'name', 'parents', 'study',
'subanalysis', 'type', 'uuid']
filtering = {
'uuid': ALL,
'study': ALL_WITH_RELATIONS,
'assay': ALL_WITH_RELATIONS,
'file_uuid': ALL,
'type': ALL
}
limit = 0
max_limit = 0

def prepend_urls(self):
return [
url((r"^(?P<resource_name>%s)/(?P<uuid>" + UUID_RE + r")/$") %
self._meta.resource_name,
self.wrap_view('dispatch_detail'),
name="api_dispatch_detail"),
]

def dehydrate(self, bundle):
# return download URL of file if a file is associated with the node
try:
file_item = FileStoreItem.objects.get(uuid=bundle.obj.file_uuid)
except AttributeError:
logger.warning("No UUID provided")
bundle.data['file_url'] = None
bundle.data['file_import_status'] = None
except FileStoreItem.DoesNotExist:
logger.warning(
"Unable to find file store item with UUID '%s'",
bundle.obj.file_uuid)
bundle.data['file_url'] = None
bundle.data['file_import_status'] = None
else:
bundle.data['file_url'] = file_item.get_datafile_url()
bundle.data['file_import_status'] = file_item.get_import_status()
return bundle


class InvestigationResource(ModelResource):
class Meta:
queryset = Investigation.objects.all()
Expand Down
12 changes: 11 additions & 1 deletion refinery/data_set_manager/serializers.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from rest_framework import serializers

from .models import Assay, AttributeOrder
from .models import Assay, AttributeOrder, Node


class AssaySerializer(serializers.ModelSerializer):
Expand Down Expand Up @@ -35,3 +35,13 @@ def update(self, instance, validated_data):
instance.is_active)
instance.save()
return instance


class NodeSerializer(serializers.ModelSerializer):
file_uuid = serializers.CharField(max_length=36,
required=False,
allow_null=True)

class Meta:
model = Node
fields = ('uuid', 'file_uuid')
7 changes: 5 additions & 2 deletions refinery/data_set_manager/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,9 @@
from .views import (AddFileToNodeView, Assays, AssaysAttributes,
AssaysFiles, CheckDataFilesView,
ChunkedFileUploadCompleteView, ChunkedFileUploadView,
DataSetImportView, ImportISATabView, ProcessISATabView,
ProcessMetadataTableView, TakeOwnershipOfPublicDatasetView)
DataSetImportView, ImportISATabView, NodeViewSet,
ProcessISATabView, ProcessMetadataTableView,
TakeOwnershipOfPublicDatasetView)

urlpatterns = patterns(
'data_set_manager.views',
Expand Down Expand Up @@ -56,4 +57,6 @@
AssaysAttributes.as_view()),
url(r'^data_set_manager/add-file/$',
AddFileToNodeView.as_view()),
url(r'^nodes/(?P<uuid>' + UUID_RE + r')/$',
NodeViewSet.as_view()),
])