diff --git a/api/api.py b/api/api.py index db7a9dc70..a25dec52d 100644 --- a/api/api.py +++ b/api/api.py @@ -95,6 +95,7 @@ def prefix(path, routes): route('/login', RequestHandler, h='log_in', m=['POST']), route('/logout', RequestHandler, h='log_out', m=['POST']), + route('/lookup', ResolveHandler, h='lookup', m=['POST']), route('/resolve', ResolveHandler, h='resolve', m=['POST']), route('/schemas/', SchemaHandler, m=['GET']), route('/report/', ReportHandler, m=['GET']), diff --git a/api/config.py b/api/config.py index c2bbaf696..beb182257 100644 --- a/api/config.py +++ b/api/config.py @@ -160,17 +160,23 @@ def apply_env_variables(config): 'device.json', 'file.json', 'file-update.json', + 'gear.json', 'group-new.json', 'group-update.json', 'info_update.json', + 'job-logs.json', + 'job-new.json', 'note.json', 'packfile.json', 'permission.json', 'project.json', 'project-template.json', 'project-update.json', + 'propose-batch.json', + 'resolver.json', 'rule-new.json', 'rule-update.json', + 'search-query.json', 'session.json', 'session-update.json', 'subject.json', @@ -225,11 +231,11 @@ def initialize_db(): log.info('Initializing database, creating indexes') # TODO review all indexes db.users.create_index('api_key.key') - db.projects.create_index([('gid', 1), ('name', 1)]) - db.sessions.create_index('project') + db.projects.create_index([('group', 1), ('label', 1)]) + db.sessions.create_index([('project', 1), ('label', 1)]) db.sessions.create_index('uid') db.sessions.create_index('created') - db.acquisitions.create_index('session') + db.acquisitions.create_index([('session', 1), ('label', 1)]) db.acquisitions.create_index('uid') db.acquisitions.create_index('collections') db.analyses.create_index([('parent.type', 1), ('parent.id', 1)]) diff --git a/api/dao/basecontainerstorage.py b/api/dao/basecontainerstorage.py index 9484833f4..85190e578 100644 --- a/api/dao/basecontainerstorage.py +++ b/api/dao/basecontainerstorage.py @@ -108,6 +108,9 @@ def get_container(self, _id, projection=None, get_children=False): cont[CHILD_MAP[self.cont_name]] = children return cont + def get_child_container_name(self): + return CHILD_MAP.get(self.cont_name) + def get_children(self, _id, projection=None, uid=None): try: child_name = CHILD_MAP[self.cont_name] @@ -210,11 +213,21 @@ def get_el(self, _id, projection=None, fill_defaults=False): self._from_mongo(cont) if fill_defaults: self._fill_default_values(cont) - if cont is not None and cont.get('files', []): - cont['files'] = [f for f in cont['files'] if 'deleted' not in f] + self.filter_deleted_files(cont) return cont - def get_all_el(self, query, user, projection, fill_defaults=False): + def get_all_el(self, query, user, projection, fill_defaults=False, **kwargs): + """ + Get all elements matching query for this container. + + Args: + query (dict): The query object, or None for all elements + user (dict): The user object, if filtering on permissions is desired, otherwise None + projection (dict): The optional projection to use for returned elements + fill_defaults (bool): Whether or not to populate the default values for returned elements. Default is False. + **kwargs: Additional arguments to pass to the underlying find function + + """ if query is None: query = {} if user: @@ -238,10 +251,9 @@ def get_all_el(self, query, user, projection, fill_defaults=False): else: replace_info_with_bool = False - results = list(self.dbc.find(query, projection)) + results = list(self.dbc.find(query, projection, **kwargs)) for cont in results: - if cont.get('files', []): - cont['files'] = [f for f in cont['files'] if 'deleted' not in f] + self.filter_deleted_files(cont) self._from_mongo(cont) if fill_defaults: self._fill_default_values(cont) @@ -304,3 +316,18 @@ def modify_info(self, _id, payload, modify_subject=False): update['$set']['modified'] = datetime.datetime.utcnow() return self.dbc.update_one(query, update) + + def filter_deleted_files(self, cont): + """ + Update container object, removing any files that are marked deleted. + """ + if cont is not None and 'files' in cont: + cont['files'] = [f for f in cont['files'] if 'deleted' not in f] + + + def get_list_projection(self): + """ + Return a copy of the list projection to use with this container, or None. + It is safe to modify the returned copy. + """ + return None diff --git a/api/dao/containerstorage.py b/api/dao/containerstorage.py index 84f67d20a..d3fda6aba 100644 --- a/api/dao/containerstorage.py +++ b/api/dao/containerstorage.py @@ -40,7 +40,6 @@ def create_el(self, payload): class ProjectStorage(ContainerStorage): - def __init__(self): super(ProjectStorage,self).__init__('projects', use_object_id=True, use_delete_tag=True) @@ -96,6 +95,9 @@ def recalc_sessions_compliance(self, project_id=None): changed_sessions.append(s['_id']) return changed_sessions + def get_list_projection(self): + return {'info': 0, 'files.info': 0} + class SessionStorage(ContainerStorage): @@ -220,6 +222,14 @@ def get_all_for_targets(self, target_type, target_ids, user=None, projection=Non return self.get_all_el(query, user, projection) + def get_list_projection(self): + # Remove subject first/last from list view to better log access to this information + return {'info': 0, 'analyses': 0, 'subject.firstname': 0, + 'subject.lastname': 0, 'subject.sex': 0, 'subject.age': 0, + 'subject.race': 0, 'subject.ethnicity': 0, 'subject.info': 0, + 'files.info': 0, 'tags': 0} + + class AcquisitionStorage(ContainerStorage): @@ -287,12 +297,18 @@ def get_all_for_targets(self, target_type, target_ids, user=None, projection=Non query['collections'] = collection_id return self.get_all_el(query, user, projection) + def get_list_projection(self): + return {'info': 0, 'collections': 0, 'files.info': 0, 'tags': 0} + class CollectionStorage(ContainerStorage): def __init__(self): super(CollectionStorage, self).__init__('collections', use_object_id=True, use_delete_tag=True) + def get_list_projection(self): + return {'info': 0} + class AnalysisStorage(ContainerStorage): @@ -305,10 +321,13 @@ def get_parent(self, parent_type, parent_id): return parent_storage.get_container(parent_id) - def get_analyses(self, parent_type, parent_id, inflate_job_info=False): - parent_type = containerutil.singularize(parent_type) - parent_id = bson.ObjectId(parent_id) - analyses = self.get_all_el({'parent.type': parent_type, 'parent.id': parent_id}, None, None) + def get_analyses(self, query, parent_type, parent_id, inflate_job_info=False, projection=None, **kwargs): + if query is None: + query = {} + query['parent.type'] = containerutil.singularize(parent_type) + query['parent.id'] = bson.ObjectId(parent_id) + + analyses = self.get_all_el(query, None, projection, **kwargs) if inflate_job_info: for analysis in analyses: self.inflate_job_info(analysis) @@ -410,3 +429,6 @@ def inflate_job_info(self, analysis): analysis['job'] = job return analysis + + def get_list_projection(self): + return {'info': 0, 'files.info': 0, 'tags': 0} diff --git a/api/handlers/collectionshandler.py b/api/handlers/collectionshandler.py index 703b277cc..e6102c2d1 100644 --- a/api/handlers/collectionshandler.py +++ b/api/handlers/collectionshandler.py @@ -19,10 +19,9 @@ class CollectionsHandler(ContainerHandler): container_handler_configurations['collections'] = { 'permchecker': containerauth.collection_permissions, - 'storage': containerstorage.ContainerStorage('collections', use_object_id=True, use_delete_tag=True), + 'storage': containerstorage.CollectionStorage(), 'storage_schema_file': 'collection.json', - 'payload_schema_file': 'collection.json', - 'list_projection': {'info': 0} + 'payload_schema_file': 'collection.json' } def __init__(self, request=None, response=None): @@ -116,7 +115,7 @@ def delete(self, **kwargs): self.abort(404, 'Element not removed from container {} {}'.format(self.storage.cont_name, _id)) def get_all(self): - projection = self.container_handler_configurations['collections']['list_projection'] + projection = self.get_list_projection('collections') if self.superuser_request: permchecker = always_ok elif self.public_request: @@ -163,7 +162,7 @@ def get_sessions(self, cid): if not self.superuser_request: query['permissions._id'] = self.uid - projection = self.container_handler_configurations['sessions']['list_projection'] + projection = self.get_list_projection('sessions') sessions = list(containerstorage.SessionStorage().get_all_el(query, None, projection)) @@ -193,7 +192,7 @@ def get_acquisitions(self, cid): if not self.superuser_request: query['permissions._id'] = self.uid - projection = self.container_handler_configurations['acquisitions']['list_projection'] + projection = self.get_list_projection('acquisitions') acquisitions = list(containerstorage.AcquisitionStorage().get_all_el(query, None, projection)) @@ -202,3 +201,8 @@ def get_acquisitions(self, cid): for acquisition in acquisitions: acquisition = self.handle_origin(acquisition) return acquisitions + + def get_list_projection(self, container): + """Return the list_projection for container.""" + cfg = self.container_handler_configurations[container] + return cfg['storage'].get_list_projection() diff --git a/api/handlers/containerhandler.py b/api/handlers/containerhandler.py index 373016383..e5fc14c51 100644 --- a/api/handlers/containerhandler.py +++ b/api/handlers/containerhandler.py @@ -42,7 +42,6 @@ class ContainerHandler(base.RequestHandler): 'sessions': True, 'acquisitions': True } - default_list_projection = ['files', 'notes', 'timestamp', 'timezone', 'public'] # This configurations are used by the ContainerHandler class to load the storage, # the permissions checker and the json schema validators used to handle a request. @@ -57,7 +56,6 @@ class ContainerHandler(base.RequestHandler): 'parent_storage': containerstorage.GroupStorage(), 'storage_schema_file': 'project.json', 'payload_schema_file': 'project.json', - 'list_projection': {'info': 0, 'files.info': 0}, 'propagated_properties': ['public'], 'children_cont': 'sessions' }, @@ -67,11 +65,6 @@ class ContainerHandler(base.RequestHandler): 'parent_storage': containerstorage.ProjectStorage(), 'storage_schema_file': 'session.json', 'payload_schema_file': 'session.json', - # Remove subject first/last from list view to better log access to this information - 'list_projection': {'info': 0, 'analyses': 0, 'subject.firstname': 0, - 'subject.lastname': 0, 'subject.sex': 0, 'subject.age': 0, - 'subject.race': 0, 'subject.ethnicity': 0, 'subject.info': 0, - 'files.info': 0, 'tags': 0}, 'children_cont': 'acquisitions' }, 'acquisitions': { @@ -79,8 +72,7 @@ class ContainerHandler(base.RequestHandler): 'permchecker': containerauth.default_container, 'parent_storage': containerstorage.SessionStorage(), 'storage_schema_file': 'acquisition.json', - 'payload_schema_file': 'acquisition.json', - 'list_projection': {'info': 0, 'collections': 0, 'files.info': 0, 'tags': 0} + 'payload_schema_file': 'acquisition.json' } } @@ -114,7 +106,10 @@ def get(self, cont_name, **kwargs): fileinfo['path'] = util.path_from_hash(fileinfo['hash']) inflate_job_info = cont_name == 'sessions' - result['analyses'] = AnalysisStorage().get_analyses(cont_name, _id, inflate_job_info) + result['analyses'] = AnalysisStorage().get_analyses(None, cont_name, _id, inflate_job_info) + + util.add_container_type(self.request, result) + return self.handle_origin(result) def handle_origin(self, result): @@ -240,7 +235,7 @@ def get_jobs(self, cid): permchecker(noop)('GET', cid) - analyses = AnalysisStorage().get_analyses('session', cont['_id']) + analyses = AnalysisStorage().get_analyses(None, 'session', cont['_id']) acquisitions = cont.get('acquisitions', []) results = [] @@ -311,7 +306,7 @@ def get_all(self, cont_name, par_cont_name=None, par_id=None): self.config = self.container_handler_configurations[cont_name] self.storage = self.config['storage'] - projection = self.config['list_projection'].copy() + projection = self.storage.get_list_projection() if self.is_true('permissions'): if not projection: @@ -384,7 +379,7 @@ def _add_results_counts(self, results, cont_name): def get_all_for_user(self, cont_name, uid): self.config = self.container_handler_configurations[cont_name] self.storage = self.config['storage'] - projection = self.config['list_projection'] + projection = self.storage.get_list_projection() # select which permission filter will be applied to the list of results. if self.superuser_request or self.user_is_admin: permchecker = always_ok diff --git a/api/handlers/grouphandler.py b/api/handlers/grouphandler.py index a006b1209..c54f3430d 100644 --- a/api/handlers/grouphandler.py +++ b/api/handlers/grouphandler.py @@ -22,6 +22,7 @@ def get(self, _id): self._filter_permissions([result], self.uid) if self.is_true('join_avatars'): ContainerHandler.join_user_info([result]) + util.add_container_type(self.request, result) return result def delete(self, _id): diff --git a/api/handlers/refererhandler.py b/api/handlers/refererhandler.py index a04440187..84c6b44d9 100644 --- a/api/handlers/refererhandler.py +++ b/api/handlers/refererhandler.py @@ -150,6 +150,8 @@ def get(self, **kwargs): if self.is_true('inflate_job'): self.storage.inflate_job_info(analysis) + util.add_container_type(self.request, analysis) + self.log_user_access(AccessType.view_container, cont_name=analysis['parent']['type'], cont_id=analysis['parent']['id']) return analysis diff --git a/api/handlers/resolvehandler.py b/api/handlers/resolvehandler.py index 2372fda1e..1e926474b 100644 --- a/api/handlers/resolvehandler.py +++ b/api/handlers/resolvehandler.py @@ -1,8 +1,11 @@ """ API request handlers for the jobs module """ +from webapp2 import Request +from ..dao import containerutil from ..web import base +from ..web.errors import APINotFoundException from ..resolver import Resolver class ResolveHandler(base.RequestHandler): @@ -10,19 +13,69 @@ class ResolveHandler(base.RequestHandler): """Provide /resolve API route.""" def resolve(self): - """Resolve a path through the hierarchy.""" + """Resolve a path through the hierarchy, and include node details with children""" + return self._resolve_and_check_permissions(False) + + def lookup(self): + """Locate a node by path, and re-route to the endpoint for that node""" + result = self._resolve_and_check_permissions(True) + + # If we resolved a file, we can just return that file node + path = result.get('path', []) + + if not path: + raise APINotFoundException('No node matched that path') + + # In the event that we resolved a file, just return the file node + dest = path[-1] + if dest.get('container_type') == 'file': + return dest + + # Reroute to the actual path that will log access, resolve analyses, etc + path = self._get_node_path(dest) + + # Create new request instance using destination URI (eg. replace containers with cont_name) + destination_environ = self.request.environ + for key in 'PATH_INFO', 'REQUEST_URI': + destination_environ[key] = destination_environ[key].replace('lookup', path, 1) + # We also must update the method, and indicate that we want the container_type included + # The client will depend on container_type being set so that it can map to the correct type + destination_environ['REQUEST_METHOD'] = 'GET' + destination_environ['fw_container_type'] = dest['container_type'] + destination_request = Request(destination_environ) + # Apply SciTranRequest attrs + destination_request.id = self.request.id + destination_request.logger = self.request.logger + + # Dispatch the destination request + self.app.router.dispatch(destination_request, self.response) + + def _get_node_path(self, node): + """Get the actual resource path for node""" + try: + cname = containerutil.pluralize(node['container_type']) + except ValueError: + # Handle everything else... + cname = node['container_type'] + 's' + + return '{0}/{1}'.format(cname, node['_id']) + + def _resolve_and_check_permissions(self, id_only): + """Resolve a path through the hierarchy.""" if self.public_request: self.abort(403, 'Request requires login') doc = self.request.json - result = Resolver.resolve(doc['path']) + + resolver = Resolver(id_only=id_only) + result = resolver.resolve(doc['path']) # Cancel the request if anything in the path is unauthorized; remove any children that are unauthorized. if not self.superuser_request: for x in result["path"]: ok = False - if x['node_type'] in ['acquisition', 'session', 'project', 'group']: + if x['container_type'] in ['acquisition', 'session', 'project', 'group', 'analysis']: perms = x.get('permissions', []) for y in perms: if y.get('_id') == self.uid: @@ -35,7 +88,7 @@ def resolve(self): filtered_children = [] for x in result["children"]: ok = False - if x['node_type'] in ['acquisition', 'session', 'project', 'group']: + if x['container_type'] in ['acquisition', 'session', 'project', 'group', 'analysis']: perms = x.get('permissions', []) for y in perms: if y.get('_id') == self.uid: @@ -50,3 +103,4 @@ def resolve(self): result["children"] = filtered_children return result + diff --git a/api/jobs/gears.py b/api/jobs/gears.py index 908e08034..411ed1314 100644 --- a/api/jobs/gears.py +++ b/api/jobs/gears.py @@ -60,7 +60,7 @@ def suggest_container(gear, cont_name, cid): """ root = ContainerStorage.factory(cont_name).get_container(cid, projection={'permissions':0}, get_children=True) - root['analyses'] = ContainerStorage.factory('analyses').get_analyses(cont_name, cid, False) + root['analyses'] = ContainerStorage.factory('analyses').get_analyses(None, cont_name, cid, False) invocation_schema = get_invocation_schema(gear) diff --git a/api/jobs/handlers.py b/api/jobs/handlers.py index ebef965d6..26e66232f 100644 --- a/api/jobs/handlers.py +++ b/api/jobs/handlers.py @@ -59,7 +59,10 @@ class GearHandler(base.RequestHandler): @require_login def get(self, _id): - return get_gear(_id) + result = get_gear(_id) + util.add_container_type(self.request, result) + return result + @require_login def get_invocation(self, _id): diff --git a/api/resolver.py b/api/resolver.py index 0b110ea1c..6e87bdea2 100644 --- a/api/resolver.py +++ b/api/resolver.py @@ -1,177 +1,351 @@ """ Resolve an ambiguous path through the data hierarchy. + +The goal of the resolver is to provide a virtual graph that can be navigated using +path notation. Below is how the graph will ultimately be represented. Currently +subjects are not formalized and are excluded from the implementation. + +Quoted strings represent literal nodes in the graph. For example, to find the gear +called dicom-mr-classifier, you would use the path: ["gears", "dicom-mr-classifier"] + +NOTE: Currently subjects and gear versions are not supported! + ++----+ +-------+ +-----+ +-------+ +|Root+---+"gears"+---+Gears+---+Version| ++-+--+ +-------+ +-----+ +-------+ + | ++-+----+ +|Groups| ++-+----+ + | ++-+------+ +|Projects+---+ ++-+------+ | + | | ++-+------+ | +----------+ +--------+ +|Subjects+---+---+"analyses"+---+Analyses| ++-+------+ | +----------+ +---+----+ + | | | ++-+------+ | | +|Sessions+---+-------+ | ++-+------+ | +---+---+ +-----+ + | +----------+"files"+---+Files| ++-+----------+ | +-------+ +-----+ +|Acquisitions+-------+ ++------------+ """ +import bson -from . import config -from .web.errors import APINotFoundException +from collections import deque -class Node(object): +from .dao import containerutil +from .dao.basecontainerstorage import ContainerStorage +from .jobs import gears +from .web.errors import APINotFoundException, InputValidationException - # All lists obtained by the Resolver are sorted by the created timestamp, then the database ID as a fallback. - # As neither property should ever change, this sort should be consistent - sorting = [('created', 1), ('_id', 1)] +def apply_container_type(lst, container_type): + """Apply container_type to each item in in the list""" + for item in lst: + item['container_type'] = container_type - # Globally disable extraneous properties of unbounded length, along with some PHI fields. - projection = { - 'files': 0, - 'info': 0, - 'tags': 0, - 'subject.sex': 0, - 'subject.age': 0, - 'subject.race': 0, - 'subject.ethnicity': 0, - 'subject.info': 0, - 'subject.firstname': 0, - 'subject.lastname': 0, - } - - # Add some more fields for debugging purposes. - # projection['roles'] = 0 - # projection['permissions'] = 0 - @staticmethod - def get_children(parent): - raise NotImplementedError() # pragma: no cover +class Node(object): + """Base class for all nodes in the resolver tree""" + def next(self, path_in, path_out, id_only): # pylint: disable=W0613 + """ + Find the next node in the hierarchy that matches the next item in path_in. + Places the found node in path_out and return the next Node in the tree. + + Args: + path_in (deque): The remaining path elements to search in left-to-right order. + path_out (list): The currently resolved path, in left-to-right order. + id_only (bool): Whether to resolve just ids for path elements, or full nodes. + + Returns: + Node: The next node in the hierarchy, or None + """ + raise NotImplementedError() + + def get_children(self, path_out): # pylint: disable=W0613 + """ + Get all children of the last path element. + + Args: + path_out (list): The currently resolved path. + + Returns: + list: A list of child elements for the last path element. + """ + raise NotImplementedError() + + def get_parent(self, path_out): + """Return the last element in path_out or None""" + if path_out: + return path_out[-1] + return None + + def parse_criterion(self, path_in): + """ + Parse criterion, returning true if we got an id. + + Args: + path_in (deque): The path in, must not be empty. + + Returns: + bool, str: A boolean value indicating whether or not we parsed an id, and the parsed value. + """ + value = path_in.popleft() + use_id = False - @staticmethod - def filter(children, criterion, _id=False): - raise NotImplementedError() # pragma: no cover + # Check for syntax + if value.startswith(''): + value = value[4:len(value)-1] + use_id = True -def _get_files(table, match): - """ - Return a consistently-ordered set of files for a given container query. - """ + return use_id, value - pipeline = [ - {'$match': match }, - {'$unwind': '$files'}, - {'$sort': {'files.name': 1}}, - {'$group': {'_id':'$_id', 'files': {'$push':'$files'}}} - ] - result = config.mongo_pipeline(table, pipeline) - if len(result) == 0: - return [] +class FilesNode(Node): + """Node that manages filename resolution""" + def next(self, path_in, path_out, id_only): + filename = path_in.popleft() + parent = self.get_parent(path_out) - files = result[0]['files'] - for x in files: - x.update({'node_type': 'file'}) - return files + # Find the matching file + for f in FilesNode.pop_files(parent): + if str(f.get('name', '')) == filename: + path_out.append(f) + return None -def _get_docs(table, label, match): - match_nondeleted = match.copy() - match_nondeleted['deleted'] = {'$exists': False} - results = list(config.db[table].find(match, Node.projection, sort=Node.sorting)) - for y in results: - y.update({'node_type': label}) - return results + raise APINotFoundException('No ' + filename + ' file found.') + def get_children(self, path_out): + parent = self.get_parent(path_out) + return FilesNode.pop_files(parent) -class FileNode(Node): @staticmethod - def get_children(parent): - return [] + def pop_files(container): + """ + Return a consistently-ordered set of files for a given container. + This will remove the 'files' attribute from the container. - @staticmethod - def filter(children, criterion, _id=False): - raise APINotFoundException("Files have no children") + Args: + container (dict): The container, or None if there is no parent. -class AcquisitionNode(Node): - @staticmethod - def get_children(parent): - files = _get_files('acquisitions', {'_id' : parent['_id'] }) + Returns: + list: The list of files, or an empty list + """ + if not container: + return [] - return files + files = container.pop('files', []) - @staticmethod - def filter(children, criterion, _id=False): - for x in children: - if x['node_type'] == "file" and x.get('name') == criterion: - return x, FileNode - raise APINotFoundException('No ' + criterion + ' file found.') + files.sort(key=lambda f: f.get('name', '')) + apply_container_type(files, 'file') -class SessionNode(Node): + return files - @staticmethod - def get_children(parent): - acqs = _get_docs('acquisitions', 'acquisition', {'session' : parent['_id']}) - files = _get_files('sessions', {'_id' : parent['_id'] }) - return list(acqs) + files +class ContainerNode(Node): + # All lists obtained by the Resolver are sorted by the created timestamp, then the database ID as a fallback. + # As neither property should ever change, this sort should be consistent + sorting = [('created', 1), ('_id', 1)] - @staticmethod - def filter(children, criterion, _id=False): - if _id: - selectAcq = '_id' - selectFil = '_id' + def __init__(self, cont_name, files=True, use_id=False, analyses=True): + self.cont_name = cont_name + self.storage = ContainerStorage.factory(cont_name) + # container_type is also the parent id field name + self.container_type = containerutil.singularize(cont_name) + self.files = files + self.use_id = use_id + self.analyses = analyses + self.child_name = self.storage.get_child_container_name() + + def next(self, path_in, path_out, id_only): + use_id, criterion = self.parse_criterion(path_in) + parent = self.get_parent(path_out) + # Peek to see if we need files for the next path element + fetch_files = (not path_in or path_in[0] == 'files') + + # Setup criterion match + query = {} + if use_id or self.use_id: + if self.storage.use_object_id: + try: + query['_id'] = bson.ObjectId(criterion) + except bson.errors.InvalidId as e: + raise InputValidationException(e.message) + else: + query['_id'] = criterion else: - selectAcq = 'label' - selectFil = 'name' + query['label'] = criterion - for x in children: - if x['node_type'] == "acquisition" and str(x.get(selectAcq)) == criterion: - return x, AcquisitionNode - if x['node_type'] == "file" and str(x.get(selectFil)) == criterion: - return x, FileNode - raise APINotFoundException('No ' + criterion + ' acquisition or file found.') + # Setup projection + if id_only: + proj = ContainerNode.get_id_only_projection() + if fetch_files: + proj['files'] = 1 + else: + proj = self.storage.get_list_projection() + if proj and not fetch_files: + proj['files'] = 0 + + results = self.find(query, parent, proj) + if not results: + raise APINotFoundException('No {0} {1} found.'.format(criterion, self.container_type)) + + child = results[0] + + self.storage.filter_deleted_files(child) + child['container_type'] = self.container_type + path_out.append(child) + + # Get the next node + if path_in: + # Files + if fetch_files: + path_in.popleft() + return FilesNode() + + # Check for analyses + if path_in[0] == 'analyses' and self.analyses: + path_in.popleft() + return AnalysesNode() + + # Otherwise, the next node is our child container + if self.child_name: + return ContainerNode(self.child_name) + + return None + + def get_children(self, path_out): + parent = self.get_parent(path_out) + + # Get container chilren + if self.child_name: + query = {} + if parent: + query[parent['container_type']] = parent['_id'] + + children = ContainerNode.get_container_children(self.child_name, query) + else: + children = [] -class ProjectNode(Node): + # Add analyses + if self.analyses: + analyses_node = AnalysesNode() - @staticmethod - def get_children(parent): - sessions = _get_docs('sessions', 'session', {'project' : parent['_id']}) - files = _get_files('projects', {'_id' : parent['_id'] }) + proj = analyses_node.storage.get_list_projection() + if proj: + proj['files'] = 0 - return list(sessions) + files + analyses = analyses_node.list_analyses(parent, proj=proj) + apply_container_type(analyses, analyses_node.container_type) + children = children + analyses - @staticmethod - def filter(children, criterion, _id=False): - if _id: - selectSes = '_id' - selectFil = '_id' - else: - selectSes = 'label' - selectFil = 'name' + # Add files + return children + FilesNode.pop_files(parent) - for x in children: - if x['node_type'] == "session" and str(x.get(selectSes)) == criterion: - return x, SessionNode - if x['node_type'] == "file" and str(x.get(selectFil)) == criterion: - return x, FileNode - raise APINotFoundException('No ' + criterion + ' session or file found.') + def find(self, query, parent, proj): + """ Find the one child of this container that matches query """ + # Add parent to query + if parent: + query[parent['container_type']] = parent['_id'] -class GroupNode(Node): + # We don't use the user field here because we want to return a 403 if + # they try to resolve something they don't have access to + return self.storage.get_all_el(query, None, proj, sort=ContainerNode.sorting, limit=1) @staticmethod - def get_children(parent): - projects = _get_docs('projects', 'project', {'group' : parent['_id']}) - return projects + def get_id_only_projection(): + """Return a projection that will return the minimal values required for id-only resolution.""" + return { + 'label': 1, + 'permissions': 1, + 'files': 1, + } @staticmethod - def filter(children, criterion, _id=False): - if _id: - select = '_id' + def get_container_children(cont_name, query=None): + """Get all children of container named cont_name, using query""" + storage = ContainerStorage.factory(cont_name) + + proj = storage.get_list_projection() + if proj: + proj['files'] = 0 + + children = storage.get_all_el(query, None, proj, sort=ContainerNode.sorting) + apply_container_type(children, containerutil.singularize(cont_name)) + + return children + + +class GearsNode(Node): + """The top level "gears" node""" + def next(self, path_in, path_out, id_only): + use_id, criterion = self.parse_criterion(path_in) + if use_id: + gear = gears.get_gear(criterion) else: - select = 'label' + gear = gears.get_gear_by_name(criterion) + + if not gear: + raise APINotFoundException('No gear {0} found.'.format(criterion)) + + gear['container_type'] = 'gear' + path_out.append(gear) + + return None + + def get_children(self, path_out): + # No children for gears yet + if path_out: + return [] + + results = gears.get_gears() + + for gear in results: + gear['container_type'] = 'gear' + + return list(results) + + +class AnalysesNode(ContainerNode): + def __init__(self): + super(AnalysesNode, self).__init__('analyses', files=True, use_id=False, analyses=False) + + def find(self, query, parent, proj): + return self.list_analyses(parent, query, proj, limit=1) + + def get_children(self, path_out): + parent = self.get_parent(path_out) + + # Only children of an analyses is files + if parent.get('container_type') == 'analysis': + return FilesNode.pop_files(parent) + + results = self.list_analyses(parent) + apply_container_type(results, self.container_type) + return results + + def list_analyses(self, parent, query=None, proj=None, **kwargs): + """Get a list of all analyses that match query, using the given projection""" + return self.storage.get_analyses(query, parent['container_type'], parent['_id'], projection=proj, sort=ContainerNode.sorting, **kwargs) - for x in children: - if str(x.get(select)) == criterion: - return x, ProjectNode - raise APINotFoundException('No ' + criterion + ' project found.') class RootNode(Node): + """The root node of the resolver tree""" + def next(self, path_in, path_out, id_only): + """Get the next node in the hierarchy""" + if path_in[0] == 'gears': + path_in.popleft() + return GearsNode() - @staticmethod - def get_children(parent): - groups = _get_docs('groups', 'group', {}) - return groups + return ContainerNode('groups', files=False, use_id=True, analyses=False) - @staticmethod - def filter(children, criterion, _id=False): - for x in children: - if x.get('_id') == criterion: - return x, GroupNode - raise APINotFoundException('No ' + criterion + ' group found.') + def get_children(self, path_out): + """Get the children of the current node in the hierarchy""" + return ContainerNode.get_container_children('groups') class Resolver(object): @@ -180,47 +354,39 @@ class Resolver(object): Does not tolerate ambiguity at any level of the path except the final node. """ + def __init__(self, id_only=False): + self.id_only = id_only - @staticmethod - def resolve(path): - + def resolve(self, path): if not isinstance(path, list): - raise Exception("Path must be an array of strings") + raise InputValidationException("Path must be an array of strings") - node, resolved, last = Resolver._resolve(path, RootNode) - children = node.get_children(last) + path = deque(path) + node = None + next_node = RootNode() - return { - 'path': resolved, - 'children': children - } + resolved_path = [] + resolved_children = [] - @staticmethod - def _resolve(path, node, parents=None): - - if parents is None: - parents = [] + # Walk down the tree, building path until we get to a leaf node + # Keeping in mind that path may be empty + while next_node: + node = next_node - last = None - if len(parents) > 0: - last = parents[len(parents) - 1] + # Don't attempt to resolve the next node if path is empty + if not path: + break - if len(path) == 0: - return node, parents, last + next_node = node.next(path, resolved_path, self.id_only) - current = path[0] - current_id = False - - # Check for syntax - if current.startswith(''): - current = current[4:len(current)-1] - current_id = True - print current + # If we haven't consumed path, then we didn't find what we were looking for + if len(path) > 0: + raise APINotFoundException('Could not resolve node for: ' + '/'.join(path)) - children = node.get_children(last) - selected, next_ = node.filter(children, current, current_id) + if hasattr(node, 'get_children'): + resolved_children = node.get_children(resolved_path) - path = path[1:] - parents.append(selected) - - return Resolver._resolve(path, next_, parents) + return { + 'path': resolved_path, + 'children': resolved_children + } diff --git a/api/util.py b/api/util.py index 64d2f85d1..657b3b976 100644 --- a/api/util.py +++ b/api/util.py @@ -340,3 +340,9 @@ def parse_range_header(range_header_val, valid_units=('bytes',)): ranges.append((first, last)) return ranges + +def add_container_type(request, result): + """Adds a 'container_type' property to result if fw_container_type is set in the request environment.""" + if 'fw_container_type' in request.environ and isinstance(result, dict): + result['container_type'] = request.environ['fw_container_type'] + diff --git a/swagger/Gruntfile.js b/swagger/Gruntfile.js index df8131907..24efc1fd2 100644 --- a/swagger/Gruntfile.js +++ b/swagger/Gruntfile.js @@ -85,6 +85,16 @@ module.exports = function(grunt) { } }, + /** + * Simplify swagger for codegen + */ + simplifySwagger: { + core: { + src: 'build/swagger-ui.json', + dst: 'build/swagger-codegen.json' + } + }, + /** * Validate swagger */ @@ -152,7 +162,8 @@ module.exports = function(grunt) { 'createBuildDir', 'flattenSwagger', 'schemasToDefs', - 'validateSwagger' + 'validateSwagger', + 'simplifySwagger' ]); /** diff --git a/swagger/examples/gear_full.json b/swagger/examples/gear_full.json deleted file mode 100755 index 908f0ad58..000000000 --- a/swagger/examples/gear_full.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "name": "test-case-gear", - "manifest": { - "name": "test-case-gear", - "label": "Test Case Gear", - "version": "0", - - "author": "Nathaniel Kofalt", - "description": "A gear built to drive test cases", - - "url": "http://none.example", - "source": "http://none.example", - "license": "MIT", - - "config": { - "two-digit multiple of ten": { - "exclusiveMaximum": true, - "type": "number", - "multipleOf": 10, - "maximum": 100 - } - }, - - "inputs": { - "any text file <= 100 KB": { - "base": "file", - "name": { - "pattern": "^.*.txt$" - }, - "size": { - "maximum": 100000 - } - } - } - }, - "input": {} -} diff --git a/swagger/examples/gears_list_just_name.json b/swagger/examples/gears_list_just_name.json deleted file mode 100755 index e56e4078e..000000000 --- a/swagger/examples/gears_list_just_name.json +++ /dev/null @@ -1,11 +0,0 @@ -[ - { - "name": "dicom_mr_classifier" - }, - { - "name": "dcm_convert" - }, - { - "name": "qa-report-fmri" - } -] \ No newline at end of file diff --git a/swagger/examples/input/job-new.json b/swagger/examples/input/job-new.json deleted file mode 100755 index 934e2e763..000000000 --- a/swagger/examples/input/job-new.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "gear_id": "aex", - "inputs": { - "dicom": { - "type": "acquisition", - "id": "573c9e6a844eac7fc01747cd", - "name" : "1_1_dicom.zip" - } - }, - "config": { - "two-digit multiple of ten": 20 - }, - "destination": { - "type": "acquisition", - "id": "573c9e6a844eac7fc01747cd" - }, - "tags": [ - "ad-hoc" - ] -} diff --git a/swagger/examples/output/analysis.json b/swagger/examples/output/analysis.json index bb0c4caae..999bdd6a2 100644 --- a/swagger/examples/output/analysis.json +++ b/swagger/examples/output/analysis.json @@ -44,7 +44,6 @@ } }, "attempt": 1, - "name": "cortex-demo", "tags": ["ad-hoc", "cortex-demo", "analysis"], "destination": { "type": "analysis", @@ -82,7 +81,8 @@ "created": "2016-10-18T17:45:11.816000+00:00", "state": "complete", "config": {}, - "id": "58065fa7e5dc5b001457a882" + "id": "58065fa7e5dc5b001457a882", + "gear_id": "58065fa7e5dc5b001457a882" }, "user": "canakgun@flywheel.io", "_id": "58065fa7e5dc5b001457a881" diff --git a/swagger/examples/output/gear-list.json b/swagger/examples/output/gear-list.json new file mode 100644 index 000000000..490934c94 --- /dev/null +++ b/swagger/examples/output/gear-list.json @@ -0,0 +1,35 @@ +[{ + "category": "converter", + "gear": { + "inputs": { + "audio": { + "base": "file", + "description": "Any audio file. Plain speech suggested!" + } + }, + "maintainer": "Nathaniel Kofalt", + "description": "Detects the speech content of an audio file, using the machine-learning DeepSpeech library by Mozilla.", + "license": "Other", + "author": "Nathaniel Kofalt", + "url": "", + "label": "Speech Recognition", + "source": "https://github.com/mozilla/DeepSpeech", + "version": "1", + "custom": { + "gear-builder": { + "image": "gear-builder-kdfqapbezk-20171219165918", + "container": "c15189b625a0ea450cafbb24ef0df03c26cc8cf151181976ec4289801e191032" + } + }, + "config": {}, + "name": "speech-recognition" + }, + "created": "2017-12-20T00:09:50.381000+00:00", + "exchange": { + "git-commit": "local", + "rootfs-hash": "sha384:e01d925f90b097b554be0f802ef6ebb9f07000d7a6a2a0c3a25dac26893d4ac2414381e2c8e60f4b58b27c7fe8e56099", + "rootfs-url": "/api/gears/temp/5a39aa4e07a393001b663910" + }, + "modified": "2017-12-20T00:09:50.381000+00:00", + "_id": "5a39aa4e07a393001b663910" +}] \ No newline at end of file diff --git a/swagger/examples/output/gear.json b/swagger/examples/output/gear.json new file mode 100644 index 000000000..841f2b536 --- /dev/null +++ b/swagger/examples/output/gear.json @@ -0,0 +1,35 @@ +{ + "category": "converter", + "gear": { + "inputs": { + "audio": { + "base": "file", + "description": "Any audio file. Plain speech suggested!" + } + }, + "maintainer": "Nathaniel Kofalt", + "description": "Detects the speech content of an audio file, using the machine-learning DeepSpeech library by Mozilla.", + "license": "Other", + "author": "Nathaniel Kofalt", + "url": "", + "label": "Speech Recognition", + "source": "https://github.com/mozilla/DeepSpeech", + "version": "1", + "custom": { + "gear-builder": { + "image": "gear-builder-kdfqapbezk-20171219165918", + "container": "c15189b625a0ea450cafbb24ef0df03c26cc8cf151181976ec4289801e191032" + } + }, + "config": {}, + "name": "speech-recognition" + }, + "created": "2017-12-20T00:09:50.381000+00:00", + "exchange": { + "git-commit": "local", + "rootfs-hash": "sha384:e01d925f90b097b554be0f802ef6ebb9f07000d7a6a2a0c3a25dac26893d4ac2414381e2c8e60f4b58b27c7fe8e56099", + "rootfs-url": "/api/gears/temp/5a39aa4e07a393001b663910" + }, + "modified": "2017-12-20T00:09:50.381000+00:00", + "_id": "5a39aa4e07a393001b663910" +} \ No newline at end of file diff --git a/swagger/examples/output/rule.json b/swagger/examples/output/rule.json new file mode 100644 index 000000000..da502290d --- /dev/null +++ b/swagger/examples/output/rule.json @@ -0,0 +1,17 @@ +{ + "_id": "5a12f2923306be0016179f47", + "name": "dcm2niix", + "alg": "dcm2niix", + "any": [], + "all": [ + { + "regex": true, + "type": "file.measurements", + "value": "^(?!non-image).+$" + }, + { + "type": "file.type", + "value": "nifti" + } + ] +} diff --git a/swagger/index.yaml b/swagger/index.yaml index b9ed8a6e4..c0c2d0a1f 100644 --- a/swagger/index.yaml +++ b/swagger/index.yaml @@ -12,36 +12,42 @@ consumes: - 'application/json' tags: - - name: files - description: File upload/download operations - - name: devices - description: Device operations - name: users description: User operations - - name: gears - description: Gear operations - name: groups description: Group operations - - name: jobs - description: Job operations - - name: collections - description: Collection operations + - name: projects + description: Project operations - name: sessions description: Session operations - name: acquisitions description: Acquisition operations - - name: projects - description: Project operations + - name: analyses + description: Analysis operations + - name: collections + description: Collection operations + - name: files + description: File upload/download operations + - name: devices + description: Device operations + - name: gears + description: Gear operations + - name: rules + description: Gear rule configuration + - name: jobs + description: Job operations - name: reports description: Site-wide reports - name: batch description: Batch job operations + - name: default paths: $include: - paths/login.yaml - paths/download.yaml - paths/upload-by-label.yaml + - paths/upload-by-reaper.yaml - paths/upload-by-uid.yaml - paths/upload-match-uid.yaml - paths/clean-packfiles.yaml @@ -60,6 +66,10 @@ paths: - paths/projects.yaml - paths/report.yaml - paths/batch.yaml + - paths/analyses.yaml + - paths/site-rules.yaml + - paths/dataexplorer.yaml + - paths/resolver.yaml securityDefinitions: diff --git a/swagger/package.json b/swagger/package.json index 73a30727e..3700b1869 100644 --- a/swagger/package.json +++ b/swagger/package.json @@ -8,7 +8,7 @@ "lint": "node_modules/.bin/grunt lintSchemas", "test": "node_modules/.bin/jasmine --config=support/jasmine.json", "watch": "node_modules/.bin/grunt live", - "coverage": "node_modules/.bin/grunt coverage" + "coverage": "node_modules/.bin/grunt coverage" }, "author": "Justin Ehlert ", "license": "MIT", diff --git a/swagger/paths/acquisitions.yaml b/swagger/paths/acquisitions.yaml index 325ad7499..f24fd3953 100644 --- a/swagger/paths/acquisitions.yaml +++ b/swagger/paths/acquisitions.yaml @@ -15,25 +15,22 @@ $template_arguments: update-input-schema: schemas/input/acquisition-update.json get-output-schema: schemas/output/acquisition.json +/acquisitions/{AcquisitionId}/info: + $template: templates/container-item-info.yaml + # ===== Tags ===== /acquisitions/{AcquisitionId}/tags: $template: templates/tags.yaml /acquisitions/{AcquisitionId}/tags/{TagValue}: $template: templates/tags-tag.yaml -# ===== Packfile ===== -/acquisitions/{AcquisitionId}/packfile-start: - $template: templates/packfile-start.yaml -/acquisitions/{AcquisitionId}/packfile: - $template: templates/packfile.yaml -/acquisitions/{AcquisitionId}/packfile-end: - $template: templates/packfile-end.yaml - # ===== Files ===== /acquisitions/{AcquisitionId}/files: $template: templates/file-list-upload.yaml /acquisitions/{AcquisitionId}/files/{FileName}: $template: templates/file-item.yaml +/acquisitions/{AcquisitionId}/files/{FileName}/info: + $template: templates/file-item-info.yaml # ===== Notes ===== /acquisitions/{AcquisitionId}/notes: @@ -44,8 +41,12 @@ $template_arguments: # ===== Analyses ===== /acquisitions/{AcquisitionId}/analyses: $template: templates/analyses-list.yaml + arguments: + allowCreate: true /acquisitions/{AcquisitionId}/analyses/{AnalysisId}: $template: templates/analysis-item.yaml + arguments: + supportsDelete: true /acquisitions/{AcquisitionId}/analyses/{AnalysisId}/files: $template: templates/analysis-files.yaml /acquisitions/{AcquisitionId}/analyses/{AnalysisId}/files/{Filename}: diff --git a/swagger/paths/analyses.yaml b/swagger/paths/analyses.yaml new file mode 100644 index 000000000..a297dd39b --- /dev/null +++ b/swagger/paths/analyses.yaml @@ -0,0 +1,62 @@ +$template_arguments: + tag: 'analyses' + +/analyses/{AnalysisId}: + $template: templates/analysis-item.yaml + arguments: + supportsDelete: false + +/analyses/{AnalysisId}/files: + $template: templates/analysis-files.yaml + +/analyses/{AnalysisId}/files/{Filename}: + $template: templates/analysis-files-create-ticket-filename.yaml + +/{ContainerName}/{ContainerId}/{SubcontainerName}/analyses: + parameters: + - name: ContainerName + in: path + type: string + required: true + enum: + - groups + - projects + - sessions + - acquisitions + - collections + description: The parent container type + - name: ContainerId + in: path + type: string + required: true + description: The parent container id + - name: SubcontainerName + in: path + type: string + required: true + enum: + - all + - projects + - sessions + - acquisitions + description: The sub container type + get: + summary: Get nested analyses for a container + description: > + Returns analyses that belong to containers of the specified type that belong + to ContainerId. + + For example: `projects/{ProjectId}/acquisitions/analyses` will return any analyses + that have an acquisition that is under that project as a parent. + + The `all` keyword is also supported, for example: projects/{ProjectId}/all/analyses + will return any analyses that have any session or acquisition or the project itself as a parent. + operationId: get_analyses + tags: + - analyses + responses: + '200': + description: The list of analyses + schema: + $ref: schemas/output/analyses-list.json + diff --git a/swagger/paths/batch.yaml b/swagger/paths/batch.yaml index b407f82c3..900d45550 100644 --- a/swagger/paths/batch.yaml +++ b/swagger/paths/batch.yaml @@ -2,34 +2,31 @@ get: summary: Get a list of batch jobs the user has created. description: Requires login. - operationId: get_all_batch_jobs + operationId: get_all_batches tags: - batch responses: '200': - description: '' - # Schema file does not exist - # schema: - # $ref: schemas/output/batch-list.json + description: 'Returns a list of all known batch jobs for the user' + schema: + $ref: schemas/output/batch-list.json post: summary: Create a batch job proposal and insert it as 'pending'. - operationId: create_batch_job + operationId: propose_batch tags: - batch parameters: - name: body in: body - description: '' + required: true + description: The batch proposal schema: - type: object - # Schema file does not exist - # $ref: schemas/input/batch-insert.json + $ref: schemas/input/propose-batch.json responses: '200': - description: '' - # Schema file does not exist - # schema: - # $ref: schemas/output/batch-insert.json + description: The batch proposal object that was created + schema: + $ref: schemas/output/batch-proposal.json /batch/{BatchId}: parameters: @@ -40,19 +37,18 @@ get: summary: Get batch job details. parameters: - - in: query + - name: jobs + in: query type: boolean - name: jobs description: If true, return job objects instead of job ids - operationId: get_batch_job + operationId: get_batch tags: - batch responses: '200': - description: '' - # Schema file does not exist - # schema: - # $ref: schemas/output/batch.json + description: 'The batch object' + schema: + $ref: schemas/output/batch.json '404': $ref: '#/responses/404:resource-not-found' @@ -67,7 +63,7 @@ description: | Creates jobs from proposed inputs, returns jobs enqueued. Moves 'pending' batch job to 'running'. - operationId: run_batch_job + operationId: start_batch tags: - batch responses: @@ -87,12 +83,11 @@ description: | Cancels jobs that are still pending, returns number of jobs cancelled. Moves a 'running' batch job to 'cancelled'. - operationId: cancel_batch_job + operationId: cancel_batch tags: - batch responses: '200': - description: '' - examples: - response: - canceled_jobs: 4 + description: 'The number of jobs canceled' + schema: + $ref: schemas/output/batch-cancel.json diff --git a/swagger/paths/collections.yaml b/swagger/paths/collections.yaml index 486090979..3ad9cb967 100644 --- a/swagger/paths/collections.yaml +++ b/swagger/paths/collections.yaml @@ -20,12 +20,13 @@ $template_arguments: $ref: examples/output/collection-list.json post: summary: Create a collection - operationId: create_collection + operationId: add_collection tags: - collections parameters: - - in: body - name: body + - name: body + in: body + required: true schema: $ref: schemas/input/collection.json responses: @@ -53,6 +54,7 @@ $template_arguments: examples: response: $ref: examples/output/collection-curators-list.json + /collections/{CollectionId}: parameters: - required: true @@ -78,8 +80,9 @@ $template_arguments: tags: - collections parameters: - - in: body - name: body + - name: body + in: body + required: true schema: $ref: schemas/input/collection-update.json responses: @@ -96,6 +99,9 @@ $template_arguments: '200': description: Collection was deleted +/collections/{CollectionId}/info: + $template: templates/container-item-info.yaml + /collections/{CollectionId}/sessions: parameters: - required: true @@ -118,10 +124,14 @@ $template_arguments: /collections/{CollectionId}/acquisitions: parameters: - - required: true - type: string + - name: CollectionId in: path - name: CollectionId + type: string + required: true + - name: session + in: query + type: string + description: The id of a session, to which the acquisitions returned will be restricted get: summary: List acquisitions in a collection operationId: get_collection_acquisitions @@ -142,14 +152,6 @@ $template_arguments: /collections/{CollectionId}/tags/{TagValue}: $template: templates/tags-tag.yaml -# ===== Packfile ===== -/collections/{CollectionId}/packfile-start: - $template: templates/packfile-start.yaml -/collections/{CollectionId}/packfile: - $template: templates/packfile.yaml -/collections/{CollectionId}/packfile-end: - $template: templates/packfile-end.yaml - # ===== Files ===== /collections/{CollectionId}/files: $template: templates/file-list-upload.yaml @@ -161,6 +163,8 @@ $template_arguments: $template: templates/permissions.yaml /collections/{CollectionId}/permissions/{UserId}: $template: templates/permissions-user.yaml +/collections/{CollectionId}/files/{FileName}/info: + $template: templates/file-item-info.yaml # ===== Notes ===== /collections/{CollectionId}/notes: @@ -171,8 +175,12 @@ $template_arguments: # ===== Analyses ===== /collections/{CollectionId}/analyses: $template: templates/analyses-list.yaml + arguments: + allowCreate: true /collections/{CollectionId}/analyses/{AnalysisId}: $template: templates/analysis-item.yaml + arguments: + supportsDelete: true /collections/{CollectionId}/analyses/{AnalysisId}/files: $template: templates/analysis-files.yaml /collections/{CollectionId}/analyses/{AnalysisId}/files/{Filename}: diff --git a/swagger/paths/dataexplorer.yaml b/swagger/paths/dataexplorer.yaml new file mode 100644 index 000000000..1b5b63727 --- /dev/null +++ b/swagger/paths/dataexplorer.yaml @@ -0,0 +1,23 @@ +/dataexplorer/search: + post: + summary: Perform a search query + operationId: search + parameters: + - name: simple + in: query + type: boolean + x-sdk-default: 'true' + - name: limit + in: query + type: integer + x-sdk-default: 100 + - name: body + in: body + required: true + schema: + $ref: schemas/input/search-query.json + responses: + '200': + description: A list of results of the search query + schema: + $ref: schemas/output/search-response-list.json diff --git a/swagger/paths/gears.yaml b/swagger/paths/gears.yaml index b34f53681..6f9ccac10 100644 --- a/swagger/paths/gears.yaml +++ b/swagger/paths/gears.yaml @@ -5,12 +5,36 @@ tags: - gears responses: - default: - description: '' + '200': + description: 'Returns a list of gears installed on the system' + schema: + $ref: schemas/output/gear-list.json + examples: + response: + $ref: examples/output/gear-list.json # TODO: Can we make the parameter here consistent, or split # this into two separate APIs? /gears/{GearIdOrName}: + get: + summary: Retrieve details about a specific gear + operationId: get_gear + tags: + - gears + parameters: + - name: GearIdOrName + in: path + type: string + required: true + description: Id of the gear to interact with + responses: + '200': + description: 'Details about a single gear' + schema: + $ref: schemas/output/gear.json + examples: + response: + $ref: examples/output/gear.json post: summary: Create or update a gear. description: | @@ -18,44 +42,54 @@ Otherwise, the specified gear will be updated operationId: add_gear parameters: - - required: true - description: Name of the gear to interact with - type: string + - name: GearIdOrName in: path - name: GearIdOrName - tags: - - gears - responses: - default: - description: '' - get: - summary: Retrieve details about a specific gear - operationId: get_gear + type: string + required: true + description: Name of the gear to interact with + - name: body + in: body + required: true + schema: + $ref: schemas/input/gear.json tags: - gears - parameters: - - required: true - description: Id of the gear to interact with - type: string - in: path - name: GearIdOrName responses: '200': - description: '' + description: 'The gear was created or updated successfully' schema: - example: - $ref: examples/gear_full.json + $ref: schemas/output/collection-new.json delete: summary: Delete a gear (not recommended) operationId: delete_gear tags: - gears parameters: - - required: true - description: Id of the gear to interact with - type: string + - name: GearIdOrName in: path - name: GearIdOrName + type: string + required: true + description: Id of the gear to interact with responses: '200': description: Gear was deleted + +/gears/{GearId}/invocation: + parameters: + - name: GearId + in: path + type: string + required: true + description: Id of the gear to interact with + get: + summary: Get a schema for invoking a gear. + operationId: get_gear_invocation + tags: + - gears + responses: + '200': + description: The gear invocation schema. + schema: + type: object + + diff --git a/swagger/paths/groups.yaml b/swagger/paths/groups.yaml index 5576cb6a7..f9817a81b 100644 --- a/swagger/paths/groups.yaml +++ b/swagger/paths/groups.yaml @@ -25,6 +25,7 @@ $template_arguments: parameters: - name: body in: body + required: true schema: $ref: schemas/input/group-new.json responses: @@ -34,6 +35,7 @@ $template_arguments: $ref: schemas/output/group-new.json '400': $ref: '#/responses/400:invalid-body-json' + /groups/{GroupId}: parameters: - required: true @@ -61,6 +63,7 @@ $template_arguments: parameters: - in: body name: body + required: true schema: $ref: schemas/input/group-update.json responses: diff --git a/swagger/paths/jobs.yaml b/swagger/paths/jobs.yaml index 04c328ef3..38e26dc69 100644 --- a/swagger/paths/jobs.yaml +++ b/swagger/paths/jobs.yaml @@ -7,9 +7,9 @@ parameters: - name: body in: body + required: true schema: - example: - $ref: examples/input/job-new.json + $ref: schemas/input/job-new.json responses: '200': description: '' @@ -25,6 +25,13 @@ operationId: get_next_job tags: - jobs + parameters: + - name: tags + in: query + type: array + items: + type: string + collectionFormat: multi responses: '200': description: '' @@ -89,7 +96,7 @@ 'running' state. Accepts the same body as /api/jobs/add , except all fields are optional. - operationId: update_job + operationId: modify_job tags: - jobs responses: @@ -98,6 +105,7 @@ parameters: - name: body in: body + required: true schema: example: $ref: examples/input/job-update.json @@ -142,3 +150,76 @@ schema: example: $ref: examples/output/job-config.json +/jobs/{JobId}/logs: + parameters: + - required: true + type: string + in: path + name: JobId + get: + summary: Get job logs + operationId: get_job_logs + tags: + - jobs + responses: + '200': + description: The current job log + schema: + $ref: schemas/output/job-log.json + post: + summary: Add logs to a job. + operationId: add_job_logs + tags: + - jobs + parameters: + - name: body + in: body + required: true + schema: + $ref: schemas/input/job-logs.json + responses: + '200': + description: Logs were added. No value is returned. +/jobs/{JobId}/accept-failed-output: + parameters: + - required: true + type: string + in: path + name: JobId + post: + summary: Accept failed job output. + description: > + Remove the 'from_failed_job' flag from the files. + + Create any automatic jobs for the accepted files. + operationId: accept_failed_output + tags: + - jobs + responses: + '200': + description: Failed output is accepted and created any automatic jobs. + '400': + description: Can only accept failed output of a job that failed. +/jobs/{JobId}/prepare-complete: + parameters: + - required: true + type: string + in: path + name: JobId + post: + summary: Create a ticket with the job id and its status. + operationId: prepare_compete + tags: + - jobs + parameters: + - name: body + in: body + schema: + example: + success: True + responses: + '200': + description: 'Returns a ticket' + schema: + example: + ticket: 579e97738120be2ada087feb \ No newline at end of file diff --git a/swagger/paths/login.yaml b/swagger/paths/login.yaml index 85ec8f795..cc1955d89 100644 --- a/swagger/paths/login.yaml +++ b/swagger/paths/login.yaml @@ -7,8 +7,7 @@ '200': description: '' schema: - example: - success: true + $ref: schemas/output/login-output.json /logout: post: summary: Log Out @@ -18,5 +17,4 @@ '200': description: '' schema: - example: - auth_tokens_removed: 2 \ No newline at end of file + $ref: schemas/output/logout-output.json diff --git a/swagger/paths/projects.yaml b/swagger/paths/projects.yaml index 3c039f56c..f0248aa24 100644 --- a/swagger/paths/projects.yaml +++ b/swagger/paths/projects.yaml @@ -15,12 +15,15 @@ $template_arguments: update-input-schema: schemas/input/project-update.json get-output-schema: schemas/output/project.json +/projects/{ProjectId}/info: + $template: templates/container-item-info.yaml + /projects/groups: get: summary: List all groups which have a project in them operationId: get_all_projects_groups tags: - - 'projects' + - projects responses: '200': description: '' @@ -37,7 +40,7 @@ $template_arguments: summary: List all sessions for the given project. operationId: get_project_sessions tags: - - 'projects' + - projects responses: '200': description: '' @@ -54,23 +57,25 @@ $template_arguments: summary: List all acquisitions for the given project. operationId: get_project_acquisitions tags: - - 'projects' + - projects responses: '200': description: '' schema: $ref: schemas/output/acquisition-list.json -'/projects/{ProjectId}/rules': +/projects/{ProjectId}/rules: parameters: - in: path type: string name: ProjectId required: true get: + summary: List all rules for a project. operationId: get_project_rules tags: - - 'projects' + - projects + - rules responses: '200': description: '' @@ -80,7 +85,8 @@ $template_arguments: summary: Create a new rule for a project. operationId: add_project_rule tags: - - 'projects' + - projects + - rules responses: default: description: '' @@ -90,7 +96,7 @@ $template_arguments: schema: $ref: schemas/input/rule-new.json -'/projects/{ProjectId}/rules/{RuleId}': +/projects/{ProjectId}/rules/{RuleId}: parameters: - in: path type: string @@ -100,11 +106,23 @@ $template_arguments: type: string name: RuleId required: true + get: + summary: Get a project rule. + operationId: get_project_rule + tags: + - projects + - rules + responses: + '200': + description: '' + schema: + $ref: schemas/output/rule.json put: summary: Update a rule on a project. operationId: modify_project_rule tags: - - 'projects' + - projects + - rules responses: default: description: '' @@ -113,9 +131,18 @@ $template_arguments: name: body schema: $ref: schemas/input/rule-update.json + delete: + summary: Remove a project rule. + operationId: remove_project_rule + tags: + - projects + - rules + responses: + '200': + $ref: '#/responses/200:deleted-with-count' -'/projects/{ProjectId}/template': +/projects/{ProjectId}/template: parameters: - in: path type: string @@ -125,7 +152,7 @@ $template_arguments: summary: Set the session template for a project. operationId: set_project_template tags: - - 'projects' + - projects parameters: - in: body name: body @@ -140,7 +167,7 @@ $template_arguments: summary: Remove the session template for a project. operationId: remove_project_template tags: - - 'projects' + - projects responses: '200': $ref: '#/responses/200:deleted-with-count' @@ -158,7 +185,7 @@ $template_arguments: description: Returns list of modified session ids. operationId: recalc_project tags: - - 'projects' + - projects responses: '200': description: | @@ -178,7 +205,7 @@ $template_arguments: Returns list of modified session ids. operationId: recalc_all_projects tags: - - 'projects' + - projects responses: '200': description: | @@ -207,6 +234,8 @@ $template_arguments: $template: templates/file-list-upload.yaml /projects/{ProjectId}/files/{FileName}: $template: templates/file-item.yaml +/projects/{ProjectId}/files/{FileName}/info: + $template: templates/file-item-info.yaml # ===== Permissions ===== /projects/{ProjectId}/permissions: @@ -223,8 +252,12 @@ $template_arguments: # ===== Analyses ===== /projects/{ProjectId}/analyses: $template: templates/analyses-list.yaml + arguments: + allowCreate: true /projects/{ProjectId}/analyses/{AnalysisId}: $template: templates/analysis-item.yaml + arguments: + supportsDelete: true /projects/{ProjectId}/analyses/{AnalysisId}/files: $template: templates/analysis-files.yaml /projects/{ProjectId}/analyses/{AnalysisId}/files/{Filename}: diff --git a/swagger/paths/resolver.yaml b/swagger/paths/resolver.yaml new file mode 100644 index 000000000..3ad449456 --- /dev/null +++ b/swagger/paths/resolver.yaml @@ -0,0 +1,50 @@ +/resolve: + post: + summary: Perform path based lookup of nodes in the Flywheel hierarchy + description: | + This will perform a deep lookup of a node (i.e. group/project/session/acquisition) and its children, + including any files. The query path is an array of strings in the following order (by default): + + * group id + * project label + * session label + * acquisition label + + Additionally, analyses for project/session/acquisition nodes can be resolved by inserting the literal + string `"analyses"`. e.g. `['scitran', 'MyProject', 'analyses']`. + + Files for projects, sessions, acquisitions and analyses can be resolved by inserting the literal string + `"files"`. e.g. `['scitran', 'MyProject', 'files']`. + + An ID can be used instead of a label by formatting the string as ``. The full path + to the node, and the node's children will be included in the response. + operationId: resolve_path + parameters: + - name: body + in: body + required: true + schema: + $ref: schemas/input/resolver.json + responses: + '200': + description: '' + schema: + $ref: schemas/output/resolver.json + +/lookup: + post: + summary: Perform path based lookup of a single node in the Flywheel hierarchy + description: | + This will perform a deep lookup of a node. See /resolve for more details. + operationId: lookup_path + parameters: + - name: body + in: body + required: true + schema: + $ref: schemas/input/resolver.json + responses: + '200': + description: '' + schema: + $ref: schemas/output/lookup.json diff --git a/swagger/paths/sessions.yaml b/swagger/paths/sessions.yaml index 2b79ba2fa..520512832 100644 --- a/swagger/paths/sessions.yaml +++ b/swagger/paths/sessions.yaml @@ -15,6 +15,9 @@ $template_arguments: update-input-schema: schemas/input/session.json get-output-schema: schemas/output/session.json +/sessions/{SessionId}/info: + $template: templates/container-item-info.yaml + '/sessions/{SessionId}/jobs': parameters: - in: path @@ -47,19 +50,13 @@ $template_arguments: /sessions/{SessionId}/tags/{TagValue}: $template: templates/tags-tag.yaml -# ===== Packfile ===== -/sessions/{SessionId}/packfile-start: - $template: templates/packfile-start.yaml -/sessions/{SessionId}/packfile: - $template: templates/packfile.yaml -/sessions/{SessionId}/packfile-end: - $template: templates/packfile-end.yaml - # ===== Files ===== /sessions/{SessionId}/files: $template: templates/file-list-upload.yaml /sessions/{SessionId}/files/{FileName}: $template: templates/file-item.yaml +/sessions/{SessionId}/files/{FileName}/info: + $template: templates/file-item-info.yaml # ===== Notes ===== /sessions/{SessionId}/notes: @@ -87,34 +84,13 @@ $template_arguments: # ===== Analyses ===== /sessions/{SessionId}/analyses: - parameters: - - in: path - type: string - required: true - name: SessionId - post: - summary: Create an analysis and upload files. - description: | - When query param "job" is "true", send JSON to create - an analysis and job. Otherwise, multipart/form-data - to upload files and create an analysis. - operationId: add_session_analysis - tags: - - 'sessions' - parameters: - - in: body - name: body - schema: - $ref: schemas/input/analysis-job.json - - in: query - type: boolean - name: job - responses: - '200': - description: '' - + $template: templates/analyses-list.yaml + arguments: + allowCreate: true /sessions/{SessionId}/analyses/{AnalysisId}: $template: templates/analysis-item.yaml + arguments: + supportsDelete: true /sessions/{SessionId}/analyses/{AnalysisId}/files: $template: templates/analysis-files.yaml /sessions/{SessionId}/analyses/{AnalysisId}/files/{Filename}: diff --git a/swagger/paths/site-rules.yaml b/swagger/paths/site-rules.yaml new file mode 100644 index 000000000..73b23e709 --- /dev/null +++ b/swagger/paths/site-rules.yaml @@ -0,0 +1,62 @@ +/site/rules: + get: + summary: List all site rules. + operationId: get_site_rules + tags: + - rules + responses: + '200': + description: '' + schema: + $ref: schemas/output/rule-list.json + post: + summary: Create a new site rule. + operationId: add_site_rule + tags: + - rules + responses: + default: + description: '' + parameters: + - in: body + name: body + schema: + $ref: schemas/input/rule-new.json + +/site/rules/{RuleId}: + parameters: + - name: RuleId + type: string + in: path + required: true + get: + summary: Get a site rule. + operationId: get_site_rule + tags: + - rules + responses: + '200': + description: '' + schema: + $ref: schemas/output/rule.json + put: + summary: Update a site rule. + operationId: modify_site_rule + tags: + - rules + responses: + default: + description: '' + parameters: + - in: body + name: body + schema: + $ref: schemas/input/rule-update.json + delete: + summary: Remove a site rule. + operationId: remove_site_rule + tags: + - rules + responses: + '200': + $ref: '#/responses/200:deleted-with-count' diff --git a/swagger/paths/upload-by-reaper.yaml b/swagger/paths/upload-by-reaper.yaml new file mode 100644 index 000000000..ea28acc75 --- /dev/null +++ b/swagger/paths/upload-by-reaper.yaml @@ -0,0 +1,37 @@ +/upload/reaper: + post: + summary: Bottom-up UID matching of Multipart form upload with N file fields, each with their desired filename. + description: | + Upload data, allowing users to move sessions during scans without causing new data to be + created in referenced project/group. + + + ### Evaluation Order: + + * If a matching acquisition UID is found anywhere on the system, the related files will be placed under that acquisition. + * **OR** If a matching session UID is found, a new acquistion is created with the specified UID under that Session UID. + * **OR** If a matching group ID and project label are found, a new session and acquisition will be created within that project + * **OR** If a matching group ID is found, a new project and session and acquisition will be created within that group. + * **OR** A new session and acquisition will be created within a special "Unknown" group and project, which is only visible to system administrators. + + operationId: upload_by_reaper + tags: + - files + responses: + '200': + description: 'Files uploaded successfully' + schema: + $ref: schemas/output/file-list.json + examples: + application/json: + $ref: examples/file_info_list.json + '402': + description: Uploads must be from an authorized drone + consumes: + - multipart/form-data + parameters: + # TODO: Need to add ref to json input schema. Proper way not yet defined for Multipart form uploads. + # See api/schemas/input/uidupload.json for the format of this metadata. + - in: formData + name: formData + type: string \ No newline at end of file diff --git a/swagger/paths/users.yaml b/swagger/paths/users.yaml index f2af9b8fa..5b14a3e73 100644 --- a/swagger/paths/users.yaml +++ b/swagger/paths/users.yaml @@ -17,6 +17,7 @@ parameters: - name: body in: body + required: true schema: $ref: schemas/input/user-new.json responses: @@ -79,6 +80,7 @@ parameters: - name: body in: body + required: true schema: $ref: schemas/input/user-update.json description: > diff --git a/swagger/responses/index.yaml b/swagger/responses/index.yaml index 03986aaa2..1b4e4c486 100644 --- a/swagger/responses/index.yaml +++ b/swagger/responses/index.yaml @@ -22,6 +22,22 @@ example: modified: 1 +'200:modified-with-count-and-jobs': + description: The number of records modified and number of jobs started. + schema: + type: object + properties: + modified: + type: integer + jobs_triggered: + type: integer + required: + - modified + - jobs_triggered + example: + modified: 1 + jobs_triggered: 0 + '400:invalid-body-json': description: | JSON did not validate against schema for this endpoint. diff --git a/swagger/schemas/definitions/acquisition.json b/swagger/schemas/definitions/acquisition.json index 181bfb6da..d0d794369 100644 --- a/swagger/schemas/definitions/acquisition.json +++ b/swagger/schemas/definitions/acquisition.json @@ -2,17 +2,18 @@ "$schema": "http://json-schema.org/draft-04/schema#", "definitions":{ "acquisition-input":{ - "type": "object", - "properties": { - "public": {"$ref":"container.json#/definitions/public"}, - "label": {"$ref":"common.json#/definitions/label"}, - "info": {"$ref":"container.json#/definitions/info"}, - "session": {"$ref":"common.json#/definitions/objectid"}, - "uid": {"$ref":"container.json#/definitions/uid"}, - "timestamp": {"$ref":"container.json#/definitions/timestamp"}, - "timezone": {"$ref":"container.json#/definitions/timezone"} - }, - "additionalProperties":false + "type": "object", + "properties": { + "public": {"$ref":"container.json#/definitions/public"}, + "label": {"$ref":"common.json#/definitions/label"}, + "info": {"$ref":"container.json#/definitions/info"}, + "session": {"$ref":"common.json#/definitions/objectid"}, + "uid": {"$ref":"container.json#/definitions/uid"}, + "timestamp": {"$ref":"container.json#/definitions/timestamp"}, + "timezone": {"$ref":"container.json#/definitions/timezone"} + }, + "additionalProperties":false, + "x-sdk-model": "acquisition" }, "acquisition-metadata-input": { "type": "object", @@ -61,13 +62,14 @@ "items":{"$ref":"permission.json#/definitions/permission-output-default-required"} }, "notes":{"allOf":[{"$ref":"note.json#/definitions/notes-list-output"}]}, - "tags":{"allOf":[{"$ref":"tag.json#/definitions/tag-output-list"}]}, + "tags":{"allOf":[{"$ref":"tag.json#/definitions/tag-list"}]}, "analyses":{ "type":"array", "items":{"$ref":"analysis.json#/definitions/analysis-output"} } }, - "additionalProperties":false + "additionalProperties":false, + "x-sdk-model": "acquisition" } } } diff --git a/swagger/schemas/definitions/analysis.json b/swagger/schemas/definitions/analysis.json index fa1e2c89b..49944a9e5 100644 --- a/swagger/schemas/definitions/analysis.json +++ b/swagger/schemas/definitions/analysis.json @@ -3,11 +3,11 @@ "definitions": { "inputs": { "type": ["array", "null"], - "items": {"$ref":"file.json#/definitions/file"} + "items": {"$ref":"file.json#/definitions/file-entry"} }, "outputs": { "type": ["array", "null"], - "items": {"$ref":"file.json#/definitions/file"} + "items": {"$ref":"file.json#/definitions/file-entry"} }, "analysis-input":{ "type":"object", @@ -35,13 +35,16 @@ "_id":{"$ref":"common.json#/definitions/objectid"}, "files":{ "type":"array", - "items":{"$ref":"file.json#/definitions/file"} + "items":{"$ref":"file.json#/definitions/file-entry"} }, "job":{ "oneOf":[ {"$ref":"common.json#/definitions/objectid"}, - {"type":"object"} - ] + {"$ref": "job.json#/definitions/job-output"} + ], + "x-sdk-schema": { + "$ref": "job.json#/definitions/job-output" + } }, "notes": {"$ref":"note.json#/definitions/notes-list-output"}, "description": {"$ref":"common.json#/definitions/description"}, @@ -52,6 +55,24 @@ }, "required":["_id", "files", "label", "user", "created", "modified"] }, + "analysis-list-entry":{ + "type":"object", + "properties":{ + "_id":{"$ref":"common.json#/definitions/objectid"}, + "files":{ + "type":"array", + "items":{"$ref":"file.json#/definitions/file-entry"} + }, + "job": {"$ref":"common.json#/definitions/objectid"}, + "notes": {"$ref":"note.json#/definitions/notes-list-output"}, + "description": {"$ref":"common.json#/definitions/description"}, + "label": {"$ref":"common.json#/definitions/label"}, + "user": {"$ref":"common.json#/definitions/user-id"}, + "created": {"$ref":"created-modified.json#/definitions/created"}, + "modified": {"$ref":"created-modified.json#/definitions/modified"} + }, + "required":["_id", "files", "label", "user", "created", "modified"] + }, "analysis-job": { "type": "object", "properties":{ diff --git a/swagger/schemas/definitions/auth.json b/swagger/schemas/definitions/auth.json new file mode 100644 index 000000000..e5f6ee0f3 --- /dev/null +++ b/swagger/schemas/definitions/auth.json @@ -0,0 +1,19 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "definitions": { + "login-output": { + "type": "object", + "properties": { + "token": {"type": "string"} + }, + "required": ["token"] + }, + "logout-output": { + "type": "object", + "properties": { + "tokens_removed": {"type": "integer"} + }, + "required": ["tokens_removed"] + } + } +} \ No newline at end of file diff --git a/swagger/schemas/definitions/batch.json b/swagger/schemas/definitions/batch.json new file mode 100644 index 000000000..e3803402d --- /dev/null +++ b/swagger/schemas/definitions/batch.json @@ -0,0 +1,81 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "definitions": { + "matched-container-list": { + "type": "array", + "items": {"$ref":"container.json#/definitions/container-output-with-files"} + }, + "batch-proposal-detail": { + "type": "object", + "properties": { + "analysis": {"$ref": "analysis.json#/definitions/analysis-input"}, + "tags": {"$ref":"tag.json#/definitions/tag-list"} + }, + "additionalProperties": false + }, + "batch": { + "type": "object", + "properties": { + "_id": {"$ref":"common.json#/definitions/objectid"}, + "gear_id": {"$ref":"job.json#/definitions/gear_id"}, + "state": {"$ref":"job.json#/definitions/state"}, + "origin": {"$ref":"job.json#/definitions/job-origin"}, + "config": {"$ref":"job.json#/definitions/config"}, + "jobs": { + "type": "array", + "items": { "$ref": "common.json#/definitions/objectid" } + }, + "created":{"$ref":"created-modified.json#/definitions/created"}, + "modified":{"$ref":"created-modified.json#/definitions/modified"} + } + }, + "batch-proposal": { + "type": "object", + "properties": { + "_id": {"$ref":"common.json#/definitions/objectid"}, + "gear_id": {"$ref":"job.json#/definitions/gear_id"}, + "state": {"$ref":"job.json#/definitions/state"}, + "config": {"$ref":"job.json#/definitions/config"}, + "origin": {"$ref":"job.json#/definitions/job-origin"}, + + "proposal": { "$ref": "#/definitions/batch-proposal-detail" }, + + "ambiguous": { "$ref": "#/definitions/matched-container-list" }, + "matched": { "$ref": "#/definitions/matched-container-list" }, + "not_matched": { "$ref": "#/definitions/matched-container-list" }, + + "improper_permissions": { + "type": "array", + "items": {"$ref":"container.json#/definitions/_id"} + }, + + "created":{"$ref":"created-modified.json#/definitions/created"}, + "modified":{"$ref":"created-modified.json#/definitions/modified"} + }, + "additionalProperties": false + }, + "batch-proposal-input": { + "type": "object", + "properties": { + "gear_id": {"$ref":"job.json#/definitions/gear_id"}, + "config": {"$ref":"job.json#/definitions/config"}, + "tags": {"$ref":"tag.json#/definitions/tag-list"}, + "analysis": {"$ref": "analysis.json#/definitions/analysis-input"}, + "targets": { + "type": "array", + "items": {"$ref":"container.json#/definitions/container-reference"} + } + }, + "additionalProperties": false + }, + "batch-cancel-output": { + "type": "object", + "properties": { + "number_cancelled": {"type":"integer"} + }, + "additionalProperties": false, + "required": ["number_cancelled"], + "x-sdk-return": "number_cancelled" + } + } +} diff --git a/swagger/schemas/definitions/collection.json b/swagger/schemas/definitions/collection.json index 67caaed72..b853885de 100644 --- a/swagger/schemas/definitions/collection.json +++ b/swagger/schemas/definitions/collection.json @@ -1,6 +1,31 @@ { "$schema": "http://json-schema.org/draft-04/schema#", "definitions":{ + "collection-node": { + "type": "object", + "properties": { + "level": { + "type": "string", + "enum": ["project", "session", "acquisition"] + }, + "_id": { "$ref": "common.json#/definitions/objectid" } + }, + "additionalProperties": false + }, + "collection-operation": { + "type": "object", + "properties": { + "operation": { + "type": "string", + "enum": ["add", "remove"] + }, + "nodes": { + "type": "array", + "items": { "$ref": "#/definitions/collection-node" } + } + }, + "additionalProperties": false + }, "collection-input":{ "type": "object", "properties": { @@ -9,6 +34,7 @@ "info": {"$ref": "container.json#/definitions/info"}, "description": {"$ref": "common.json#/definitions/description"} }, + "x-sdk-model": "collection", "additionalProperties": false }, "collection-input-with-contents":{ @@ -18,8 +44,9 @@ "label": {"$ref": "common.json#/definitions/label"}, "info": {"$ref": "container.json#/definitions/info"}, "description": {"$ref": "common.json#/definitions/description"}, - "contents": { "type": "object" } + "contents": {"$ref": "#/definitions/collection-operation"} }, + "x-sdk-model": "collection", "additionalProperties": false }, "collection-new-output": { @@ -27,7 +54,8 @@ "properties": { "_id": {"$ref":"common.json#/definitions/objectid"} }, - "required": ["_id"] + "required": ["_id"], + "x-sdk-return": "_id" }, "collection-output":{ "type": "object", @@ -50,12 +78,13 @@ "items":{"$ref":"file.json#/definitions/file-output"} }, "notes": {"$ref":"note.json#/definitions/notes-list-output"}, - "tags": {"$ref":"tag.json#/definitions/tag-output-list"}, + "tags": {"$ref":"tag.json#/definitions/tag-list"}, "analyses":{ "type":"array", "items":{"$ref":"analysis.json#/definitions/analysis-output"} } }, + "x-sdk-model": "collection", "additionalProperties":false } } diff --git a/swagger/schemas/definitions/common.json b/swagger/schemas/definitions/common.json index 1a52e3dee..7a67f68cb 100644 --- a/swagger/schemas/definitions/common.json +++ b/swagger/schemas/definitions/common.json @@ -52,7 +52,8 @@ "_id": { "type": "string" } - } + }, + "x-sdk-return": "_id" } } } \ No newline at end of file diff --git a/swagger/schemas/definitions/container.json b/swagger/schemas/definitions/container.json index ed3161013..77d27a36d 100644 --- a/swagger/schemas/definitions/container.json +++ b/swagger/schemas/definitions/container.json @@ -8,13 +8,43 @@ "uid": {"type": "string"}, "timestamp": {"type": ["string", "null"], "format": "date-time"}, "timezone": {"type": "string"}, + "container-type": { + "type": "string", + "enum": ["group", "project", "session", "acquisition", "collection", "analysis"], + "description": "The type of container (e.g. session)" + }, "container-new-output": { "type": "object", "properties": { "_id": {"$ref":"#/definitions/_id"} }, - "required": ["_id"] - } + "required": ["_id"], + "x-sdk-return": "_id" + }, + "container-reference": { + "type": "object", + "properties": { + "type": {"$ref":"#/definitions/container-type"}, + "id": {"$ref":"#/definitions/_id"} + }, + "required": [ "type", "id" ], + "additionalProperties":false, + "description": "A reference to an individual container, by type and id" + }, + "container-output-with-files": { + "type": "object", + "properties": { + "_id": {"$ref":"#/definitions/_id"}, + "label": {"$ref":"common.json#/definitions/label"}, + "files":{ + "type":"array", + "items":{"$ref":"file.json#/definitions/file-output"} + }, + "created": {"$ref":"created-modified.json#/definitions/created"}, + "modified": {"$ref":"created-modified.json#/definitions/modified"} + }, + "description": "Generic container output with files" + } } } diff --git a/swagger/schemas/definitions/created-modified.json b/swagger/schemas/definitions/created-modified.json index 1d33cac5d..715171615 100644 --- a/swagger/schemas/definitions/created-modified.json +++ b/swagger/schemas/definitions/created-modified.json @@ -2,10 +2,12 @@ "$schema": "http://json-schema.org/draft-04/schema#", "definitions":{ "created": { - "type": "string" + "type": "string", + "format": "date-time" }, "modified": { - "type": "string" + "type": "string", + "format": "date-time" } } } diff --git a/swagger/schemas/definitions/device.json b/swagger/schemas/definitions/device.json index ae5abed6e..85ac61528 100644 --- a/swagger/schemas/definitions/device.json +++ b/swagger/schemas/definitions/device.json @@ -12,6 +12,16 @@ "type": "string", "enum": ["ok", "missing", "error", "unknown"] }, + "device-status-entry": { + "type": "object", + "properties":{ + "errors": {"$ref":"#/definitions/errors"}, + "last_seen": {"$ref":"common.json#/definitions/timestamp"}, + "status": {"$ref":"#/definitions/status-value"} + }, + "additionalProperties":false, + "required": ["last_seen", "status"] + }, "device": { "type": "object", "properties": { @@ -23,33 +33,30 @@ "interval": {"$ref":"#/definitions/interval"}, "last_seen": {"$ref":"common.json#/definitions/timestamp"} }, + "x-sdk-model": "device", "additionalProperties": false }, "device-input":{ - "type": "object", - "properties": { - "interval": {"$ref":"#/definitions/interval"}, - "errors": {"$ref":"#/definitions/errors"}, - "info": {"$ref":"common.json#/definitions/info"} - }, - "additionalProperties": false + "type": "object", + "properties": { + "interval": {"$ref":"#/definitions/interval"}, + "errors": {"$ref":"#/definitions/errors"}, + "info": {"$ref":"common.json#/definitions/info"} + }, + "x-sdk-model": "device", + "additionalProperties": false }, "device-output": { "type": "object", "allOf": [{"$ref":"#/definitions/device"}], - "required": ["_id", "name", "method", "last_seen"] + "required": ["_id", "name", "method", "last_seen"], + "x-sdk-model": "device" }, "device-status": { "type":"object", "patternProperties": { "^[0-9a-z.@_-]*$":{ - "properties":{ - "errors": {"$ref":"#/definitions/errors"}, - "last_seen": {"$ref":"common.json#/definitions/timestamp"}, - "status": {"$ref":"#/definitions/status-value"} - }, - "additionalProperties":false, - "required": ["last_seen", "status"] + "$ref": "#/definitions/device-status-entry" } } } diff --git a/swagger/schemas/definitions/download.json b/swagger/schemas/definitions/download.json index 8e7c00314..8dac67445 100644 --- a/swagger/schemas/definitions/download.json +++ b/swagger/schemas/definitions/download.json @@ -9,6 +9,7 @@ "-": {"$ref": "#/definitions/filter-items"}, "minus": {"$ref": "#/definitions/filter-items"} }, + "x-sdk-ignore-properties": ["+", "-"], "additionalProperties": false }, "filter-items": { diff --git a/swagger/schemas/definitions/file.json b/swagger/schemas/definitions/file.json index f6cc81590..8f1ce5a06 100644 --- a/swagger/schemas/definitions/file.json +++ b/swagger/schemas/definitions/file.json @@ -41,7 +41,7 @@ "maxLength":106 }, "size":{"type":"integer"}, - "file": { + "file-entry": { "type": "object", "properties": { "name": {"$ref":"#/definitions/name"}, @@ -65,7 +65,8 @@ "input": {"type":"boolean"}, "output": {"type":"boolean"} }, - "additionalProperties": false + "additionalProperties": false, + "x-sdk-model": "file-entry" }, "file-input":{ "type": "object", @@ -78,7 +79,8 @@ "tags": {"$ref":"#/definitions/tags"}, "info": {"$ref":"common.json#/definitions/info"} }, - "additionalProperties": false + "additionalProperties": false, + "x-sdk-model": "file-entry" }, "file-update":{ "type": "object", @@ -87,12 +89,25 @@ "modality": {"$ref":"#/definitions/modality"}, "measurements": {"$ref":"#/definitions/measurements"} }, - "additionalProperties": false + "additionalProperties": false, + "x-sdk-model": "file-entry" }, "file-output":{ "type": "object", - "allOf": [{"$ref":"#/definitions/file"}], - "required":["modified", "size"] - } + "allOf": [{"$ref":"#/definitions/file-entry"}], + "required":["modified", "size"], + "x-sdk-model": "file-entry" + }, + "file-reference": { + "type": "object", + "properties": { + "type": {"$ref":"container.json#/definitions/container-type"}, + "id": {"$ref":"container.json#/definitions/_id"}, + "name": {"$ref":"#/definitions/name"} + }, + "required": [ "type", "id", "name" ], + "additionalProperties":false, + "description": "A reference to an individual file in a container, by type, id and name" + } } } diff --git a/swagger/schemas/definitions/gear.json b/swagger/schemas/definitions/gear.json new file mode 100644 index 000000000..5c9c9acb2 --- /dev/null +++ b/swagger/schemas/definitions/gear.json @@ -0,0 +1,182 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "$comment": "This is based on https://github.com/flywheel-io/gears/blob/master/spec/manifest.schema.json. It is NOT used for validation, just for doc/code gen.", + "definitions": { + "gear-directive": { + "type": "object", + "description": "A schema directive." + }, + "gear-author": { + "type": "string", + "description": "The author of this gear." + }, + "gear-maintainer": { + "type": "string", + "description": "(optional) The maintainer of this gear. Can be used to distinguish the algorithm author from the gear maintainer." + }, + "gear-cite": { + "type": "string", + "description": "(optional) Any citations relevant to the algorithm(s) or work present in the gear." + }, + "gear-config": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/gear-directive" + }, + "description": "Schema snippets describing the options this gear consumes. Not currently processed." + }, + "gear-custom": { + "type": "object", + "description": "A place for gear authors to put arbitrary information." + }, + "gear-description": { + "type": "string", + "description": "A brief description of the gear's purpose. Ideally 1-4 sentences." + }, + "gear-environment": { + "type": "object", + "additionalProperties": { "type": "string" }, + "description": "Environment variables that should be set for the gear." + }, + "gear-command": { + "type": "string", + "description": "If provided, the starting command for the gear, rather than /flywheel/v0/run. Will be templated according to the spec." + }, + "gear-label": { + "type": "string", + "description": "The human-friendly name of this gear." + }, + "gear-license": { + "type": "string", + "description": "Software license of the gear" + }, + "gear-name": { + "type": "string", + "pattern": "^[a-z0-9\\-]+$", + "description": "The identification of this gear." + }, + "gear-uri": { + "type": "string", + "description": "A valid URI, or empty string." + }, + "gear-input-item": { + "description": "Describes a gear input", + "properties": { + "base": { + "type": "string", + "enum": [ "file", "api-key" ], + "description": "The type of gear input." + }, + "description": { + "type": "string", + "description": "Hackaround for description not technically being a schema directive" + }, + "optional": { + "type": "boolean", + "description": "Allow the gear to mark an input file as optional." + } + }, + "required": [ "base" ], + "additionalProperties": { + "$ref": "#/definitions/gear-directive" + } + }, + "gear-inputs": { + "type": "object", + "additionalProperties": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/gear-input-item" + } + }, + "description": "Schema snippets describing the inputs this gear consumes." + }, + "gear-source": { + "$ref": "#/definitions/gear-uri", + "description": "The URL where the source code of this gear can be found. Leave blank if none." + }, + "gear-url": { + "$ref": "#/definitions/gear-uri", + "description": "The URL where more information about this gear can be found. Leave blank if none." + }, + "gear-version": { + "type": "string", + "description": "A human-friendly string explaining the release version of this gear. Example: 3.2.1" + }, + + "gear-exchange": { + "type": "object", + "description": "Metadata object that describes the origin of a gear version", + "properties": { + "git-commit": { + "type": "string", + "description": "The SHA-1 hash referring to the git commit" + }, + "rootfs-hash": { + "type": "string", + "description": "The cryptographic hash of the root filesystem in the form of \"algorithm:\"" + }, + "rootfs-url": { + "type": "string", + "description": "The absolute URL of the gear's root file system" + } + }, + "required": [ + "git-commit", + "rootfs-hash", + "rootfs-url" + ] + }, + "gear": { + "type": "object", + "description": "Gear manifest", + "properties": { + "author": { "$ref": "#/definitions/gear-author" }, + "maintainer": { "$ref": "#/definitions/gear-maintainer" }, + "cite": { "$ref": "#/definitions/gear-cite" }, + "config": { "$ref": "#/definitions/gear-config" }, + "custom": { "$ref": "#/definitions/gear-custom" }, + "description": { "$ref": "#/definitions/gear-description" }, + "environment": { "$ref": "#/definitions/gear-environment" }, + "command": { "$ref": "#/definitions/gear-command" }, + "inputs": { "$ref": "#/definitions/gear-inputs" }, + "label": { "$ref": "#/definitions/gear-label" }, + "license": { "$ref": "#/definitions/gear-license" }, + "name": { "$ref": "#/definitions/gear-name" }, + "source": { "$ref": "#/definitions/gear-source" }, + "url": { "$ref": "#/definitions/gear-url" }, + "version": { "$ref": "#/definitions/gear-version" } + }, + "required": [ + "author", + "config", + "description", + "inputs", + "label", + "license", + "name", + "source", + "url", + "version" + ], + "x-sdk-include-empty": [ "config", "inputs" ], + "additionalProperties": false + }, + "gear-category": { + "type": "string", + "description": "The gear category" + }, + "gear-doc": { + "type": "object", + "properties": { + "_id": { "$ref": "common.json#/definitions/objectid" }, + "category": { "$ref": "#/definitions/gear-category" }, + "gear": { "$ref": "#/definitions/gear" }, + "exchange": { "$ref": "#/definitions/gear-exchange" }, + "created": { "$ref":"created-modified.json#/definitions/created"}, + "modified": { "$ref":"created-modified.json#/definitions/modified"} + }, + "description": "A full gear description, including manifest and exchange information" + } + } +} diff --git a/swagger/schemas/definitions/group.json b/swagger/schemas/definitions/group.json index e5bab92d3..9bbd1bd2f 100644 --- a/swagger/schemas/definitions/group.json +++ b/swagger/schemas/definitions/group.json @@ -15,9 +15,16 @@ "label": {"$ref": "#/definitions/label"}, "permissions": {"$ref": "permission.json#/definitions/permission-output-list"}, "created": {"$ref":"created-modified.json#/definitions/created"}, - "modified": {"$ref":"created-modified.json#/definitions/modified"} + "modified": {"$ref":"created-modified.json#/definitions/modified"}, + "tags": { + "type": "array", + "items": { + "type": "string" + } + } }, - "additionalProperties":false + "additionalProperties":false, + "x-sdk-model": "group" }, "group-input":{ "type": "object", @@ -25,7 +32,8 @@ "_id":{"$ref":"common.json#/definitions/string-id"}, "label": {"$ref": "#/definitions/label"} }, - "additionalProperties": false + "additionalProperties": false, + "x-sdk-model": "group" }, "group-metadata-input": { "type": "object", @@ -37,14 +45,16 @@ "group-output":{ "type": "object", "allOf": [{"$ref":"#/definitions/group"}], - "required": ["permissions", "created","modified","_id"] + "required": ["permissions", "created","modified","_id"], + "x-sdk-model": "group" }, "group-new-output": { "type": "object", "properties": { "_id": {"$ref":"common.json#/definitions/string-id"} }, - "required": ["_id"] + "required": ["_id"], + "x-sdk-return": "_id" }, "group-output-list":{ "type":"array", @@ -53,7 +63,8 @@ "project-group-output":{ "type": "object", "allOf": [{"$ref":"#/definitions/group"}], - "required": ["_id"] + "required": ["_id"], + "x-sdk-model": "group" }, "project-group-output-list":{ "type":"array", diff --git a/swagger/schemas/definitions/info.json b/swagger/schemas/definitions/info.json index fc4d65558..e2e5f9263 100644 --- a/swagger/schemas/definitions/info.json +++ b/swagger/schemas/definitions/info.json @@ -2,6 +2,7 @@ "$schema": "http://json-schema.org/draft-04/schema#", "definitions": { "info-add-remove": { + "type": "object", "properties": { "set": {"type": "object", "minProperties": 1}, "delete": { @@ -16,6 +17,7 @@ "additionalProperties": false }, "info-replace": { + "type": "object", "properties": { "replace": {"type": "object"} }, diff --git a/swagger/schemas/definitions/job.json b/swagger/schemas/definitions/job.json index 7b27244e2..9d5a8db52 100644 --- a/swagger/schemas/definitions/job.json +++ b/swagger/schemas/definitions/job.json @@ -5,34 +5,23 @@ "gear_id": {"type":"string"}, "previous_job_id": {"type":"string"}, - "inputs-property-type":{"type":"string"}, - "inputs-property-id":{"type":"string"}, - "inputs-property-name":{"type":"string"}, - "inputs-item": { "type":"object", "properties":{ - "type":{"enum":["http", "scitran"]}, + "type":{ + "type": "string", + "enum":["http", "scitran"] + }, "uri":{"type":"string"}, "location":{"type":"string"}, "vu":{"type":"string"} }, "required":["type", "uri", "location"] }, - "inputs-entry": { - "type": "object", - "properties": { - "type": {"$ref":"#/definitions/inputs-property-type"}, - "id": {"$ref":"#/definitions/inputs-property-id"}, - "name": {"$ref":"#/definitions/inputs-property-name"} - }, - "required": [ "type", "id", "name" ], - "additionalProperties":false - }, "inputs-object": { "type": "object", "patternProperties": { - "[-_ a-zA-Z0-9]+": {"$ref":"#/definitions/inputs-entry"} + "[-_ a-zA-Z0-9]+": {"$ref":"file.json#/definitions/file-reference"} } }, "inputs-array":{ @@ -40,9 +29,9 @@ "items":{ "type":"object", "properties":{ - "type": {"$ref":"#/definitions/inputs-property-type"}, - "id": {"$ref":"#/definitions/inputs-property-id"}, - "name": {"$ref":"#/definitions/inputs-property-name"}, + "type": {"$ref":"container.json#/definitions/container-type"}, + "id": {"$ref":"container.json#/definitions/_id"}, + "name": {"$ref":"file.json#/definitions/name"}, "input":{"type":"string"} } } @@ -79,6 +68,26 @@ ], "additionalProperties":false }, + "job-log-statement": { + "type": "object", + "properties": { + "fd": { "type": "integer" }, + "msg": { "type": "string" } + }, + "required": ["fd", "msg"] + }, + "job-log": { + "type": "object", + "properties": { + "id": {"$ref":"common.json#/definitions/objectid"}, + "logs": { + "type": "array", + "items": { + "$ref": "#/definitions/job-log-statement" + } + } + } + }, "saved_files": { "type": "array", "items": {"$ref": "file.json#/definitions/name"} @@ -94,7 +103,8 @@ }, "state":{ - "type":"string" + "type":"string", + "enum": [ "pending", "running", "failed", "complete", "cancelled" ] }, "attempt":{ "type":"integer" @@ -142,7 +152,8 @@ "saved_files":{"$ref":"#/definitions/saved_files"}, "produced_metadata":{"$ref":"#/definitions/produced-metadata"} }, - "additionalProperties":false + "additionalProperties":false, + "x-sdk-model":"job" }, "job-input": { "type":"object", @@ -154,7 +165,8 @@ "config":{"$ref":"#/definitions/config"} }, "required": ["gear_id"], - "additionalProperties":false + "additionalProperties":false, + "x-sdk-model":"job" }, "job-output": { "type": "object", @@ -162,7 +174,8 @@ "required": [ "id", "gear_id", "inputs", "config", "destination", "tags", "state", "attempt" - ] + ], + "x-sdk-model":"job" } } } diff --git a/swagger/schemas/definitions/note.json b/swagger/schemas/definitions/note.json index 58298f1a3..46cd5786b 100644 --- a/swagger/schemas/definitions/note.json +++ b/swagger/schemas/definitions/note.json @@ -1,13 +1,17 @@ { "$schema": "http://json-schema.org/draft-04/schema#", "definitions": { - "text": {"type": "string"}, + "text": { + "type": "string", + "x-sdk-positional": true + }, "note-input":{ "type":"object", "properties":{ "text":{"$ref":"#/definitions/text"} }, - "additionalProperties": false + "additionalProperties": false, + "x-sdk-model": "note" }, "notes-list-input": { "type": "array", @@ -23,7 +27,8 @@ "user":{"$ref":"common.json#/definitions/user-id"} }, "additionalProperties": false, - "required":["_id", "text", "created", "modified", "user"] + "required":["_id", "text", "created", "modified", "user"], + "x-sdk-model": "note" }, "notes-list-output":{ "type":"array", diff --git a/swagger/schemas/definitions/packfile.json b/swagger/schemas/definitions/packfile.json index d40dcffd2..a9cf41aad 100644 --- a/swagger/schemas/definitions/packfile.json +++ b/swagger/schemas/definitions/packfile.json @@ -47,13 +47,15 @@ "packfile": {"$ref":"#/definitions/packfile-packfile-input"} }, "required": ["project", "session", "acquisition", "packfile"], - "additionalProperties": false + "additionalProperties": false, + "x-sdk-model": "packfile" }, "packfile-start": { "type":"object", "properties":{ - "token":{"$ref":"common.json#/definitions/objectid"} - } + "token":{"type": "string"} + }, + "x-sdk-return": "token" } } } diff --git a/swagger/schemas/definitions/permission.json b/swagger/schemas/definitions/permission.json index 03a48fd55..c32996e1e 100644 --- a/swagger/schemas/definitions/permission.json +++ b/swagger/schemas/definitions/permission.json @@ -1,18 +1,23 @@ { "$schema": "http://json-schema.org/draft-04/schema#", "definitions": { - "access": { "enum": ["ro", "rw", "admin"] }, + "access": { + "type": "string", + "enum": ["ro", "rw", "admin"] + }, "permission":{ "type":"object", "properties":{ "_id":{"$ref":"common.json#/definitions/user-id"}, "access":{"$ref":"#/definitions/access"} }, - "additionalProperties": false + "additionalProperties": false, + "x-sdk-model": "permission" }, "permission-output-default-required":{ "allOf":[{"$ref":"#/definitions/permission"}], - "required":["_id", "access"] + "required":["_id", "access"], + "x-sdk-model": "permission" }, "permission-output-list": { "type": "array", diff --git a/swagger/schemas/definitions/project.json b/swagger/schemas/definitions/project.json index 6cb7dc156..84fa920ea 100644 --- a/swagger/schemas/definitions/project.json +++ b/swagger/schemas/definitions/project.json @@ -10,7 +10,8 @@ "description": {"$ref":"common.json#/definitions/description"}, "group": {"$ref":"common.json#/definitions/string-id"} }, - "additionalProperties": false + "additionalProperties": false, + "x-sdk-model": "project" }, "project-metadata-input": { "type": "object", @@ -46,13 +47,14 @@ } }, "notes": {"$ref":"note.json#/definitions/notes-list-output"}, - "tags": {"$ref":"tag.json#/definitions/tag-output-list"}, + "tags": {"$ref":"tag.json#/definitions/tag-list"}, "analyses":{ "type":"array", "items":{"$ref":"analysis.json#/definitions/analysis-output"} } }, - "additionalProperties": false + "additionalProperties": false, + "x-sdk-model": "project" } } } diff --git a/swagger/schemas/definitions/resolver.json b/swagger/schemas/definitions/resolver.json new file mode 100644 index 000000000..2af1470d8 --- /dev/null +++ b/swagger/schemas/definitions/resolver.json @@ -0,0 +1,89 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "definitions": { + "resolver-input": { + "type": "object", + "properties": { + "path": { + "type": "array", + "minLength": 1, + "items": { + "type": "string" + } + } + }, + "required": ["path"] + }, + "resolver-output": { + "type": "object", + "properties": { + "path": { "$ref": "#/definitions/resolver-node-list" }, + "children": { "$ref": "#/definitions/resolver-node-list" } + }, + "required": ["path"] + }, + "resolver-node": { + "type": "object", + "properties": { + "container_type": { + "type": "string" + } + }, + "discriminator": "container_type", + "required": ["container_type"] + }, + "resolver-node-list": { + "type": "array", + "items": { "$ref": "#/definitions/resolver-node" } + }, + "group-node": { + "allOf": [ + {"$ref":"#/definitions/resolver-node"}, + {"$ref":"group.json#/definitions/group-output"} + ], + "x-discriminator-value": "group" + }, + "project-node": { + "allOf": [ + {"$ref":"#/definitions/resolver-node"}, + {"$ref":"project.json#/definitions/project-output"} + ], + "x-discriminator-value": "project" + }, + "session-node": { + "allOf": [ + {"$ref":"#/definitions/resolver-node"}, + {"$ref":"session.json#/definitions/session-output"} + ], + "x-discriminator-value": "session" + }, + "acquisition-node": { + "allOf": [ + {"$ref":"#/definitions/resolver-node"}, + {"$ref":"acquisition.json#/definitions/acquisition-output"} + ], + "x-discriminator-value": "acquisition" + }, + "file-node": { + "allOf": [ + {"$ref":"#/definitions/resolver-node"}, + {"$ref":"file.json#/definitions/file-output"} + ], + "x-discriminator-value": "file" + }, + "analysis-node": { + "allOf": [ + {"$ref":"#/definitions/resolver-node"}, + {"$ref":"analysis.json#/definitions/analysis-output"} + ], + "x-discriminator-value": "analysis" + }, + "gear-node": { + "allOf": [ + {"$ref":"#/definitions/resolver-node"}, + {"$ref":"gear.json#/definitions/gear-doc"} + ], + "x-discriminator-value": "gear" + } + } +} \ No newline at end of file diff --git a/swagger/schemas/definitions/rule.json b/swagger/schemas/definitions/rule.json index 3e2585774..d8fca6377 100644 --- a/swagger/schemas/definitions/rule.json +++ b/swagger/schemas/definitions/rule.json @@ -35,7 +35,8 @@ "all": { "$ref": "#/definitions/rule-items" }, "disabled": { "type": "boolean" } }, - "additionalProperties": false + "additionalProperties": false, + "x-sdk-model": "rule" }, "rule-output": { @@ -47,7 +48,8 @@ "any": { "$ref": "#/definitions/rule-items" }, "all": { "$ref": "#/definitions/rule-items" }, "disabled": { "type": "boolean" } - } + }, + "x-sdk-model": "rule" } } } diff --git a/swagger/schemas/definitions/search.json b/swagger/schemas/definitions/search.json new file mode 100644 index 000000000..218f47e72 --- /dev/null +++ b/swagger/schemas/definitions/search.json @@ -0,0 +1,128 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "definitions": { + "search-type": { + "type": "string", + "enum": ["file", "acquisition", "session", "analysis", "collection"], + "description": "Sets the type of search results to return" + }, + "search-query": { + "type": "object", + "properties": { + "return_type": {"$ref": "#/definitions/search-type"}, + "search_string": { + "type": "string", + "description": "Represents the plain text search query" + }, + "all_data": { + "type": "boolean", + "description": "When set will include all data that the user does not have access to read", + "default": false + }, + "filters": { + "type": "object", + "description": "See https://www.elastic.co/guide/en/elasticsearch/reference/current/term-level-queries.html" + } + }, + "required": ["return_type"] + }, + "search-project-response": { + "type": "object", + "properties": { + "_id": {"$ref":"container.json#/definitions/_id"}, + "label": {"$ref":"common.json#/definitions/label"} + }, + "description": "Fields for project search response" + }, + "search-group-response": { + "type": "object", + "properties": { + "_id": {"$ref":"container.json#/definitions/_id"}, + "label": {"$ref":"common.json#/definitions/label"} + }, + "description": "Fields for group search response" + }, + "search-session-response": { + "type": "object", + "properties": { + "_id": {"$ref":"container.json#/definitions/_id"}, + "label": {"$ref":"common.json#/definitions/label"}, + "timestamp": {"$ref":"common.json#/definitions/timestamp"}, + "created": {"$ref":"created-modified.json#/definitions/created"} + }, + "description": "Fields for session search response" + }, + "search-acquisition-response": { + "type": "object", + "properties": { + "_id": {"$ref":"container.json#/definitions/_id"}, + "label": {"$ref":"common.json#/definitions/label"}, + "timestamp": {"$ref":"common.json#/definitions/timestamp"}, + "created": {"$ref":"created-modified.json#/definitions/created"} + }, + "description": "Fields for acquisition search response" + }, + "search-subject-response": { + "type": "object", + "properties": { + "code": {"$ref":"subject.json#/definitions/code"} + }, + "description": "Fields for subject search response" + }, + "search-file-response": { + "type": "object", + "properties": { + "measurements": {"$ref":"file.json#/definitions/measurements"}, + "created": {"$ref":"created-modified.json#/definitions/created"}, + "type": {"$ref":"file.json#/definitions/file-type"}, + "name": {"$ref":"file.json#/definitions/name"}, + "size": {"$ref":"file.json#/definitions/size"} + }, + "description": "Fields for file search response" + }, + "search-analysis-response": { + "type": "object", + "properties": { + "_id": {"$ref":"container.json#/definitions/_id"}, + "label": {"$ref":"common.json#/definitions/label"}, + "user": {"$ref":"common.json#/definitions/user-id"}, + "created": {"$ref":"created-modified.json#/definitions/created"} + }, + "description": "Fields for acquisition search response" + }, + "search-parent-response": { + "type": "object", + "properties": { + "_id": {"$ref":"container.json#/definitions/_id"}, + "type": {"$ref":"container.json#/definitions/container-type"} + }, + "description": "Fields for parent search response" + }, + "search-collection-response": { + "type": "object", + "properties": { + "_id": {"$ref":"container.json#/definitions/_id"}, + "label": {"$ref":"common.json#/definitions/label"}, + "curator": {"$ref":"common.json#/definitions/user-id"}, + "created": {"$ref":"created-modified.json#/definitions/created"} + }, + "description": "Fields for collection search response" + }, + "search-response": { + "type": "object", + "properties": { + "project": {"$ref":"#/definitions/search-project-response"}, + "group": {"$ref":"#/definitions/search-group-response"}, + "session": {"$ref":"#/definitions/search-session-response"}, + "acquisition": {"$ref":"#/definitions/search-acquisition-response"}, + "subject": {"$ref":"#/definitions/search-subject-response"}, + "file": {"$ref":"#/definitions/search-file-response"}, + "collection": {"$ref":"#/definitions/search-collection-response"}, + "analysis": {"$ref":"#/definitions/search-analysis-response"}, + "parent": {"$ref":"#/definitions/search-parent-response"}, + "permissions": {"$ref":"permission.json#/definitions/permission-output-list"} + }, + "description": "Single search response entry" + } + } +} \ No newline at end of file diff --git a/swagger/schemas/definitions/session.json b/swagger/schemas/definitions/session.json index 9c1c9775b..668f08382 100644 --- a/swagger/schemas/definitions/session.json +++ b/swagger/schemas/definitions/session.json @@ -18,7 +18,8 @@ "timezone": {"$ref":"container.json#/definitions/timezone"}, "subject": {"$ref": "subject.json#/definitions/subject-input"} }, - "additionalProperties": false + "additionalProperties": false, + "x-sdk-model": "session" }, "session-metadata-input": { "type": "object", @@ -63,13 +64,14 @@ "items":{"$ref":"file.json#/definitions/file-output"} }, "notes": {"$ref":"note.json#/definitions/notes-list-output"}, - "tags": {"$ref":"tag.json#/definitions/tag-output-list"}, + "tags": {"$ref":"tag.json#/definitions/tag-list"}, "analyses":{ "type":"array", "items":{"$ref":"analysis.json#/definitions/analysis-output"} } }, - "additionalProperties": false + "additionalProperties": false, + "x-sdk-model": "session" }, "session-jobs-output": { "type": "object", @@ -79,6 +81,7 @@ "items":{"$ref": "job.json#/definitions/job-output"} }, "containers":{ + "type": "object", "patternProperties": { "^[a-fA-F0-9]{24}$":{ "type": "object" diff --git a/swagger/schemas/definitions/subject.json b/swagger/schemas/definitions/subject.json index a5733ccdc..b049b82a6 100644 --- a/swagger/schemas/definitions/subject.json +++ b/swagger/schemas/definitions/subject.json @@ -5,10 +5,24 @@ "firstname": { "type": "string", "maxLength": 64 }, "lastname": { "type": "string", "maxLength": 64 }, "age": { "type": ["integer", "null"] }, - "sex": { "enum": ["male", "female", "other", "unknown", null] }, - "race": { "enum": ["American Indian or Alaska Native", "Asian", "Native Hawaiian or Other Pacific Islander", "Black or African American", "White", "More Than One Race", "Unknown or Not Reported", null] }, - "ethnicity": { "enum": ["Not Hispanic or Latino", "Hispanic or Latino", "Unknown or Not Reported", null] }, - + "sex": { + "oneOf": [ + { "type": "null"}, + { "type": "string", "enum": ["male", "female", "other", "unknown"] } + ] + }, + "race": { + "oneOf": [ + { "type": "null"}, + { "type": "string", "enum": ["American Indian or Alaska Native", "Asian", "Native Hawaiian or Other Pacific Islander", "Black or African American", "White", "More Than One Race", "Unknown or Not Reported"] } + ] + }, + "ethnicity": { + "oneOf": [ + { "type": "null"}, + { "type": "string", "enum": ["Not Hispanic or Latino", "Hispanic or Latino", "Unknown or Not Reported"] } + ] + }, "code": { "type": "string", "maxLength": 64 }, "tags": { "type": "array", "items": {"type": "string"} }, "subject-input":{ @@ -31,7 +45,8 @@ "items":{"$ref":"file.json#/definitions/file-input"} } }, - "additionalProperties": false + "additionalProperties": false, + "x-sdk-model": "subject" }, "subject-output":{ "type": "object", @@ -54,11 +69,13 @@ "items":{"$ref":"file.json#/definitions/file-output"} } }, - "additionalProperties": false + "additionalProperties": false, + "x-sdk-model": "subject" }, "subject-output-default-required":{ "allOf":[{"$ref":"#/definitions/subject-output"}], - "required":["_id"] + "required":["_id"], + "x-sdk-model": "subject" } } } diff --git a/swagger/schemas/definitions/tag.json b/swagger/schemas/definitions/tag.json index e53e657c7..88d27a439 100644 --- a/swagger/schemas/definitions/tag.json +++ b/swagger/schemas/definitions/tag.json @@ -1,18 +1,24 @@ { "$schema": "http://json-schema.org/draft-04/schema#", "definitions": { - "value": {"type": "string", "minLength": 1, "maxLength": 32}, + "value": { + "type": "string", + "minLength": 1, + "maxLength": 32, + "x-sdk-positional": true + }, "tag":{ + "type": "object", "properties":{ "value":{"$ref":"#/definitions/value"} }, "additionalProperties": false, "required": ["value"] }, - "tag-output-list":{ + "tag-list":{ "type":"array", "items":{ - "allOf":[{"$ref":"#/definitions/tag"}] + "allOf":[{"type":"string"}] } } } diff --git a/swagger/schemas/definitions/user.json b/swagger/schemas/definitions/user.json index 475bc5024..bc8bb0f65 100644 --- a/swagger/schemas/definitions/user.json +++ b/swagger/schemas/definitions/user.json @@ -37,7 +37,7 @@ "properties":{ "key": {"type": "string"}, "created": {"$ref":"created-modified.json#/definitions/created"}, - "last_used": {} + "last_used": {"$ref":"common.json#/definitions/timestamp"} }, "additionalProperties":false }, @@ -57,7 +57,8 @@ "firstlogin":{"$ref":"#/definitions/firstlogin"}, "lastlogin":{"$ref":"#/definitions/lastlogin"} }, - "additionalProperties":false + "additionalProperties":false, + "x-sdk-model": "user" }, "user-output":{ "type":"object", @@ -77,7 +78,8 @@ "created":{"$ref":"created-modified.json#/definitions/created"}, "modified":{"$ref":"created-modified.json#/definitions/modified"} }, - "additionalProperties":false + "additionalProperties":false, + "x-sdk-model": "user" }, "user-output-api-key": { "type":"object", @@ -102,7 +104,8 @@ "required":[ "_id", "firstname", "lastname", "root", "email", "created", "modified" - ] + ], + "x-sdk-model": "user" } } } diff --git a/swagger/schemas/input/gear.json b/swagger/schemas/input/gear.json new file mode 100644 index 000000000..1f700aa54 --- /dev/null +++ b/swagger/schemas/input/gear.json @@ -0,0 +1,38 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "Analysis", + "type": "object", + "allOf":[{"$ref":"../definitions/gear.json#/definitions/gear-doc"}], + "example": { + "category": "converter", + "gear": { + "inputs": { + "audio": { + "base": "file", + "description": "Any audio file. Plain speech suggested!" + } + }, + "maintainer": "Nathaniel Kofalt", + "description": "Detects the speech content of an audio file, using the machine-learning DeepSpeech library by Mozilla.", + "license": "Other", + "author": "Nathaniel Kofalt", + "url": "", + "label": "Speech Recognition", + "source": "https://github.com/mozilla/DeepSpeech", + "version": "1", + "custom": { + "gear-builder": { + "image": "gear-builder-kdfqapbezk-20171219165918", + "container": "c15189b625a0ea450cafbb24ef0df03c26cc8cf151181976ec4289801e191032" + } + }, + "config": {}, + "name": "speech-recognition" + }, + "exchange": { + "git-commit": "local", + "rootfs-hash": "sha384:e01d925f90b097b554be0f802ef6ebb9f07000d7a6a2a0c3a25dac26893d4ac2414381e2c8e60f4b58b27c7fe8e56099", + "rootfs-url": "/api/gears/temp/5a39aa4e07a393001b663910" + } + } +} \ No newline at end of file diff --git a/swagger/schemas/input/job-logs.json b/swagger/schemas/input/job-logs.json new file mode 100644 index 000000000..43e5f9444 --- /dev/null +++ b/swagger/schemas/input/job-logs.json @@ -0,0 +1,8 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "array", + "items": {"$ref": "../definitions/job.json#/definitions/job-log-statement"}, + "example": [ + { "fd": 1, "msg": "Hello World!" } + ] +} diff --git a/swagger/schemas/input/job-new.json b/swagger/schemas/input/job-new.json new file mode 100644 index 000000000..eed7eb7f7 --- /dev/null +++ b/swagger/schemas/input/job-new.json @@ -0,0 +1,25 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "allOf": [{"$ref": "../definitions/job.json#/definitions/job-input"}], + "example": { + "gear_id": "aex", + "inputs": { + "dicom": { + "type": "acquisition", + "id": "573c9e6a844eac7fc01747cd", + "name" : "1_1_dicom.zip" + } + }, + "config": { + "two-digit multiple of ten": 20 + }, + "destination": { + "type": "acquisition", + "id": "573c9e6a844eac7fc01747cd" + }, + "tags": [ + "ad-hoc" + ] + } +} diff --git a/swagger/schemas/input/propose-batch.json b/swagger/schemas/input/propose-batch.json new file mode 100644 index 000000000..0ec0934ce --- /dev/null +++ b/swagger/schemas/input/propose-batch.json @@ -0,0 +1,14 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "allOf": [{"$ref":"../definitions/batch.json#/definitions/batch-proposal-input"}], + "example": { + "gear_id": "59b1b5b0e105c40019f50015", + "config": {}, + "tags": ["test-tag"], + "targets": [{ + "type": "session", + "id": "deb1b5b0e105c40019f500af" + }] + } +} diff --git a/swagger/schemas/input/resolver.json b/swagger/schemas/input/resolver.json new file mode 100644 index 000000000..11acc2c06 --- /dev/null +++ b/swagger/schemas/input/resolver.json @@ -0,0 +1,8 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "allOf": [{"$ref": "../definitions/resolver.json#/definitions/resolver-input"}], + "example": { + "path": ["scitran", "Neuroscience"] + } +} diff --git a/swagger/schemas/input/search-query.json b/swagger/schemas/input/search-query.json new file mode 100644 index 000000000..5d76f2aee --- /dev/null +++ b/swagger/schemas/input/search-query.json @@ -0,0 +1,9 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "allOf":[{"$ref":"../definitions/search.json#/definitions/search-query"}], + "example": { + "return_type": "session", + "search_string": "amyg" + } +} diff --git a/swagger/schemas/output/analyses-list.json b/swagger/schemas/output/analyses-list.json new file mode 100644 index 000000000..988581c68 --- /dev/null +++ b/swagger/schemas/output/analyses-list.json @@ -0,0 +1,30 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "array", + "items": {"$ref":"../definitions/analysis.json#/definitions/analysis-list-entry"}, + "example": [{ + "files": [{ + "origin": { + "type": "job", + "id": "58063f24e5dc5b001657a87f" + }, + "mimetype": "application/octet-stream", + "hash": "v0-sha384-12188e00a26650b2baa3f0195337dcf504f4362bb2136eef0cdbefb57159356b1355a0402fca0ab5ab081f21c305e5c2", + "name": "cortical_surface_right_hemisphere.obj", + "tags": [], + "measurements": [], + "modified": "2016-10-18T15:26:35.701000+00:00", + "modality": null, + "input": true, + "size": 21804112, + "type": "None", + "info": {} + }], + "created": "2016-10-18T17:45:11.778000+00:00", + "modified": "2016-10-18T17:45:11.778000+00:00", + "label": "cortex-demo 10/18/2016 13:45:5", + "job": "58065fa7e5dc5b001457a882", + "user": "canakgun@flywheel.io", + "_id": "58065fa7e5dc5b001457a881" + }] +} diff --git a/swagger/schemas/output/batch-cancel.json b/swagger/schemas/output/batch-cancel.json new file mode 100644 index 000000000..8ca8186a0 --- /dev/null +++ b/swagger/schemas/output/batch-cancel.json @@ -0,0 +1,8 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "allOf": [{"$ref": "../definitions/batch.json#/definitions/batch-cancel-output"}], + "example": { + "number_cancelled": 4 + } +} diff --git a/swagger/schemas/output/batch-list.json b/swagger/schemas/output/batch-list.json new file mode 100644 index 000000000..d120f9d03 --- /dev/null +++ b/swagger/schemas/output/batch-list.json @@ -0,0 +1,23 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "array", + "items": {"$ref":"../definitions/batch.json#/definitions/batch"}, + "example": [{ + "origin": { + "type": "user", + "id": "justinehlert@flywheel.io" + }, + "jobs": [ + "5a33fa6652e95c001707489c", + "5a33fa6652e95c001707489d", + "5a33fa6652e95c001707489e", + "5a33fa6652e95c001707489f" + ], + "created": "2017-12-15T16:37:55.538000+00:00", + "modified": "2017-12-15T16:38:01.107000+00:00", + "state": "complete", + "gear_id": "59b1b5b0e105c40019f50015", + "_id": "5a33fa6352e95c001707489b", + "config": {} + }] +} diff --git a/swagger/schemas/output/batch-proposal.json b/swagger/schemas/output/batch-proposal.json new file mode 100644 index 000000000..c0c61ec2c --- /dev/null +++ b/swagger/schemas/output/batch-proposal.json @@ -0,0 +1,18 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "allOf": [{"$ref":"../definitions/batch.json#/definitions/batch-proposal"}], + "example": { + "_id": "5a33fa6352e95c001707489b", + "gear_id": "59b1b5b0e105c40019f50015", + "config": {}, + "state": "pending", + "origin": { + "type": "user", + "id": "justinehlert@flywheel.io" + }, + "proposal": {}, + "created": "2017-12-15T16:37:55.538000+00:00", + "modified": "2017-12-15T16:38:01.107000+00:00" + } +} diff --git a/swagger/schemas/output/batch.json b/swagger/schemas/output/batch.json new file mode 100644 index 000000000..fa73bafff --- /dev/null +++ b/swagger/schemas/output/batch.json @@ -0,0 +1,23 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "allOf": [{"$ref":"../definitions/batch.json#/definitions/batch"}], + "example": { + "origin": { + "type": "user", + "id": "justinehlert@flywheel.io" + }, + "jobs": [ + "5a33fa6652e95c001707489c", + "5a33fa6652e95c001707489d", + "5a33fa6652e95c001707489e", + "5a33fa6652e95c001707489f" + ], + "created": "2017-12-15T16:37:55.538000+00:00", + "modified": "2017-12-15T16:38:01.107000+00:00", + "state": "complete", + "gear_id": "59b1b5b0e105c40019f50015", + "_id": "5a33fa6352e95c001707489b", + "config": {} + } +} diff --git a/swagger/schemas/output/file-info.json b/swagger/schemas/output/file-info.json new file mode 100644 index 000000000..43f6fc036 --- /dev/null +++ b/swagger/schemas/output/file-info.json @@ -0,0 +1,21 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type":"object", + "allOf":[{"$ref":"../definitions/file.json#/definitions/file-output"}], + "example": { + "origin": { + "type": "job", + "id": "58063f24e5dc5b001657a87f" + }, + "mimetype": "application/octet-stream", + "hash": "v0-sha384-12188e00a26650b2baa3f0195337dcf504f4362bb2136eef0cdbefb57159356b1355a0402fca0ab5ab081f21c305e5c2", + "name": "cortical_surface_right_hemisphere.obj", + "tags": [], + "measurements": [], + "modified": "2016-10-18T15:26:35.701000+00:00", + "modality": null, + "size": 21804112, + "type": "None", + "info": {} + } +} diff --git a/swagger/schemas/output/gear-list.json b/swagger/schemas/output/gear-list.json new file mode 100644 index 000000000..e022766d0 --- /dev/null +++ b/swagger/schemas/output/gear-list.json @@ -0,0 +1,7 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type":"array", + "items":{ + "allOf":[{"$ref":"../definitions/gear.json#/definitions/gear-doc"}] + } +} diff --git a/swagger/schemas/output/gear.json b/swagger/schemas/output/gear.json new file mode 100644 index 000000000..b64b79625 --- /dev/null +++ b/swagger/schemas/output/gear.json @@ -0,0 +1,5 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type":"object", + "allOf":[{"$ref":"../definitions/gear.json#/definitions/gear-doc"}] +} diff --git a/swagger/schemas/output/job-log.json b/swagger/schemas/output/job-log.json new file mode 100644 index 000000000..810151388 --- /dev/null +++ b/swagger/schemas/output/job-log.json @@ -0,0 +1,11 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "allOf": [{"$ref": "../definitions/job.json#/definitions/job-log"}], + "example": { + "_id": "57ac7394c700190017123fb8", + "logs": [ + { "fd": 1, "msg": "Hello World!" } + ] + } +} diff --git a/swagger/schemas/output/login-output.json b/swagger/schemas/output/login-output.json new file mode 100644 index 000000000..e92c297a0 --- /dev/null +++ b/swagger/schemas/output/login-output.json @@ -0,0 +1,7 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "allOf":[{"$ref":"../definitions/auth.json#/definitions/login-output"}], + "example": { + "token": "MjeuawZcctfRdCOmx_C6oYXK4sLHd2Dhc_oZpkXPPkxHizhNgwFWcrrKGA49BEnK" + } +} diff --git a/swagger/schemas/output/logout-output.json b/swagger/schemas/output/logout-output.json new file mode 100644 index 000000000..8749f3ded --- /dev/null +++ b/swagger/schemas/output/logout-output.json @@ -0,0 +1,7 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "allOf":[{"$ref":"../definitions/auth.json#/definitions/logout-output"}], + "example": { + "tokens_removed": 1 + } +} diff --git a/swagger/schemas/output/lookup.json b/swagger/schemas/output/lookup.json new file mode 100644 index 000000000..490cfb672 --- /dev/null +++ b/swagger/schemas/output/lookup.json @@ -0,0 +1,18 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type":"object", + "allOf": [{ "$ref": "../definitions/resolver.json#/definitions/resolver-node" }], + "example": { + "container_type": "project", + "_id": "57e452791cff88b85f9f9c97", + "label": "Neuroscience", + "group": "scitran", + "created": "2016-09-22T21:51:53.151000+00:00", + "modified": "2016-09-22T21:51:53.151000+00:00", + "public": false, + "permissions": [{ + "access": "admin", + "_id": "coltonlw@flywheel.io" + }] + } +} diff --git a/swagger/schemas/output/resolver.json b/swagger/schemas/output/resolver.json new file mode 100644 index 000000000..c52161a7d --- /dev/null +++ b/swagger/schemas/output/resolver.json @@ -0,0 +1,75 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type":"object", + "allOf": [{ "$ref": "../definitions/resolver.json#/definitions/resolver-output" }], + "example": { + "path": [ + { + "container_type": "group", + "_id": "scitran", + "label": "Scitran", + "permissions": [ + { + "access": "admin", + "_id": "coltonlw@flywheel.io" + } + ], + "created": "2016-08-19T11:41:15.360000+00:00", + "modified": "2016-08-19T11:41:15.360000+00:00" + }, + { + "container_type": "project", + "_id": "57e452791cff88b85f9f9c97", + "label": "Neuroscience", + "group": "scitran", + "created": "2016-09-22T21:51:53.151000+00:00", + "modified": "2016-09-22T21:51:53.151000+00:00", + "public": false, + "permissions": [{ + "access": "admin", + "_id": "coltonlw@flywheel.io" + }] + } + ], + "children": [ + { + "container_type": "session", + "_id": "57e01cccb1dc04000fb83f03", + "label": "control_1", + "group": "scitran", + "created": "2016-09-19T17:13:48.164000+00:00", + "subject": { + "code": "ex4784", + "_id": "57e01cccb1dc04000fb83f02" + }, + "modified": "2016-09-19T17:13:48.164000+00:00", + "project": "57e01cccf6b5d5edbcb4e1cf", + "public": false, + "permissions": [{ + "access": "admin", + "_id": "coltonlw@flywheel.io" + }] + }, + { + "container_type": "file", + "origin": { + "method": "importer", + "type": "device", + "id": "importer_Admin_Import", + "name": "Admin Import" + }, + "mimetype": "application/zip", + "measurements": [], + "hash": "v0-sha384-dd3c97bfe0ad1fcba75ae6718c6e81038c59af4f447f5db194d52732efa4f955b28455db02eb64cad3e4e55f11e3679f", + "name": "4784_1_1_localizer_dicom.zip", + "tags": [], + "created": "2016-09-21T14:56:09.943000+00:00", + "modified": "2016-09-21T14:56:09.943000+00:00", + "modality": null, + "info": {}, + "type": "dicom", + "size": 989933 + } + ] + } +} diff --git a/swagger/schemas/output/rule.json b/swagger/schemas/output/rule.json new file mode 100644 index 000000000..77ec4fc71 --- /dev/null +++ b/swagger/schemas/output/rule.json @@ -0,0 +1,7 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "Rule", + "type": "object", + "allOf": [{"$ref": "../definitions/rule.json#/definitions/rule-output"}], + "example": {"$ref": "../../examples/output/rule.json"} +} diff --git a/swagger/schemas/output/search-response-list.json b/swagger/schemas/output/search-response-list.json new file mode 100644 index 000000000..6368c2947 --- /dev/null +++ b/swagger/schemas/output/search-response-list.json @@ -0,0 +1,6 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "array", + "items": {"$ref":"../definitions/search.json#/definitions/search-response"}, + "example": [] +} diff --git a/swagger/schemas/output/user-new.json b/swagger/schemas/output/user-new.json index b008d72b7..cac0e5f79 100644 --- a/swagger/schemas/output/user-new.json +++ b/swagger/schemas/output/user-new.json @@ -1,6 +1,6 @@ { "$schema": "http://json-schema.org/draft-04/schema#", - "$ref": "../definitions/common.json#/definitions/object-created", + "allOf": [{"$ref": "../definitions/common.json#/definitions/object-created"}], "example": { "_id": "jane.doe@gmail.com" } diff --git a/swagger/schemas/output/user.json b/swagger/schemas/output/user.json index 3e01847c5..36226054b 100644 --- a/swagger/schemas/output/user.json +++ b/swagger/schemas/output/user.json @@ -2,12 +2,10 @@ "$schema": "http://json-schema.org/draft-04/schema#", "type": "object", "allOf":[ - {"$ref":"../definitions/user.json#/definitions/user-output"}, - { - "required":[ - "_id", "firstname", "lastname", - "root", "email", "created", "modified" - ] - } + {"$ref":"../definitions/user.json#/definitions/user-output"} + ], + "required":[ + "_id", "firstname", "lastname", + "root", "email", "created", "modified" ] } diff --git a/swagger/support/schema-transpiler.js b/swagger/support/schema-transpiler.js index 2bc4b66ec..832b046e4 100644 --- a/swagger/support/schema-transpiler.js +++ b/swagger/support/schema-transpiler.js @@ -74,11 +74,6 @@ SchemaTranspiler.prototype.draft4ToOpenApi2 = function(schema, defs, id) { schema.type = this._selectTypeFromArray(schema.type, id); } - if( schema.allOf && schema.allOf.length === 1 && !schema.required ) { - // Merge all of object with top-level object - schema = this._flattenAllOf(schema, id); - } - // Check for top-level $ref, allOf, anyOf, oneOf if( schema.$ref && schema.example ) { // Special case, if object has $ref and example, then @@ -102,7 +97,11 @@ SchemaTranspiler.prototype.draft4ToOpenApi2 = function(schema, defs, id) { } if( schema.patternProperties ) { - this.warn(id, '"patternProperties" is not supported in OpenApi 2'); + var keys = _.keys(schema.patternProperties); + if( keys.length > 1 ) { + this.warn(id, 'Can only support one type in additionalProperties (from "patternProperties")'); + } + schema.additionalProperties = this.draft4ToOpenApi2(schema.patternProperties[keys[0]], defs, id); delete schema.patternProperties; } diff --git a/swagger/support/schemas.js b/swagger/support/schemas.js index c4e1c0e8e..45756182c 100644 --- a/swagger/support/schemas.js +++ b/swagger/support/schemas.js @@ -15,10 +15,25 @@ var PRIMITIVE_TYPES = { 'null': true }; +var OBJECT_PROPERTIES = [ 'allOf', 'anyOf', 'oneOf', 'multipleOf', 'not', + 'if', 'then', 'else', 'properties', 'additionalProperties' ]; + function isPrimitiveType(type) { return !!PRIMITIVE_TYPES[type]; } +function isEmptyObject(schema) { + if( schema.type && schema.type !== 'object' ) { + return false; + } + if( schema.$ref ) { + return false; + } + return !_.some(OBJECT_PROPERTIES, function(key) { + return !!schema[key]; + }); +} + function normalizeName(name) { return name.replace('_', '-'); } @@ -199,6 +214,8 @@ Schemas.prototype.isPrimitiveDef = function(name) { return false; }; +Schemas.isPrimitiveType = isPrimitiveType; +Schemas.isEmptyObject = isEmptyObject; Schemas.prototype.getComplexDefinitions = function() { return _.pickBy(this.definitions, function(value) { diff --git a/swagger/support/spec/schema-transpiler-spec.js b/swagger/support/spec/schema-transpiler-spec.js index 1a65aafc9..1f60a236a 100644 --- a/swagger/support/spec/schema-transpiler-spec.js +++ b/swagger/support/spec/schema-transpiler-spec.js @@ -72,13 +72,15 @@ describe('SchemaTranspiler draft4ToOpenApi2', function() { }); }); - it('should flatten allOf with one element', function() { + it('should not flatten allOf with one element', function() { var schema = { allOf: [{$ref:'#/definitions/Foo'}] }; var result = transpiler.toOpenApi2(schema); - expect(result).toEqual({$ref:'#/definitions/Foo'}); + expect(result).toEqual({ + allOf: [{$ref:'#/definitions/Foo'}] + }); }); it('should merge properties for anyOf', function() { @@ -122,28 +124,16 @@ describe('SchemaTranspiler draft4ToOpenApi2', function() { expect(result).toEqual({}); }); - it('should flatten array elements', function() { - var defs = { - Foo: { - type: 'object', - properties: { - updated: {type: 'boolean'} - }, - required: ['updated'] - } - }, - schema = { + it('should not flatten array elements', function() { + var schema = { type: 'array', items: { allOf: [{$ref:'#/definitions/Foo'}] } }; - var result = transpiler.toOpenApi2(schema, defs); - expect(result).toEqual({ - type: 'array', - items: {$ref:'#/definitions/Foo'} - }); + var result = transpiler.toOpenApi2(schema); + expect(result).toEqual(schema); }); it('should recurse into properties', function() { @@ -160,7 +150,7 @@ describe('SchemaTranspiler draft4ToOpenApi2', function() { type: 'object', properties: { bar: {type: 'string'}, - foo: {$ref: '#/definitions/Foo'} + foo: {allOf: [{$ref: '#/definitions/Foo'}]} } }); }); diff --git a/swagger/support/swagger-resolver.js b/swagger/support/swagger-resolver.js index ee2575df0..6b154dccf 100644 --- a/swagger/support/swagger-resolver.js +++ b/swagger/support/swagger-resolver.js @@ -34,7 +34,7 @@ function validateTemplateArgs(tmplpath, template, args) { if( !param.name ) { throw 'Template "' + tmplpath + '" parameter does not have a name!'; } - if( _.isNil(args[param.name]) ) { + if( param.required && _.isNil(args[param.name]) ) { throw 'Template "' + tmplpath + '" invocation is missing parameter: ' + param.name; } } diff --git a/swagger/support/tasks/simplify-swagger.js b/swagger/support/tasks/simplify-swagger.js new file mode 100644 index 000000000..698d92f60 --- /dev/null +++ b/swagger/support/tasks/simplify-swagger.js @@ -0,0 +1,285 @@ +'use strict'; + +module.exports = function(grunt) { + var path = require('path'); + var fs = require('fs'); + var _ = require('lodash'); + var yaml = require('js-yaml'); + var walk = require('../walk'); + var Schemas = require('../schemas'); + + /** + * This task simplifies models in a swagger file. + * @param {object} data Task data + * @param {string} data.src The input file (root level swagger file) + * @param {string} data.dst The output file + */ + grunt.registerMultiTask('simplifySwagger', 'Simplify models in swagger API file', function() { + var srcFile = this.data.src||'swagger.yaml'; + var dstFile = this.data.dst; + + if(!fs.existsSync(srcFile)) { + grunt.log.error('Could not find:', srcFile); + return false; + } + + var root = yaml.safeLoad(fs.readFileSync(srcFile).toString()); + + var context = { + aliases: {} + }; + + try { + // Merge models + // for example, this will merge group-input and group-output into group based on the + // x-sdk-model property + mergeModels(root, context); + } catch( e ) { + grunt.fail.warn('ERROR: '.red + ' ' + e); + } + + // Walk through definitions, simplifying models where we can + simplifyDefinitions(root, context); + + // walk through all schemas + // That's every definition and every response and body schema + root = walk(root, function(obj, path) { + if( isSchema(path) ) { + return simplifySchema(obj, path, context); + } + return obj; + }); + + var data = JSON.stringify(root, null, 2); + fs.writeFileSync(dstFile, data); + }); + + function formatPath(path) { + path = _.map(path, function(el) { + return el.replace(/\//g, '~1'); + }); + return '#/' + path.join('/'); + } + + function unformatPath(path) { + if( !path.substr ) { + grunt.log.writeln('Invalid path: ' + JSON.stringify(path)); + return path; + } + var parts = path.substr(2).split('/'); + return _.map(parts, function(el) { + return el.replace(/~1/g, '/'); + }); + } + + function isSchema(path) { + if( path.length === 2 && path[0] === 'definitions' ) { + return true; + } + if( path.length === 4 && path[0] === 'definitions' && path[2] === 'properties' ) { + return true; + } + if( path.length > 1 && path[path.length-1] === 'schema' ) { + return true; + } + return false; + } + + function isValidSchema(schema) { + return( schema.type || schema.$ref || + schema.allOf || schema.oneOf || schema.anyOf || schema.not ); + } + + function isDefinition(path) { + return ( path.length === 2 && path[0] === 'definitions' ); + } + + function simplifyDefinitions(root, context) { + var defs = root.definitions||{}; + var keys = _.keys(defs); + + _.each(keys, function(k) { + var schema = defs[k]; + var path = formatPath(['definitions', k]); + + if( schema.type === 'array' ) { + // Setup an alias for array objects (don't generate a model) + context.aliases[path] = simplifySchema(schema, ['definitions', k], context); + delete defs[k]; + } else if( schema.allOf && schema.allOf.length === 1 && schema.allOf[0].$ref ) { + // For objects that are just aliases for other objects, copy all of the properties + var target = unformatPath(schema.allOf[0].$ref); + var targetObj = resolvePathObj(root, target); + if( targetObj ) { + defs[k] = targetObj; + } else { + grunt.log.writeln('ERROR '.red + 'Cannot find alias for: ' + path + ' (' + schema.allOf[0].$ref + ')'); + } + } else if( schema.$ref ) { + // Replace pure references + context.aliases[path] = schema; + delete defs[k]; + } else if( Schemas.isPrimitiveType(schema.type) ) { + // For simple types in definitions, alias them + context.aliases[path] = schema; + delete defs[k]; + } + }); + } + + // Performs all of the simplifying steps, and + // returns a simplified version of schema + function simplifySchema(schema, path, context) { + schema = _.cloneDeep(schema); + // If an x-sdk-schema is specified, use that + if( schema['x-sdk-schema'] ) { + schema = schema['x-sdk-schema']; + } + + if( !isValidSchema(schema) ) { + grunt.log.writeln('WARNING '.red + 'Invalid schema (no object type specified) at: ' + formatPath(path)); + schema.type = 'object'; + } else if( schema.type === 'array' && schema.items ) { + path = _.concat(path, 'items'); + schema.items = simplifySchema(schema.items, path, context); + } else if( schema.allOf ) { + if( schema.allOf.length === 1 ) { + if( schema.allOf[0].$ref ) { + var alias = context.aliases[schema.allOf[0].$ref]; + // Replace alias for allOf fields + if( alias ) { + schema = _.cloneDeep(alias); + } else { + schema = schema.allOf[0]; + } + } else if( Schemas.isPrimitiveType(schema.allOf[0].type) ) { + schema = schema.allOf[0]; + } else { + grunt.log.writeln('WARNING Cannot simplify "allOf" definition at: ' + formatPath(path)); + } + } else { + // Still replace aliases + for( var i = 0; i < schema.allOf.length; i++ ) { + var alias = context.aliases[schema.allOf[i].$ref]; + if( alias ) { + schema.allOf[i] = _.cloneDeep(alias); + } + } + // It's not an error to not simplify polymorphic types + if( !schema['x-discriminator-value'] ) { + grunt.log.writeln('WARNING Cannot simplify "allOf" definition at: ' + formatPath(path)); + } + } + } else if( schema.$ref ) { + // Replace alias for $ref fields + var alias = context.aliases[schema.$ref]; + if( alias ) { + schema = _.cloneDeep(alias); + } + } + return schema; + } + + // Merge all models that have the x-sdk-model property + function mergeModels(root, context) { + var defs = root.definitions||{}; + var keys = _.keys(defs); + var models = {}; + var aliases = {}; + + // First collect all the models to be merged + _.each(keys, function(k) { + var schema = defs[k]; + if( schema['x-sdk-model'] ) { + var modelName = schema['x-sdk-model']; + if( !models[modelName] ) { + models[modelName] = []; + } + models[modelName].push({ + id: k, + schema: schema + }); + + // Create temporary aliases for comparing properties + aliases['#/definitions/' + k] = '#/definitions/' + modelName; + } + }); + + // Then perform the merge + keys = _.keys(models); + _.each(keys, function(modelName) { + var schemas = models[modelName]; + var schema = _.cloneDeep(schemas[0]).schema; + var refSchema = { + $ref: '#/definitions/' + modelName + }; + + for( var i = 1; i < schemas.length; i++ ) { + // Merge each schema into the current + mergeSchema(modelName, schema, schemas[i], aliases); + } + + // Add aliases and delete the original models + for( var i = 0; i < schemas.length; i++ ) { + var id = schemas[i].id; + context.aliases['#/definitions/' + id] = refSchema; + delete defs[id]; + } + + // Remove fields that are no longer relevant + delete schema['x-sdk-model']; + delete schema['required']; + + defs[modelName] = schema; + }); + } + + function mergeSchema(name, schema, src, aliases) { + schema.properties = schema.properties||{}; + var dstProps = schema.properties; + var srcProps = src.schema.properties||{}; + + var keys = _.keys(srcProps); + _.each(keys, function(k) { + // Compare, after resolving aliases + // This way, file-input and file-output resolve to file-entry (for example) + // and are treated as the same for comparison purposes + var srcProp = resolveAlias(srcProps[k], aliases); + var dstProp = resolveAlias(dstProps[k], aliases); + if( dstProp && !_.isEqual(srcProp, dstProp) ) { + throw 'Cannot merge model ' + src.id + ' into ' + name + ': incompatible "' + k + '" property'; + } else { + dstProps[k] = srcProp; + } + }); + } + + function resolveAlias(schema, aliases) { + // Simple alias resolution where aliases is a map of: + // #/definition/model1 to #/defintion/model2 + if( !schema ) { + return schema; + } + + return walk(schema, function(obj) { + if( obj.$ref ) { + var alias = aliases[obj.$ref]; + if( alias ) { + return _.extend({}, obj, { $ref: alias }); + } + } + return obj; + }); + } + + function resolvePathObj(root, path) { + var current = root; + path = path.slice(); + while( current && path.length ) { + current = current[path.shift()]; + } + return current; + } +}; + + diff --git a/swagger/templates/analyses-list.yaml b/swagger/templates/analyses-list.yaml index ad44c1a10..6a8ed7e2d 100644 --- a/swagger/templates/analyses-list.yaml +++ b/swagger/templates/analyses-list.yaml @@ -1,27 +1,61 @@ parameters: - name: resource type: string + required: true - name: parameter type: string + required: true - name: tag type: string + required: true + - name: allowCreate + type: boolean + required: false template: | parameters: - required: true type: string in: path name: '{{parameter}}' + get: + summary: Get analyses for {{resource}}. + description: Returns analyses that directly belong to this resource. + operationId: get_{{resource}}_analyses + tags: + - '{{tag}}' + responses: + '200': + description: The list of analyses + schema: + $ref: schemas/output/analyses-list.json + + {{#allowCreate}} post: summary: Create an analysis and upload files. + description: | + When query param "job" is "true", send JSON to create + an analysis and job. Otherwise, multipart/form-data + to upload files and create an analysis. operationId: add_{{resource}}_analysis tags: - '{{tag}}' consumes: + - application/json - multipart/form-data parameters: - - in: formData - name: formData - type: string + - in: body + name: body + required: true + schema: + $ref: schemas/input/analysis-job.json + - name: job + in: query + type: boolean + description: Return job as an object instead of an id + x-sdk-default: 'true' responses: '200': - description: '' + description: Returns the id of the analysis that was created. + schema: + $ref: schemas/output/container-new.json + {{/allowCreate}} diff --git a/swagger/templates/analysis-files-create-ticket-filename.yaml b/swagger/templates/analysis-files-create-ticket-filename.yaml index 7f90a951d..08b59bd54 100644 --- a/swagger/templates/analysis-files-create-ticket-filename.yaml +++ b/swagger/templates/analysis-files-create-ticket-filename.yaml @@ -5,12 +5,15 @@ parameters: type: string - name: tag type: string + required: true template: | parameters: + {{#parameter}} - required: true type: string in: path - name: '{{parameter}}' + name: '{{.}}' + {{/parameter}} - required: true type: string in: path @@ -29,7 +32,12 @@ template: | files in the anlaysis. If no "ticket" query param is included, files will be downloaded directly. - operationId: download_{{resource}}_analysis_files_by_filename + {{#resource}} + operationId: download_{{.}}_analysis_files_by_filename + {{/resource}} + {{^resource}} + operationId: download_analysis_files_by_filename + {{/resource}} tags: - '{{tag}}' produces: diff --git a/swagger/templates/analysis-files.yaml b/swagger/templates/analysis-files.yaml index 1577e15e5..5e0f6073c 100644 --- a/swagger/templates/analysis-files.yaml +++ b/swagger/templates/analysis-files.yaml @@ -5,12 +5,15 @@ parameters: type: string - name: tag type: string + required: true template: | parameters: + {{#parameter}} - required: true type: string in: path - name: '{{parameter}}' + name: '{{.}}' + {{/parameter}} - required: true type: string in: path @@ -24,7 +27,12 @@ template: | files in the anlaysis If no "ticket" query param is included, server error 500 - operationId: download_{{resource}}_analysis_files + {{#resource}} + operationId: download_{{.}}_analysis_files + {{/resource}} + {{^resource}} + operationId: download_analysis_files + {{/resource}} tags: - '{{tag}}' produces: diff --git a/swagger/templates/analysis-item.yaml b/swagger/templates/analysis-item.yaml index 0a46cde8b..daf8ccc7a 100644 --- a/swagger/templates/analysis-item.yaml +++ b/swagger/templates/analysis-item.yaml @@ -5,21 +5,38 @@ parameters: type: string - name: tag type: string + required: true + - name: supportsDelete + type: boolean + required: true template: | parameters: + {{#parameter}} - required: true type: string in: path - name: '{{parameter}}' + name: '{{.}}' + {{/parameter}} - required: true type: string in: path name: AnalysisId get: summary: Get an analysis. - operationId: get_{{resource}}_analysis + {{#resource}} + operationId: get_{{.}}_analysis + {{/resource}} + {{^resource}} + operationId: get_analysis + {{/resource}} tags: - '{{tag}}' + parameters: + - name: inflate_job + in: query + type: boolean + description: Return job as an object instead of an id + x-sdk-default: 'true' responses: '200': description: '' @@ -28,11 +45,18 @@ template: | examples: response: $ref: examples/output/analysis.json + {{#supportsDelete}} delete: summary: Delete an anaylsis - operationId: delete_{{resource}}_analysis + {{#resource}} + operationId: delete_{{.}}_analysis + {{/resource}} + {{^resource}} + operationId: delete_analysis + {{/resource}} tags: - '{{tag}}' responses: '200': $ref: '#/responses/200:deleted-with-count' + {{/supportsDelete}} diff --git a/swagger/templates/analysis-notes-item.yaml b/swagger/templates/analysis-notes-item.yaml index f4a87a726..6ee57edb7 100644 --- a/swagger/templates/analysis-notes-item.yaml +++ b/swagger/templates/analysis-notes-item.yaml @@ -1,10 +1,13 @@ parameters: - name: resource type: string + required: true - name: parameter type: string + required: true - name: tag type: string + required: true template: | parameters: - required: true diff --git a/swagger/templates/analysis-notes.yaml b/swagger/templates/analysis-notes.yaml index 2627f2edd..58dc6b9ae 100644 --- a/swagger/templates/analysis-notes.yaml +++ b/swagger/templates/analysis-notes.yaml @@ -1,10 +1,13 @@ parameters: - name: resource type: string + required: true - name: parameter type: string + required: true - name: tag type: string + required: true template: | parameters: - required: true @@ -23,6 +26,7 @@ template: | parameters: - name: body in: body + required: true schema: $ref: schemas/input/note.json responses: diff --git a/swagger/templates/container-item-info.yaml b/swagger/templates/container-item-info.yaml new file mode 100644 index 000000000..b30b37a25 --- /dev/null +++ b/swagger/templates/container-item-info.yaml @@ -0,0 +1,33 @@ +parameters: + - name: resource + type: string + required: true + - name: parameter + type: string + required: true + - name: tag + type: string + required: true +template: | + parameters: + - required: true + type: string + in: path + name: '{{parameter}}' + post: + summary: Update or replace info for a {{resource}}. + operationId: modify_{{resource}}_info + x-sdk-modify-info: true + tags: + - '{{tag}}' + parameters: + - name: body + in: body + required: true + schema: + $ref: schemas/input/info_update.json + responses: + '200': + description: 'The info was updated successfully' + + diff --git a/swagger/templates/container-item.yaml b/swagger/templates/container-item.yaml index 223838060..d0a4b5b69 100644 --- a/swagger/templates/container-item.yaml +++ b/swagger/templates/container-item.yaml @@ -1,14 +1,19 @@ parameters: - name: resource type: string + required: true - name: tag type: string + required: true - name: parameter type: string + required: true - name: update-input-schema type: string + required: true - name: get-output-schema type: string + required: true template: | parameters: - in: path @@ -33,6 +38,7 @@ template: | parameters: - in: body name: body + required: true schema: $ref: {{{update-input-schema}}} responses: diff --git a/swagger/templates/container.yaml b/swagger/templates/container.yaml index d867de6d3..ad003381e 100644 --- a/swagger/templates/container.yaml +++ b/swagger/templates/container.yaml @@ -1,12 +1,16 @@ parameters: - name: resource type: string + required: true - name: tag type: string + required: true - name: list-output-schema type: string + required: true - name: create-input-schema type: string + required: true template: | get: summary: Get a list of {{#pluralize}}{{resource}}{{/pluralize}} @@ -20,12 +24,13 @@ template: | $ref: {{{list-output-schema}}} post: summary: Create a new {{resource}} - operationId: create_{{resource}} + operationId: add_{{resource}} tags: - '{{tag}}' parameters: - in: body name: body + required: true schema: $ref: {{{create-input-schema}}} responses: diff --git a/swagger/templates/file-item-info.yaml b/swagger/templates/file-item-info.yaml new file mode 100644 index 000000000..ef53a2f5b --- /dev/null +++ b/swagger/templates/file-item-info.yaml @@ -0,0 +1,47 @@ +parameters: + - name: resource + type: string + required: true + - name: parameter + type: string + required: true + - name: tag + type: string + required: true +template: | + parameters: + - required: true + type: string + in: path + name: '{{parameter}}' + - required: true + type: string + in: path + name: FileName + get: + summary: Get info for a particular file. + operationId: get_{{resource}}_file_info + tags: + - '{{tag}}' + responses: + '200': + description: 'The file object, including info' + schema: + $ref: schemas/output/file-info.json + post: + summary: Update info for a particular file. + operationId: modify_{{resource}}_file_info + x-sdk-modify-info: true + tags: + - '{{tag}}' + parameters: + - name: body + in: body + required: true + schema: + $ref: schemas/input/info_update.json + responses: + '200': + $ref: '#/responses/200:modified-with-count' + + diff --git a/swagger/templates/file-item.yaml b/swagger/templates/file-item.yaml index a6e436b3e..331379066 100644 --- a/swagger/templates/file-item.yaml +++ b/swagger/templates/file-item.yaml @@ -1,10 +1,13 @@ parameters: - name: resource type: string + required: true - name: parameter type: string + required: true - name: tag type: string + required: true template: | parameters: - required: true @@ -25,7 +28,7 @@ template: | - Make another request with the received ticket id in the "ticket" parameter. A valid "Authorization" header is no longer required. When "view" is true, [RFC7233](https://tools.ietf.org/html/rfc7233) range request headers are supported. - operationId: download_{{resource}}_file + operationId: download_file_from_{{resource}} tags: - '{{tag}}' produces: @@ -52,7 +55,7 @@ template: | in: query type: string description: The filename of a zipfile member to download rather than the entire file - + x-sdk-download-ticket: get_{{resource}}_download_ticket responses: '200': description: '' @@ -70,3 +73,27 @@ template: | default: description: '' + put: + summary: Modify a file's attributes + operationId: modify_{{resource}}_file + tags: + - '{{tag}}' + parameters: + - name: body + in: body + required: true + schema: + $ref: schemas/input/file-update.json + responses: + '200': + $ref: '#/responses/200:modified-with-count-and-jobs' + + delete: + summary: Delete a file + operationId: delete_{{resource}}_file + tags: + - '{{tag}}' + responses: + '200': + $ref: '#/responses/200:modified-with-count' + diff --git a/swagger/templates/file-list-upload.yaml b/swagger/templates/file-list-upload.yaml index e8985e014..ebe9d561f 100644 --- a/swagger/templates/file-list-upload.yaml +++ b/swagger/templates/file-list-upload.yaml @@ -1,10 +1,13 @@ parameters: - name: resource type: string + required: true - name: parameter type: string + required: true - name: tag type: string + required: true template: | parameters: - name: '{{parameter}}' @@ -14,7 +17,7 @@ template: | post: summary: Upload a file to {{resource}}. - operationId: upload_{{resource}}_file + operationId: upload_file_to_{{resource}} tags: - '{{tag}}' consumes: diff --git a/swagger/templates/notes-note.yaml b/swagger/templates/notes-note.yaml index 899ad54a8..604dbe7d9 100644 --- a/swagger/templates/notes-note.yaml +++ b/swagger/templates/notes-note.yaml @@ -1,10 +1,13 @@ parameters: - name: resource type: string + required: true - name: parameter type: string + required: true - name: tag type: string + required: true template: | parameters: - required: true @@ -36,6 +39,7 @@ template: | parameters: - in: body name: body + required: true schema: $ref: schemas/input/note.json responses: diff --git a/swagger/templates/notes.yaml b/swagger/templates/notes.yaml index 1c4936b09..e61052f26 100644 --- a/swagger/templates/notes.yaml +++ b/swagger/templates/notes.yaml @@ -1,10 +1,13 @@ parameters: - name: resource type: string + required: true - name: parameter type: string + required: true - name: tag type: string + required: true template: | parameters: - required: true @@ -19,6 +22,7 @@ template: | parameters: - name: body in: body + required: true schema: $ref: schemas/input/note.json responses: diff --git a/swagger/templates/packfile-end.yaml b/swagger/templates/packfile-end.yaml index 0b662d20a..02c000aa3 100644 --- a/swagger/templates/packfile-end.yaml +++ b/swagger/templates/packfile-end.yaml @@ -1,17 +1,20 @@ parameters: - name: resource type: string + required: true - name: parameter type: string + required: true - name: tag type: string + required: true template: | parameters: - required: true type: string in: path name: '{{parameter}}' - post: + get: summary: End a packfile upload operationId: end_{{resource}}_packfile_upload tags: @@ -19,9 +22,15 @@ template: | produces: - text/event-stream parameters: - - in: formData - name: formData + - name: token + in: query type: string + required: true + - name: metadata + in: query + type: string + required: true + description: string-encoded metadata json object. responses: '200': description: '' diff --git a/swagger/templates/packfile-start.yaml b/swagger/templates/packfile-start.yaml index d8b5ebfc4..065d0ff7c 100644 --- a/swagger/templates/packfile-start.yaml +++ b/swagger/templates/packfile-start.yaml @@ -1,10 +1,13 @@ parameters: - name: resource type: string + required: true - name: parameter type: string + required: true - name: tag type: string + required: true template: | parameters: - required: true diff --git a/swagger/templates/packfile.yaml b/swagger/templates/packfile.yaml index d302d75c0..79e9edd0d 100644 --- a/swagger/templates/packfile.yaml +++ b/swagger/templates/packfile.yaml @@ -1,10 +1,13 @@ parameters: - name: resource type: string + required: true - name: parameter type: string + required: true - name: tag type: string + required: true template: | parameters: - required: true @@ -19,9 +22,14 @@ template: | consumes: - multipart/form-data parameters: - - in: formData - name: formData + - name: token + in: query type: string + required: true + - name: file + in: formData + type: file + required: true responses: '200': description: '' diff --git a/swagger/templates/permissions-user.yaml b/swagger/templates/permissions-user.yaml index f8ab38625..339aeb98c 100644 --- a/swagger/templates/permissions-user.yaml +++ b/swagger/templates/permissions-user.yaml @@ -1,10 +1,13 @@ parameters: - name: resource type: string + required: true - name: parameter type: string + required: true - name: tag type: string + required: true template: | parameters: - required: true diff --git a/swagger/templates/permissions.yaml b/swagger/templates/permissions.yaml index ff9ef38c2..9ca64e8b6 100644 --- a/swagger/templates/permissions.yaml +++ b/swagger/templates/permissions.yaml @@ -1,10 +1,13 @@ parameters: - name: resource type: string + required: true - name: parameter type: string + required: true - name: tag type: string + required: true template: | parameters: - required: true diff --git a/swagger/templates/tags-tag.yaml b/swagger/templates/tags-tag.yaml index d12ad63f5..cb9495968 100644 --- a/swagger/templates/tags-tag.yaml +++ b/swagger/templates/tags-tag.yaml @@ -1,10 +1,13 @@ parameters: - name: resource type: string + required: true - name: parameter type: string + required: true - name: tag type: string + required: true template: | parameters: - required: true diff --git a/swagger/templates/tags.yaml b/swagger/templates/tags.yaml index d396ec790..09a5f634f 100644 --- a/swagger/templates/tags.yaml +++ b/swagger/templates/tags.yaml @@ -1,10 +1,13 @@ parameters: - name: resource type: string + required: true - name: parameter type: string + required: true - name: tag type: string + required: true template: | parameters: - required: true @@ -20,6 +23,7 @@ template: | parameters: - name: body in: body + required: true schema: $ref: schemas/input/tag.json responses: diff --git a/tests/integration_tests/python/test_batch.py b/tests/integration_tests/python/test_batch.py index 2bdceeac2..9baa980a5 100644 --- a/tests/integration_tests/python/test_batch.py +++ b/tests/integration_tests/python/test_batch.py @@ -280,8 +280,9 @@ def test_no_input_batch(data_builder, default_payload, randstr, as_admin, as_roo batch1 = r.json() assert len(batch1['matched']) == 2 - assert batch1['matched'][0]['id'] == session - assert batch1['matched'][1]['id'] == session2 + matched_ids = [ x['id'] for x in batch1['matched'] ] + assert session in matched_ids + assert session2 in matched_ids # create a batch w/o inputs targeting acquisitions r = as_admin.post('/batch', json={ @@ -291,8 +292,9 @@ def test_no_input_batch(data_builder, default_payload, randstr, as_admin, as_roo assert r.ok batch2 = r.json() assert len(batch2['matched']) == 2 - assert batch2['matched'][0]['id'] == session - assert batch1['matched'][1]['id'] == session2 + matched_ids = [ x['id'] for x in batch2['matched'] ] + assert session in matched_ids + assert session2 in matched_ids # create a batch w/o inputs targeting project r = as_admin.post('/batch', json={ @@ -302,8 +304,9 @@ def test_no_input_batch(data_builder, default_payload, randstr, as_admin, as_roo assert r.ok batch3 = r.json() assert len(batch3['matched']) == 2 - assert batch3['matched'][0]['id'] == session - assert batch1['matched'][1]['id'] == session2 + matched_ids = [ x['id'] for x in batch3['matched'] ] + assert session in matched_ids + assert session2 in matched_ids batch_id = batch1['_id'] @@ -353,8 +356,9 @@ def test_no_input_batch(data_builder, default_payload, randstr, as_admin, as_roo batch4 = r.json() assert len(batch4['matched']) == 2 - assert batch4['matched'][0]['id'] == session - assert batch1['matched'][1]['id'] == session2 + matched_ids = [ x['id'] for x in batch4['matched'] ] + assert session in matched_ids + assert session2 in matched_ids batch_id = batch4['_id'] # run batch diff --git a/tests/integration_tests/python/test_resolver.py b/tests/integration_tests/python/test_resolver.py index 37c988226..7ddc2389a 100644 --- a/tests/integration_tests/python/test_resolver.py +++ b/tests/integration_tests/python/test_resolver.py @@ -1,10 +1,25 @@ def path_in_result(path, result): return [node.get('_id', node.get('name')) for node in result['path']] == path - def child_in_result(child, result): return sum(all((k in c and c[k]==v) for k,v in child.iteritems()) for c in result['children']) == 1 +def gear_in_path(name, id, result): + for g in result['path']: + if g['gear']['name'] == name and g['_id'] == id: + return True + return False + +def idz(s): + return '' + +def create_analysis(as_admin, file_form, container, c_id, label): + r = as_admin.post('/' + container + '/' + c_id + '/analyses', files=file_form( + 'one.csv', meta={'label': label, 'inputs': [{'name': 'one.csv'}]} + )) + assert r.ok + return r.json()['_id'] + def test_resolver(data_builder, as_admin, as_user, as_public, file_form): # ROOT @@ -14,7 +29,7 @@ def test_resolver(data_builder, as_admin, as_user, as_public, file_form): # try resolving invalid (non-list) path r = as_admin.post('/resolve', json={'path': 'test'}) - assert r.status_code == 500 + assert r.status_code == 400 # resolve root (empty) r = as_admin.post('/resolve', json={'path': []}) @@ -29,13 +44,12 @@ def test_resolver(data_builder, as_admin, as_user, as_public, file_form): result = r.json() assert r.ok assert result['path'] == [] - assert child_in_result({'_id': group, 'node_type': 'group'}, result) + assert child_in_result({'_id': group, 'container_type': 'group'}, result) # try to resolve non-existent root/child r = as_admin.post('/resolve', json={'path': ['child']}) assert r.status_code == 404 - # GROUP # try to resolve root/group as different (and non-root) user r = as_user.post('/resolve', json={'path': [group]}) @@ -55,13 +69,12 @@ def test_resolver(data_builder, as_admin, as_user, as_public, file_form): result = r.json() assert r.ok assert path_in_result([group], result) - assert child_in_result({'_id': project, 'node_type': 'project'}, result) + assert child_in_result({'_id': project, 'container_type': 'project'}, result) # try to resolve non-existent root/group/child r = as_admin.post('/resolve', json={'path': [group, 'child']}) assert r.status_code == 404 - # PROJECT # resolve root/group/project (empty) r = as_admin.post('/resolve', json={'path': [group, project_label]}) @@ -78,7 +91,7 @@ def test_resolver(data_builder, as_admin, as_user, as_public, file_form): result = r.json() assert r.ok assert path_in_result([group, project], result) - assert child_in_result({'name': project_file, 'node_type': 'file'}, result) + assert child_in_result({'name': project_file, 'container_type': 'file'}, result) assert len(result['children']) == 1 # resolve root/group/project (1 file, 1 session) @@ -88,16 +101,33 @@ def test_resolver(data_builder, as_admin, as_user, as_public, file_form): result = r.json() assert r.ok assert path_in_result([group, project], result) - assert child_in_result({'_id': session, 'node_type': 'session'}, result) + assert child_in_result({'_id': session, 'container_type': 'session'}, result) assert len(result['children']) == 2 - # resolve root/group/project/file + # resolve root/group/project/files (1 file, 1 session) + r = as_admin.post('/resolve', json={'path': [group, project_label, 'files']}) + result = r.json() + assert r.ok + assert path_in_result([group, project], result) + assert child_in_result({'name': project_file, 'container_type': 'file'}, result) + assert len(result['children']) == 1 + + # resolve root/group/project/file (old way) r = as_admin.post('/resolve', json={'path': [group, project_label, project_file]}) result = r.json() + assert r.status_code == 404 + + # resolve root/group/project/file + r = as_admin.post('/resolve', json={'path': [group, project_label, 'files', project_file]}) + result = r.json() assert r.ok assert path_in_result([group, project, project_file], result) assert result['children'] == [] + # resolve non-existent root/group/project/file + r = as_admin.post('/resolve', json={'path': [group, project_label, 'files', 'NON-EXISTENT-FILE.dat']}) + assert r.status_code == 404 + # try to resolve non-existent root/group/project/child r = as_admin.post('/resolve', json={'path': [group, project_label, 'child']}) assert r.status_code == 404 @@ -119,7 +149,7 @@ def test_resolver(data_builder, as_admin, as_user, as_public, file_form): result = r.json() assert r.ok assert path_in_result([group, project, session], result) - assert child_in_result({'name': session_file, 'node_type': 'file'}, result) + assert child_in_result({'name': session_file, 'container_type': 'file'}, result) assert len(result['children']) == 1 # resolve root/group/project/session (1 file, 1 acquisition) @@ -129,11 +159,11 @@ def test_resolver(data_builder, as_admin, as_user, as_public, file_form): result = r.json() assert r.ok assert path_in_result([group, project, session], result) - assert child_in_result({'_id': acquisition, 'node_type': 'acquisition'}, result) + assert child_in_result({'_id': acquisition, 'container_type': 'acquisition'}, result) assert len(result['children']) == 2 # resolve root/group/project/session/file - r = as_admin.post('/resolve', json={'path': [group, project_label, session_label, session_file]}) + r = as_admin.post('/resolve', json={'path': [group, project_label, session_label, 'files', session_file]}) result = r.json() assert r.ok assert path_in_result([group, project, session, session_file], result) @@ -160,26 +190,27 @@ def test_resolver(data_builder, as_admin, as_user, as_public, file_form): result = r.json() assert r.ok assert path_in_result([group, project, session, acquisition], result) - assert child_in_result({'name': acquisition_file, 'node_type': 'file'}, result) + assert child_in_result({'name': acquisition_file, 'container_type': 'file'}, result) assert len(result['children']) == 1 # resolve root/group/project/session/acquisition/file - r = as_admin.post('/resolve', json={'path': [group, project_label, session_label, acquisition_label, acquisition_file]}) + r = as_admin.post('/resolve', json={'path': [group, project_label, session_label, acquisition_label, 'files', acquisition_file]}) result = r.json() assert r.ok assert path_in_result([group, project, session, acquisition, acquisition_file], result) assert result['children'] == [] - def idz(s): - return '' - # resolve root/group/project/session/acquisition/file with id - r = as_admin.post('/resolve', json={'path': [idz(group), idz(project), idz(session), idz(acquisition), acquisition_file]}) + r = as_admin.post('/resolve', json={'path': [idz(group), idz(project), idz(session), idz(acquisition), 'files', acquisition_file]}) result = r.json() assert r.ok assert path_in_result([group, project, session, acquisition, acquisition_file], result) assert result['children'] == [] + # resolve root/group/project/session/acquisition/file with invalid id + r = as_admin.post('/resolve', json={'path': [idz(group), idz(project), idz('not-valid'), idz(acquisition), 'files', acquisition_file]}) + assert r.status_code == 400 + # try to resolve non-existent root/group/project/session/acquisition/child r = as_admin.post('/resolve', json={'path': [group, project_label, session_label, acquisition_label, 'child']}) assert r.status_code == 404 @@ -189,3 +220,383 @@ def idz(s): # try to resolve non-existent (also invalid) root/group/project/session/acquisition/file/child r = as_admin.post('/resolve', json={'path': [group, project_label, session_label, acquisition_label, acquisition_file, 'child']}) assert r.status_code == 404 + +def test_lookup(data_builder, as_admin, as_user, as_public, file_form): + # ROOT + # try accessing lookup w/o logging in + r = as_public.post('/lookup', json={'path': []}) + assert r.status_code == 403 + + # try resolving invalid (non-list) path + r = as_admin.post('/lookup', json={'path': 'test'}) + assert r.status_code == 400 + + # lookup root (empty) + r = as_admin.post('/lookup', json={'path': []}) + result = r.json() + assert r.status_code == 404 + + # lookup root (1 group) + group = data_builder.create_group() + r = as_admin.post('/lookup', json={'path': []}) + assert r.status_code == 404 + + # try to lookup non-existent root/child + r = as_admin.post('/lookup', json={'path': ['child']}) + assert r.status_code == 404 + + + # GROUP + # try to lookup root/group as different (and non-root) user + r = as_user.post('/lookup', json={'path': [group]}) + assert r.status_code == 403 + + # lookup root/group (empty) + r = as_admin.post('/lookup', json={'path': [group]}) + result = r.json() + assert r.ok + assert result['container_type'] == 'group' + assert result['_id'] == group + + # try to lookup non-existent root/group/child + r = as_admin.post('/lookup', json={'path': [group, 'child']}) + assert r.status_code == 404 + + + # PROJECT + # lookup root/group/project (empty) + project_label = 'test-lookup-project-label' + project = data_builder.create_project(label=project_label) + + r = as_admin.post('/lookup', json={'path': [group, project_label]}) + result = r.json() + assert r.ok + assert result['container_type'] == 'project' + assert result['_id'] == project + + # lookup root/group/project/file + project_file = 'project_file' + r = as_admin.post('/projects/' + project + '/files', files=file_form(project_file)) + assert r.ok + + r = as_admin.post('/lookup', json={'path': [group, project_label, 'files', project_file]}) + result = r.json() + assert r.ok + assert result['container_type'] == 'file' + assert result['name'] == project_file + assert 'mimetype' in result + assert 'size' in result + + # try to lookup non-existent root/group/project/child + r = as_admin.post('/lookup', json={'path': [group, project_label, 'child']}) + assert r.status_code == 404 + + + # SESSION + # lookup root/group/project/session (empty) + session_label = 'test-lookup-session-label' + session = data_builder.create_session(label=session_label) + + r = as_admin.post('/lookup', json={'path': [group, project_label, session_label]}) + result = r.json() + assert r.ok + assert result['container_type'] == 'session' + assert result['_id'] == session + + # lookup root/group/project/session/file + session_file = 'session_file' + r = as_admin.post('/sessions/' + session + '/files', files=file_form(session_file)) + assert r.ok + + r = as_admin.post('/lookup', json={'path': [group, project_label, session_label, 'files', session_file]}) + result = r.json() + assert r.ok + assert result['container_type'] == 'file' + assert result['name'] == session_file + assert 'mimetype' in result + assert 'size' in result + + # try to lookup non-existent root/group/project/session/child + r = as_admin.post('/lookup', json={'path': [group, project_label, session_label, 'child']}) + assert r.status_code == 404 + + # ACQUISITION + # lookup root/group/project/session/acquisition (empty) + acquisition_label = 'test-lookup-acquisition-label' + acquisition = data_builder.create_acquisition(label=acquisition_label) + r = as_admin.post('/lookup', json={'path': [group, project_label, session_label, acquisition_label]}) + result = r.json() + assert r.ok + assert result['container_type'] == 'acquisition' + assert result['_id'] == acquisition + + # lookup root/group/project/session/acquisition/file + acquisition_file = 'acquisition_file' + r = as_admin.post('/acquisitions/' + acquisition + '/files', files=file_form(acquisition_file)) + assert r.ok + + r = as_admin.post('/lookup', json={'path': [group, project_label, session_label, acquisition_label, 'files', acquisition_file]}) + result = r.json() + assert r.ok + assert result['container_type'] == 'file' + assert result['name'] == acquisition_file + assert 'mimetype' in result + assert 'size' in result + + # lookup root/group/project/session/acquisition with id + r = as_admin.post('/lookup', json={'path': [idz(group), idz(project), idz(session), idz(acquisition)]}) + result = r.json() + assert r.ok + assert result['container_type'] == 'acquisition' + assert result['_id'] == acquisition + + # lookup root/group/project/session/acquisition/file with id + r = as_admin.post('/lookup', json={'path': [idz(group), idz(project), idz(session), idz(acquisition), 'files', acquisition_file]}) + result = r.json() + assert r.ok + assert result['container_type'] == 'file' + assert result['name'] == acquisition_file + assert 'mimetype' in result + assert 'size' in result + + # try to lookup non-existent root/group/project/session/acquisition/child + r = as_admin.post('/lookup', json={'path': [group, project_label, session_label, acquisition_label, 'child']}) + assert r.status_code == 404 + + + # FILE + # try to lookup non-existent (also invalid) root/group/project/session/acquisition/file/child + r = as_admin.post('/lookup', json={'path': [group, project_label, session_label, acquisition_label, 'files', acquisition_file, 'child']}) + assert r.status_code == 404 + +def test_resolve_gears(data_builder, as_admin, as_user, as_public, file_form): + # ROOT + # try accessing resolver w/o logging in + r = as_public.post('/resolve', json={'path': ['gears']}) + assert r.status_code == 403 + + # resolve root (1 gear) + gear_id = data_builder.create_gear() + gear = as_admin.get('/gears/' + gear_id).json() + gear_name = gear['gear']['name'] + + r = as_admin.post('/resolve', json={'path': ['gears']}) + result = r.json() + assert r.ok + assert result['path'] == [] + assert child_in_result({'_id': gear_id, 'container_type': 'gear'}, result) + + # resolve gear (empty) + r = as_admin.post('/resolve', json={'path': ['gears', gear_name]}) + result = r.json() + assert r.ok + assert gear_in_path(gear_name, gear_id, result) + assert result['children'] == [] + + # resolve gear by id + r = as_admin.post('/resolve', json={'path': ['gears', idz(gear_id)]}) + result = r.json() + assert r.ok + assert gear_in_path(gear_name, gear_id, result) + assert result['children'] == [] + + # Lookup (empty) + r = as_admin.post('/lookup', json={'path': ['gears']}) + result = r.json() + assert r.status_code == 404 + + # Lookup by name + r = as_admin.post('/lookup', json={'path': ['gears', gear_name]}) + result = r.json() + assert r.ok + assert result['container_type'] == 'gear' + assert result['_id'] == gear_id + assert result['gear']['name'] == gear_name + + # Lookup by id + r = as_admin.post('/lookup', json={'path': ['gears', idz(gear_id)]}) + result = r.json() + assert r.ok + assert result['container_type'] == 'gear' + assert result['_id'] == gear_id + assert result['gear']['name'] == gear_name + + # Lookup (not-found) + r = as_admin.post('/lookup', json={'path': ['gears', 'NON-EXISTENT-GEAR']}) + assert r.status_code == 404 + + # Lookup by id (not-found) + r = as_admin.post('/lookup', json={'path': ['gears', idz('ffffffffffffffffffffffff')]}) + assert r.status_code == 404 + + +def test_resolve_analyses(data_builder, as_admin, as_user, as_public, file_form): + analysis_file = 'one.csv' + + # Create group + group = data_builder.create_group() + + # Create project + project_label = 'test-resolve-analyses-project-label' + project = data_builder.create_project(label=project_label) + + project_file = 'project_file' + r = as_admin.post('/projects/' + project + '/files', files=file_form(project_file)) + assert r.ok + + project_analysis_name = 'test-project-analysis' + project_analysis = create_analysis(as_admin, file_form, 'projects', project, project_analysis_name) + + # Create session + session_label = 'test-resolve-analyses-session-label' + session = data_builder.create_session(label=session_label) + + session_file = 'session_file' + r = as_admin.post('/sessions/' + session + '/files', files=file_form(session_file)) + assert r.ok + + session_analysis_name = 'test-session-analysis' + session_analysis = create_analysis(as_admin, file_form, 'sessions', session, session_analysis_name) + + # Create acquisition + acquisition_label = 'test-resolve-analyses-acquisition-label' + acquisition = data_builder.create_acquisition(label=acquisition_label) + + acquisition_file = 'acquisition_file' + r = as_admin.post('/acquisitions/' + acquisition + '/files', files=file_form(acquisition_file)) + assert r.ok + + acq_analysis_name = 'test-acquisition-analysis' + acq_analysis = create_analysis(as_admin, file_form, 'acquisitions', acquisition, acq_analysis_name) + + # GROUP + r = as_admin.post('/resolve', json={'path': [group, 'analyses']}) + assert r.status_code == 404 + + # PROJECT + # resolve root/group/project (1 file, 1 session) + r = as_admin.post('/resolve', json={'path': [group, project_label]}) + result = r.json() + assert r.ok + assert path_in_result([group, project], result) + assert child_in_result({'name': project_file, 'container_type': 'file'}, result) + assert child_in_result({'_id': session, 'container_type': 'session'}, result) + assert child_in_result({'_id': project_analysis, 'container_type': 'analysis'}, result) + assert len(result['children']) == 3 + + # resolve root/group/project/analysis + r = as_admin.post('/resolve', json={'path': [group, project_label, 'analyses']}) + result = r.json() + assert r.ok + assert path_in_result([group, project], result) + assert child_in_result({'_id': project_analysis, 'container_type': 'analysis'}, result) + assert len(result['children']) == 1 + + # resolve root/group/project/analysis/name + r = as_admin.post('/resolve', json={'path': [group, project_label, 'analyses', project_analysis_name]}) + result = r.json() + assert r.ok + assert path_in_result([group, project, project_analysis], result) + assert child_in_result({'name': analysis_file, 'container_type': 'file'}, result) + assert len(result['children']) == 1 + + # lookup root/group/project/analysis/name + r = as_admin.post('/lookup', json={'path': [group, project_label, 'analyses', project_analysis_name]}) + result = r.json() + assert r.ok + assert result['container_type'] == 'analysis' + assert result['_id'] == project_analysis + assert len(result['files']) == 1 + + # resolve root/group/project/analysis/files + r = as_admin.post('/resolve', json={'path': [group, project_label, 'analyses', project_analysis_name, 'files', analysis_file]}) + result = r.json() + assert r.ok + assert path_in_result([group, project, project_analysis, analysis_file], result) + assert result['children'] == [] + + # SESSION + # resolve root/group/project/session + r = as_admin.post('/resolve', json={'path': [group, project_label, session_label]}) + result = r.json() + assert r.ok + assert path_in_result([group, project, session], result) + assert child_in_result({'name': session_file, 'container_type': 'file'}, result) + assert child_in_result({'_id': acquisition, 'container_type': 'acquisition'}, result) + assert child_in_result({'_id': session_analysis, 'container_type': 'analysis'}, result) + assert len(result['children']) == 3 + + # resolve root/group/project/analysis/name + r = as_admin.post('/resolve', json={'path': [group, project_label, session_label, 'analyses', session_analysis_name]}) + result = r.json() + assert r.ok + assert path_in_result([group, project, session, session_analysis], result) + assert child_in_result({'name': analysis_file, 'container_type': 'file'}, result) + assert len(result['children']) == 1 + + # lookup root/group/project/analysis/name + r = as_admin.post('/lookup', json={'path': [group, project_label, session_label, 'analyses', session_analysis_name]}) + result = r.json() + assert r.ok + assert result['container_type'] == 'analysis' + assert result['_id'] == session_analysis + assert len(result['files']) == 1 + + # resolve root/group/project/analysis/files + r = as_admin.post('/resolve', json={'path': [group, project_label, session_label, 'analyses', session_analysis_name, 'files', analysis_file]}) + result = r.json() + assert r.ok + assert path_in_result([group, project, session, session_analysis, analysis_file], result) + assert result['children'] == [] + + # ACQUISITION + # resolve root/group/project/session/acquisition + r = as_admin.post('/resolve', json={'path': [group, project_label, session_label, acquisition_label]}) + result = r.json() + assert r.ok + assert path_in_result([group, project, session, acquisition], result) + assert child_in_result({'name': acquisition_file, 'container_type': 'file'}, result) + assert child_in_result({'_id': acq_analysis, 'container_type': 'analysis'}, result) + assert len(result['children']) == 2 + + # resolve root/group/project/analysis/name + r = as_admin.post('/resolve', json={'path': [group, project_label, session_label, acquisition_label, 'analyses', acq_analysis_name]}) + result = r.json() + assert r.ok + assert path_in_result([group, project, session, acquisition, acq_analysis], result) + assert child_in_result({'name': analysis_file, 'container_type': 'file'}, result) + assert len(result['children']) == 1 + + # lookup root/group/project/analysis/name + r = as_admin.post('/lookup', json={'path': [group, project_label, session_label, acquisition_label, 'analyses', acq_analysis_name]}) + result = r.json() + assert r.ok + assert result['container_type'] == 'analysis' + assert result['_id'] == acq_analysis + assert len(result['files']) == 1 + + # resolve root/group/project/analysis/id + r = as_admin.post('/resolve', json={'path': [group, project_label, idz(session), acquisition_label, 'analyses', idz(acq_analysis)]}) + result = r.json() + assert r.ok + assert path_in_result([group, project, session, acquisition, acq_analysis], result) + assert child_in_result({'name': analysis_file, 'container_type': 'file'}, result) + assert len(result['children']) == 1 + + # lookup root/group/project/analysis/name + r = as_admin.post('/lookup', json={'path': [group, project_label, idz(session), acquisition_label, 'analyses', idz(acq_analysis)]}) + result = r.json() + assert r.ok + assert result['container_type'] == 'analysis' + assert result['_id'] == acq_analysis + assert len(result['files']) == 1 + + # resolve root/group/project/analysis/files + r = as_admin.post('/resolve', json={'path': [group, project_label, session_label, acquisition_label, 'analyses', acq_analysis_name, 'files', analysis_file]}) + result = r.json() + assert r.ok + assert path_in_result([group, project, session, acquisition, acq_analysis, analysis_file], result) + assert result['children'] == [] + + + diff --git a/tests/integration_tests/python/test_uploads.py b/tests/integration_tests/python/test_uploads.py index 607ec86e8..80df8d27c 100644 --- a/tests/integration_tests/python/test_uploads.py +++ b/tests/integration_tests/python/test_uploads.py @@ -341,9 +341,16 @@ def test_reaper_project_search(data_builder, file_form, as_root): assert r.ok project_list = r.json() assert len(project_list) == 2 - project = project_list[1] - assert project_list[1]['label'] == expected_project_label_2 + + # Order is not guaranteed + if project_list[0]['_id'] == project_1: + project = project_list[1] + else: + project = project_list[0] + + assert project['label'] == expected_project_label_2 project_2 = project['_id'] + assert len(as_root.get('/projects/' + project_2 + '/sessions').json()) == 1 session = as_root.get('/projects/' + project_2 + '/sessions').json()[0]['_id'] @@ -374,8 +381,14 @@ def test_reaper_project_search(data_builder, file_form, as_root): project_list = r.json() # Ensure there are still only 2 projects assert len(project_list) == 2 - project = project_list[1] - assert project_list[1]['label'] == expected_project_label_2 + + # Order is not guaranteed + if project_list[0]['_id'] == project_1: + project = project_list[1] + else: + project = project_list[0] + + assert project['label'] == expected_project_label_2 assert len(as_root.get('/projects/' + project_2 + '/sessions').json()) == 2 session2 = as_root.get('/projects/' + project_2 + '/sessions').json()[1]['_id'] diff --git a/tests/unit_tests/python/test_validators.py b/tests/unit_tests/python/test_validators.py index 0dcfb157e..27cced13e 100644 --- a/tests/unit_tests/python/test_validators.py +++ b/tests/unit_tests/python/test_validators.py @@ -62,7 +62,7 @@ def test_payload(): def test_file_output_valid(): payload = [{ - 'modified': 'yesterday', + 'modified': '2018-02-07T17:27:21+00:00', 'size': 10 }] schema_uri = validators.schema_uri("output", "file-list.json") @@ -71,13 +71,22 @@ def test_file_output_valid(): def test_file_output_invalid(): payload = [{ - 'modified': 'yesterday' + 'modified': '2018-02-07T17:27:21+00:00' }] schema_uri = validators.schema_uri("output", "file-list.json") schema, resolver = validators._resolve_schema(schema_uri) with pytest.raises(jsonschema.exceptions.ValidationError): validators._validate_json(payload, schema, resolver) +def test_jsonschema_validate_enum_with_null(): + schema = { + 'oneOf': [ + { 'type': 'null' }, + { 'type': 'string', 'enum': ['true', 'false'] } + ] + } + jsonschema.validate('true', schema) + jsonschema.validate(None, schema) # ===== Automated Tests =====