From 0060ca30cf438f5463ed328d8fd5c41bcc197d55 Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Thu, 11 Jan 2018 08:18:34 -0600 Subject: [PATCH 01/53] Allow template parameters to be optional Parameters can now be optional or required, which allows for more advanced template usage. Updated existing templates to set the required flag. Added documentation for root level analyses apis via existing templates. --- swagger/index.yaml | 3 +++ swagger/paths/acquisitions.yaml | 2 ++ swagger/paths/analyses.yaml | 13 ++++++++++ swagger/paths/collections.yaml | 2 ++ swagger/paths/projects.yaml | 2 ++ swagger/paths/sessions.yaml | 2 ++ swagger/support/swagger-resolver.js | 2 +- swagger/templates/analyses-list.yaml | 3 +++ ...analysis-files-create-ticket-filename.yaml | 12 ++++++++-- swagger/templates/analysis-files.yaml | 12 ++++++++-- swagger/templates/analysis-item.yaml | 24 ++++++++++++++++--- swagger/templates/analysis-notes-item.yaml | 3 +++ swagger/templates/analysis-notes.yaml | 3 +++ swagger/templates/container-item.yaml | 5 ++++ swagger/templates/container.yaml | 4 ++++ swagger/templates/file-item.yaml | 3 +++ swagger/templates/file-list-upload.yaml | 3 +++ swagger/templates/notes-note.yaml | 3 +++ swagger/templates/notes.yaml | 3 +++ swagger/templates/packfile-end.yaml | 3 +++ swagger/templates/packfile-start.yaml | 3 +++ swagger/templates/packfile.yaml | 3 +++ swagger/templates/permissions-user.yaml | 3 +++ swagger/templates/permissions.yaml | 3 +++ swagger/templates/tags-tag.yaml | 3 +++ swagger/templates/tags.yaml | 3 +++ 26 files changed, 117 insertions(+), 8 deletions(-) create mode 100644 swagger/paths/analyses.yaml diff --git a/swagger/index.yaml b/swagger/index.yaml index b9ed8a6e4..d47d08681 100644 --- a/swagger/index.yaml +++ b/swagger/index.yaml @@ -36,6 +36,8 @@ tags: description: Site-wide reports - name: batch description: Batch job operations + - name: analyses + description: Analysis operations paths: $include: @@ -60,6 +62,7 @@ paths: - paths/projects.yaml - paths/report.yaml - paths/batch.yaml + - paths/analyses.yaml securityDefinitions: diff --git a/swagger/paths/acquisitions.yaml b/swagger/paths/acquisitions.yaml index 325ad7499..f13775733 100644 --- a/swagger/paths/acquisitions.yaml +++ b/swagger/paths/acquisitions.yaml @@ -46,6 +46,8 @@ $template_arguments: $template: templates/analyses-list.yaml /acquisitions/{AcquisitionId}/analyses/{AnalysisId}: $template: templates/analysis-item.yaml + arguments: + supportsDelete: true /acquisitions/{AcquisitionId}/analyses/{AnalysisId}/files: $template: templates/analysis-files.yaml /acquisitions/{AcquisitionId}/analyses/{AnalysisId}/files/{Filename}: diff --git a/swagger/paths/analyses.yaml b/swagger/paths/analyses.yaml new file mode 100644 index 000000000..c6a59ad69 --- /dev/null +++ b/swagger/paths/analyses.yaml @@ -0,0 +1,13 @@ +$template_arguments: + tag: 'analyses' + +/analyses/{AnalysisId}: + $template: templates/analysis-item.yaml + arguments: + supportsDelete: false + +/analyses/{AnalysisId}/files: + $template: templates/analysis-files.yaml + +/analyses/{AnalysisId}/files/{Filename}: + $template: templates/analysis-files-create-ticket-filename.yaml diff --git a/swagger/paths/collections.yaml b/swagger/paths/collections.yaml index 486090979..d2f1a9880 100644 --- a/swagger/paths/collections.yaml +++ b/swagger/paths/collections.yaml @@ -173,6 +173,8 @@ $template_arguments: $template: templates/analyses-list.yaml /collections/{CollectionId}/analyses/{AnalysisId}: $template: templates/analysis-item.yaml + arguments: + supportsDelete: true /collections/{CollectionId}/analyses/{AnalysisId}/files: $template: templates/analysis-files.yaml /collections/{CollectionId}/analyses/{AnalysisId}/files/{Filename}: diff --git a/swagger/paths/projects.yaml b/swagger/paths/projects.yaml index 3c039f56c..ed5952f38 100644 --- a/swagger/paths/projects.yaml +++ b/swagger/paths/projects.yaml @@ -225,6 +225,8 @@ $template_arguments: $template: templates/analyses-list.yaml /projects/{ProjectId}/analyses/{AnalysisId}: $template: templates/analysis-item.yaml + arguments: + supportsDelete: true /projects/{ProjectId}/analyses/{AnalysisId}/files: $template: templates/analysis-files.yaml /projects/{ProjectId}/analyses/{AnalysisId}/files/{Filename}: diff --git a/swagger/paths/sessions.yaml b/swagger/paths/sessions.yaml index 2b79ba2fa..a9664efdc 100644 --- a/swagger/paths/sessions.yaml +++ b/swagger/paths/sessions.yaml @@ -115,6 +115,8 @@ $template_arguments: /sessions/{SessionId}/analyses/{AnalysisId}: $template: templates/analysis-item.yaml + arguments: + supportsDelete: true /sessions/{SessionId}/analyses/{AnalysisId}/files: $template: templates/analysis-files.yaml /sessions/{SessionId}/analyses/{AnalysisId}/files/{Filename}: diff --git a/swagger/support/swagger-resolver.js b/swagger/support/swagger-resolver.js index ee2575df0..6b154dccf 100644 --- a/swagger/support/swagger-resolver.js +++ b/swagger/support/swagger-resolver.js @@ -34,7 +34,7 @@ function validateTemplateArgs(tmplpath, template, args) { if( !param.name ) { throw 'Template "' + tmplpath + '" parameter does not have a name!'; } - if( _.isNil(args[param.name]) ) { + if( param.required && _.isNil(args[param.name]) ) { throw 'Template "' + tmplpath + '" invocation is missing parameter: ' + param.name; } } diff --git a/swagger/templates/analyses-list.yaml b/swagger/templates/analyses-list.yaml index ad44c1a10..6d76dc2d1 100644 --- a/swagger/templates/analyses-list.yaml +++ b/swagger/templates/analyses-list.yaml @@ -1,10 +1,13 @@ parameters: - name: resource type: string + required: true - name: parameter type: string + required: true - name: tag type: string + required: true template: | parameters: - required: true diff --git a/swagger/templates/analysis-files-create-ticket-filename.yaml b/swagger/templates/analysis-files-create-ticket-filename.yaml index 7f90a951d..08b59bd54 100644 --- a/swagger/templates/analysis-files-create-ticket-filename.yaml +++ b/swagger/templates/analysis-files-create-ticket-filename.yaml @@ -5,12 +5,15 @@ parameters: type: string - name: tag type: string + required: true template: | parameters: + {{#parameter}} - required: true type: string in: path - name: '{{parameter}}' + name: '{{.}}' + {{/parameter}} - required: true type: string in: path @@ -29,7 +32,12 @@ template: | files in the anlaysis. If no "ticket" query param is included, files will be downloaded directly. - operationId: download_{{resource}}_analysis_files_by_filename + {{#resource}} + operationId: download_{{.}}_analysis_files_by_filename + {{/resource}} + {{^resource}} + operationId: download_analysis_files_by_filename + {{/resource}} tags: - '{{tag}}' produces: diff --git a/swagger/templates/analysis-files.yaml b/swagger/templates/analysis-files.yaml index 1577e15e5..5e0f6073c 100644 --- a/swagger/templates/analysis-files.yaml +++ b/swagger/templates/analysis-files.yaml @@ -5,12 +5,15 @@ parameters: type: string - name: tag type: string + required: true template: | parameters: + {{#parameter}} - required: true type: string in: path - name: '{{parameter}}' + name: '{{.}}' + {{/parameter}} - required: true type: string in: path @@ -24,7 +27,12 @@ template: | files in the anlaysis If no "ticket" query param is included, server error 500 - operationId: download_{{resource}}_analysis_files + {{#resource}} + operationId: download_{{.}}_analysis_files + {{/resource}} + {{^resource}} + operationId: download_analysis_files + {{/resource}} tags: - '{{tag}}' produces: diff --git a/swagger/templates/analysis-item.yaml b/swagger/templates/analysis-item.yaml index 0a46cde8b..3b2251535 100644 --- a/swagger/templates/analysis-item.yaml +++ b/swagger/templates/analysis-item.yaml @@ -5,19 +5,30 @@ parameters: type: string - name: tag type: string + required: true + - name: supportsDelete + type: boolean + required: true template: | parameters: + {{#parameter}} - required: true type: string in: path - name: '{{parameter}}' + name: '{{.}}' + {{/parameter}} - required: true type: string in: path name: AnalysisId get: summary: Get an analysis. - operationId: get_{{resource}}_analysis + {{#resource}} + operationId: get_{{.}}_analysis + {{/resource}} + {{^resource}} + operationId: get_analysis + {{/resource}} tags: - '{{tag}}' responses: @@ -28,11 +39,18 @@ template: | examples: response: $ref: examples/output/analysis.json + {{#supportsDelete}} delete: summary: Delete an anaylsis - operationId: delete_{{resource}}_analysis + {{#resource}} + operationId: delete_{{.}}_analysis + {{/resource}} + {{^resource}} + operationId: delete_analysis + {{/resource}} tags: - '{{tag}}' responses: '200': $ref: '#/responses/200:deleted-with-count' + {{/supportsDelete}} diff --git a/swagger/templates/analysis-notes-item.yaml b/swagger/templates/analysis-notes-item.yaml index f4a87a726..6ee57edb7 100644 --- a/swagger/templates/analysis-notes-item.yaml +++ b/swagger/templates/analysis-notes-item.yaml @@ -1,10 +1,13 @@ parameters: - name: resource type: string + required: true - name: parameter type: string + required: true - name: tag type: string + required: true template: | parameters: - required: true diff --git a/swagger/templates/analysis-notes.yaml b/swagger/templates/analysis-notes.yaml index 2627f2edd..12168f23c 100644 --- a/swagger/templates/analysis-notes.yaml +++ b/swagger/templates/analysis-notes.yaml @@ -1,10 +1,13 @@ parameters: - name: resource type: string + required: true - name: parameter type: string + required: true - name: tag type: string + required: true template: | parameters: - required: true diff --git a/swagger/templates/container-item.yaml b/swagger/templates/container-item.yaml index 223838060..77670df1c 100644 --- a/swagger/templates/container-item.yaml +++ b/swagger/templates/container-item.yaml @@ -1,14 +1,19 @@ parameters: - name: resource type: string + required: true - name: tag type: string + required: true - name: parameter type: string + required: true - name: update-input-schema type: string + required: true - name: get-output-schema type: string + required: true template: | parameters: - in: path diff --git a/swagger/templates/container.yaml b/swagger/templates/container.yaml index d867de6d3..c5efe50bc 100644 --- a/swagger/templates/container.yaml +++ b/swagger/templates/container.yaml @@ -1,12 +1,16 @@ parameters: - name: resource type: string + required: true - name: tag type: string + required: true - name: list-output-schema type: string + required: true - name: create-input-schema type: string + required: true template: | get: summary: Get a list of {{#pluralize}}{{resource}}{{/pluralize}} diff --git a/swagger/templates/file-item.yaml b/swagger/templates/file-item.yaml index a6e436b3e..3b81525da 100644 --- a/swagger/templates/file-item.yaml +++ b/swagger/templates/file-item.yaml @@ -1,10 +1,13 @@ parameters: - name: resource type: string + required: true - name: parameter type: string + required: true - name: tag type: string + required: true template: | parameters: - required: true diff --git a/swagger/templates/file-list-upload.yaml b/swagger/templates/file-list-upload.yaml index e8985e014..3d28a3da9 100644 --- a/swagger/templates/file-list-upload.yaml +++ b/swagger/templates/file-list-upload.yaml @@ -1,10 +1,13 @@ parameters: - name: resource type: string + required: true - name: parameter type: string + required: true - name: tag type: string + required: true template: | parameters: - name: '{{parameter}}' diff --git a/swagger/templates/notes-note.yaml b/swagger/templates/notes-note.yaml index 899ad54a8..77636a02d 100644 --- a/swagger/templates/notes-note.yaml +++ b/swagger/templates/notes-note.yaml @@ -1,10 +1,13 @@ parameters: - name: resource type: string + required: true - name: parameter type: string + required: true - name: tag type: string + required: true template: | parameters: - required: true diff --git a/swagger/templates/notes.yaml b/swagger/templates/notes.yaml index 1c4936b09..60513ca89 100644 --- a/swagger/templates/notes.yaml +++ b/swagger/templates/notes.yaml @@ -1,10 +1,13 @@ parameters: - name: resource type: string + required: true - name: parameter type: string + required: true - name: tag type: string + required: true template: | parameters: - required: true diff --git a/swagger/templates/packfile-end.yaml b/swagger/templates/packfile-end.yaml index 0b662d20a..3af0ca3c4 100644 --- a/swagger/templates/packfile-end.yaml +++ b/swagger/templates/packfile-end.yaml @@ -1,10 +1,13 @@ parameters: - name: resource type: string + required: true - name: parameter type: string + required: true - name: tag type: string + required: true template: | parameters: - required: true diff --git a/swagger/templates/packfile-start.yaml b/swagger/templates/packfile-start.yaml index d8b5ebfc4..065d0ff7c 100644 --- a/swagger/templates/packfile-start.yaml +++ b/swagger/templates/packfile-start.yaml @@ -1,10 +1,13 @@ parameters: - name: resource type: string + required: true - name: parameter type: string + required: true - name: tag type: string + required: true template: | parameters: - required: true diff --git a/swagger/templates/packfile.yaml b/swagger/templates/packfile.yaml index d302d75c0..c2df8b52f 100644 --- a/swagger/templates/packfile.yaml +++ b/swagger/templates/packfile.yaml @@ -1,10 +1,13 @@ parameters: - name: resource type: string + required: true - name: parameter type: string + required: true - name: tag type: string + required: true template: | parameters: - required: true diff --git a/swagger/templates/permissions-user.yaml b/swagger/templates/permissions-user.yaml index f8ab38625..339aeb98c 100644 --- a/swagger/templates/permissions-user.yaml +++ b/swagger/templates/permissions-user.yaml @@ -1,10 +1,13 @@ parameters: - name: resource type: string + required: true - name: parameter type: string + required: true - name: tag type: string + required: true template: | parameters: - required: true diff --git a/swagger/templates/permissions.yaml b/swagger/templates/permissions.yaml index ff9ef38c2..9ca64e8b6 100644 --- a/swagger/templates/permissions.yaml +++ b/swagger/templates/permissions.yaml @@ -1,10 +1,13 @@ parameters: - name: resource type: string + required: true - name: parameter type: string + required: true - name: tag type: string + required: true template: | parameters: - required: true diff --git a/swagger/templates/tags-tag.yaml b/swagger/templates/tags-tag.yaml index d12ad63f5..cb9495968 100644 --- a/swagger/templates/tags-tag.yaml +++ b/swagger/templates/tags-tag.yaml @@ -1,10 +1,13 @@ parameters: - name: resource type: string + required: true - name: parameter type: string + required: true - name: tag type: string + required: true template: | parameters: - required: true diff --git a/swagger/templates/tags.yaml b/swagger/templates/tags.yaml index d396ec790..3c4ca6204 100644 --- a/swagger/templates/tags.yaml +++ b/swagger/templates/tags.yaml @@ -1,10 +1,13 @@ parameters: - name: resource type: string + required: true - name: parameter type: string + required: true - name: tag type: string + required: true template: | parameters: - required: true From 89363b400d9096350b22c85941b52fc16ab72c3c Mon Sep 17 00:00:00 2001 From: Ryan Sanford Date: Fri, 12 Jan 2018 10:19:07 -0600 Subject: [PATCH 02/53] Initial addition of reaper upload API doc --- swagger/index.yaml | 1 + swagger/paths/upload-by-reaper.yaml | 37 +++++++++++++++++++++++++++++ 2 files changed, 38 insertions(+) create mode 100644 swagger/paths/upload-by-reaper.yaml diff --git a/swagger/index.yaml b/swagger/index.yaml index d47d08681..11778c473 100644 --- a/swagger/index.yaml +++ b/swagger/index.yaml @@ -44,6 +44,7 @@ paths: - paths/login.yaml - paths/download.yaml - paths/upload-by-label.yaml + - paths/upload-by-reaper.yaml - paths/upload-by-uid.yaml - paths/upload-match-uid.yaml - paths/clean-packfiles.yaml diff --git a/swagger/paths/upload-by-reaper.yaml b/swagger/paths/upload-by-reaper.yaml new file mode 100644 index 000000000..ea28acc75 --- /dev/null +++ b/swagger/paths/upload-by-reaper.yaml @@ -0,0 +1,37 @@ +/upload/reaper: + post: + summary: Bottom-up UID matching of Multipart form upload with N file fields, each with their desired filename. + description: | + Upload data, allowing users to move sessions during scans without causing new data to be + created in referenced project/group. + + + ### Evaluation Order: + + * If a matching acquisition UID is found anywhere on the system, the related files will be placed under that acquisition. + * **OR** If a matching session UID is found, a new acquistion is created with the specified UID under that Session UID. + * **OR** If a matching group ID and project label are found, a new session and acquisition will be created within that project + * **OR** If a matching group ID is found, a new project and session and acquisition will be created within that group. + * **OR** A new session and acquisition will be created within a special "Unknown" group and project, which is only visible to system administrators. + + operationId: upload_by_reaper + tags: + - files + responses: + '200': + description: 'Files uploaded successfully' + schema: + $ref: schemas/output/file-list.json + examples: + application/json: + $ref: examples/file_info_list.json + '402': + description: Uploads must be from an authorized drone + consumes: + - multipart/form-data + parameters: + # TODO: Need to add ref to json input schema. Proper way not yet defined for Multipart form uploads. + # See api/schemas/input/uidupload.json for the format of this metadata. + - in: formData + name: formData + type: string \ No newline at end of file From 1bc08eb6a494dacc36dde057b65a0c2f0d4fb548 Mon Sep 17 00:00:00 2001 From: David Farkas Date: Fri, 12 Jan 2018 17:33:07 +0100 Subject: [PATCH 03/53] Add jobs/logs, jobs/prepare-complete, jobs/accept-failed-output endpoints doc --- swagger/paths/jobs.yaml | 57 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 57 insertions(+) diff --git a/swagger/paths/jobs.yaml b/swagger/paths/jobs.yaml index 04c328ef3..9c91568f2 100644 --- a/swagger/paths/jobs.yaml +++ b/swagger/paths/jobs.yaml @@ -142,3 +142,60 @@ schema: example: $ref: examples/output/job-config.json +/jobs/{JobId}/logs: + parameters: + - required: true + type: string + in: path + name: JobId + post: + summary: Add logs to a job. + operationId: add_logs + tags: + - jobs + responses: + '200': + description: Logs were added. No value is returned. +/jobs/{JobId}/accept-failed-output: + parameters: + - required: true + type: string + in: path + name: JobId + post: + summary: Accept failed job output. + description: > + Remove the 'from_failed_job' flag from the files. + + Create any automatic jobs for the accepted files. + operationId: accept_failed_output + tags: + - jobs + responses: + '200': + description: Failed output is accepted and created any automatic jobs. + '400': + description: Can only accept failed output of a job that failed. +/jobs/{JobId}/prepare-complete: + parameters: + - required: true + type: string + in: path + name: JobId + post: + summary: Create a ticket with the job id and its status. + operationId: prepare_compete + tags: + - jobs + parameters: + - name: body + in: body + schema: + example: + success: True + responses: + '200': + description: 'Returns a ticket' + schema: + example: + ticket: 579e97738120be2ada087feb \ No newline at end of file From 5478bf612e95db2378d3d4a22b91f825cade1b9a Mon Sep 17 00:00:00 2001 From: Ambrus Simon Date: Fri, 12 Jan 2018 17:42:18 +0100 Subject: [PATCH 04/53] Document site rules --- swagger/examples/output/rule.json | 17 +++++++++ swagger/index.yaml | 1 + swagger/package.json | 2 +- swagger/paths/projects.yaml | 50 ++++++++++++++++++------- swagger/paths/site-rules.yaml | 62 +++++++++++++++++++++++++++++++ swagger/schemas/output/rule.json | 7 ++++ 6 files changed, 125 insertions(+), 14 deletions(-) create mode 100644 swagger/examples/output/rule.json create mode 100644 swagger/paths/site-rules.yaml create mode 100644 swagger/schemas/output/rule.json diff --git a/swagger/examples/output/rule.json b/swagger/examples/output/rule.json new file mode 100644 index 000000000..da502290d --- /dev/null +++ b/swagger/examples/output/rule.json @@ -0,0 +1,17 @@ +{ + "_id": "5a12f2923306be0016179f47", + "name": "dcm2niix", + "alg": "dcm2niix", + "any": [], + "all": [ + { + "regex": true, + "type": "file.measurements", + "value": "^(?!non-image).+$" + }, + { + "type": "file.type", + "value": "nifti" + } + ] +} diff --git a/swagger/index.yaml b/swagger/index.yaml index 11778c473..a55fe7c48 100644 --- a/swagger/index.yaml +++ b/swagger/index.yaml @@ -64,6 +64,7 @@ paths: - paths/report.yaml - paths/batch.yaml - paths/analyses.yaml + - paths/site-rules.yaml securityDefinitions: diff --git a/swagger/package.json b/swagger/package.json index 73a30727e..3700b1869 100644 --- a/swagger/package.json +++ b/swagger/package.json @@ -8,7 +8,7 @@ "lint": "node_modules/.bin/grunt lintSchemas", "test": "node_modules/.bin/jasmine --config=support/jasmine.json", "watch": "node_modules/.bin/grunt live", - "coverage": "node_modules/.bin/grunt coverage" + "coverage": "node_modules/.bin/grunt coverage" }, "author": "Justin Ehlert ", "license": "MIT", diff --git a/swagger/paths/projects.yaml b/swagger/paths/projects.yaml index ed5952f38..62b678bc0 100644 --- a/swagger/paths/projects.yaml +++ b/swagger/paths/projects.yaml @@ -20,7 +20,7 @@ $template_arguments: summary: List all groups which have a project in them operationId: get_all_projects_groups tags: - - 'projects' + - projects responses: '200': description: '' @@ -37,7 +37,7 @@ $template_arguments: summary: List all sessions for the given project. operationId: get_project_sessions tags: - - 'projects' + - projects responses: '200': description: '' @@ -54,23 +54,25 @@ $template_arguments: summary: List all acquisitions for the given project. operationId: get_project_acquisitions tags: - - 'projects' + - projects responses: '200': description: '' schema: $ref: schemas/output/acquisition-list.json -'/projects/{ProjectId}/rules': +/projects/{ProjectId}/rules: parameters: - in: path type: string name: ProjectId required: true get: + summary: List all rules for a project. operationId: get_project_rules tags: - - 'projects' + - projects + - rules responses: '200': description: '' @@ -80,7 +82,8 @@ $template_arguments: summary: Create a new rule for a project. operationId: add_project_rule tags: - - 'projects' + - projects + - rules responses: default: description: '' @@ -90,7 +93,7 @@ $template_arguments: schema: $ref: schemas/input/rule-new.json -'/projects/{ProjectId}/rules/{RuleId}': +/projects/{ProjectId}/rules/{RuleId}: parameters: - in: path type: string @@ -100,11 +103,23 @@ $template_arguments: type: string name: RuleId required: true + get: + summary: Get a project rule. + operationId: get_project_rule + tags: + - projects + - rules + responses: + '200': + description: '' + schema: + $ref: schemas/output/rule.json put: summary: Update a rule on a project. operationId: modify_project_rule tags: - - 'projects' + - projects + - rules responses: default: description: '' @@ -113,9 +128,18 @@ $template_arguments: name: body schema: $ref: schemas/input/rule-update.json + delete: + summary: Remove a project rule. + operationId: remove_project_rule + tags: + - projects + - rules + responses: + '200': + $ref: '#/responses/200:deleted-with-count' -'/projects/{ProjectId}/template': +/projects/{ProjectId}/template: parameters: - in: path type: string @@ -125,7 +149,7 @@ $template_arguments: summary: Set the session template for a project. operationId: set_project_template tags: - - 'projects' + - projects parameters: - in: body name: body @@ -140,7 +164,7 @@ $template_arguments: summary: Remove the session template for a project. operationId: remove_project_template tags: - - 'projects' + - projects responses: '200': $ref: '#/responses/200:deleted-with-count' @@ -158,7 +182,7 @@ $template_arguments: description: Returns list of modified session ids. operationId: recalc_project tags: - - 'projects' + - projects responses: '200': description: | @@ -178,7 +202,7 @@ $template_arguments: Returns list of modified session ids. operationId: recalc_all_projects tags: - - 'projects' + - projects responses: '200': description: | diff --git a/swagger/paths/site-rules.yaml b/swagger/paths/site-rules.yaml new file mode 100644 index 000000000..73b23e709 --- /dev/null +++ b/swagger/paths/site-rules.yaml @@ -0,0 +1,62 @@ +/site/rules: + get: + summary: List all site rules. + operationId: get_site_rules + tags: + - rules + responses: + '200': + description: '' + schema: + $ref: schemas/output/rule-list.json + post: + summary: Create a new site rule. + operationId: add_site_rule + tags: + - rules + responses: + default: + description: '' + parameters: + - in: body + name: body + schema: + $ref: schemas/input/rule-new.json + +/site/rules/{RuleId}: + parameters: + - name: RuleId + type: string + in: path + required: true + get: + summary: Get a site rule. + operationId: get_site_rule + tags: + - rules + responses: + '200': + description: '' + schema: + $ref: schemas/output/rule.json + put: + summary: Update a site rule. + operationId: modify_site_rule + tags: + - rules + responses: + default: + description: '' + parameters: + - in: body + name: body + schema: + $ref: schemas/input/rule-update.json + delete: + summary: Remove a site rule. + operationId: remove_site_rule + tags: + - rules + responses: + '200': + $ref: '#/responses/200:deleted-with-count' diff --git a/swagger/schemas/output/rule.json b/swagger/schemas/output/rule.json new file mode 100644 index 000000000..77ec4fc71 --- /dev/null +++ b/swagger/schemas/output/rule.json @@ -0,0 +1,7 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "Rule", + "type": "object", + "allOf": [{"$ref": "../definitions/rule.json#/definitions/rule-output"}], + "example": {"$ref": "../../examples/output/rule.json"} +} From aeada8dcb54436bd7914e3df0e0be8fe422bbd2a Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Tue, 30 Jan 2018 13:52:57 -0600 Subject: [PATCH 05/53] Add missing DELETE file --- swagger/templates/file-item.yaml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/swagger/templates/file-item.yaml b/swagger/templates/file-item.yaml index 3b81525da..76e168014 100644 --- a/swagger/templates/file-item.yaml +++ b/swagger/templates/file-item.yaml @@ -73,3 +73,12 @@ template: | default: description: '' + delete: + summary: Delete a file + operationId: delete_{{resource}}_file + tags: + - '{{tag}}' + responses: + '200': + $ref: '#/responses/200:modified-with-count' + From 8b2feef6481d351e7bdb2f6d5344a3d01b8a371e Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Tue, 30 Jan 2018 13:58:21 -0600 Subject: [PATCH 06/53] Reorder tags --- swagger/index.yaml | 31 +++++++++++++++++-------------- 1 file changed, 17 insertions(+), 14 deletions(-) diff --git a/swagger/index.yaml b/swagger/index.yaml index a55fe7c48..8015c5929 100644 --- a/swagger/index.yaml +++ b/swagger/index.yaml @@ -12,32 +12,35 @@ consumes: - 'application/json' tags: - - name: files - description: File upload/download operations - - name: devices - description: Device operations - name: users description: User operations - - name: gears - description: Gear operations - name: groups description: Group operations - - name: jobs - description: Job operations - - name: collections - description: Collection operations + - name: projects + description: Project operations - name: sessions description: Session operations - name: acquisitions description: Acquisition operations - - name: projects - description: Project operations + - name: analyses + description: Analysis operations + - name: collections + description: Collection operations + - name: files + description: File upload/download operations + - name: devices + description: Device operations + - name: gears + description: Gear operations + - name: rules + description: Gear rule configuration + - name: jobs + description: Job operations - name: reports description: Site-wide reports - name: batch description: Batch job operations - - name: analyses - description: Analysis operations + - name: default paths: $include: From 081bbdb0ea661f4e73b8bdc653dd1fa840da4eaf Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Tue, 30 Jan 2018 15:37:04 -0600 Subject: [PATCH 07/53] Add schemas for gear APIs. --- api/config.py | 1 + swagger/examples/gear_full.json | 37 ----- swagger/examples/gears_list_just_name.json | 11 -- swagger/examples/output/gear-list.json | 35 ++++ swagger/examples/output/gear.json | 35 ++++ swagger/paths/gears.yaml | 68 +++++--- swagger/schemas/definitions/gear.json | 182 +++++++++++++++++++++ swagger/schemas/input/gear.json | 38 +++++ swagger/schemas/output/gear-list.json | 7 + swagger/schemas/output/gear.json | 5 + 10 files changed, 344 insertions(+), 75 deletions(-) delete mode 100755 swagger/examples/gear_full.json delete mode 100755 swagger/examples/gears_list_just_name.json create mode 100644 swagger/examples/output/gear-list.json create mode 100644 swagger/examples/output/gear.json create mode 100644 swagger/schemas/definitions/gear.json create mode 100644 swagger/schemas/input/gear.json create mode 100644 swagger/schemas/output/gear-list.json create mode 100644 swagger/schemas/output/gear.json diff --git a/api/config.py b/api/config.py index c2bbaf696..0a76e6d25 100644 --- a/api/config.py +++ b/api/config.py @@ -160,6 +160,7 @@ def apply_env_variables(config): 'device.json', 'file.json', 'file-update.json', + 'gear.json', 'group-new.json', 'group-update.json', 'info_update.json', diff --git a/swagger/examples/gear_full.json b/swagger/examples/gear_full.json deleted file mode 100755 index 908f0ad58..000000000 --- a/swagger/examples/gear_full.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "name": "test-case-gear", - "manifest": { - "name": "test-case-gear", - "label": "Test Case Gear", - "version": "0", - - "author": "Nathaniel Kofalt", - "description": "A gear built to drive test cases", - - "url": "http://none.example", - "source": "http://none.example", - "license": "MIT", - - "config": { - "two-digit multiple of ten": { - "exclusiveMaximum": true, - "type": "number", - "multipleOf": 10, - "maximum": 100 - } - }, - - "inputs": { - "any text file <= 100 KB": { - "base": "file", - "name": { - "pattern": "^.*.txt$" - }, - "size": { - "maximum": 100000 - } - } - } - }, - "input": {} -} diff --git a/swagger/examples/gears_list_just_name.json b/swagger/examples/gears_list_just_name.json deleted file mode 100755 index e56e4078e..000000000 --- a/swagger/examples/gears_list_just_name.json +++ /dev/null @@ -1,11 +0,0 @@ -[ - { - "name": "dicom_mr_classifier" - }, - { - "name": "dcm_convert" - }, - { - "name": "qa-report-fmri" - } -] \ No newline at end of file diff --git a/swagger/examples/output/gear-list.json b/swagger/examples/output/gear-list.json new file mode 100644 index 000000000..490934c94 --- /dev/null +++ b/swagger/examples/output/gear-list.json @@ -0,0 +1,35 @@ +[{ + "category": "converter", + "gear": { + "inputs": { + "audio": { + "base": "file", + "description": "Any audio file. Plain speech suggested!" + } + }, + "maintainer": "Nathaniel Kofalt", + "description": "Detects the speech content of an audio file, using the machine-learning DeepSpeech library by Mozilla.", + "license": "Other", + "author": "Nathaniel Kofalt", + "url": "", + "label": "Speech Recognition", + "source": "https://github.com/mozilla/DeepSpeech", + "version": "1", + "custom": { + "gear-builder": { + "image": "gear-builder-kdfqapbezk-20171219165918", + "container": "c15189b625a0ea450cafbb24ef0df03c26cc8cf151181976ec4289801e191032" + } + }, + "config": {}, + "name": "speech-recognition" + }, + "created": "2017-12-20T00:09:50.381000+00:00", + "exchange": { + "git-commit": "local", + "rootfs-hash": "sha384:e01d925f90b097b554be0f802ef6ebb9f07000d7a6a2a0c3a25dac26893d4ac2414381e2c8e60f4b58b27c7fe8e56099", + "rootfs-url": "/api/gears/temp/5a39aa4e07a393001b663910" + }, + "modified": "2017-12-20T00:09:50.381000+00:00", + "_id": "5a39aa4e07a393001b663910" +}] \ No newline at end of file diff --git a/swagger/examples/output/gear.json b/swagger/examples/output/gear.json new file mode 100644 index 000000000..841f2b536 --- /dev/null +++ b/swagger/examples/output/gear.json @@ -0,0 +1,35 @@ +{ + "category": "converter", + "gear": { + "inputs": { + "audio": { + "base": "file", + "description": "Any audio file. Plain speech suggested!" + } + }, + "maintainer": "Nathaniel Kofalt", + "description": "Detects the speech content of an audio file, using the machine-learning DeepSpeech library by Mozilla.", + "license": "Other", + "author": "Nathaniel Kofalt", + "url": "", + "label": "Speech Recognition", + "source": "https://github.com/mozilla/DeepSpeech", + "version": "1", + "custom": { + "gear-builder": { + "image": "gear-builder-kdfqapbezk-20171219165918", + "container": "c15189b625a0ea450cafbb24ef0df03c26cc8cf151181976ec4289801e191032" + } + }, + "config": {}, + "name": "speech-recognition" + }, + "created": "2017-12-20T00:09:50.381000+00:00", + "exchange": { + "git-commit": "local", + "rootfs-hash": "sha384:e01d925f90b097b554be0f802ef6ebb9f07000d7a6a2a0c3a25dac26893d4ac2414381e2c8e60f4b58b27c7fe8e56099", + "rootfs-url": "/api/gears/temp/5a39aa4e07a393001b663910" + }, + "modified": "2017-12-20T00:09:50.381000+00:00", + "_id": "5a39aa4e07a393001b663910" +} \ No newline at end of file diff --git a/swagger/paths/gears.yaml b/swagger/paths/gears.yaml index b34f53681..fd6191b64 100644 --- a/swagger/paths/gears.yaml +++ b/swagger/paths/gears.yaml @@ -5,12 +5,36 @@ tags: - gears responses: - default: - description: '' + '200': + description: 'Returns a list of gears installed on the system' + schema: + $ref: schemas/output/gear-list.json + examples: + response: + $ref: examples/output/gear-list.json # TODO: Can we make the parameter here consistent, or split # this into two separate APIs? /gears/{GearIdOrName}: + get: + summary: Retrieve details about a specific gear + operationId: get_gear + tags: + - gears + parameters: + - name: GearIdOrName + in: path + type: string + required: true + description: Id of the gear to interact with + responses: + '200': + description: 'Details about a single gear' + schema: + $ref: schemas/output/gear.json + examples: + response: + $ref: examples/output/gear.json post: summary: Create or update a gear. description: | @@ -18,44 +42,34 @@ Otherwise, the specified gear will be updated operationId: add_gear parameters: - - required: true - description: Name of the gear to interact with - type: string + - name: GearIdOrName in: path - name: GearIdOrName - tags: - - gears - responses: - default: - description: '' - get: - summary: Retrieve details about a specific gear - operationId: get_gear + type: string + required: true + description: Name of the gear to interact with + - name: body + in: body + required: true + schema: + $ref: schemas/input/gear.json tags: - gears - parameters: - - required: true - description: Id of the gear to interact with - type: string - in: path - name: GearIdOrName responses: '200': - description: '' + description: 'The gear was created or updated successfully' schema: - example: - $ref: examples/gear_full.json + $ref: schemas/output/collection-new.json delete: summary: Delete a gear (not recommended) operationId: delete_gear tags: - gears parameters: - - required: true - description: Id of the gear to interact with - type: string + - name: GearIdOrName in: path - name: GearIdOrName + type: string + required: true + description: Id of the gear to interact with responses: '200': description: Gear was deleted diff --git a/swagger/schemas/definitions/gear.json b/swagger/schemas/definitions/gear.json new file mode 100644 index 000000000..674ac4fb0 --- /dev/null +++ b/swagger/schemas/definitions/gear.json @@ -0,0 +1,182 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "$comment": "This is based on https://github.com/flywheel-io/gears/blob/master/spec/manifest.schema.json. It is NOT used for validation, just for doc/code gen.", + "definitions": { + "gear-directive": { + "type": "object", + "description": "A schema directive." + }, + "gear-author": { + "type": "string", + "description": "The author of this gear." + }, + "gear-maintainer": { + "type": "string", + "description": "(optional) The maintainer of this gear. Can be used to distinguish the algorithm author from the gear maintainer." + }, + "gear-cite": { + "type": "string", + "description": "(optional) Any citations relevant to the algorithm(s) or work present in the gear." + }, + "gear-config": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/gear-directive" + }, + "description": "Schema snippets describing the options this gear consumes. Not currently processed." + }, + "gear-custom": { + "type": "object", + "description": "A place for gear authors to put arbitrary information." + }, + "gear-description": { + "type": "string", + "description": "A brief description of the gear's purpose. Ideally 1-4 sentences." + }, + "gear-environment": { + "type": "object", + "additionalProperties": { "type": "string" }, + "description": "Environment variables that should be set for the gear." + }, + "gear-command": { + "type": "string", + "description": "If provided, the starting command for the gear, rather than /flywheel/v0/run. Will be templated according to the spec." + }, + "gear-label": { + "type": "string", + "description": "The human-friendly name of this gear." + }, + "gear-license": { + "type": "string", + "description": "Software license of the gear" + }, + "gear-name": { + "type": "string", + "pattern": "^[a-z0-9\\-]+$", + "description": "The identification of this gear." + }, + "gear-uri": { + "type": "string", + "description": "A valid URI, or empty string." + }, + "gear-input-item": { + "description": "Describes a gear input", + "properties": { + "base": { + "type": "string", + "enum": [ "file", "api-key" ], + "description": "The type of gear input." + }, + "description": { + "type": "string", + "description": "Hackaround for description not technically being a schema directive" + }, + "optional": { + "type": "boolean", + "description": "Allow the gear to mark an input file as optional." + } + }, + "required": [ "base" ], + "additionalProperties": { + "$ref": "#/definitions/gear-directive" + } + }, + "gear-inputs": { + "type": "object", + "additionalProperties": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/gear-input-item" + } + }, + "description": "Schema snippets describing the inputs this gear consumes." + }, + "gear-source": { + "$ref": "#/definitions/gear-uri", + "description": "The URL where the source code of this gear can be found. Leave blank if none." + }, + "gear-url": { + "$ref": "#/definitions/gear-uri", + "description": "The URL where more information about this gear can be found. Leave blank if none." + }, + "gear-version": { + "type": "string", + "description": "A human-friendly string explaining the release version of this gear. Example: 3.2.1" + }, + + "gear-exchange": { + "type": "object", + "description": "Metadata object that describes the origin of a gear version", + "properties": { + "git-commit": { + "type": "string", + "description": "The SHA-1 hash referring to the git commit" + }, + "rootfs-hash": { + "type": "string", + "description": "The cryptographic hash of the root filesystem in the form of \"algorithm:\"" + }, + "rootfs-url": { + "type": "string", + "description": "The absolute URL of the gear's root file system" + } + }, + "required": [ + "git-commit", + "rootfs-hash", + "rootfs-url" + ] + }, + "gear": { + "type": "object", + "description": "Gear manifest", + "properties": { + "author": { "$ref": "#/definitions/gear-author" }, + "maintainer": { "$ref": "#/definitions/gear-maintainer" }, + "cite": { "$ref": "#/definitions/gear-cite" }, + "config": { "$ref": "#/definitions/gear-config" }, + "custom": { "$ref": "#/definitions/gear-custom" }, + "description": { "$ref": "#/definitions/gear-description" }, + "environment": { "$ref": "#/definitions/gear-environment" }, + "command": { "$ref": "#/definitions/gear-command" }, + "inputs": { "$ref": "#/definitions/gear-inputs" }, + "label": { "$ref": "#/definitions/gear-label" }, + "license": { "$ref": "#/definitions/gear-license" }, + "name": { "$ref": "#/definitions/gear-name" }, + "source": { "$ref": "#/definitions/gear-source" }, + "url": { "$ref": "#/definitions/gear-url" }, + "version": { "$ref": "#/definitions/gear-version" } + }, + "required": [ + "author", + "config", + "description", + "inputs", + "label", + "license", + "name", + "source", + "url", + "version" + ], + "additionalProperties": false + }, + "gear-category": { + "type": "string", + "enum": [ "utility", "analysis", "converter", "qa" ], + "description": "The gear category" + }, + "gear-doc": { + "type": "object", + "properties": { + "_id": { "$ref": "common.json#/definitions/objectid" }, + "category": { "$ref": "#/definitions/gear-category" }, + "gear": { "$ref": "#/definitions/gear" }, + "exchange": { "$ref": "#/definitions/gear-exchange" }, + "created": { "$ref":"created-modified.json#/definitions/created"}, + "modified": { "$ref":"created-modified.json#/definitions/modified"} + }, + "description": "A full gear description, including manifest and exchange information" + } + } +} diff --git a/swagger/schemas/input/gear.json b/swagger/schemas/input/gear.json new file mode 100644 index 000000000..1f700aa54 --- /dev/null +++ b/swagger/schemas/input/gear.json @@ -0,0 +1,38 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "Analysis", + "type": "object", + "allOf":[{"$ref":"../definitions/gear.json#/definitions/gear-doc"}], + "example": { + "category": "converter", + "gear": { + "inputs": { + "audio": { + "base": "file", + "description": "Any audio file. Plain speech suggested!" + } + }, + "maintainer": "Nathaniel Kofalt", + "description": "Detects the speech content of an audio file, using the machine-learning DeepSpeech library by Mozilla.", + "license": "Other", + "author": "Nathaniel Kofalt", + "url": "", + "label": "Speech Recognition", + "source": "https://github.com/mozilla/DeepSpeech", + "version": "1", + "custom": { + "gear-builder": { + "image": "gear-builder-kdfqapbezk-20171219165918", + "container": "c15189b625a0ea450cafbb24ef0df03c26cc8cf151181976ec4289801e191032" + } + }, + "config": {}, + "name": "speech-recognition" + }, + "exchange": { + "git-commit": "local", + "rootfs-hash": "sha384:e01d925f90b097b554be0f802ef6ebb9f07000d7a6a2a0c3a25dac26893d4ac2414381e2c8e60f4b58b27c7fe8e56099", + "rootfs-url": "/api/gears/temp/5a39aa4e07a393001b663910" + } + } +} \ No newline at end of file diff --git a/swagger/schemas/output/gear-list.json b/swagger/schemas/output/gear-list.json new file mode 100644 index 000000000..e022766d0 --- /dev/null +++ b/swagger/schemas/output/gear-list.json @@ -0,0 +1,7 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type":"array", + "items":{ + "allOf":[{"$ref":"../definitions/gear.json#/definitions/gear-doc"}] + } +} diff --git a/swagger/schemas/output/gear.json b/swagger/schemas/output/gear.json new file mode 100644 index 000000000..b64b79625 --- /dev/null +++ b/swagger/schemas/output/gear.json @@ -0,0 +1,5 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type":"object", + "allOf":[{"$ref":"../definitions/gear.json#/definitions/gear-doc"}] +} From 18c4b8d8dfd668cdda84fbe6de8b3632a59c56f8 Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Wed, 31 Jan 2018 13:50:32 -0600 Subject: [PATCH 08/53] Add schema and endpoint for jobs Added input schema for adding a job, and the endpoint for GET /job/{JobId}/logs. --- api/config.py | 1 + swagger/examples/input/job-new.json | 20 -------------------- swagger/paths/jobs.yaml | 14 ++++++++++++-- swagger/schemas/definitions/job.json | 20 ++++++++++++++++++++ swagger/schemas/input/job-new.json | 25 +++++++++++++++++++++++++ swagger/schemas/output/job-log.json | 11 +++++++++++ 6 files changed, 69 insertions(+), 22 deletions(-) delete mode 100755 swagger/examples/input/job-new.json create mode 100644 swagger/schemas/input/job-new.json create mode 100644 swagger/schemas/output/job-log.json diff --git a/api/config.py b/api/config.py index 0a76e6d25..88696e658 100644 --- a/api/config.py +++ b/api/config.py @@ -164,6 +164,7 @@ def apply_env_variables(config): 'group-new.json', 'group-update.json', 'info_update.json', + 'job-new.json', 'note.json', 'packfile.json', 'permission.json', diff --git a/swagger/examples/input/job-new.json b/swagger/examples/input/job-new.json deleted file mode 100755 index 934e2e763..000000000 --- a/swagger/examples/input/job-new.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "gear_id": "aex", - "inputs": { - "dicom": { - "type": "acquisition", - "id": "573c9e6a844eac7fc01747cd", - "name" : "1_1_dicom.zip" - } - }, - "config": { - "two-digit multiple of ten": 20 - }, - "destination": { - "type": "acquisition", - "id": "573c9e6a844eac7fc01747cd" - }, - "tags": [ - "ad-hoc" - ] -} diff --git a/swagger/paths/jobs.yaml b/swagger/paths/jobs.yaml index 9c91568f2..48daa3269 100644 --- a/swagger/paths/jobs.yaml +++ b/swagger/paths/jobs.yaml @@ -7,9 +7,9 @@ parameters: - name: body in: body + required: true schema: - example: - $ref: examples/input/job-new.json + $ref: schemas/input/job-new.json responses: '200': description: '' @@ -148,6 +148,16 @@ type: string in: path name: JobId + get: + summary: Get job logs + operationId: get_job_logs + tags: + - jobs + responses: + '200': + description: The current job log + schema: + $ref: schemas/output/job-log.json post: summary: Add logs to a job. operationId: add_logs diff --git a/swagger/schemas/definitions/job.json b/swagger/schemas/definitions/job.json index 7b27244e2..b1d8a588b 100644 --- a/swagger/schemas/definitions/job.json +++ b/swagger/schemas/definitions/job.json @@ -79,6 +79,26 @@ ], "additionalProperties":false }, + "job-log-statement": { + "type": "object", + "properties": { + "fd": { "type": "integer" }, + "msg": { "type": "string" } + }, + "required": ["fd", "msg"] + }, + "job-log": { + "type": "object", + "properties": { + "id": {"$ref":"common.json#/definitions/objectid"}, + "logs": { + "type": "array", + "items": { + "$ref": "#/definitions/job-log-statement" + } + } + } + }, "saved_files": { "type": "array", "items": {"$ref": "file.json#/definitions/name"} diff --git a/swagger/schemas/input/job-new.json b/swagger/schemas/input/job-new.json new file mode 100644 index 000000000..eed7eb7f7 --- /dev/null +++ b/swagger/schemas/input/job-new.json @@ -0,0 +1,25 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "allOf": [{"$ref": "../definitions/job.json#/definitions/job-input"}], + "example": { + "gear_id": "aex", + "inputs": { + "dicom": { + "type": "acquisition", + "id": "573c9e6a844eac7fc01747cd", + "name" : "1_1_dicom.zip" + } + }, + "config": { + "two-digit multiple of ten": 20 + }, + "destination": { + "type": "acquisition", + "id": "573c9e6a844eac7fc01747cd" + }, + "tags": [ + "ad-hoc" + ] + } +} diff --git a/swagger/schemas/output/job-log.json b/swagger/schemas/output/job-log.json new file mode 100644 index 000000000..810151388 --- /dev/null +++ b/swagger/schemas/output/job-log.json @@ -0,0 +1,11 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "allOf": [{"$ref": "../definitions/job.json#/definitions/job-log"}], + "example": { + "_id": "57ac7394c700190017123fb8", + "logs": [ + { "fd": 1, "msg": "Hello World!" } + ] + } +} From 275e0f5cf7a751026ded069c7600028cf36e82d7 Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Tue, 6 Feb 2018 16:01:29 -0600 Subject: [PATCH 09/53] Add docs for modify info --- swagger/paths/acquisitions.yaml | 5 +++ swagger/paths/collections.yaml | 6 +++ swagger/paths/projects.yaml | 5 +++ swagger/paths/sessions.yaml | 5 +++ swagger/schemas/output/file-info.json | 21 ++++++++++ swagger/templates/container-item-info.yaml | 33 +++++++++++++++ swagger/templates/file-item-info.yaml | 47 ++++++++++++++++++++++ 7 files changed, 122 insertions(+) create mode 100644 swagger/schemas/output/file-info.json create mode 100644 swagger/templates/container-item-info.yaml create mode 100644 swagger/templates/file-item-info.yaml diff --git a/swagger/paths/acquisitions.yaml b/swagger/paths/acquisitions.yaml index f13775733..d32b21d95 100644 --- a/swagger/paths/acquisitions.yaml +++ b/swagger/paths/acquisitions.yaml @@ -15,6 +15,9 @@ $template_arguments: update-input-schema: schemas/input/acquisition-update.json get-output-schema: schemas/output/acquisition.json +/acquisitions/{AcquisitionId}/info: + $template: templates/container-item-info.yaml + # ===== Tags ===== /acquisitions/{AcquisitionId}/tags: $template: templates/tags.yaml @@ -34,6 +37,8 @@ $template_arguments: $template: templates/file-list-upload.yaml /acquisitions/{AcquisitionId}/files/{FileName}: $template: templates/file-item.yaml +/acquisitions/{AcquisitionId}/files/{FileName}/info: + $template: templates/file-item-info.yaml # ===== Notes ===== /acquisitions/{AcquisitionId}/notes: diff --git a/swagger/paths/collections.yaml b/swagger/paths/collections.yaml index d2f1a9880..0a1cadd72 100644 --- a/swagger/paths/collections.yaml +++ b/swagger/paths/collections.yaml @@ -53,6 +53,7 @@ $template_arguments: examples: response: $ref: examples/output/collection-curators-list.json + /collections/{CollectionId}: parameters: - required: true @@ -96,6 +97,9 @@ $template_arguments: '200': description: Collection was deleted +/collections/{CollectionId}/info: + $template: templates/container-item-info.yaml + /collections/{CollectionId}/sessions: parameters: - required: true @@ -161,6 +165,8 @@ $template_arguments: $template: templates/permissions.yaml /collections/{CollectionId}/permissions/{UserId}: $template: templates/permissions-user.yaml +/collections/{CollectionId}/files/{FileName}/info: + $template: templates/file-item-info.yaml # ===== Notes ===== /collections/{CollectionId}/notes: diff --git a/swagger/paths/projects.yaml b/swagger/paths/projects.yaml index 62b678bc0..fbf5bc8fd 100644 --- a/swagger/paths/projects.yaml +++ b/swagger/paths/projects.yaml @@ -15,6 +15,9 @@ $template_arguments: update-input-schema: schemas/input/project-update.json get-output-schema: schemas/output/project.json +/projects/{ProjectId}/info: + $template: templates/container-item-info.yaml + /projects/groups: get: summary: List all groups which have a project in them @@ -231,6 +234,8 @@ $template_arguments: $template: templates/file-list-upload.yaml /projects/{ProjectId}/files/{FileName}: $template: templates/file-item.yaml +/projects/{ProjectId}/files/{FileName}/info: + $template: templates/file-item-info.yaml # ===== Permissions ===== /projects/{ProjectId}/permissions: diff --git a/swagger/paths/sessions.yaml b/swagger/paths/sessions.yaml index a9664efdc..e39d6d866 100644 --- a/swagger/paths/sessions.yaml +++ b/swagger/paths/sessions.yaml @@ -15,6 +15,9 @@ $template_arguments: update-input-schema: schemas/input/session.json get-output-schema: schemas/output/session.json +/sessions/{SessionId}/info: + $template: templates/container-item-info.yaml + '/sessions/{SessionId}/jobs': parameters: - in: path @@ -60,6 +63,8 @@ $template_arguments: $template: templates/file-list-upload.yaml /sessions/{SessionId}/files/{FileName}: $template: templates/file-item.yaml +/sessions/{SessionId}/files/{FileName}/info: + $template: templates/file-item-info.yaml # ===== Notes ===== /sessions/{SessionId}/notes: diff --git a/swagger/schemas/output/file-info.json b/swagger/schemas/output/file-info.json new file mode 100644 index 000000000..43f6fc036 --- /dev/null +++ b/swagger/schemas/output/file-info.json @@ -0,0 +1,21 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type":"object", + "allOf":[{"$ref":"../definitions/file.json#/definitions/file-output"}], + "example": { + "origin": { + "type": "job", + "id": "58063f24e5dc5b001657a87f" + }, + "mimetype": "application/octet-stream", + "hash": "v0-sha384-12188e00a26650b2baa3f0195337dcf504f4362bb2136eef0cdbefb57159356b1355a0402fca0ab5ab081f21c305e5c2", + "name": "cortical_surface_right_hemisphere.obj", + "tags": [], + "measurements": [], + "modified": "2016-10-18T15:26:35.701000+00:00", + "modality": null, + "size": 21804112, + "type": "None", + "info": {} + } +} diff --git a/swagger/templates/container-item-info.yaml b/swagger/templates/container-item-info.yaml new file mode 100644 index 000000000..b30b37a25 --- /dev/null +++ b/swagger/templates/container-item-info.yaml @@ -0,0 +1,33 @@ +parameters: + - name: resource + type: string + required: true + - name: parameter + type: string + required: true + - name: tag + type: string + required: true +template: | + parameters: + - required: true + type: string + in: path + name: '{{parameter}}' + post: + summary: Update or replace info for a {{resource}}. + operationId: modify_{{resource}}_info + x-sdk-modify-info: true + tags: + - '{{tag}}' + parameters: + - name: body + in: body + required: true + schema: + $ref: schemas/input/info_update.json + responses: + '200': + description: 'The info was updated successfully' + + diff --git a/swagger/templates/file-item-info.yaml b/swagger/templates/file-item-info.yaml new file mode 100644 index 000000000..ef53a2f5b --- /dev/null +++ b/swagger/templates/file-item-info.yaml @@ -0,0 +1,47 @@ +parameters: + - name: resource + type: string + required: true + - name: parameter + type: string + required: true + - name: tag + type: string + required: true +template: | + parameters: + - required: true + type: string + in: path + name: '{{parameter}}' + - required: true + type: string + in: path + name: FileName + get: + summary: Get info for a particular file. + operationId: get_{{resource}}_file_info + tags: + - '{{tag}}' + responses: + '200': + description: 'The file object, including info' + schema: + $ref: schemas/output/file-info.json + post: + summary: Update info for a particular file. + operationId: modify_{{resource}}_file_info + x-sdk-modify-info: true + tags: + - '{{tag}}' + parameters: + - name: body + in: body + required: true + schema: + $ref: schemas/input/info_update.json + responses: + '200': + $ref: '#/responses/200:modified-with-count' + + From 89de560266c439cfcbff28ce7499f50f7a91da91 Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Wed, 7 Feb 2018 11:16:02 -0600 Subject: [PATCH 10/53] Add documentation for PUT on file endpoints --- swagger/responses/index.yaml | 16 ++++++++++++++++ swagger/templates/file-item.yaml | 15 +++++++++++++++ 2 files changed, 31 insertions(+) diff --git a/swagger/responses/index.yaml b/swagger/responses/index.yaml index 03986aaa2..1b4e4c486 100644 --- a/swagger/responses/index.yaml +++ b/swagger/responses/index.yaml @@ -22,6 +22,22 @@ example: modified: 1 +'200:modified-with-count-and-jobs': + description: The number of records modified and number of jobs started. + schema: + type: object + properties: + modified: + type: integer + jobs_triggered: + type: integer + required: + - modified + - jobs_triggered + example: + modified: 1 + jobs_triggered: 0 + '400:invalid-body-json': description: | JSON did not validate against schema for this endpoint. diff --git a/swagger/templates/file-item.yaml b/swagger/templates/file-item.yaml index 76e168014..815b29fe9 100644 --- a/swagger/templates/file-item.yaml +++ b/swagger/templates/file-item.yaml @@ -73,6 +73,21 @@ template: | default: description: '' + put: + summary: Modify a file's attributes + operationId: modify_{{resource}}_file + tags: + - '{{tag}}' + parameters: + - name: body + in: body + required: true + schema: + $ref: schemas/input/file-update.json + responses: + '200': + $ref: '#/responses/200:modified-with-count-and-jobs' + delete: summary: Delete a file operationId: delete_{{resource}}_file From ea0d39b3a001e6784f41239eeca2e253f3a479da Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Wed, 7 Feb 2018 15:23:23 -0600 Subject: [PATCH 11/53] Add missing parameter to collection/acquisitions Added "session" query parameter that limits the returned acquisitions. --- swagger/paths/collections.yaml | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/swagger/paths/collections.yaml b/swagger/paths/collections.yaml index 0a1cadd72..2e0bc0ebb 100644 --- a/swagger/paths/collections.yaml +++ b/swagger/paths/collections.yaml @@ -122,10 +122,14 @@ $template_arguments: /collections/{CollectionId}/acquisitions: parameters: - - required: true - type: string + - name: CollectionId in: path - name: CollectionId + type: string + required: true + - name: session + in: query + type: string + description: The id of a session, to which the acquisitions returned will be restricted get: summary: List acquisitions in a collection operationId: get_collection_acquisitions From 58bc7f367256636545c037c60b8217901bc02684 Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Wed, 7 Feb 2018 15:24:57 -0600 Subject: [PATCH 12/53] Add missing gear invocation endpoint This endpoint returns the JSON schema for gear configuration. --- swagger/paths/gears.yaml | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/swagger/paths/gears.yaml b/swagger/paths/gears.yaml index fd6191b64..6f9ccac10 100644 --- a/swagger/paths/gears.yaml +++ b/swagger/paths/gears.yaml @@ -73,3 +73,23 @@ responses: '200': description: Gear was deleted + +/gears/{GearId}/invocation: + parameters: + - name: GearId + in: path + type: string + required: true + description: Id of the gear to interact with + get: + summary: Get a schema for invoking a gear. + operationId: get_gear_invocation + tags: + - gears + responses: + '200': + description: The gear invocation schema. + schema: + type: object + + From c11464f45d9e4810cc9830d9f99c18aace8debdb Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Fri, 9 Feb 2018 08:50:08 -0600 Subject: [PATCH 13/53] Rename update_job endpoint --- swagger/paths/jobs.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/swagger/paths/jobs.yaml b/swagger/paths/jobs.yaml index 48daa3269..c2199da76 100644 --- a/swagger/paths/jobs.yaml +++ b/swagger/paths/jobs.yaml @@ -89,7 +89,7 @@ 'running' state. Accepts the same body as /api/jobs/add , except all fields are optional. - operationId: update_job + operationId: modify_job tags: - jobs responses: @@ -98,6 +98,7 @@ parameters: - name: body in: body + required: true schema: example: $ref: examples/input/job-update.json From 1631af8abf16ad72ac9989ea04c34c1b663c6f0e Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Fri, 9 Feb 2018 08:55:41 -0600 Subject: [PATCH 14/53] Add get_analyses endpoint documentation This documents the endpoints for ///analyses as well as ////analyses. --- swagger/paths/acquisitions.yaml | 2 + swagger/paths/analyses.yaml | 49 +++++++++++++++++++++++ swagger/paths/collections.yaml | 2 + swagger/paths/projects.yaml | 2 + swagger/paths/sessions.yaml | 29 ++------------ swagger/schemas/definitions/analysis.json | 20 ++++++++- swagger/schemas/output/analyses-list.json | 5 +++ swagger/templates/analyses-list.yaml | 38 ++++++++++++++++-- swagger/templates/analysis-item.yaml | 5 +++ swagger/templates/analysis-notes.yaml | 1 + 10 files changed, 122 insertions(+), 31 deletions(-) create mode 100644 swagger/schemas/output/analyses-list.json diff --git a/swagger/paths/acquisitions.yaml b/swagger/paths/acquisitions.yaml index d32b21d95..5ce6d36e4 100644 --- a/swagger/paths/acquisitions.yaml +++ b/swagger/paths/acquisitions.yaml @@ -49,6 +49,8 @@ $template_arguments: # ===== Analyses ===== /acquisitions/{AcquisitionId}/analyses: $template: templates/analyses-list.yaml + arguments: + allowCreate: true /acquisitions/{AcquisitionId}/analyses/{AnalysisId}: $template: templates/analysis-item.yaml arguments: diff --git a/swagger/paths/analyses.yaml b/swagger/paths/analyses.yaml index c6a59ad69..a297dd39b 100644 --- a/swagger/paths/analyses.yaml +++ b/swagger/paths/analyses.yaml @@ -11,3 +11,52 @@ $template_arguments: /analyses/{AnalysisId}/files/{Filename}: $template: templates/analysis-files-create-ticket-filename.yaml + +/{ContainerName}/{ContainerId}/{SubcontainerName}/analyses: + parameters: + - name: ContainerName + in: path + type: string + required: true + enum: + - groups + - projects + - sessions + - acquisitions + - collections + description: The parent container type + - name: ContainerId + in: path + type: string + required: true + description: The parent container id + - name: SubcontainerName + in: path + type: string + required: true + enum: + - all + - projects + - sessions + - acquisitions + description: The sub container type + get: + summary: Get nested analyses for a container + description: > + Returns analyses that belong to containers of the specified type that belong + to ContainerId. + + For example: `projects/{ProjectId}/acquisitions/analyses` will return any analyses + that have an acquisition that is under that project as a parent. + + The `all` keyword is also supported, for example: projects/{ProjectId}/all/analyses + will return any analyses that have any session or acquisition or the project itself as a parent. + operationId: get_analyses + tags: + - analyses + responses: + '200': + description: The list of analyses + schema: + $ref: schemas/output/analyses-list.json + diff --git a/swagger/paths/collections.yaml b/swagger/paths/collections.yaml index 2e0bc0ebb..29014466f 100644 --- a/swagger/paths/collections.yaml +++ b/swagger/paths/collections.yaml @@ -181,6 +181,8 @@ $template_arguments: # ===== Analyses ===== /collections/{CollectionId}/analyses: $template: templates/analyses-list.yaml + arguments: + allowCreate: true /collections/{CollectionId}/analyses/{AnalysisId}: $template: templates/analysis-item.yaml arguments: diff --git a/swagger/paths/projects.yaml b/swagger/paths/projects.yaml index fbf5bc8fd..f0248aa24 100644 --- a/swagger/paths/projects.yaml +++ b/swagger/paths/projects.yaml @@ -252,6 +252,8 @@ $template_arguments: # ===== Analyses ===== /projects/{ProjectId}/analyses: $template: templates/analyses-list.yaml + arguments: + allowCreate: true /projects/{ProjectId}/analyses/{AnalysisId}: $template: templates/analysis-item.yaml arguments: diff --git a/swagger/paths/sessions.yaml b/swagger/paths/sessions.yaml index e39d6d866..5e5c0a71d 100644 --- a/swagger/paths/sessions.yaml +++ b/swagger/paths/sessions.yaml @@ -92,32 +92,9 @@ $template_arguments: # ===== Analyses ===== /sessions/{SessionId}/analyses: - parameters: - - in: path - type: string - required: true - name: SessionId - post: - summary: Create an analysis and upload files. - description: | - When query param "job" is "true", send JSON to create - an analysis and job. Otherwise, multipart/form-data - to upload files and create an analysis. - operationId: add_session_analysis - tags: - - 'sessions' - parameters: - - in: body - name: body - schema: - $ref: schemas/input/analysis-job.json - - in: query - type: boolean - name: job - responses: - '200': - description: '' - + $template: templates/analyses-list.yaml + arguments: + allowCreate: true /sessions/{SessionId}/analyses/{AnalysisId}: $template: templates/analysis-item.yaml arguments: diff --git a/swagger/schemas/definitions/analysis.json b/swagger/schemas/definitions/analysis.json index fa1e2c89b..55d915526 100644 --- a/swagger/schemas/definitions/analysis.json +++ b/swagger/schemas/definitions/analysis.json @@ -40,7 +40,7 @@ "job":{ "oneOf":[ {"$ref":"common.json#/definitions/objectid"}, - {"type":"object"} + {"$ref": "job.json#/definitions/job-output"} ] }, "notes": {"$ref":"note.json#/definitions/notes-list-output"}, @@ -52,6 +52,24 @@ }, "required":["_id", "files", "label", "user", "created", "modified"] }, + "analysis-list-entry":{ + "type":"object", + "properties":{ + "_id":{"$ref":"common.json#/definitions/objectid"}, + "files":{ + "type":"array", + "items":{"$ref":"file.json#/definitions/file-entry"} + }, + "job": {"$ref":"common.json#/definitions/objectid"}, + "notes": {"$ref":"note.json#/definitions/notes-list-output"}, + "description": {"$ref":"common.json#/definitions/description"}, + "label": {"$ref":"common.json#/definitions/label"}, + "user": {"$ref":"common.json#/definitions/user-id"}, + "created": {"$ref":"created-modified.json#/definitions/created"}, + "modified": {"$ref":"created-modified.json#/definitions/modified"} + }, + "required":["_id", "files", "label", "user", "created", "modified"] + }, "analysis-job": { "type": "object", "properties":{ diff --git a/swagger/schemas/output/analyses-list.json b/swagger/schemas/output/analyses-list.json new file mode 100644 index 000000000..5a78c08a9 --- /dev/null +++ b/swagger/schemas/output/analyses-list.json @@ -0,0 +1,5 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "array", + "items": {"$ref":"../definitions/analysis.json#/definitions/analysis-list-entry"} +} diff --git a/swagger/templates/analyses-list.yaml b/swagger/templates/analyses-list.yaml index 6d76dc2d1..d4ee05254 100644 --- a/swagger/templates/analyses-list.yaml +++ b/swagger/templates/analyses-list.yaml @@ -8,23 +8,53 @@ parameters: - name: tag type: string required: true + - name: allowCreate + type: boolean + required: false template: | parameters: - required: true type: string in: path name: '{{parameter}}' + get: + summary: Get analyses for {{resource}}. + description: Returns analyses that directly belong to this resource. + operationId: get_{{resource}}_analyses + tags: + - '{{tag}}' + responses: + '200': + description: The list of analyses + schema: + $ref: schemas/output/analyses-list.json + + {{#allowCreate}} post: summary: Create an analysis and upload files. + description: | + When query param "job" is "true", send JSON to create + an analysis and job. Otherwise, multipart/form-data + to upload files and create an analysis. operationId: add_{{resource}}_analysis tags: - '{{tag}}' consumes: + - application/json - multipart/form-data parameters: - - in: formData - name: formData - type: string + - in: body + name: body + required: true + schema: + $ref: schemas/input/analysis-job.json + - name: job + in: query + type: boolean + description: Return job as an object instead of an id responses: '200': - description: '' + description: Returns the id of the analysis that was created. + schema: + $ref: schemas/output/container-new.json + {{/allowCreate}} diff --git a/swagger/templates/analysis-item.yaml b/swagger/templates/analysis-item.yaml index 3b2251535..958017691 100644 --- a/swagger/templates/analysis-item.yaml +++ b/swagger/templates/analysis-item.yaml @@ -31,6 +31,11 @@ template: | {{/resource}} tags: - '{{tag}}' + parameters: + - name: inflate_job + in: query + type: boolean + description: Return job as an object instead of an id responses: '200': description: '' diff --git a/swagger/templates/analysis-notes.yaml b/swagger/templates/analysis-notes.yaml index 12168f23c..58dc6b9ae 100644 --- a/swagger/templates/analysis-notes.yaml +++ b/swagger/templates/analysis-notes.yaml @@ -26,6 +26,7 @@ template: | parameters: - name: body in: body + required: true schema: $ref: schemas/input/note.json responses: From 1b250ce69c1bebcb7b813e1191611d6d4e0c07d2 Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Fri, 9 Feb 2018 09:21:43 -0600 Subject: [PATCH 15/53] Fix failing tests for analyses --- swagger/examples/output/analysis.json | 4 ++-- swagger/schemas/definitions/analysis.json | 2 +- swagger/schemas/output/analyses-list.json | 27 ++++++++++++++++++++++- 3 files changed, 29 insertions(+), 4 deletions(-) diff --git a/swagger/examples/output/analysis.json b/swagger/examples/output/analysis.json index bb0c4caae..999bdd6a2 100644 --- a/swagger/examples/output/analysis.json +++ b/swagger/examples/output/analysis.json @@ -44,7 +44,6 @@ } }, "attempt": 1, - "name": "cortex-demo", "tags": ["ad-hoc", "cortex-demo", "analysis"], "destination": { "type": "analysis", @@ -82,7 +81,8 @@ "created": "2016-10-18T17:45:11.816000+00:00", "state": "complete", "config": {}, - "id": "58065fa7e5dc5b001457a882" + "id": "58065fa7e5dc5b001457a882", + "gear_id": "58065fa7e5dc5b001457a882" }, "user": "canakgun@flywheel.io", "_id": "58065fa7e5dc5b001457a881" diff --git a/swagger/schemas/definitions/analysis.json b/swagger/schemas/definitions/analysis.json index 55d915526..49f7424f7 100644 --- a/swagger/schemas/definitions/analysis.json +++ b/swagger/schemas/definitions/analysis.json @@ -58,7 +58,7 @@ "_id":{"$ref":"common.json#/definitions/objectid"}, "files":{ "type":"array", - "items":{"$ref":"file.json#/definitions/file-entry"} + "items":{"$ref":"file.json#/definitions/file"} }, "job": {"$ref":"common.json#/definitions/objectid"}, "notes": {"$ref":"note.json#/definitions/notes-list-output"}, diff --git a/swagger/schemas/output/analyses-list.json b/swagger/schemas/output/analyses-list.json index 5a78c08a9..988581c68 100644 --- a/swagger/schemas/output/analyses-list.json +++ b/swagger/schemas/output/analyses-list.json @@ -1,5 +1,30 @@ { "$schema": "http://json-schema.org/draft-04/schema#", "type": "array", - "items": {"$ref":"../definitions/analysis.json#/definitions/analysis-list-entry"} + "items": {"$ref":"../definitions/analysis.json#/definitions/analysis-list-entry"}, + "example": [{ + "files": [{ + "origin": { + "type": "job", + "id": "58063f24e5dc5b001657a87f" + }, + "mimetype": "application/octet-stream", + "hash": "v0-sha384-12188e00a26650b2baa3f0195337dcf504f4362bb2136eef0cdbefb57159356b1355a0402fca0ab5ab081f21c305e5c2", + "name": "cortical_surface_right_hemisphere.obj", + "tags": [], + "measurements": [], + "modified": "2016-10-18T15:26:35.701000+00:00", + "modality": null, + "input": true, + "size": 21804112, + "type": "None", + "info": {} + }], + "created": "2016-10-18T17:45:11.778000+00:00", + "modified": "2016-10-18T17:45:11.778000+00:00", + "label": "cortex-demo 10/18/2016 13:45:5", + "job": "58065fa7e5dc5b001457a882", + "user": "canakgun@flywheel.io", + "_id": "58065fa7e5dc5b001457a881" + }] } From 866949f9451333499160de0e467fc4f7ec1f0ef6 Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Fri, 9 Feb 2018 11:28:46 -0600 Subject: [PATCH 16/53] Add schemas for batch job scheduling Also refactored a few schemas to better match the SDK. --- api/config.py | 1 + swagger/paths/batch.yaml | 49 ++++++------ swagger/schemas/definitions/acquisition.json | 2 +- swagger/schemas/definitions/batch.json | 81 ++++++++++++++++++++ swagger/schemas/definitions/collection.json | 2 +- swagger/schemas/definitions/container.json | 31 +++++++- swagger/schemas/definitions/file.json | 13 +++- swagger/schemas/definitions/job.json | 25 ++---- swagger/schemas/definitions/project.json | 2 +- swagger/schemas/definitions/session.json | 2 +- swagger/schemas/definitions/tag.json | 2 +- swagger/schemas/input/propose-batch.json | 14 ++++ swagger/schemas/output/batch-cancel.json | 8 ++ swagger/schemas/output/batch-list.json | 23 ++++++ swagger/schemas/output/batch-proposal.json | 18 +++++ swagger/schemas/output/batch.json | 23 ++++++ 16 files changed, 243 insertions(+), 53 deletions(-) create mode 100644 swagger/schemas/definitions/batch.json create mode 100644 swagger/schemas/input/propose-batch.json create mode 100644 swagger/schemas/output/batch-cancel.json create mode 100644 swagger/schemas/output/batch-list.json create mode 100644 swagger/schemas/output/batch-proposal.json create mode 100644 swagger/schemas/output/batch.json diff --git a/api/config.py b/api/config.py index 88696e658..40f812b9a 100644 --- a/api/config.py +++ b/api/config.py @@ -171,6 +171,7 @@ def apply_env_variables(config): 'project.json', 'project-template.json', 'project-update.json', + 'propose-batch.json', 'rule-new.json', 'rule-update.json', 'session.json', diff --git a/swagger/paths/batch.yaml b/swagger/paths/batch.yaml index b407f82c3..900d45550 100644 --- a/swagger/paths/batch.yaml +++ b/swagger/paths/batch.yaml @@ -2,34 +2,31 @@ get: summary: Get a list of batch jobs the user has created. description: Requires login. - operationId: get_all_batch_jobs + operationId: get_all_batches tags: - batch responses: '200': - description: '' - # Schema file does not exist - # schema: - # $ref: schemas/output/batch-list.json + description: 'Returns a list of all known batch jobs for the user' + schema: + $ref: schemas/output/batch-list.json post: summary: Create a batch job proposal and insert it as 'pending'. - operationId: create_batch_job + operationId: propose_batch tags: - batch parameters: - name: body in: body - description: '' + required: true + description: The batch proposal schema: - type: object - # Schema file does not exist - # $ref: schemas/input/batch-insert.json + $ref: schemas/input/propose-batch.json responses: '200': - description: '' - # Schema file does not exist - # schema: - # $ref: schemas/output/batch-insert.json + description: The batch proposal object that was created + schema: + $ref: schemas/output/batch-proposal.json /batch/{BatchId}: parameters: @@ -40,19 +37,18 @@ get: summary: Get batch job details. parameters: - - in: query + - name: jobs + in: query type: boolean - name: jobs description: If true, return job objects instead of job ids - operationId: get_batch_job + operationId: get_batch tags: - batch responses: '200': - description: '' - # Schema file does not exist - # schema: - # $ref: schemas/output/batch.json + description: 'The batch object' + schema: + $ref: schemas/output/batch.json '404': $ref: '#/responses/404:resource-not-found' @@ -67,7 +63,7 @@ description: | Creates jobs from proposed inputs, returns jobs enqueued. Moves 'pending' batch job to 'running'. - operationId: run_batch_job + operationId: start_batch tags: - batch responses: @@ -87,12 +83,11 @@ description: | Cancels jobs that are still pending, returns number of jobs cancelled. Moves a 'running' batch job to 'cancelled'. - operationId: cancel_batch_job + operationId: cancel_batch tags: - batch responses: '200': - description: '' - examples: - response: - canceled_jobs: 4 + description: 'The number of jobs canceled' + schema: + $ref: schemas/output/batch-cancel.json diff --git a/swagger/schemas/definitions/acquisition.json b/swagger/schemas/definitions/acquisition.json index 181bfb6da..4c646e5c1 100644 --- a/swagger/schemas/definitions/acquisition.json +++ b/swagger/schemas/definitions/acquisition.json @@ -61,7 +61,7 @@ "items":{"$ref":"permission.json#/definitions/permission-output-default-required"} }, "notes":{"allOf":[{"$ref":"note.json#/definitions/notes-list-output"}]}, - "tags":{"allOf":[{"$ref":"tag.json#/definitions/tag-output-list"}]}, + "tags":{"allOf":[{"$ref":"tag.json#/definitions/tag-list"}]}, "analyses":{ "type":"array", "items":{"$ref":"analysis.json#/definitions/analysis-output"} diff --git a/swagger/schemas/definitions/batch.json b/swagger/schemas/definitions/batch.json new file mode 100644 index 000000000..e3803402d --- /dev/null +++ b/swagger/schemas/definitions/batch.json @@ -0,0 +1,81 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "definitions": { + "matched-container-list": { + "type": "array", + "items": {"$ref":"container.json#/definitions/container-output-with-files"} + }, + "batch-proposal-detail": { + "type": "object", + "properties": { + "analysis": {"$ref": "analysis.json#/definitions/analysis-input"}, + "tags": {"$ref":"tag.json#/definitions/tag-list"} + }, + "additionalProperties": false + }, + "batch": { + "type": "object", + "properties": { + "_id": {"$ref":"common.json#/definitions/objectid"}, + "gear_id": {"$ref":"job.json#/definitions/gear_id"}, + "state": {"$ref":"job.json#/definitions/state"}, + "origin": {"$ref":"job.json#/definitions/job-origin"}, + "config": {"$ref":"job.json#/definitions/config"}, + "jobs": { + "type": "array", + "items": { "$ref": "common.json#/definitions/objectid" } + }, + "created":{"$ref":"created-modified.json#/definitions/created"}, + "modified":{"$ref":"created-modified.json#/definitions/modified"} + } + }, + "batch-proposal": { + "type": "object", + "properties": { + "_id": {"$ref":"common.json#/definitions/objectid"}, + "gear_id": {"$ref":"job.json#/definitions/gear_id"}, + "state": {"$ref":"job.json#/definitions/state"}, + "config": {"$ref":"job.json#/definitions/config"}, + "origin": {"$ref":"job.json#/definitions/job-origin"}, + + "proposal": { "$ref": "#/definitions/batch-proposal-detail" }, + + "ambiguous": { "$ref": "#/definitions/matched-container-list" }, + "matched": { "$ref": "#/definitions/matched-container-list" }, + "not_matched": { "$ref": "#/definitions/matched-container-list" }, + + "improper_permissions": { + "type": "array", + "items": {"$ref":"container.json#/definitions/_id"} + }, + + "created":{"$ref":"created-modified.json#/definitions/created"}, + "modified":{"$ref":"created-modified.json#/definitions/modified"} + }, + "additionalProperties": false + }, + "batch-proposal-input": { + "type": "object", + "properties": { + "gear_id": {"$ref":"job.json#/definitions/gear_id"}, + "config": {"$ref":"job.json#/definitions/config"}, + "tags": {"$ref":"tag.json#/definitions/tag-list"}, + "analysis": {"$ref": "analysis.json#/definitions/analysis-input"}, + "targets": { + "type": "array", + "items": {"$ref":"container.json#/definitions/container-reference"} + } + }, + "additionalProperties": false + }, + "batch-cancel-output": { + "type": "object", + "properties": { + "number_cancelled": {"type":"integer"} + }, + "additionalProperties": false, + "required": ["number_cancelled"], + "x-sdk-return": "number_cancelled" + } + } +} diff --git a/swagger/schemas/definitions/collection.json b/swagger/schemas/definitions/collection.json index 67caaed72..d391f3987 100644 --- a/swagger/schemas/definitions/collection.json +++ b/swagger/schemas/definitions/collection.json @@ -50,7 +50,7 @@ "items":{"$ref":"file.json#/definitions/file-output"} }, "notes": {"$ref":"note.json#/definitions/notes-list-output"}, - "tags": {"$ref":"tag.json#/definitions/tag-output-list"}, + "tags": {"$ref":"tag.json#/definitions/tag-list"}, "analyses":{ "type":"array", "items":{"$ref":"analysis.json#/definitions/analysis-output"} diff --git a/swagger/schemas/definitions/container.json b/swagger/schemas/definitions/container.json index ed3161013..caf1f1699 100644 --- a/swagger/schemas/definitions/container.json +++ b/swagger/schemas/definitions/container.json @@ -8,6 +8,11 @@ "uid": {"type": "string"}, "timestamp": {"type": ["string", "null"], "format": "date-time"}, "timezone": {"type": "string"}, + "container-type": { + "type": "string", + "enum": ["group", "project", "session", "acquisition", "collection", "analysis"], + "description": "The type of container (e.g. session)" + }, "container-new-output": { "type": "object", @@ -15,6 +20,30 @@ "_id": {"$ref":"#/definitions/_id"} }, "required": ["_id"] - } + }, + "container-reference": { + "type": "object", + "properties": { + "type": {"$ref":"#/definitions/container-type"}, + "id": {"$ref":"#/definitions/_id"} + }, + "required": [ "type", "id" ], + "additionalProperties":false, + "description": "A reference to an individual container, by type and id" + }, + "container-output-with-files": { + "type": "object", + "properties": { + "_id": {"$ref":"#/definitions/_id"}, + "label": {"$ref":"common.json#/definitions/label"}, + "files":{ + "type":"array", + "items":{"$ref":"file.json#/definitions/file-output"} + }, + "created": {"$ref":"created-modified.json#/definitions/created"}, + "modified": {"$ref":"created-modified.json#/definitions/modified"} + }, + "description": "Generic container output with files" + } } } diff --git a/swagger/schemas/definitions/file.json b/swagger/schemas/definitions/file.json index f6cc81590..6fc0f1ad0 100644 --- a/swagger/schemas/definitions/file.json +++ b/swagger/schemas/definitions/file.json @@ -93,6 +93,17 @@ "type": "object", "allOf": [{"$ref":"#/definitions/file"}], "required":["modified", "size"] - } + }, + "file-reference": { + "type": "object", + "properties": { + "type": {"$ref":"container.json#/definitions/container-type"}, + "id": {"$ref":"container.json#/definitions/_id"}, + "name": {"$ref":"#/definitions/name"} + }, + "required": [ "type", "id", "name" ], + "additionalProperties":false, + "description": "A reference to an individual file in a container, by type, id and name" + } } } diff --git a/swagger/schemas/definitions/job.json b/swagger/schemas/definitions/job.json index b1d8a588b..aa3b51c4e 100644 --- a/swagger/schemas/definitions/job.json +++ b/swagger/schemas/definitions/job.json @@ -5,10 +5,6 @@ "gear_id": {"type":"string"}, "previous_job_id": {"type":"string"}, - "inputs-property-type":{"type":"string"}, - "inputs-property-id":{"type":"string"}, - "inputs-property-name":{"type":"string"}, - "inputs-item": { "type":"object", "properties":{ @@ -19,20 +15,10 @@ }, "required":["type", "uri", "location"] }, - "inputs-entry": { - "type": "object", - "properties": { - "type": {"$ref":"#/definitions/inputs-property-type"}, - "id": {"$ref":"#/definitions/inputs-property-id"}, - "name": {"$ref":"#/definitions/inputs-property-name"} - }, - "required": [ "type", "id", "name" ], - "additionalProperties":false - }, "inputs-object": { "type": "object", "patternProperties": { - "[-_ a-zA-Z0-9]+": {"$ref":"#/definitions/inputs-entry"} + "[-_ a-zA-Z0-9]+": {"$ref":"file.json#/definitions/file-reference"} } }, "inputs-array":{ @@ -40,9 +26,9 @@ "items":{ "type":"object", "properties":{ - "type": {"$ref":"#/definitions/inputs-property-type"}, - "id": {"$ref":"#/definitions/inputs-property-id"}, - "name": {"$ref":"#/definitions/inputs-property-name"}, + "type": {"$ref":"container.json#/definitions/container-type"}, + "id": {"$ref":"container.json#/definitions/_id"}, + "name": {"$ref":"file.json#/definitions/name"}, "input":{"type":"string"} } } @@ -114,7 +100,8 @@ }, "state":{ - "type":"string" + "type":"string", + "enum": [ "pending", "running", "failed", "complete", "cancelled" ] }, "attempt":{ "type":"integer" diff --git a/swagger/schemas/definitions/project.json b/swagger/schemas/definitions/project.json index 6cb7dc156..366a7642c 100644 --- a/swagger/schemas/definitions/project.json +++ b/swagger/schemas/definitions/project.json @@ -46,7 +46,7 @@ } }, "notes": {"$ref":"note.json#/definitions/notes-list-output"}, - "tags": {"$ref":"tag.json#/definitions/tag-output-list"}, + "tags": {"$ref":"tag.json#/definitions/tag-list"}, "analyses":{ "type":"array", "items":{"$ref":"analysis.json#/definitions/analysis-output"} diff --git a/swagger/schemas/definitions/session.json b/swagger/schemas/definitions/session.json index 9c1c9775b..16702f47e 100644 --- a/swagger/schemas/definitions/session.json +++ b/swagger/schemas/definitions/session.json @@ -63,7 +63,7 @@ "items":{"$ref":"file.json#/definitions/file-output"} }, "notes": {"$ref":"note.json#/definitions/notes-list-output"}, - "tags": {"$ref":"tag.json#/definitions/tag-output-list"}, + "tags": {"$ref":"tag.json#/definitions/tag-list"}, "analyses":{ "type":"array", "items":{"$ref":"analysis.json#/definitions/analysis-output"} diff --git a/swagger/schemas/definitions/tag.json b/swagger/schemas/definitions/tag.json index e53e657c7..cf1e974cd 100644 --- a/swagger/schemas/definitions/tag.json +++ b/swagger/schemas/definitions/tag.json @@ -9,7 +9,7 @@ "additionalProperties": false, "required": ["value"] }, - "tag-output-list":{ + "tag-list":{ "type":"array", "items":{ "allOf":[{"$ref":"#/definitions/tag"}] diff --git a/swagger/schemas/input/propose-batch.json b/swagger/schemas/input/propose-batch.json new file mode 100644 index 000000000..0ec0934ce --- /dev/null +++ b/swagger/schemas/input/propose-batch.json @@ -0,0 +1,14 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "allOf": [{"$ref":"../definitions/batch.json#/definitions/batch-proposal-input"}], + "example": { + "gear_id": "59b1b5b0e105c40019f50015", + "config": {}, + "tags": ["test-tag"], + "targets": [{ + "type": "session", + "id": "deb1b5b0e105c40019f500af" + }] + } +} diff --git a/swagger/schemas/output/batch-cancel.json b/swagger/schemas/output/batch-cancel.json new file mode 100644 index 000000000..8ca8186a0 --- /dev/null +++ b/swagger/schemas/output/batch-cancel.json @@ -0,0 +1,8 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "allOf": [{"$ref": "../definitions/batch.json#/definitions/batch-cancel-output"}], + "example": { + "number_cancelled": 4 + } +} diff --git a/swagger/schemas/output/batch-list.json b/swagger/schemas/output/batch-list.json new file mode 100644 index 000000000..d120f9d03 --- /dev/null +++ b/swagger/schemas/output/batch-list.json @@ -0,0 +1,23 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "array", + "items": {"$ref":"../definitions/batch.json#/definitions/batch"}, + "example": [{ + "origin": { + "type": "user", + "id": "justinehlert@flywheel.io" + }, + "jobs": [ + "5a33fa6652e95c001707489c", + "5a33fa6652e95c001707489d", + "5a33fa6652e95c001707489e", + "5a33fa6652e95c001707489f" + ], + "created": "2017-12-15T16:37:55.538000+00:00", + "modified": "2017-12-15T16:38:01.107000+00:00", + "state": "complete", + "gear_id": "59b1b5b0e105c40019f50015", + "_id": "5a33fa6352e95c001707489b", + "config": {} + }] +} diff --git a/swagger/schemas/output/batch-proposal.json b/swagger/schemas/output/batch-proposal.json new file mode 100644 index 000000000..c0c61ec2c --- /dev/null +++ b/swagger/schemas/output/batch-proposal.json @@ -0,0 +1,18 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "allOf": [{"$ref":"../definitions/batch.json#/definitions/batch-proposal"}], + "example": { + "_id": "5a33fa6352e95c001707489b", + "gear_id": "59b1b5b0e105c40019f50015", + "config": {}, + "state": "pending", + "origin": { + "type": "user", + "id": "justinehlert@flywheel.io" + }, + "proposal": {}, + "created": "2017-12-15T16:37:55.538000+00:00", + "modified": "2017-12-15T16:38:01.107000+00:00" + } +} diff --git a/swagger/schemas/output/batch.json b/swagger/schemas/output/batch.json new file mode 100644 index 000000000..fa73bafff --- /dev/null +++ b/swagger/schemas/output/batch.json @@ -0,0 +1,23 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "allOf": [{"$ref":"../definitions/batch.json#/definitions/batch"}], + "example": { + "origin": { + "type": "user", + "id": "justinehlert@flywheel.io" + }, + "jobs": [ + "5a33fa6652e95c001707489c", + "5a33fa6652e95c001707489d", + "5a33fa6652e95c001707489e", + "5a33fa6652e95c001707489f" + ], + "created": "2017-12-15T16:37:55.538000+00:00", + "modified": "2017-12-15T16:38:01.107000+00:00", + "state": "complete", + "gear_id": "59b1b5b0e105c40019f50015", + "_id": "5a33fa6352e95c001707489b", + "config": {} + } +} From b20a26ad3df17c3414d4e863718fac48d5f59356 Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Fri, 9 Feb 2018 12:18:28 -0600 Subject: [PATCH 17/53] Add schema for adding job logs --- api/config.py | 1 + swagger/paths/jobs.yaml | 15 ++++++++++++++- swagger/schemas/input/job-logs.json | 8 ++++++++ 3 files changed, 23 insertions(+), 1 deletion(-) create mode 100644 swagger/schemas/input/job-logs.json diff --git a/api/config.py b/api/config.py index 40f812b9a..283a22ff5 100644 --- a/api/config.py +++ b/api/config.py @@ -164,6 +164,7 @@ def apply_env_variables(config): 'group-new.json', 'group-update.json', 'info_update.json', + 'job-logs.json', 'job-new.json', 'note.json', 'packfile.json', diff --git a/swagger/paths/jobs.yaml b/swagger/paths/jobs.yaml index c2199da76..38e26dc69 100644 --- a/swagger/paths/jobs.yaml +++ b/swagger/paths/jobs.yaml @@ -25,6 +25,13 @@ operationId: get_next_job tags: - jobs + parameters: + - name: tags + in: query + type: array + items: + type: string + collectionFormat: multi responses: '200': description: '' @@ -161,9 +168,15 @@ $ref: schemas/output/job-log.json post: summary: Add logs to a job. - operationId: add_logs + operationId: add_job_logs tags: - jobs + parameters: + - name: body + in: body + required: true + schema: + $ref: schemas/input/job-logs.json responses: '200': description: Logs were added. No value is returned. diff --git a/swagger/schemas/input/job-logs.json b/swagger/schemas/input/job-logs.json new file mode 100644 index 000000000..43e5f9444 --- /dev/null +++ b/swagger/schemas/input/job-logs.json @@ -0,0 +1,8 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "array", + "items": {"$ref": "../definitions/job.json#/definitions/job-log-statement"}, + "example": [ + { "fd": 1, "msg": "Hello World!" } + ] +} From 89cc37239397ce9ef3dd75567a97bc0ebcbf4b79 Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Fri, 9 Feb 2018 15:38:50 -0600 Subject: [PATCH 18/53] Add API documentation for search endpoint --- api/config.py | 1 + swagger/index.yaml | 1 + swagger/paths/dataexplorer.yaml | 21 +++ swagger/schemas/definitions/search.json | 128 ++++++++++++++++++ swagger/schemas/input/search-query.json | 9 ++ .../schemas/output/search-response-list.json | 6 + 6 files changed, 166 insertions(+) create mode 100644 swagger/paths/dataexplorer.yaml create mode 100644 swagger/schemas/definitions/search.json create mode 100644 swagger/schemas/input/search-query.json create mode 100644 swagger/schemas/output/search-response-list.json diff --git a/api/config.py b/api/config.py index 283a22ff5..75c669e42 100644 --- a/api/config.py +++ b/api/config.py @@ -175,6 +175,7 @@ def apply_env_variables(config): 'propose-batch.json', 'rule-new.json', 'rule-update.json', + 'search-query.json', 'session.json', 'session-update.json', 'subject.json', diff --git a/swagger/index.yaml b/swagger/index.yaml index 8015c5929..e6372f23a 100644 --- a/swagger/index.yaml +++ b/swagger/index.yaml @@ -68,6 +68,7 @@ paths: - paths/batch.yaml - paths/analyses.yaml - paths/site-rules.yaml + - paths/dataexplorer.yaml securityDefinitions: diff --git a/swagger/paths/dataexplorer.yaml b/swagger/paths/dataexplorer.yaml new file mode 100644 index 000000000..b17c60e52 --- /dev/null +++ b/swagger/paths/dataexplorer.yaml @@ -0,0 +1,21 @@ +/dataexplorer/search: + post: + summary: Perform a search query + operationId: search + parameters: + - name: simple + in: query + type: boolean + - name: limit + in: query + type: integer + - name: body + in: body + required: true + schema: + $ref: schemas/input/search-query.json + responses: + '200': + description: A list of results of the search query + schema: + $ref: schemas/output/search-response-list.json diff --git a/swagger/schemas/definitions/search.json b/swagger/schemas/definitions/search.json new file mode 100644 index 000000000..218f47e72 --- /dev/null +++ b/swagger/schemas/definitions/search.json @@ -0,0 +1,128 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "definitions": { + "search-type": { + "type": "string", + "enum": ["file", "acquisition", "session", "analysis", "collection"], + "description": "Sets the type of search results to return" + }, + "search-query": { + "type": "object", + "properties": { + "return_type": {"$ref": "#/definitions/search-type"}, + "search_string": { + "type": "string", + "description": "Represents the plain text search query" + }, + "all_data": { + "type": "boolean", + "description": "When set will include all data that the user does not have access to read", + "default": false + }, + "filters": { + "type": "object", + "description": "See https://www.elastic.co/guide/en/elasticsearch/reference/current/term-level-queries.html" + } + }, + "required": ["return_type"] + }, + "search-project-response": { + "type": "object", + "properties": { + "_id": {"$ref":"container.json#/definitions/_id"}, + "label": {"$ref":"common.json#/definitions/label"} + }, + "description": "Fields for project search response" + }, + "search-group-response": { + "type": "object", + "properties": { + "_id": {"$ref":"container.json#/definitions/_id"}, + "label": {"$ref":"common.json#/definitions/label"} + }, + "description": "Fields for group search response" + }, + "search-session-response": { + "type": "object", + "properties": { + "_id": {"$ref":"container.json#/definitions/_id"}, + "label": {"$ref":"common.json#/definitions/label"}, + "timestamp": {"$ref":"common.json#/definitions/timestamp"}, + "created": {"$ref":"created-modified.json#/definitions/created"} + }, + "description": "Fields for session search response" + }, + "search-acquisition-response": { + "type": "object", + "properties": { + "_id": {"$ref":"container.json#/definitions/_id"}, + "label": {"$ref":"common.json#/definitions/label"}, + "timestamp": {"$ref":"common.json#/definitions/timestamp"}, + "created": {"$ref":"created-modified.json#/definitions/created"} + }, + "description": "Fields for acquisition search response" + }, + "search-subject-response": { + "type": "object", + "properties": { + "code": {"$ref":"subject.json#/definitions/code"} + }, + "description": "Fields for subject search response" + }, + "search-file-response": { + "type": "object", + "properties": { + "measurements": {"$ref":"file.json#/definitions/measurements"}, + "created": {"$ref":"created-modified.json#/definitions/created"}, + "type": {"$ref":"file.json#/definitions/file-type"}, + "name": {"$ref":"file.json#/definitions/name"}, + "size": {"$ref":"file.json#/definitions/size"} + }, + "description": "Fields for file search response" + }, + "search-analysis-response": { + "type": "object", + "properties": { + "_id": {"$ref":"container.json#/definitions/_id"}, + "label": {"$ref":"common.json#/definitions/label"}, + "user": {"$ref":"common.json#/definitions/user-id"}, + "created": {"$ref":"created-modified.json#/definitions/created"} + }, + "description": "Fields for acquisition search response" + }, + "search-parent-response": { + "type": "object", + "properties": { + "_id": {"$ref":"container.json#/definitions/_id"}, + "type": {"$ref":"container.json#/definitions/container-type"} + }, + "description": "Fields for parent search response" + }, + "search-collection-response": { + "type": "object", + "properties": { + "_id": {"$ref":"container.json#/definitions/_id"}, + "label": {"$ref":"common.json#/definitions/label"}, + "curator": {"$ref":"common.json#/definitions/user-id"}, + "created": {"$ref":"created-modified.json#/definitions/created"} + }, + "description": "Fields for collection search response" + }, + "search-response": { + "type": "object", + "properties": { + "project": {"$ref":"#/definitions/search-project-response"}, + "group": {"$ref":"#/definitions/search-group-response"}, + "session": {"$ref":"#/definitions/search-session-response"}, + "acquisition": {"$ref":"#/definitions/search-acquisition-response"}, + "subject": {"$ref":"#/definitions/search-subject-response"}, + "file": {"$ref":"#/definitions/search-file-response"}, + "collection": {"$ref":"#/definitions/search-collection-response"}, + "analysis": {"$ref":"#/definitions/search-analysis-response"}, + "parent": {"$ref":"#/definitions/search-parent-response"}, + "permissions": {"$ref":"permission.json#/definitions/permission-output-list"} + }, + "description": "Single search response entry" + } + } +} \ No newline at end of file diff --git a/swagger/schemas/input/search-query.json b/swagger/schemas/input/search-query.json new file mode 100644 index 000000000..5d76f2aee --- /dev/null +++ b/swagger/schemas/input/search-query.json @@ -0,0 +1,9 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "allOf":[{"$ref":"../definitions/search.json#/definitions/search-query"}], + "example": { + "return_type": "session", + "search_string": "amyg" + } +} diff --git a/swagger/schemas/output/search-response-list.json b/swagger/schemas/output/search-response-list.json new file mode 100644 index 000000000..6368c2947 --- /dev/null +++ b/swagger/schemas/output/search-response-list.json @@ -0,0 +1,6 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "array", + "items": {"$ref":"../definitions/search.json#/definitions/search-response"}, + "example": [] +} From 06e4c999006fac301d2ef5d660f1a9556a0960e2 Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Tue, 13 Feb 2018 16:01:41 -0600 Subject: [PATCH 19/53] Cleanup packfile endpoints Removed packfile endpoints for sessions, collections, and acquisitions. Added missing parameters to packfile and packfile-end endpoints. --- swagger/paths/acquisitions.yaml | 8 -------- swagger/paths/collections.yaml | 8 -------- swagger/paths/sessions.yaml | 8 -------- swagger/templates/packfile-end.yaml | 12 +++++++++--- swagger/templates/packfile.yaml | 9 +++++++-- 5 files changed, 16 insertions(+), 29 deletions(-) diff --git a/swagger/paths/acquisitions.yaml b/swagger/paths/acquisitions.yaml index 5ce6d36e4..f24fd3953 100644 --- a/swagger/paths/acquisitions.yaml +++ b/swagger/paths/acquisitions.yaml @@ -24,14 +24,6 @@ $template_arguments: /acquisitions/{AcquisitionId}/tags/{TagValue}: $template: templates/tags-tag.yaml -# ===== Packfile ===== -/acquisitions/{AcquisitionId}/packfile-start: - $template: templates/packfile-start.yaml -/acquisitions/{AcquisitionId}/packfile: - $template: templates/packfile.yaml -/acquisitions/{AcquisitionId}/packfile-end: - $template: templates/packfile-end.yaml - # ===== Files ===== /acquisitions/{AcquisitionId}/files: $template: templates/file-list-upload.yaml diff --git a/swagger/paths/collections.yaml b/swagger/paths/collections.yaml index 29014466f..0c6ca703f 100644 --- a/swagger/paths/collections.yaml +++ b/swagger/paths/collections.yaml @@ -150,14 +150,6 @@ $template_arguments: /collections/{CollectionId}/tags/{TagValue}: $template: templates/tags-tag.yaml -# ===== Packfile ===== -/collections/{CollectionId}/packfile-start: - $template: templates/packfile-start.yaml -/collections/{CollectionId}/packfile: - $template: templates/packfile.yaml -/collections/{CollectionId}/packfile-end: - $template: templates/packfile-end.yaml - # ===== Files ===== /collections/{CollectionId}/files: $template: templates/file-list-upload.yaml diff --git a/swagger/paths/sessions.yaml b/swagger/paths/sessions.yaml index 5e5c0a71d..520512832 100644 --- a/swagger/paths/sessions.yaml +++ b/swagger/paths/sessions.yaml @@ -50,14 +50,6 @@ $template_arguments: /sessions/{SessionId}/tags/{TagValue}: $template: templates/tags-tag.yaml -# ===== Packfile ===== -/sessions/{SessionId}/packfile-start: - $template: templates/packfile-start.yaml -/sessions/{SessionId}/packfile: - $template: templates/packfile.yaml -/sessions/{SessionId}/packfile-end: - $template: templates/packfile-end.yaml - # ===== Files ===== /sessions/{SessionId}/files: $template: templates/file-list-upload.yaml diff --git a/swagger/templates/packfile-end.yaml b/swagger/templates/packfile-end.yaml index 3af0ca3c4..02c000aa3 100644 --- a/swagger/templates/packfile-end.yaml +++ b/swagger/templates/packfile-end.yaml @@ -14,7 +14,7 @@ template: | type: string in: path name: '{{parameter}}' - post: + get: summary: End a packfile upload operationId: end_{{resource}}_packfile_upload tags: @@ -22,9 +22,15 @@ template: | produces: - text/event-stream parameters: - - in: formData - name: formData + - name: token + in: query type: string + required: true + - name: metadata + in: query + type: string + required: true + description: string-encoded metadata json object. responses: '200': description: '' diff --git a/swagger/templates/packfile.yaml b/swagger/templates/packfile.yaml index c2df8b52f..79e9edd0d 100644 --- a/swagger/templates/packfile.yaml +++ b/swagger/templates/packfile.yaml @@ -22,9 +22,14 @@ template: | consumes: - multipart/form-data parameters: - - in: formData - name: formData + - name: token + in: query type: string + required: true + - name: file + in: formData + type: file + required: true responses: '200': description: '' From 1135641b5d11f3e25872132379f7a928c47c0887 Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Fri, 2 Mar 2018 12:09:06 -0600 Subject: [PATCH 20/53] Add documentation for resolver endpoint Closes #1088 --- api/config.py | 1 + swagger/index.yaml | 1 + swagger/paths/resolver.yaml | 26 ++++++++ swagger/schemas/definitions/resolver.json | 75 +++++++++++++++++++++++ swagger/schemas/input/resolver.json | 8 +++ swagger/schemas/output/resolver.json | 75 +++++++++++++++++++++++ 6 files changed, 186 insertions(+) create mode 100644 swagger/paths/resolver.yaml create mode 100644 swagger/schemas/definitions/resolver.json create mode 100644 swagger/schemas/input/resolver.json create mode 100644 swagger/schemas/output/resolver.json diff --git a/api/config.py b/api/config.py index 75c669e42..537a01528 100644 --- a/api/config.py +++ b/api/config.py @@ -173,6 +173,7 @@ def apply_env_variables(config): 'project-template.json', 'project-update.json', 'propose-batch.json', + 'resolver.json', 'rule-new.json', 'rule-update.json', 'search-query.json', diff --git a/swagger/index.yaml b/swagger/index.yaml index e6372f23a..c0c2d0a1f 100644 --- a/swagger/index.yaml +++ b/swagger/index.yaml @@ -69,6 +69,7 @@ paths: - paths/analyses.yaml - paths/site-rules.yaml - paths/dataexplorer.yaml + - paths/resolver.yaml securityDefinitions: diff --git a/swagger/paths/resolver.yaml b/swagger/paths/resolver.yaml new file mode 100644 index 000000000..bced3ed73 --- /dev/null +++ b/swagger/paths/resolver.yaml @@ -0,0 +1,26 @@ +/resolve: + post: + summary: Perform path based lookup of nodes in the Flywheel hierarchy + description: | + This will perform a deep lookup of a node (i.e. group/project/session/acquisition) and its children, + including any files. The query path is an array of strings in the following order (by default): + + * group id + * project label + * session label + * acquisition label + + An ID can be used instead of a label by formatting the string as ``. The full path + to the node, and the node's children will be included in the response. + operationId: resolve_path + parameters: + - name: body + in: body + required: true + schema: + $ref: schemas/input/resolver.json + responses: + '200': + description: '' + schema: + $ref: schemas/output/resolver.json diff --git a/swagger/schemas/definitions/resolver.json b/swagger/schemas/definitions/resolver.json new file mode 100644 index 000000000..f32093f41 --- /dev/null +++ b/swagger/schemas/definitions/resolver.json @@ -0,0 +1,75 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "definitions": { + "resolver-input": { + "type": "object", + "properties": { + "path": { + "type": "array", + "minLength": 1, + "items": { + "type": "string" + } + } + }, + "required": ["path"] + }, + "resolver-output": { + "type": "object", + "properties": { + "path": { "$ref": "#/definitions/resolver-node-list" }, + "children": { "$ref": "#/definitions/resolver-node-list" } + }, + "required": ["path"] + }, + "resolver-node": { + "type": "object", + "properties": { + "node_type": { + "type": "string" + } + }, + "discriminator": "node_type", + "required": ["node_type"] + }, + "resolver-node-list": { + "type": "array", + "items": { "$ref": "#/definitions/resolver-node" } + }, + "group-node": { + "allOf": [ + {"$ref":"#/definitions/resolver-node"}, + {"$ref":"group.json#/definitions/group-output"} + ], + "x-discriminator-value": "group" + }, + "project-node": { + "allOf": [ + {"$ref":"#/definitions/resolver-node"}, + {"$ref":"project.json#/definitions/project-output"} + ], + "x-discriminator-value": "project" + }, + "session-node": { + "allOf": [ + {"$ref":"#/definitions/resolver-node"}, + {"$ref":"session.json#/definitions/session-output"} + ], + "x-discriminator-value": "session" + }, + "acquisition-node": { + "allOf": [ + {"$ref":"#/definitions/resolver-node"}, + {"$ref":"acquisition.json#/definitions/acquisition-output"} + ], + "x-discriminator-value": "acquisition" + }, + "file-node": { + "allOf": [ + {"$ref":"#/definitions/resolver-node"}, + {"$ref":"file.json#/definitions/file-output"} + ], + "x-discriminator-value": "file" + } + } +} \ No newline at end of file diff --git a/swagger/schemas/input/resolver.json b/swagger/schemas/input/resolver.json new file mode 100644 index 000000000..11acc2c06 --- /dev/null +++ b/swagger/schemas/input/resolver.json @@ -0,0 +1,8 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "allOf": [{"$ref": "../definitions/resolver.json#/definitions/resolver-input"}], + "example": { + "path": ["scitran", "Neuroscience"] + } +} diff --git a/swagger/schemas/output/resolver.json b/swagger/schemas/output/resolver.json new file mode 100644 index 000000000..fd8979424 --- /dev/null +++ b/swagger/schemas/output/resolver.json @@ -0,0 +1,75 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type":"object", + "allOf": [{ "$ref": "../definitions/resolver.json#/definitions/resolver-output" }], + "example": { + "path": [ + { + "node_type": "group", + "_id": "scitran", + "label": "Scitran", + "permissions": [ + { + "access": "admin", + "_id": "coltonlw@flywheel.io" + } + ], + "created": "2016-08-19T11:41:15.360000+00:00", + "modified": "2016-08-19T11:41:15.360000+00:00" + }, + { + "node_type": "project", + "_id": "57e452791cff88b85f9f9c97", + "label": "Neuroscience", + "group": "scitran", + "created": "2016-09-22T21:51:53.151000+00:00", + "modified": "2016-09-22T21:51:53.151000+00:00", + "public": false, + "permissions": [{ + "access": "admin", + "_id": "coltonlw@flywheel.io" + }] + } + ], + "children": [ + { + "node_type": "session", + "_id": "57e01cccb1dc04000fb83f03", + "label": "control_1", + "group": "scitran", + "created": "2016-09-19T17:13:48.164000+00:00", + "subject": { + "code": "ex4784", + "_id": "57e01cccb1dc04000fb83f02" + }, + "modified": "2016-09-19T17:13:48.164000+00:00", + "project": "57e01cccf6b5d5edbcb4e1cf", + "public": false, + "permissions": [{ + "access": "admin", + "_id": "coltonlw@flywheel.io" + }] + }, + { + "node_type": "file", + "origin": { + "method": "importer", + "type": "device", + "id": "importer_Admin_Import", + "name": "Admin Import" + }, + "mimetype": "application/zip", + "measurements": [], + "hash": "v0-sha384-dd3c97bfe0ad1fcba75ae6718c6e81038c59af4f447f5db194d52732efa4f955b28455db02eb64cad3e4e55f11e3679f", + "name": "4784_1_1_localizer_dicom.zip", + "tags": [], + "created": "2016-09-21T14:56:09.943000+00:00", + "modified": "2016-09-21T14:56:09.943000+00:00", + "modality": null, + "info": {}, + "type": "dicom", + "size": 989933 + } + ] + } +} From 7496d0f6d29be58d8b2ad3c9f1700acc7080d6bd Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Tue, 2 Jan 2018 08:35:27 -0600 Subject: [PATCH 21/53] Added step to simplify swagger for code generation. Fixed a few missing types in schemas, added schemas for login/logout responses --- swagger/Gruntfile.js | 13 ++- swagger/paths/login.yaml | 6 +- swagger/schemas/definitions/auth.json | 19 ++++ swagger/schemas/definitions/info.json | 2 + swagger/schemas/definitions/job.json | 5 +- swagger/schemas/definitions/permission.json | 5 +- swagger/schemas/definitions/session.json | 1 + swagger/schemas/definitions/subject.json | 6 +- swagger/schemas/definitions/tag.json | 1 + swagger/schemas/definitions/user.json | 2 +- swagger/schemas/output/login-output.json | 7 ++ swagger/schemas/output/logout-output.json | 7 ++ swagger/schemas/output/user.json | 12 +-- swagger/support/schema-transpiler.js | 5 - swagger/support/schemas.js | 1 + .../support/spec/schema-transpiler-spec.js | 28 ++---- swagger/support/tasks/simplify-swagger.js | 92 +++++++++++++++++++ 17 files changed, 170 insertions(+), 42 deletions(-) create mode 100644 swagger/schemas/definitions/auth.json create mode 100644 swagger/schemas/output/login-output.json create mode 100644 swagger/schemas/output/logout-output.json create mode 100644 swagger/support/tasks/simplify-swagger.js diff --git a/swagger/Gruntfile.js b/swagger/Gruntfile.js index df8131907..24efc1fd2 100644 --- a/swagger/Gruntfile.js +++ b/swagger/Gruntfile.js @@ -85,6 +85,16 @@ module.exports = function(grunt) { } }, + /** + * Simplify swagger for codegen + */ + simplifySwagger: { + core: { + src: 'build/swagger-ui.json', + dst: 'build/swagger-codegen.json' + } + }, + /** * Validate swagger */ @@ -152,7 +162,8 @@ module.exports = function(grunt) { 'createBuildDir', 'flattenSwagger', 'schemasToDefs', - 'validateSwagger' + 'validateSwagger', + 'simplifySwagger' ]); /** diff --git a/swagger/paths/login.yaml b/swagger/paths/login.yaml index 85ec8f795..cc1955d89 100644 --- a/swagger/paths/login.yaml +++ b/swagger/paths/login.yaml @@ -7,8 +7,7 @@ '200': description: '' schema: - example: - success: true + $ref: schemas/output/login-output.json /logout: post: summary: Log Out @@ -18,5 +17,4 @@ '200': description: '' schema: - example: - auth_tokens_removed: 2 \ No newline at end of file + $ref: schemas/output/logout-output.json diff --git a/swagger/schemas/definitions/auth.json b/swagger/schemas/definitions/auth.json new file mode 100644 index 000000000..e5f6ee0f3 --- /dev/null +++ b/swagger/schemas/definitions/auth.json @@ -0,0 +1,19 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "definitions": { + "login-output": { + "type": "object", + "properties": { + "token": {"type": "string"} + }, + "required": ["token"] + }, + "logout-output": { + "type": "object", + "properties": { + "tokens_removed": {"type": "integer"} + }, + "required": ["tokens_removed"] + } + } +} \ No newline at end of file diff --git a/swagger/schemas/definitions/info.json b/swagger/schemas/definitions/info.json index fc4d65558..e2e5f9263 100644 --- a/swagger/schemas/definitions/info.json +++ b/swagger/schemas/definitions/info.json @@ -2,6 +2,7 @@ "$schema": "http://json-schema.org/draft-04/schema#", "definitions": { "info-add-remove": { + "type": "object", "properties": { "set": {"type": "object", "minProperties": 1}, "delete": { @@ -16,6 +17,7 @@ "additionalProperties": false }, "info-replace": { + "type": "object", "properties": { "replace": {"type": "object"} }, diff --git a/swagger/schemas/definitions/job.json b/swagger/schemas/definitions/job.json index aa3b51c4e..61540f0de 100644 --- a/swagger/schemas/definitions/job.json +++ b/swagger/schemas/definitions/job.json @@ -8,7 +8,10 @@ "inputs-item": { "type":"object", "properties":{ - "type":{"enum":["http", "scitran"]}, + "type":{ + "type": "string", + "enum":["http", "scitran"] + }, "uri":{"type":"string"}, "location":{"type":"string"}, "vu":{"type":"string"} diff --git a/swagger/schemas/definitions/permission.json b/swagger/schemas/definitions/permission.json index 03a48fd55..62630cdfc 100644 --- a/swagger/schemas/definitions/permission.json +++ b/swagger/schemas/definitions/permission.json @@ -1,7 +1,10 @@ { "$schema": "http://json-schema.org/draft-04/schema#", "definitions": { - "access": { "enum": ["ro", "rw", "admin"] }, + "access": { + "type": "string", + "enum": ["ro", "rw", "admin"] + }, "permission":{ "type":"object", "properties":{ diff --git a/swagger/schemas/definitions/session.json b/swagger/schemas/definitions/session.json index 16702f47e..485778684 100644 --- a/swagger/schemas/definitions/session.json +++ b/swagger/schemas/definitions/session.json @@ -79,6 +79,7 @@ "items":{"$ref": "job.json#/definitions/job-output"} }, "containers":{ + "type": "object", "patternProperties": { "^[a-fA-F0-9]{24}$":{ "type": "object" diff --git a/swagger/schemas/definitions/subject.json b/swagger/schemas/definitions/subject.json index a5733ccdc..408e09e4f 100644 --- a/swagger/schemas/definitions/subject.json +++ b/swagger/schemas/definitions/subject.json @@ -5,9 +5,9 @@ "firstname": { "type": "string", "maxLength": 64 }, "lastname": { "type": "string", "maxLength": 64 }, "age": { "type": ["integer", "null"] }, - "sex": { "enum": ["male", "female", "other", "unknown", null] }, - "race": { "enum": ["American Indian or Alaska Native", "Asian", "Native Hawaiian or Other Pacific Islander", "Black or African American", "White", "More Than One Race", "Unknown or Not Reported", null] }, - "ethnicity": { "enum": ["Not Hispanic or Latino", "Hispanic or Latino", "Unknown or Not Reported", null] }, + "sex": { "type": "string", "enum": ["male", "female", "other", "unknown", null] }, + "race": { "type": "string", "enum": ["American Indian or Alaska Native", "Asian", "Native Hawaiian or Other Pacific Islander", "Black or African American", "White", "More Than One Race", "Unknown or Not Reported", null] }, + "ethnicity": { "type": "string", "enum": ["Not Hispanic or Latino", "Hispanic or Latino", "Unknown or Not Reported", null] }, "code": { "type": "string", "maxLength": 64 }, "tags": { "type": "array", "items": {"type": "string"} }, diff --git a/swagger/schemas/definitions/tag.json b/swagger/schemas/definitions/tag.json index cf1e974cd..8b0a03b57 100644 --- a/swagger/schemas/definitions/tag.json +++ b/swagger/schemas/definitions/tag.json @@ -3,6 +3,7 @@ "definitions": { "value": {"type": "string", "minLength": 1, "maxLength": 32}, "tag":{ + "type": "object", "properties":{ "value":{"$ref":"#/definitions/value"} }, diff --git a/swagger/schemas/definitions/user.json b/swagger/schemas/definitions/user.json index 475bc5024..24f55a3fa 100644 --- a/swagger/schemas/definitions/user.json +++ b/swagger/schemas/definitions/user.json @@ -37,7 +37,7 @@ "properties":{ "key": {"type": "string"}, "created": {"$ref":"created-modified.json#/definitions/created"}, - "last_used": {} + "last_used": {"$ref":"common.json#/definitions/timestamp"} }, "additionalProperties":false }, diff --git a/swagger/schemas/output/login-output.json b/swagger/schemas/output/login-output.json new file mode 100644 index 000000000..e92c297a0 --- /dev/null +++ b/swagger/schemas/output/login-output.json @@ -0,0 +1,7 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "allOf":[{"$ref":"../definitions/auth.json#/definitions/login-output"}], + "example": { + "token": "MjeuawZcctfRdCOmx_C6oYXK4sLHd2Dhc_oZpkXPPkxHizhNgwFWcrrKGA49BEnK" + } +} diff --git a/swagger/schemas/output/logout-output.json b/swagger/schemas/output/logout-output.json new file mode 100644 index 000000000..8749f3ded --- /dev/null +++ b/swagger/schemas/output/logout-output.json @@ -0,0 +1,7 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "allOf":[{"$ref":"../definitions/auth.json#/definitions/logout-output"}], + "example": { + "tokens_removed": 1 + } +} diff --git a/swagger/schemas/output/user.json b/swagger/schemas/output/user.json index 3e01847c5..36226054b 100644 --- a/swagger/schemas/output/user.json +++ b/swagger/schemas/output/user.json @@ -2,12 +2,10 @@ "$schema": "http://json-schema.org/draft-04/schema#", "type": "object", "allOf":[ - {"$ref":"../definitions/user.json#/definitions/user-output"}, - { - "required":[ - "_id", "firstname", "lastname", - "root", "email", "created", "modified" - ] - } + {"$ref":"../definitions/user.json#/definitions/user-output"} + ], + "required":[ + "_id", "firstname", "lastname", + "root", "email", "created", "modified" ] } diff --git a/swagger/support/schema-transpiler.js b/swagger/support/schema-transpiler.js index 2bc4b66ec..dfe6cbae5 100644 --- a/swagger/support/schema-transpiler.js +++ b/swagger/support/schema-transpiler.js @@ -74,11 +74,6 @@ SchemaTranspiler.prototype.draft4ToOpenApi2 = function(schema, defs, id) { schema.type = this._selectTypeFromArray(schema.type, id); } - if( schema.allOf && schema.allOf.length === 1 && !schema.required ) { - // Merge all of object with top-level object - schema = this._flattenAllOf(schema, id); - } - // Check for top-level $ref, allOf, anyOf, oneOf if( schema.$ref && schema.example ) { // Special case, if object has $ref and example, then diff --git a/swagger/support/schemas.js b/swagger/support/schemas.js index c4e1c0e8e..5c883d223 100644 --- a/swagger/support/schemas.js +++ b/swagger/support/schemas.js @@ -199,6 +199,7 @@ Schemas.prototype.isPrimitiveDef = function(name) { return false; }; +Schemas.isPrimitiveType = isPrimitiveType; Schemas.prototype.getComplexDefinitions = function() { return _.pickBy(this.definitions, function(value) { diff --git a/swagger/support/spec/schema-transpiler-spec.js b/swagger/support/spec/schema-transpiler-spec.js index 1a65aafc9..1f60a236a 100644 --- a/swagger/support/spec/schema-transpiler-spec.js +++ b/swagger/support/spec/schema-transpiler-spec.js @@ -72,13 +72,15 @@ describe('SchemaTranspiler draft4ToOpenApi2', function() { }); }); - it('should flatten allOf with one element', function() { + it('should not flatten allOf with one element', function() { var schema = { allOf: [{$ref:'#/definitions/Foo'}] }; var result = transpiler.toOpenApi2(schema); - expect(result).toEqual({$ref:'#/definitions/Foo'}); + expect(result).toEqual({ + allOf: [{$ref:'#/definitions/Foo'}] + }); }); it('should merge properties for anyOf', function() { @@ -122,28 +124,16 @@ describe('SchemaTranspiler draft4ToOpenApi2', function() { expect(result).toEqual({}); }); - it('should flatten array elements', function() { - var defs = { - Foo: { - type: 'object', - properties: { - updated: {type: 'boolean'} - }, - required: ['updated'] - } - }, - schema = { + it('should not flatten array elements', function() { + var schema = { type: 'array', items: { allOf: [{$ref:'#/definitions/Foo'}] } }; - var result = transpiler.toOpenApi2(schema, defs); - expect(result).toEqual({ - type: 'array', - items: {$ref:'#/definitions/Foo'} - }); + var result = transpiler.toOpenApi2(schema); + expect(result).toEqual(schema); }); it('should recurse into properties', function() { @@ -160,7 +150,7 @@ describe('SchemaTranspiler draft4ToOpenApi2', function() { type: 'object', properties: { bar: {type: 'string'}, - foo: {$ref: '#/definitions/Foo'} + foo: {allOf: [{$ref: '#/definitions/Foo'}]} } }); }); diff --git a/swagger/support/tasks/simplify-swagger.js b/swagger/support/tasks/simplify-swagger.js new file mode 100644 index 000000000..cb2ae0fc0 --- /dev/null +++ b/swagger/support/tasks/simplify-swagger.js @@ -0,0 +1,92 @@ +'use strict'; + +module.exports = function(grunt) { + var path = require('path'); + var fs = require('fs'); + var _ = require('lodash'); + var yaml = require('js-yaml'); + var walk = require('../walk'); + var Schemas = require('../schemas'); + + /** + * This task simplifies models in a swagger file. + * @param {object} data Task data + * @param {string} data.src The input file (root level swagger file) + * @param {string} data.dst The output file + */ + grunt.registerMultiTask('simplifySwagger', 'Simplify models in swagger API file', function() { + var srcFile = this.data.src||'swagger.yaml'; + var dstFile = this.data.dst; + + if(!fs.existsSync(srcFile)) { + grunt.log.error('Could not find:', srcFile); + return false; + } + + var root = yaml.safeLoad(fs.readFileSync(srcFile).toString()); + + // walk through all schemas + // That's every definition and every response and body schema + root = walk(root, function(obj, path) { + if( isSchema(path) ) { + return simplifySchema(obj, path); + } + return obj; + }); + + var data = JSON.stringify(root, null, 2); + fs.writeFileSync(dstFile, data); + }); + + function formatPath(path) { + path = _.map(path, function(el) { + return el.replace(/\//g, '~1'); + }); + return '#/' + path.join('/'); + } + + function isSchema(path) { + if( path.length === 2 && path[0] === 'definitions' ) { + return true; + } + if( path.length === 4 && path[0] === 'definitions' && path[2] === 'properties' ) { + return true; + } + if( path.length > 1 && path[path.length-1] === 'schema' ) { + return true; + } + return false; + } + + function isValidSchema(schema) { + return( schema.type || schema.$ref || + schema.allOf || schema.oneOf || schema.anyOf || schema.not ); + } + + // Performs all of the simplifying steps, and + // returns a simplified version of schema + function simplifySchema(schema, path) { + schema = _.cloneDeep(schema); + if( !isValidSchema(schema) ) { + grunt.log.writeln('WARNING '.red + 'Invalid schema (no object type specified) at: ' + formatPath(path)); + schema.type = 'object'; + } else if( schema.type === 'array' && schema.items ) { + path = _.concat(path, 'items'); + schema.items = simplifySchema(schema.items, path); + } else if( schema.allOf ) { + if( schema.allOf.length === 1 ) { + if( schema.allOf[0].$ref || Schemas.isPrimitiveType(schema.allOf[0].type) ) { + schema = schema.allOf[0]; + } else { + grunt.log.writeln('WARNING: Cannot simplify "allOf" definition at: ' + formatPath(path)); + } + } else { + grunt.log.writeln('WARNING: Cannot simplify "allOf" definition at: ' + formatPath(path)); + } + } + return schema; + } + +}; + + From 452f6177b39e9ea38bb0bf66390dacf30144c3fb Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Mon, 8 Jan 2018 11:12:31 -0600 Subject: [PATCH 22/53] Added additional model simplifications --- swagger/support/tasks/simplify-swagger.js | 81 +++++++++++++++++++++-- 1 file changed, 75 insertions(+), 6 deletions(-) diff --git a/swagger/support/tasks/simplify-swagger.js b/swagger/support/tasks/simplify-swagger.js index cb2ae0fc0..e87ddacc5 100644 --- a/swagger/support/tasks/simplify-swagger.js +++ b/swagger/support/tasks/simplify-swagger.js @@ -25,11 +25,18 @@ module.exports = function(grunt) { var root = yaml.safeLoad(fs.readFileSync(srcFile).toString()); + var context = { + aliases: {} + }; + + // Walk through definitions, simplifying models where we can + simplifyDefinitions(root, context); + // walk through all schemas // That's every definition and every response and body schema root = walk(root, function(obj, path) { if( isSchema(path) ) { - return simplifySchema(obj, path); + return simplifySchema(obj, path, context); } return obj; }); @@ -45,6 +52,17 @@ module.exports = function(grunt) { return '#/' + path.join('/'); } + function unformatPath(path) { + if( !path.substr ) { + grunt.log.writeln('Invalid path: ' + JSON.stringify(path)); + return path; + } + var parts = path.substr(2).split('/'); + return _.map(parts, function(el) { + return el.replace(/~1/g, '/'); + }); + } + function isSchema(path) { if( path.length === 2 && path[0] === 'definitions' ) { return true; @@ -63,30 +81,81 @@ module.exports = function(grunt) { schema.allOf || schema.oneOf || schema.anyOf || schema.not ); } + function isDefinition(path) { + return ( path.length === 2 && path[0] === 'definitions' ); + } + + function simplifyDefinitions(root, context) { + var defs = root.definitions||{}; + var keys = _.keys(defs); + + _.each(keys, function(k) { + var schema = defs[k]; + var path = formatPath(['definitions', k]); + + if( schema.type === 'array' ) { + // Setup an alias for array objects (don't generate a model) + context.aliases[path] = simplifySchema(schema, ['definitions', k], context); + delete defs[k]; + } else if( schema.allOf && schema.allOf.length === 1 && schema.allOf[0].$ref ) { + // For objects that are just aliases for other objects, copy all of the properties + var target = unformatPath(schema.allOf[0].$ref); + var targetObj = resolvePathObj(root, target); + if( targetObj ) { + defs[k] = targetObj; + } else { + grunt.log.writeln('ERROR '.red + 'Cannot find alias for: ' + path + ' (' + schema.allOf[0].$ref + ')'); + } + } + }); + } + // Performs all of the simplifying steps, and // returns a simplified version of schema - function simplifySchema(schema, path) { + function simplifySchema(schema, path, context) { schema = _.cloneDeep(schema); if( !isValidSchema(schema) ) { grunt.log.writeln('WARNING '.red + 'Invalid schema (no object type specified) at: ' + formatPath(path)); schema.type = 'object'; } else if( schema.type === 'array' && schema.items ) { path = _.concat(path, 'items'); - schema.items = simplifySchema(schema.items, path); + schema.items = simplifySchema(schema.items, path, context); } else if( schema.allOf ) { if( schema.allOf.length === 1 ) { - if( schema.allOf[0].$ref || Schemas.isPrimitiveType(schema.allOf[0].type) ) { + if( schema.allOf[0].$ref ) { + var alias = context.aliases[schema.allOf[0].$ref]; + // Replace alias for allOf fields + if( alias ) { + schema = _.cloneDeep(alias); + } else { + schema = schema.allOf[0]; + } + } else if( Schemas.isPrimitiveType(schema.allOf[0].type) ) { schema = schema.allOf[0]; } else { - grunt.log.writeln('WARNING: Cannot simplify "allOf" definition at: ' + formatPath(path)); + grunt.log.writeln('WARNING Cannot simplify "allOf" definition at: ' + formatPath(path)); } } else { - grunt.log.writeln('WARNING: Cannot simplify "allOf" definition at: ' + formatPath(path)); + grunt.log.writeln('WARNING Cannot simplify "allOf" definition at: ' + formatPath(path)); + } + } else if( schema.$ref ) { + // Replace alias for $ref fields + var alias = context.aliases[schema.$ref]; + if( alias ) { + schema = _.cloneDeep(alias); } } return schema; } + function resolvePathObj(root, path) { + var current = root; + path = path.slice(); + while( current && path.length ) { + current = current[path.shift()]; + } + return current; + } }; From e3eb466620e6c64c84e0f7fe495af32c39aecfde Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Thu, 11 Jan 2018 07:20:27 -0600 Subject: [PATCH 23/53] Alias primitive types in definitions --- swagger/support/tasks/simplify-swagger.js | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/swagger/support/tasks/simplify-swagger.js b/swagger/support/tasks/simplify-swagger.js index e87ddacc5..3ebd782ee 100644 --- a/swagger/support/tasks/simplify-swagger.js +++ b/swagger/support/tasks/simplify-swagger.js @@ -106,6 +106,10 @@ module.exports = function(grunt) { } else { grunt.log.writeln('ERROR '.red + 'Cannot find alias for: ' + path + ' (' + schema.allOf[0].$ref + ')'); } + } else if( Schemas.isPrimitiveType(schema.type) ) { + // For simple types in definitions, alias them + context.aliases[path] = schema; + delete defs[k]; } }); } From 5bda4202f595644d385b5ada26b8be0079961007 Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Thu, 11 Jan 2018 13:23:04 -0600 Subject: [PATCH 24/53] Added support for conversion from patternProperties to additionalProperties in schema transpiler --- swagger/schemas/definitions/device.json | 18 +++++++++++------- swagger/support/schema-transpiler.js | 6 +++++- swagger/support/schemas.js | 16 ++++++++++++++++ 3 files changed, 32 insertions(+), 8 deletions(-) diff --git a/swagger/schemas/definitions/device.json b/swagger/schemas/definitions/device.json index ae5abed6e..4255269de 100644 --- a/swagger/schemas/definitions/device.json +++ b/swagger/schemas/definitions/device.json @@ -12,6 +12,16 @@ "type": "string", "enum": ["ok", "missing", "error", "unknown"] }, + "device-status-entry": { + "type": "object", + "properties":{ + "errors": {"$ref":"#/definitions/errors"}, + "last_seen": {"$ref":"common.json#/definitions/timestamp"}, + "status": {"$ref":"#/definitions/status-value"} + }, + "additionalProperties":false, + "required": ["last_seen", "status"] + }, "device": { "type": "object", "properties": { @@ -43,13 +53,7 @@ "type":"object", "patternProperties": { "^[0-9a-z.@_-]*$":{ - "properties":{ - "errors": {"$ref":"#/definitions/errors"}, - "last_seen": {"$ref":"common.json#/definitions/timestamp"}, - "status": {"$ref":"#/definitions/status-value"} - }, - "additionalProperties":false, - "required": ["last_seen", "status"] + "$ref": "#/definitions/device-status-entry" } } } diff --git a/swagger/support/schema-transpiler.js b/swagger/support/schema-transpiler.js index dfe6cbae5..832b046e4 100644 --- a/swagger/support/schema-transpiler.js +++ b/swagger/support/schema-transpiler.js @@ -97,7 +97,11 @@ SchemaTranspiler.prototype.draft4ToOpenApi2 = function(schema, defs, id) { } if( schema.patternProperties ) { - this.warn(id, '"patternProperties" is not supported in OpenApi 2'); + var keys = _.keys(schema.patternProperties); + if( keys.length > 1 ) { + this.warn(id, 'Can only support one type in additionalProperties (from "patternProperties")'); + } + schema.additionalProperties = this.draft4ToOpenApi2(schema.patternProperties[keys[0]], defs, id); delete schema.patternProperties; } diff --git a/swagger/support/schemas.js b/swagger/support/schemas.js index 5c883d223..45756182c 100644 --- a/swagger/support/schemas.js +++ b/swagger/support/schemas.js @@ -15,10 +15,25 @@ var PRIMITIVE_TYPES = { 'null': true }; +var OBJECT_PROPERTIES = [ 'allOf', 'anyOf', 'oneOf', 'multipleOf', 'not', + 'if', 'then', 'else', 'properties', 'additionalProperties' ]; + function isPrimitiveType(type) { return !!PRIMITIVE_TYPES[type]; } +function isEmptyObject(schema) { + if( schema.type && schema.type !== 'object' ) { + return false; + } + if( schema.$ref ) { + return false; + } + return !_.some(OBJECT_PROPERTIES, function(key) { + return !!schema[key]; + }); +} + function normalizeName(name) { return name.replace('_', '-'); } @@ -200,6 +215,7 @@ Schemas.prototype.isPrimitiveDef = function(name) { }; Schemas.isPrimitiveType = isPrimitiveType; +Schemas.isEmptyObject = isEmptyObject; Schemas.prototype.getComplexDefinitions = function() { return _.pickBy(this.definitions, function(value) { From 11c709c466e9e1ed33c46b015cb344f2b19b0605 Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Thu, 11 Jan 2018 16:10:53 -0600 Subject: [PATCH 25/53] Replace pure references in definitions with aliases --- swagger/support/tasks/simplify-swagger.js | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/swagger/support/tasks/simplify-swagger.js b/swagger/support/tasks/simplify-swagger.js index 3ebd782ee..0b6a6decc 100644 --- a/swagger/support/tasks/simplify-swagger.js +++ b/swagger/support/tasks/simplify-swagger.js @@ -106,6 +106,10 @@ module.exports = function(grunt) { } else { grunt.log.writeln('ERROR '.red + 'Cannot find alias for: ' + path + ' (' + schema.allOf[0].$ref + ')'); } + } else if( schema.$ref ) { + // Replace pure references + context.aliases[path] = schema; + delete defs[k]; } else if( Schemas.isPrimitiveType(schema.type) ) { // For simple types in definitions, alias them context.aliases[path] = schema; From 786a2aa28a712b4aad54fff6271c2d4f0fc6fbb1 Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Mon, 15 Jan 2018 13:34:15 -0600 Subject: [PATCH 26/53] Checkpoint commit of modifications for codegen. --- swagger/paths/groups.yaml | 3 +++ swagger/paths/users.yaml | 2 ++ swagger/schemas/definitions/collection.json | 3 ++- swagger/schemas/definitions/common.json | 3 ++- swagger/schemas/definitions/container.json | 3 ++- swagger/schemas/definitions/group.json | 11 +++++++++-- swagger/schemas/definitions/note.json | 5 ++++- swagger/schemas/definitions/tag.json | 9 +++++++-- swagger/schemas/output/user-new.json | 2 +- swagger/templates/container-item.yaml | 1 + swagger/templates/container.yaml | 3 ++- swagger/templates/file-item.yaml | 4 ++-- swagger/templates/file-list-upload.yaml | 2 +- swagger/templates/notes-note.yaml | 1 + swagger/templates/notes.yaml | 1 + swagger/templates/tags.yaml | 1 + 16 files changed, 41 insertions(+), 13 deletions(-) diff --git a/swagger/paths/groups.yaml b/swagger/paths/groups.yaml index 5576cb6a7..f9817a81b 100644 --- a/swagger/paths/groups.yaml +++ b/swagger/paths/groups.yaml @@ -25,6 +25,7 @@ $template_arguments: parameters: - name: body in: body + required: true schema: $ref: schemas/input/group-new.json responses: @@ -34,6 +35,7 @@ $template_arguments: $ref: schemas/output/group-new.json '400': $ref: '#/responses/400:invalid-body-json' + /groups/{GroupId}: parameters: - required: true @@ -61,6 +63,7 @@ $template_arguments: parameters: - in: body name: body + required: true schema: $ref: schemas/input/group-update.json responses: diff --git a/swagger/paths/users.yaml b/swagger/paths/users.yaml index f2af9b8fa..5b14a3e73 100644 --- a/swagger/paths/users.yaml +++ b/swagger/paths/users.yaml @@ -17,6 +17,7 @@ parameters: - name: body in: body + required: true schema: $ref: schemas/input/user-new.json responses: @@ -79,6 +80,7 @@ parameters: - name: body in: body + required: true schema: $ref: schemas/input/user-update.json description: > diff --git a/swagger/schemas/definitions/collection.json b/swagger/schemas/definitions/collection.json index d391f3987..050b3bacc 100644 --- a/swagger/schemas/definitions/collection.json +++ b/swagger/schemas/definitions/collection.json @@ -27,7 +27,8 @@ "properties": { "_id": {"$ref":"common.json#/definitions/objectid"} }, - "required": ["_id"] + "required": ["_id"], + "x-sdk-return": "_id" }, "collection-output":{ "type": "object", diff --git a/swagger/schemas/definitions/common.json b/swagger/schemas/definitions/common.json index 1a52e3dee..7a67f68cb 100644 --- a/swagger/schemas/definitions/common.json +++ b/swagger/schemas/definitions/common.json @@ -52,7 +52,8 @@ "_id": { "type": "string" } - } + }, + "x-sdk-return": "_id" } } } \ No newline at end of file diff --git a/swagger/schemas/definitions/container.json b/swagger/schemas/definitions/container.json index caf1f1699..77d27a36d 100644 --- a/swagger/schemas/definitions/container.json +++ b/swagger/schemas/definitions/container.json @@ -19,7 +19,8 @@ "properties": { "_id": {"$ref":"#/definitions/_id"} }, - "required": ["_id"] + "required": ["_id"], + "x-sdk-return": "_id" }, "container-reference": { "type": "object", diff --git a/swagger/schemas/definitions/group.json b/swagger/schemas/definitions/group.json index e5bab92d3..a9fcd525f 100644 --- a/swagger/schemas/definitions/group.json +++ b/swagger/schemas/definitions/group.json @@ -15,7 +15,13 @@ "label": {"$ref": "#/definitions/label"}, "permissions": {"$ref": "permission.json#/definitions/permission-output-list"}, "created": {"$ref":"created-modified.json#/definitions/created"}, - "modified": {"$ref":"created-modified.json#/definitions/modified"} + "modified": {"$ref":"created-modified.json#/definitions/modified"}, + "tags": { + "type": "array", + "items": { + "type": "string" + } + } }, "additionalProperties":false }, @@ -44,7 +50,8 @@ "properties": { "_id": {"$ref":"common.json#/definitions/string-id"} }, - "required": ["_id"] + "required": ["_id"], + "x-sdk-return": "_id" }, "group-output-list":{ "type":"array", diff --git a/swagger/schemas/definitions/note.json b/swagger/schemas/definitions/note.json index 58298f1a3..87efcdcc7 100644 --- a/swagger/schemas/definitions/note.json +++ b/swagger/schemas/definitions/note.json @@ -1,7 +1,10 @@ { "$schema": "http://json-schema.org/draft-04/schema#", "definitions": { - "text": {"type": "string"}, + "text": { + "type": "string", + "x-sdk-positional": true + }, "note-input":{ "type":"object", "properties":{ diff --git a/swagger/schemas/definitions/tag.json b/swagger/schemas/definitions/tag.json index 8b0a03b57..88d27a439 100644 --- a/swagger/schemas/definitions/tag.json +++ b/swagger/schemas/definitions/tag.json @@ -1,7 +1,12 @@ { "$schema": "http://json-schema.org/draft-04/schema#", "definitions": { - "value": {"type": "string", "minLength": 1, "maxLength": 32}, + "value": { + "type": "string", + "minLength": 1, + "maxLength": 32, + "x-sdk-positional": true + }, "tag":{ "type": "object", "properties":{ @@ -13,7 +18,7 @@ "tag-list":{ "type":"array", "items":{ - "allOf":[{"$ref":"#/definitions/tag"}] + "allOf":[{"type":"string"}] } } } diff --git a/swagger/schemas/output/user-new.json b/swagger/schemas/output/user-new.json index b008d72b7..cac0e5f79 100644 --- a/swagger/schemas/output/user-new.json +++ b/swagger/schemas/output/user-new.json @@ -1,6 +1,6 @@ { "$schema": "http://json-schema.org/draft-04/schema#", - "$ref": "../definitions/common.json#/definitions/object-created", + "allOf": [{"$ref": "../definitions/common.json#/definitions/object-created"}], "example": { "_id": "jane.doe@gmail.com" } diff --git a/swagger/templates/container-item.yaml b/swagger/templates/container-item.yaml index 77670df1c..d0a4b5b69 100644 --- a/swagger/templates/container-item.yaml +++ b/swagger/templates/container-item.yaml @@ -38,6 +38,7 @@ template: | parameters: - in: body name: body + required: true schema: $ref: {{{update-input-schema}}} responses: diff --git a/swagger/templates/container.yaml b/swagger/templates/container.yaml index c5efe50bc..ad003381e 100644 --- a/swagger/templates/container.yaml +++ b/swagger/templates/container.yaml @@ -24,12 +24,13 @@ template: | $ref: {{{list-output-schema}}} post: summary: Create a new {{resource}} - operationId: create_{{resource}} + operationId: add_{{resource}} tags: - '{{tag}}' parameters: - in: body name: body + required: true schema: $ref: {{{create-input-schema}}} responses: diff --git a/swagger/templates/file-item.yaml b/swagger/templates/file-item.yaml index 815b29fe9..331379066 100644 --- a/swagger/templates/file-item.yaml +++ b/swagger/templates/file-item.yaml @@ -28,7 +28,7 @@ template: | - Make another request with the received ticket id in the "ticket" parameter. A valid "Authorization" header is no longer required. When "view" is true, [RFC7233](https://tools.ietf.org/html/rfc7233) range request headers are supported. - operationId: download_{{resource}}_file + operationId: download_file_from_{{resource}} tags: - '{{tag}}' produces: @@ -55,7 +55,7 @@ template: | in: query type: string description: The filename of a zipfile member to download rather than the entire file - + x-sdk-download-ticket: get_{{resource}}_download_ticket responses: '200': description: '' diff --git a/swagger/templates/file-list-upload.yaml b/swagger/templates/file-list-upload.yaml index 3d28a3da9..ebe9d561f 100644 --- a/swagger/templates/file-list-upload.yaml +++ b/swagger/templates/file-list-upload.yaml @@ -17,7 +17,7 @@ template: | post: summary: Upload a file to {{resource}}. - operationId: upload_{{resource}}_file + operationId: upload_file_to_{{resource}} tags: - '{{tag}}' consumes: diff --git a/swagger/templates/notes-note.yaml b/swagger/templates/notes-note.yaml index 77636a02d..604dbe7d9 100644 --- a/swagger/templates/notes-note.yaml +++ b/swagger/templates/notes-note.yaml @@ -39,6 +39,7 @@ template: | parameters: - in: body name: body + required: true schema: $ref: schemas/input/note.json responses: diff --git a/swagger/templates/notes.yaml b/swagger/templates/notes.yaml index 60513ca89..e61052f26 100644 --- a/swagger/templates/notes.yaml +++ b/swagger/templates/notes.yaml @@ -22,6 +22,7 @@ template: | parameters: - name: body in: body + required: true schema: $ref: schemas/input/note.json responses: diff --git a/swagger/templates/tags.yaml b/swagger/templates/tags.yaml index 3c4ca6204..09a5f634f 100644 --- a/swagger/templates/tags.yaml +++ b/swagger/templates/tags.yaml @@ -23,6 +23,7 @@ template: | parameters: - name: body in: body + required: true schema: $ref: schemas/input/tag.json responses: From 510cc77782a61c93e0d4ee13f4f76e5c0261aed7 Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Wed, 31 Jan 2018 13:51:46 -0600 Subject: [PATCH 27/53] Added x-sdk-include-empty for Gear model --- swagger/schemas/definitions/gear.json | 1 + 1 file changed, 1 insertion(+) diff --git a/swagger/schemas/definitions/gear.json b/swagger/schemas/definitions/gear.json index 674ac4fb0..05c7e7d1b 100644 --- a/swagger/schemas/definitions/gear.json +++ b/swagger/schemas/definitions/gear.json @@ -159,6 +159,7 @@ "url", "version" ], + "x-sdk-include-empty": [ "config", "inputs" ], "additionalProperties": false }, "gear-category": { From 9be94d1dda96b6d659692093133ddbc02f7ae37a Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Fri, 2 Feb 2018 12:10:38 -0600 Subject: [PATCH 28/53] Rename file to file-entry (python codegen had a problem with that name) --- swagger/schemas/definitions/analysis.json | 6 +++--- swagger/schemas/definitions/file.json | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/swagger/schemas/definitions/analysis.json b/swagger/schemas/definitions/analysis.json index 49f7424f7..7cbcf722b 100644 --- a/swagger/schemas/definitions/analysis.json +++ b/swagger/schemas/definitions/analysis.json @@ -3,11 +3,11 @@ "definitions": { "inputs": { "type": ["array", "null"], - "items": {"$ref":"file.json#/definitions/file"} + "items": {"$ref":"file.json#/definitions/file-entry"} }, "outputs": { "type": ["array", "null"], - "items": {"$ref":"file.json#/definitions/file"} + "items": {"$ref":"file.json#/definitions/file-entry"} }, "analysis-input":{ "type":"object", @@ -35,7 +35,7 @@ "_id":{"$ref":"common.json#/definitions/objectid"}, "files":{ "type":"array", - "items":{"$ref":"file.json#/definitions/file"} + "items":{"$ref":"file.json#/definitions/file-entry"} }, "job":{ "oneOf":[ diff --git a/swagger/schemas/definitions/file.json b/swagger/schemas/definitions/file.json index 6fc0f1ad0..a407c33d0 100644 --- a/swagger/schemas/definitions/file.json +++ b/swagger/schemas/definitions/file.json @@ -41,7 +41,7 @@ "maxLength":106 }, "size":{"type":"integer"}, - "file": { + "file-entry": { "type": "object", "properties": { "name": {"$ref":"#/definitions/name"}, @@ -91,7 +91,7 @@ }, "file-output":{ "type": "object", - "allOf": [{"$ref":"#/definitions/file"}], + "allOf": [{"$ref":"#/definitions/file-entry"}], "required":["modified", "size"] }, "file-reference": { From 0a47869ff858b999288dc57cb8d928e23734f85d Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Mon, 5 Feb 2018 14:43:32 -0600 Subject: [PATCH 29/53] API Doc changes for codegen --- swagger/paths/collections.yaml | 12 +++++---- swagger/schemas/definitions/collection.json | 27 ++++++++++++++++++++- swagger/schemas/definitions/gear.json | 1 - 3 files changed, 33 insertions(+), 7 deletions(-) diff --git a/swagger/paths/collections.yaml b/swagger/paths/collections.yaml index 0c6ca703f..3ad9cb967 100644 --- a/swagger/paths/collections.yaml +++ b/swagger/paths/collections.yaml @@ -20,12 +20,13 @@ $template_arguments: $ref: examples/output/collection-list.json post: summary: Create a collection - operationId: create_collection + operationId: add_collection tags: - collections parameters: - - in: body - name: body + - name: body + in: body + required: true schema: $ref: schemas/input/collection.json responses: @@ -79,8 +80,9 @@ $template_arguments: tags: - collections parameters: - - in: body - name: body + - name: body + in: body + required: true schema: $ref: schemas/input/collection-update.json responses: diff --git a/swagger/schemas/definitions/collection.json b/swagger/schemas/definitions/collection.json index 050b3bacc..b96691d94 100644 --- a/swagger/schemas/definitions/collection.json +++ b/swagger/schemas/definitions/collection.json @@ -1,6 +1,31 @@ { "$schema": "http://json-schema.org/draft-04/schema#", "definitions":{ + "collection-node": { + "type": "object", + "properties": { + "level": { + "type": "string", + "enum": ["project", "session", "acquisition"] + }, + "_id": { "$ref": "common.json#/definitions/objectid" } + }, + "additionalProperties": false + }, + "collection-operation": { + "type": "object", + "properties": { + "operation": { + "type": "string", + "enum": ["add", "remove"] + }, + "nodes": { + "type": "array", + "items": { "$ref": "#/definitions/collection-node" } + } + }, + "additionalProperties": false + }, "collection-input":{ "type": "object", "properties": { @@ -18,7 +43,7 @@ "label": {"$ref": "common.json#/definitions/label"}, "info": {"$ref": "container.json#/definitions/info"}, "description": {"$ref": "common.json#/definitions/description"}, - "contents": { "type": "object" } + "contents": {"$ref": "#/definitions/collection-operation"} }, "additionalProperties": false }, diff --git a/swagger/schemas/definitions/gear.json b/swagger/schemas/definitions/gear.json index 05c7e7d1b..5c9c9acb2 100644 --- a/swagger/schemas/definitions/gear.json +++ b/swagger/schemas/definitions/gear.json @@ -164,7 +164,6 @@ }, "gear-category": { "type": "string", - "enum": [ "utility", "analysis", "converter", "qa" ], "description": "The gear category" }, "gear-doc": { From 3677395b971145b6e5c8ea8dab86471bf8802faa Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Mon, 5 Feb 2018 16:43:06 -0600 Subject: [PATCH 30/53] Added date-time format to created/modified timestamps --- swagger/schemas/definitions/created-modified.json | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/swagger/schemas/definitions/created-modified.json b/swagger/schemas/definitions/created-modified.json index 1d33cac5d..715171615 100644 --- a/swagger/schemas/definitions/created-modified.json +++ b/swagger/schemas/definitions/created-modified.json @@ -2,10 +2,12 @@ "$schema": "http://json-schema.org/draft-04/schema#", "definitions":{ "created": { - "type": "string" + "type": "string", + "format": "date-time" }, "modified": { - "type": "string" + "type": "string", + "format": "date-time" } } } From 68c8c68b1826d4b0586540afa5613666abe845ce Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Tue, 6 Feb 2018 16:35:31 -0600 Subject: [PATCH 31/53] Set default values for info_exists. --- swagger/schemas/definitions/acquisition.json | 2 +- swagger/schemas/definitions/container.json | 2 +- swagger/schemas/definitions/file.json | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/swagger/schemas/definitions/acquisition.json b/swagger/schemas/definitions/acquisition.json index 4c646e5c1..3db1f79d2 100644 --- a/swagger/schemas/definitions/acquisition.json +++ b/swagger/schemas/definitions/acquisition.json @@ -55,7 +55,7 @@ "timezone": {"$ref":"container.json#/definitions/timezone"}, "created": {"$ref":"created-modified.json#/definitions/created"}, "modified": {"$ref":"created-modified.json#/definitions/modified"}, - "info_exists": {"type": "boolean"}, + "info_exists": {"type": "boolean", "default": false}, "permissions":{ "type":"array", "items":{"$ref":"permission.json#/definitions/permission-output-default-required"} diff --git a/swagger/schemas/definitions/container.json b/swagger/schemas/definitions/container.json index 77d27a36d..7440e22b7 100644 --- a/swagger/schemas/definitions/container.json +++ b/swagger/schemas/definitions/container.json @@ -4,7 +4,7 @@ "_id": {"type": "string"}, "public": {"type": "boolean"}, "info": {"$ref": "common.json#/definitions/info"}, - "info_exists": {"type": "boolean"}, + "info_exists": {"type": "boolean", "default": false}, "uid": {"type": "string"}, "timestamp": {"type": ["string", "null"], "format": "date-time"}, "timezone": {"type": "string"}, diff --git a/swagger/schemas/definitions/file.json b/swagger/schemas/definitions/file.json index a407c33d0..9de8d67a1 100644 --- a/swagger/schemas/definitions/file.json +++ b/swagger/schemas/definitions/file.json @@ -61,7 +61,7 @@ "created": {"$ref":"created-modified.json#/definitions/created"}, "modified": {"$ref":"created-modified.json#/definitions/modified"}, "size": {"$ref":"#/definitions/size"}, - "info_exists": {"type": "boolean"}, + "info_exists": {"type": "boolean", "default": false}, "input": {"type":"boolean"}, "output": {"type":"boolean"} }, From fee194e3175f4b26cabdd1c778ed777832804ee9 Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Wed, 7 Feb 2018 12:10:21 -0600 Subject: [PATCH 32/53] Fix failing unit tests due to schema changes --- swagger/schemas/definitions/subject.json | 22 ++++++++++++++++++---- tests/unit_tests/python/test_validators.py | 13 +++++++++++-- 2 files changed, 29 insertions(+), 6 deletions(-) diff --git a/swagger/schemas/definitions/subject.json b/swagger/schemas/definitions/subject.json index 408e09e4f..7ce0ba3ec 100644 --- a/swagger/schemas/definitions/subject.json +++ b/swagger/schemas/definitions/subject.json @@ -5,10 +5,24 @@ "firstname": { "type": "string", "maxLength": 64 }, "lastname": { "type": "string", "maxLength": 64 }, "age": { "type": ["integer", "null"] }, - "sex": { "type": "string", "enum": ["male", "female", "other", "unknown", null] }, - "race": { "type": "string", "enum": ["American Indian or Alaska Native", "Asian", "Native Hawaiian or Other Pacific Islander", "Black or African American", "White", "More Than One Race", "Unknown or Not Reported", null] }, - "ethnicity": { "type": "string", "enum": ["Not Hispanic or Latino", "Hispanic or Latino", "Unknown or Not Reported", null] }, - + "sex": { + "oneOf": [ + { "type": "null"}, + { "type": "string", "enum": ["male", "female", "other", "unknown"] } + ] + }, + "race": { + "oneOf": [ + { "type": "null"}, + { "type": "string", "enum": ["American Indian or Alaska Native", "Asian", "Native Hawaiian or Other Pacific Islander", "Black or African American", "White", "More Than One Race", "Unknown or Not Reported"] } + ] + }, + "ethnicity": { + "oneOf": [ + { "type": "null"}, + { "type": "string", "enum": ["Not Hispanic or Latino", "Hispanic or Latino", "Unknown or Not Reported"] } + ] + }, "code": { "type": "string", "maxLength": 64 }, "tags": { "type": "array", "items": {"type": "string"} }, "subject-input":{ diff --git a/tests/unit_tests/python/test_validators.py b/tests/unit_tests/python/test_validators.py index 0dcfb157e..27cced13e 100644 --- a/tests/unit_tests/python/test_validators.py +++ b/tests/unit_tests/python/test_validators.py @@ -62,7 +62,7 @@ def test_payload(): def test_file_output_valid(): payload = [{ - 'modified': 'yesterday', + 'modified': '2018-02-07T17:27:21+00:00', 'size': 10 }] schema_uri = validators.schema_uri("output", "file-list.json") @@ -71,13 +71,22 @@ def test_file_output_valid(): def test_file_output_invalid(): payload = [{ - 'modified': 'yesterday' + 'modified': '2018-02-07T17:27:21+00:00' }] schema_uri = validators.schema_uri("output", "file-list.json") schema, resolver = validators._resolve_schema(schema_uri) with pytest.raises(jsonschema.exceptions.ValidationError): validators._validate_json(payload, schema, resolver) +def test_jsonschema_validate_enum_with_null(): + schema = { + 'oneOf': [ + { 'type': 'null' }, + { 'type': 'string', 'enum': ['true', 'false'] } + ] + } + jsonschema.validate('true', schema) + jsonschema.validate(None, schema) # ===== Automated Tests ===== From e0bddb9cd1b2adbd3282a956172e39b060d7acee Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Fri, 9 Feb 2018 09:25:23 -0600 Subject: [PATCH 33/53] Update file to file-entry in analysis list --- swagger/schemas/definitions/analysis.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/swagger/schemas/definitions/analysis.json b/swagger/schemas/definitions/analysis.json index 7cbcf722b..b6dd0d2a7 100644 --- a/swagger/schemas/definitions/analysis.json +++ b/swagger/schemas/definitions/analysis.json @@ -58,7 +58,7 @@ "_id":{"$ref":"common.json#/definitions/objectid"}, "files":{ "type":"array", - "items":{"$ref":"file.json#/definitions/file"} + "items":{"$ref":"file.json#/definitions/file-entry"} }, "job": {"$ref":"common.json#/definitions/objectid"}, "notes": {"$ref":"note.json#/definitions/notes-list-output"}, From cb7de2e33323d7abaaeb0da32ae2c108b039f0db Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Fri, 9 Feb 2018 09:29:29 -0600 Subject: [PATCH 34/53] Update schemas for analyses in SDK This update forces us to use the inflated job for single analysis end points. --- swagger/schemas/definitions/analysis.json | 5 ++++- swagger/support/tasks/simplify-swagger.js | 5 +++++ swagger/templates/analyses-list.yaml | 1 + swagger/templates/analysis-item.yaml | 1 + 4 files changed, 11 insertions(+), 1 deletion(-) diff --git a/swagger/schemas/definitions/analysis.json b/swagger/schemas/definitions/analysis.json index b6dd0d2a7..49944a9e5 100644 --- a/swagger/schemas/definitions/analysis.json +++ b/swagger/schemas/definitions/analysis.json @@ -41,7 +41,10 @@ "oneOf":[ {"$ref":"common.json#/definitions/objectid"}, {"$ref": "job.json#/definitions/job-output"} - ] + ], + "x-sdk-schema": { + "$ref": "job.json#/definitions/job-output" + } }, "notes": {"$ref":"note.json#/definitions/notes-list-output"}, "description": {"$ref":"common.json#/definitions/description"}, diff --git a/swagger/support/tasks/simplify-swagger.js b/swagger/support/tasks/simplify-swagger.js index 0b6a6decc..792013787 100644 --- a/swagger/support/tasks/simplify-swagger.js +++ b/swagger/support/tasks/simplify-swagger.js @@ -122,6 +122,11 @@ module.exports = function(grunt) { // returns a simplified version of schema function simplifySchema(schema, path, context) { schema = _.cloneDeep(schema); + // If an x-sdk-schema is specified, use that + if( schema['x-sdk-schema'] ) { + schema = schema['x-sdk-schema']; + } + if( !isValidSchema(schema) ) { grunt.log.writeln('WARNING '.red + 'Invalid schema (no object type specified) at: ' + formatPath(path)); schema.type = 'object'; diff --git a/swagger/templates/analyses-list.yaml b/swagger/templates/analyses-list.yaml index d4ee05254..6a8ed7e2d 100644 --- a/swagger/templates/analyses-list.yaml +++ b/swagger/templates/analyses-list.yaml @@ -52,6 +52,7 @@ template: | in: query type: boolean description: Return job as an object instead of an id + x-sdk-default: 'true' responses: '200': description: Returns the id of the analysis that was created. diff --git a/swagger/templates/analysis-item.yaml b/swagger/templates/analysis-item.yaml index 958017691..daf8ccc7a 100644 --- a/swagger/templates/analysis-item.yaml +++ b/swagger/templates/analysis-item.yaml @@ -36,6 +36,7 @@ template: | in: query type: boolean description: Return job as an object instead of an id + x-sdk-default: 'true' responses: '200': description: '' From e467742e5fdde41a5ab0af8636eb5fb3b350f50f Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Fri, 9 Feb 2018 09:29:45 -0600 Subject: [PATCH 35/53] Ignore '+' and '-' properties for download filters --- swagger/schemas/definitions/download.json | 1 + 1 file changed, 1 insertion(+) diff --git a/swagger/schemas/definitions/download.json b/swagger/schemas/definitions/download.json index 8e7c00314..8dac67445 100644 --- a/swagger/schemas/definitions/download.json +++ b/swagger/schemas/definitions/download.json @@ -9,6 +9,7 @@ "-": {"$ref": "#/definitions/filter-items"}, "minus": {"$ref": "#/definitions/filter-items"} }, + "x-sdk-ignore-properties": ["+", "-"], "additionalProperties": false }, "filter-items": { From 08bdbd9d8643c13b385176a8cdca42c071934c2c Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Fri, 9 Feb 2018 15:41:04 -0600 Subject: [PATCH 36/53] Add default values for search parameters --- swagger/paths/dataexplorer.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/swagger/paths/dataexplorer.yaml b/swagger/paths/dataexplorer.yaml index b17c60e52..1b5b63727 100644 --- a/swagger/paths/dataexplorer.yaml +++ b/swagger/paths/dataexplorer.yaml @@ -6,9 +6,11 @@ - name: simple in: query type: boolean + x-sdk-default: 'true' - name: limit in: query type: integer + x-sdk-default: 100 - name: body in: body required: true From 6086aa0118f0cb54b5c08f40c0e71233ad2085d8 Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Tue, 13 Feb 2018 16:03:41 -0600 Subject: [PATCH 37/53] Update packfile output definition --- swagger/schemas/definitions/packfile.json | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/swagger/schemas/definitions/packfile.json b/swagger/schemas/definitions/packfile.json index d40dcffd2..28e6ba53d 100644 --- a/swagger/schemas/definitions/packfile.json +++ b/swagger/schemas/definitions/packfile.json @@ -52,8 +52,9 @@ "packfile-start": { "type":"object", "properties":{ - "token":{"$ref":"common.json#/definitions/objectid"} - } + "token":{"type": "string"} + }, + "x-sdk-return": "token" } } } From 4d35b4dadd3eb162b972938e0863f49c535809e6 Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Tue, 27 Feb 2018 09:52:50 -0600 Subject: [PATCH 38/53] Add model merge for swagger codegen This commit will allow merging input and output models based on the `x-sdk-model` property in the JSON definitions. --- swagger/schemas/definitions/acquisition.json | 28 ++--- swagger/schemas/definitions/collection.json | 3 + swagger/schemas/definitions/container.json | 2 +- swagger/schemas/definitions/device.json | 19 ++-- swagger/schemas/definitions/file.json | 14 ++- swagger/schemas/definitions/group.json | 12 ++- swagger/schemas/definitions/job.json | 9 +- swagger/schemas/definitions/note.json | 6 +- swagger/schemas/definitions/packfile.json | 3 +- swagger/schemas/definitions/permission.json | 6 +- swagger/schemas/definitions/project.json | 6 +- swagger/schemas/definitions/rule.json | 6 +- swagger/schemas/definitions/session.json | 6 +- swagger/schemas/definitions/subject.json | 9 +- swagger/schemas/definitions/user.json | 9 +- swagger/support/tasks/simplify-swagger.js | 101 +++++++++++++++++++ 16 files changed, 188 insertions(+), 51 deletions(-) diff --git a/swagger/schemas/definitions/acquisition.json b/swagger/schemas/definitions/acquisition.json index 3db1f79d2..d0d794369 100644 --- a/swagger/schemas/definitions/acquisition.json +++ b/swagger/schemas/definitions/acquisition.json @@ -2,17 +2,18 @@ "$schema": "http://json-schema.org/draft-04/schema#", "definitions":{ "acquisition-input":{ - "type": "object", - "properties": { - "public": {"$ref":"container.json#/definitions/public"}, - "label": {"$ref":"common.json#/definitions/label"}, - "info": {"$ref":"container.json#/definitions/info"}, - "session": {"$ref":"common.json#/definitions/objectid"}, - "uid": {"$ref":"container.json#/definitions/uid"}, - "timestamp": {"$ref":"container.json#/definitions/timestamp"}, - "timezone": {"$ref":"container.json#/definitions/timezone"} - }, - "additionalProperties":false + "type": "object", + "properties": { + "public": {"$ref":"container.json#/definitions/public"}, + "label": {"$ref":"common.json#/definitions/label"}, + "info": {"$ref":"container.json#/definitions/info"}, + "session": {"$ref":"common.json#/definitions/objectid"}, + "uid": {"$ref":"container.json#/definitions/uid"}, + "timestamp": {"$ref":"container.json#/definitions/timestamp"}, + "timezone": {"$ref":"container.json#/definitions/timezone"} + }, + "additionalProperties":false, + "x-sdk-model": "acquisition" }, "acquisition-metadata-input": { "type": "object", @@ -55,7 +56,7 @@ "timezone": {"$ref":"container.json#/definitions/timezone"}, "created": {"$ref":"created-modified.json#/definitions/created"}, "modified": {"$ref":"created-modified.json#/definitions/modified"}, - "info_exists": {"type": "boolean", "default": false}, + "info_exists": {"type": "boolean"}, "permissions":{ "type":"array", "items":{"$ref":"permission.json#/definitions/permission-output-default-required"} @@ -67,7 +68,8 @@ "items":{"$ref":"analysis.json#/definitions/analysis-output"} } }, - "additionalProperties":false + "additionalProperties":false, + "x-sdk-model": "acquisition" } } } diff --git a/swagger/schemas/definitions/collection.json b/swagger/schemas/definitions/collection.json index b96691d94..b853885de 100644 --- a/swagger/schemas/definitions/collection.json +++ b/swagger/schemas/definitions/collection.json @@ -34,6 +34,7 @@ "info": {"$ref": "container.json#/definitions/info"}, "description": {"$ref": "common.json#/definitions/description"} }, + "x-sdk-model": "collection", "additionalProperties": false }, "collection-input-with-contents":{ @@ -45,6 +46,7 @@ "description": {"$ref": "common.json#/definitions/description"}, "contents": {"$ref": "#/definitions/collection-operation"} }, + "x-sdk-model": "collection", "additionalProperties": false }, "collection-new-output": { @@ -82,6 +84,7 @@ "items":{"$ref":"analysis.json#/definitions/analysis-output"} } }, + "x-sdk-model": "collection", "additionalProperties":false } } diff --git a/swagger/schemas/definitions/container.json b/swagger/schemas/definitions/container.json index 7440e22b7..77d27a36d 100644 --- a/swagger/schemas/definitions/container.json +++ b/swagger/schemas/definitions/container.json @@ -4,7 +4,7 @@ "_id": {"type": "string"}, "public": {"type": "boolean"}, "info": {"$ref": "common.json#/definitions/info"}, - "info_exists": {"type": "boolean", "default": false}, + "info_exists": {"type": "boolean"}, "uid": {"type": "string"}, "timestamp": {"type": ["string", "null"], "format": "date-time"}, "timezone": {"type": "string"}, diff --git a/swagger/schemas/definitions/device.json b/swagger/schemas/definitions/device.json index 4255269de..85ac61528 100644 --- a/swagger/schemas/definitions/device.json +++ b/swagger/schemas/definitions/device.json @@ -33,21 +33,24 @@ "interval": {"$ref":"#/definitions/interval"}, "last_seen": {"$ref":"common.json#/definitions/timestamp"} }, + "x-sdk-model": "device", "additionalProperties": false }, "device-input":{ - "type": "object", - "properties": { - "interval": {"$ref":"#/definitions/interval"}, - "errors": {"$ref":"#/definitions/errors"}, - "info": {"$ref":"common.json#/definitions/info"} - }, - "additionalProperties": false + "type": "object", + "properties": { + "interval": {"$ref":"#/definitions/interval"}, + "errors": {"$ref":"#/definitions/errors"}, + "info": {"$ref":"common.json#/definitions/info"} + }, + "x-sdk-model": "device", + "additionalProperties": false }, "device-output": { "type": "object", "allOf": [{"$ref":"#/definitions/device"}], - "required": ["_id", "name", "method", "last_seen"] + "required": ["_id", "name", "method", "last_seen"], + "x-sdk-model": "device" }, "device-status": { "type":"object", diff --git a/swagger/schemas/definitions/file.json b/swagger/schemas/definitions/file.json index 9de8d67a1..8f1ce5a06 100644 --- a/swagger/schemas/definitions/file.json +++ b/swagger/schemas/definitions/file.json @@ -61,11 +61,12 @@ "created": {"$ref":"created-modified.json#/definitions/created"}, "modified": {"$ref":"created-modified.json#/definitions/modified"}, "size": {"$ref":"#/definitions/size"}, - "info_exists": {"type": "boolean", "default": false}, + "info_exists": {"type": "boolean"}, "input": {"type":"boolean"}, "output": {"type":"boolean"} }, - "additionalProperties": false + "additionalProperties": false, + "x-sdk-model": "file-entry" }, "file-input":{ "type": "object", @@ -78,7 +79,8 @@ "tags": {"$ref":"#/definitions/tags"}, "info": {"$ref":"common.json#/definitions/info"} }, - "additionalProperties": false + "additionalProperties": false, + "x-sdk-model": "file-entry" }, "file-update":{ "type": "object", @@ -87,12 +89,14 @@ "modality": {"$ref":"#/definitions/modality"}, "measurements": {"$ref":"#/definitions/measurements"} }, - "additionalProperties": false + "additionalProperties": false, + "x-sdk-model": "file-entry" }, "file-output":{ "type": "object", "allOf": [{"$ref":"#/definitions/file-entry"}], - "required":["modified", "size"] + "required":["modified", "size"], + "x-sdk-model": "file-entry" }, "file-reference": { "type": "object", diff --git a/swagger/schemas/definitions/group.json b/swagger/schemas/definitions/group.json index a9fcd525f..9bbd1bd2f 100644 --- a/swagger/schemas/definitions/group.json +++ b/swagger/schemas/definitions/group.json @@ -23,7 +23,8 @@ } } }, - "additionalProperties":false + "additionalProperties":false, + "x-sdk-model": "group" }, "group-input":{ "type": "object", @@ -31,7 +32,8 @@ "_id":{"$ref":"common.json#/definitions/string-id"}, "label": {"$ref": "#/definitions/label"} }, - "additionalProperties": false + "additionalProperties": false, + "x-sdk-model": "group" }, "group-metadata-input": { "type": "object", @@ -43,7 +45,8 @@ "group-output":{ "type": "object", "allOf": [{"$ref":"#/definitions/group"}], - "required": ["permissions", "created","modified","_id"] + "required": ["permissions", "created","modified","_id"], + "x-sdk-model": "group" }, "group-new-output": { "type": "object", @@ -60,7 +63,8 @@ "project-group-output":{ "type": "object", "allOf": [{"$ref":"#/definitions/group"}], - "required": ["_id"] + "required": ["_id"], + "x-sdk-model": "group" }, "project-group-output-list":{ "type":"array", diff --git a/swagger/schemas/definitions/job.json b/swagger/schemas/definitions/job.json index 61540f0de..9d5a8db52 100644 --- a/swagger/schemas/definitions/job.json +++ b/swagger/schemas/definitions/job.json @@ -152,7 +152,8 @@ "saved_files":{"$ref":"#/definitions/saved_files"}, "produced_metadata":{"$ref":"#/definitions/produced-metadata"} }, - "additionalProperties":false + "additionalProperties":false, + "x-sdk-model":"job" }, "job-input": { "type":"object", @@ -164,7 +165,8 @@ "config":{"$ref":"#/definitions/config"} }, "required": ["gear_id"], - "additionalProperties":false + "additionalProperties":false, + "x-sdk-model":"job" }, "job-output": { "type": "object", @@ -172,7 +174,8 @@ "required": [ "id", "gear_id", "inputs", "config", "destination", "tags", "state", "attempt" - ] + ], + "x-sdk-model":"job" } } } diff --git a/swagger/schemas/definitions/note.json b/swagger/schemas/definitions/note.json index 87efcdcc7..46cd5786b 100644 --- a/swagger/schemas/definitions/note.json +++ b/swagger/schemas/definitions/note.json @@ -10,7 +10,8 @@ "properties":{ "text":{"$ref":"#/definitions/text"} }, - "additionalProperties": false + "additionalProperties": false, + "x-sdk-model": "note" }, "notes-list-input": { "type": "array", @@ -26,7 +27,8 @@ "user":{"$ref":"common.json#/definitions/user-id"} }, "additionalProperties": false, - "required":["_id", "text", "created", "modified", "user"] + "required":["_id", "text", "created", "modified", "user"], + "x-sdk-model": "note" }, "notes-list-output":{ "type":"array", diff --git a/swagger/schemas/definitions/packfile.json b/swagger/schemas/definitions/packfile.json index 28e6ba53d..a9cf41aad 100644 --- a/swagger/schemas/definitions/packfile.json +++ b/swagger/schemas/definitions/packfile.json @@ -47,7 +47,8 @@ "packfile": {"$ref":"#/definitions/packfile-packfile-input"} }, "required": ["project", "session", "acquisition", "packfile"], - "additionalProperties": false + "additionalProperties": false, + "x-sdk-model": "packfile" }, "packfile-start": { "type":"object", diff --git a/swagger/schemas/definitions/permission.json b/swagger/schemas/definitions/permission.json index 62630cdfc..c32996e1e 100644 --- a/swagger/schemas/definitions/permission.json +++ b/swagger/schemas/definitions/permission.json @@ -11,11 +11,13 @@ "_id":{"$ref":"common.json#/definitions/user-id"}, "access":{"$ref":"#/definitions/access"} }, - "additionalProperties": false + "additionalProperties": false, + "x-sdk-model": "permission" }, "permission-output-default-required":{ "allOf":[{"$ref":"#/definitions/permission"}], - "required":["_id", "access"] + "required":["_id", "access"], + "x-sdk-model": "permission" }, "permission-output-list": { "type": "array", diff --git a/swagger/schemas/definitions/project.json b/swagger/schemas/definitions/project.json index 366a7642c..84fa920ea 100644 --- a/swagger/schemas/definitions/project.json +++ b/swagger/schemas/definitions/project.json @@ -10,7 +10,8 @@ "description": {"$ref":"common.json#/definitions/description"}, "group": {"$ref":"common.json#/definitions/string-id"} }, - "additionalProperties": false + "additionalProperties": false, + "x-sdk-model": "project" }, "project-metadata-input": { "type": "object", @@ -52,7 +53,8 @@ "items":{"$ref":"analysis.json#/definitions/analysis-output"} } }, - "additionalProperties": false + "additionalProperties": false, + "x-sdk-model": "project" } } } diff --git a/swagger/schemas/definitions/rule.json b/swagger/schemas/definitions/rule.json index 3e2585774..d8fca6377 100644 --- a/swagger/schemas/definitions/rule.json +++ b/swagger/schemas/definitions/rule.json @@ -35,7 +35,8 @@ "all": { "$ref": "#/definitions/rule-items" }, "disabled": { "type": "boolean" } }, - "additionalProperties": false + "additionalProperties": false, + "x-sdk-model": "rule" }, "rule-output": { @@ -47,7 +48,8 @@ "any": { "$ref": "#/definitions/rule-items" }, "all": { "$ref": "#/definitions/rule-items" }, "disabled": { "type": "boolean" } - } + }, + "x-sdk-model": "rule" } } } diff --git a/swagger/schemas/definitions/session.json b/swagger/schemas/definitions/session.json index 485778684..668f08382 100644 --- a/swagger/schemas/definitions/session.json +++ b/swagger/schemas/definitions/session.json @@ -18,7 +18,8 @@ "timezone": {"$ref":"container.json#/definitions/timezone"}, "subject": {"$ref": "subject.json#/definitions/subject-input"} }, - "additionalProperties": false + "additionalProperties": false, + "x-sdk-model": "session" }, "session-metadata-input": { "type": "object", @@ -69,7 +70,8 @@ "items":{"$ref":"analysis.json#/definitions/analysis-output"} } }, - "additionalProperties": false + "additionalProperties": false, + "x-sdk-model": "session" }, "session-jobs-output": { "type": "object", diff --git a/swagger/schemas/definitions/subject.json b/swagger/schemas/definitions/subject.json index 7ce0ba3ec..b049b82a6 100644 --- a/swagger/schemas/definitions/subject.json +++ b/swagger/schemas/definitions/subject.json @@ -45,7 +45,8 @@ "items":{"$ref":"file.json#/definitions/file-input"} } }, - "additionalProperties": false + "additionalProperties": false, + "x-sdk-model": "subject" }, "subject-output":{ "type": "object", @@ -68,11 +69,13 @@ "items":{"$ref":"file.json#/definitions/file-output"} } }, - "additionalProperties": false + "additionalProperties": false, + "x-sdk-model": "subject" }, "subject-output-default-required":{ "allOf":[{"$ref":"#/definitions/subject-output"}], - "required":["_id"] + "required":["_id"], + "x-sdk-model": "subject" } } } diff --git a/swagger/schemas/definitions/user.json b/swagger/schemas/definitions/user.json index 24f55a3fa..bc8bb0f65 100644 --- a/swagger/schemas/definitions/user.json +++ b/swagger/schemas/definitions/user.json @@ -57,7 +57,8 @@ "firstlogin":{"$ref":"#/definitions/firstlogin"}, "lastlogin":{"$ref":"#/definitions/lastlogin"} }, - "additionalProperties":false + "additionalProperties":false, + "x-sdk-model": "user" }, "user-output":{ "type":"object", @@ -77,7 +78,8 @@ "created":{"$ref":"created-modified.json#/definitions/created"}, "modified":{"$ref":"created-modified.json#/definitions/modified"} }, - "additionalProperties":false + "additionalProperties":false, + "x-sdk-model": "user" }, "user-output-api-key": { "type":"object", @@ -102,7 +104,8 @@ "required":[ "_id", "firstname", "lastname", "root", "email", "created", "modified" - ] + ], + "x-sdk-model": "user" } } } diff --git a/swagger/support/tasks/simplify-swagger.js b/swagger/support/tasks/simplify-swagger.js index 792013787..31f96c0c3 100644 --- a/swagger/support/tasks/simplify-swagger.js +++ b/swagger/support/tasks/simplify-swagger.js @@ -29,6 +29,15 @@ module.exports = function(grunt) { aliases: {} }; + try { + // Merge models + // for example, this will merge group-input and group-output into group based on the + // x-sdk-model property + mergeModels(root, context); + } catch( e ) { + grunt.fail.warn('ERROR: '.red + ' ' + e); + } + // Walk through definitions, simplifying models where we can simplifyDefinitions(root, context); @@ -161,6 +170,98 @@ module.exports = function(grunt) { return schema; } + // Merge all models that have the x-sdk-model property + function mergeModels(root, context) { + var defs = root.definitions||{}; + var keys = _.keys(defs); + var models = {}; + var aliases = {}; + + // First collect all the models to be merged + _.each(keys, function(k) { + var schema = defs[k]; + if( schema['x-sdk-model'] ) { + var modelName = schema['x-sdk-model']; + if( !models[modelName] ) { + models[modelName] = []; + } + models[modelName].push({ + id: k, + schema: schema + }); + + // Create temporary aliases for comparing properties + aliases['#/definitions/' + k] = '#/definitions/' + modelName; + } + }); + + // Then perform the merge + keys = _.keys(models); + _.each(keys, function(modelName) { + var schemas = models[modelName]; + var schema = _.cloneDeep(schemas[0]).schema; + var refSchema = { + $ref: '#/definitions/' + modelName + }; + + for( var i = 1; i < schemas.length; i++ ) { + // Merge each schema into the current + mergeSchema(modelName, schema, schemas[i], aliases); + } + + // Add aliases and delete the original models + for( var i = 0; i < schemas.length; i++ ) { + var id = schemas[i].id; + context.aliases['#/definitions/' + id] = refSchema; + delete defs[id]; + } + + // Remove fields that are no longer relevant + delete schema['x-sdk-model']; + delete schema['required']; + + defs[modelName] = schema; + }); + } + + function mergeSchema(name, schema, src, aliases) { + schema.properties = schema.properties||{}; + var dstProps = schema.properties; + var srcProps = src.schema.properties||{}; + + var keys = _.keys(srcProps); + _.each(keys, function(k) { + // Compare, after resolving aliases + // This way, file-input and file-output resolve to file-entry (for example) + // and are treated as the same for comparison purposes + var srcProp = resolveAlias(srcProps[k], aliases); + var dstProp = resolveAlias(dstProps[k], aliases); + if( dstProp && !_.isEqual(srcProp, dstProp) ) { + throw 'Cannot merge model ' + src.id + ' into ' + name + ': incompatible "' + k + '" property'; + } else { + dstProps[k] = srcProp; + } + }); + } + + function resolveAlias(schema, aliases) { + // Simple alias resolution where aliases is a map of: + // #/definition/model1 to #/defintion/model2 + if( !schema ) { + return schema; + } + + return walk(schema, function(obj) { + if( obj.$ref ) { + var alias = aliases[obj.$ref]; + if( alias ) { + return _.extend({}, obj, { $ref: alias }); + } + } + return obj; + }); + } + function resolvePathObj(root, path) { var current = root; path = path.slice(); From 087a3b112543143ae3edd2b78280484e273c19e3 Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Fri, 2 Mar 2018 12:13:03 -0600 Subject: [PATCH 39/53] Add additional handling for polymorphic models --- swagger/support/tasks/simplify-swagger.js | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/swagger/support/tasks/simplify-swagger.js b/swagger/support/tasks/simplify-swagger.js index 31f96c0c3..698d92f60 100644 --- a/swagger/support/tasks/simplify-swagger.js +++ b/swagger/support/tasks/simplify-swagger.js @@ -158,7 +158,17 @@ module.exports = function(grunt) { grunt.log.writeln('WARNING Cannot simplify "allOf" definition at: ' + formatPath(path)); } } else { - grunt.log.writeln('WARNING Cannot simplify "allOf" definition at: ' + formatPath(path)); + // Still replace aliases + for( var i = 0; i < schema.allOf.length; i++ ) { + var alias = context.aliases[schema.allOf[i].$ref]; + if( alias ) { + schema.allOf[i] = _.cloneDeep(alias); + } + } + // It's not an error to not simplify polymorphic types + if( !schema['x-discriminator-value'] ) { + grunt.log.writeln('WARNING Cannot simplify "allOf" definition at: ' + formatPath(path)); + } } } else if( schema.$ref ) { // Replace alias for $ref fields From 49f97ebbe7f67c0e16a50413907c2397b201b257 Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Mon, 5 Mar 2018 14:18:24 -0600 Subject: [PATCH 40/53] Generalize resolver approach This commit updates resolver to make use of mongo indexes and retrieve fewer nodes from the database. Also allows resolving just a single container id without retrieving children. --- api/config.py | 6 +- api/handlers/resolvehandler.py | 4 +- api/resolver.py | 318 ++++++++---------- .../integration_tests/python/test_uploads.py | 21 +- 4 files changed, 170 insertions(+), 179 deletions(-) diff --git a/api/config.py b/api/config.py index 537a01528..beb182257 100644 --- a/api/config.py +++ b/api/config.py @@ -231,11 +231,11 @@ def initialize_db(): log.info('Initializing database, creating indexes') # TODO review all indexes db.users.create_index('api_key.key') - db.projects.create_index([('gid', 1), ('name', 1)]) - db.sessions.create_index('project') + db.projects.create_index([('group', 1), ('label', 1)]) + db.sessions.create_index([('project', 1), ('label', 1)]) db.sessions.create_index('uid') db.sessions.create_index('created') - db.acquisitions.create_index('session') + db.acquisitions.create_index([('session', 1), ('label', 1)]) db.acquisitions.create_index('uid') db.acquisitions.create_index('collections') db.analyses.create_index([('parent.type', 1), ('parent.id', 1)]) diff --git a/api/handlers/resolvehandler.py b/api/handlers/resolvehandler.py index 2372fda1e..bd21381ea 100644 --- a/api/handlers/resolvehandler.py +++ b/api/handlers/resolvehandler.py @@ -16,7 +16,9 @@ def resolve(self): self.abort(403, 'Request requires login') doc = self.request.json - result = Resolver.resolve(doc['path']) + + resolver = Resolver() + result = resolver.resolve(doc['path']) # Cancel the request if anything in the path is unauthorized; remove any children that are unauthorized. if not self.superuser_request: diff --git a/api/resolver.py b/api/resolver.py index 0b110ea1c..1c070ebc0 100644 --- a/api/resolver.py +++ b/api/resolver.py @@ -4,15 +4,16 @@ from . import config from .web.errors import APINotFoundException +from bson.objectid import ObjectId +from collections import deque class Node(object): - # All lists obtained by the Resolver are sorted by the created timestamp, then the database ID as a fallback. # As neither property should ever change, this sort should be consistent sorting = [('created', 1), ('_id', 1)] # Globally disable extraneous properties of unbounded length, along with some PHI fields. - projection = { + default_projection = { 'files': 0, 'info': 0, 'tags': 0, @@ -25,154 +26,93 @@ class Node(object): 'subject.lastname': 0, } - # Add some more fields for debugging purposes. - # projection['roles'] = 0 - # projection['permissions'] = 0 + # In some cases we only want to resolve the id of a container + id_only_projection = { + '_id': 1, + 'label': 1, + 'permissions': 1, + } - @staticmethod - def get_children(parent): - raise NotImplementedError() # pragma: no cover + def __init__(self, collection, node_type, parent, files=True, use_id=False, object_id=True): + self.collection = collection + self.node_type = node_type + self.parent = parent + self.files = files + self.use_id = use_id + self.object_id = object_id + + def find(self, criterion, parent=None, id_only=False, include_files=False, use_id=False, limit=0): + query = { + 'deleted': {'$exists': False} + } - @staticmethod - def filter(children, criterion, _id=False): - raise NotImplementedError() # pragma: no cover + # Setup criterion match + if criterion: + if use_id or self.use_id: + if self.object_id: + query['_id'] = ObjectId(criterion) + else: + query['_id'] = criterion + else: + query['label'] = criterion + + # Add parent to query + if parent and self.parent: + query[self.parent] = parent['_id'] + + # Setup projection + if id_only: + proj = Node.id_only_projection + else: + proj = Node.default_projection.copy() + if include_files: + del proj['files'] -def _get_files(table, match): - """ - Return a consistently-ordered set of files for a given container query. - """ + results = list(config.db[self.collection].find(query, proj, sort=Node.sorting, limit=limit)) + for el in results: + el['node_type'] = self.node_type + return results - pipeline = [ - {'$match': match }, - {'$unwind': '$files'}, - {'$sort': {'files.name': 1}}, - {'$group': {'_id':'$_id', 'files': {'$push':'$files'}}} - ] - result = config.mongo_pipeline(table, pipeline) - if len(result) == 0: - return [] +PROJECT_TREE = [ + Node('groups', 'group', None, False, True, False), + Node('projects', 'project', 'group'), + Node('sessions', 'session', 'project'), + Node('acquisitions', 'acquisition', 'session') +] - files = result[0]['files'] - for x in files: - x.update({'node_type': 'file'}) - return files +def parse_criterion(value): + if not value: + return False, None -def _get_docs(table, label, match): - match_nondeleted = match.copy() - match_nondeleted['deleted'] = {'$exists': False} - results = list(config.db[table].find(match, Node.projection, sort=Node.sorting)) - for y in results: - y.update({'node_type': label}) - return results + use_id = False + # Check for syntax + if value.startswith(''): + value = value[4:len(value)-1] + use_id = True + return use_id, value -class FileNode(Node): - @staticmethod - def get_children(parent): +def pop_files(container): + """ + Return a consistently-ordered set of files for a given container. + """ + if not container: return [] - @staticmethod - def filter(children, criterion, _id=False): - raise APINotFoundException("Files have no children") - -class AcquisitionNode(Node): - @staticmethod - def get_children(parent): - files = _get_files('acquisitions', {'_id' : parent['_id'] }) - - return files - - @staticmethod - def filter(children, criterion, _id=False): - for x in children: - if x['node_type'] == "file" and x.get('name') == criterion: - return x, FileNode - raise APINotFoundException('No ' + criterion + ' file found.') + files = container.pop('files', []) -class SessionNode(Node): + files.sort(key=lambda f: f.get('name', '')) + for f in files: + f['node_type'] = 'file' - @staticmethod - def get_children(parent): - acqs = _get_docs('acquisitions', 'acquisition', {'session' : parent['_id']}) - files = _get_files('sessions', {'_id' : parent['_id'] }) - - return list(acqs) + files - - @staticmethod - def filter(children, criterion, _id=False): - if _id: - selectAcq = '_id' - selectFil = '_id' - else: - selectAcq = 'label' - selectFil = 'name' - - for x in children: - if x['node_type'] == "acquisition" and str(x.get(selectAcq)) == criterion: - return x, AcquisitionNode - if x['node_type'] == "file" and str(x.get(selectFil)) == criterion: - return x, FileNode - raise APINotFoundException('No ' + criterion + ' acquisition or file found.') - -class ProjectNode(Node): - - @staticmethod - def get_children(parent): - sessions = _get_docs('sessions', 'session', {'project' : parent['_id']}) - files = _get_files('projects', {'_id' : parent['_id'] }) - - return list(sessions) + files - - @staticmethod - def filter(children, criterion, _id=False): - if _id: - selectSes = '_id' - selectFil = '_id' - else: - selectSes = 'label' - selectFil = 'name' - - for x in children: - if x['node_type'] == "session" and str(x.get(selectSes)) == criterion: - return x, SessionNode - if x['node_type'] == "file" and str(x.get(selectFil)) == criterion: - return x, FileNode - raise APINotFoundException('No ' + criterion + ' session or file found.') - -class GroupNode(Node): - - @staticmethod - def get_children(parent): - projects = _get_docs('projects', 'project', {'group' : parent['_id']}) - return projects - - @staticmethod - def filter(children, criterion, _id=False): - if _id: - select = '_id' - else: - select = 'label' - - for x in children: - if str(x.get(select)) == criterion: - return x, ProjectNode - raise APINotFoundException('No ' + criterion + ' project found.') - -class RootNode(Node): - - @staticmethod - def get_children(parent): - groups = _get_docs('groups', 'group', {}) - return groups - - @staticmethod - def filter(children, criterion, _id=False): - for x in children: - if x.get('_id') == criterion: - return x, GroupNode - raise APINotFoundException('No ' + criterion + ' group found.') + return files +def find_file(files, name): + for f in files: + if str(f.get('name')) == name: + return f + return None class Resolver(object): """ @@ -181,46 +121,82 @@ class Resolver(object): Does not tolerate ambiguity at any level of the path except the final node. """ - @staticmethod - def resolve(path): + def __init__(self, id_only=False): + self.id_only = id_only + def resolve(self, path): if not isinstance(path, list): raise Exception("Path must be an array of strings") - node, resolved, last = Resolver._resolve(path, RootNode) - children = node.get_children(last) + path = deque(path) + tree = deque(PROJECT_TREE) + resolved_path = [] + resolved_children = [] + last = None + files = [] + + # Short circuit - just return a list of groups + if not path: + resolved_children = tree[0].find(None, id_only=self.id_only) + return { + 'path': resolved_path, + 'children': resolved_children + } + + # Walk down the tree, building path until we get to the last node + # Keeping in mind that path may be empty + while len(path) > 0 and len(tree) > 0: + node = tree.popleft() + current_id, current = parse_criterion(path.popleft()) + + # Find the next child + children = node.find(current, parent=last, id_only=self.id_only, include_files=True, use_id=current_id, limit=1) + + # If children is empty, try to find a match in the last set of files + if not children: + # Check in last set of files + if not current_id: + child = find_file(files, current) + if child: + children = [child] + files = [] + if len(path) > 0: + raise APINotFoundException('Files have no children') + + if not children: + # Not found + or_file = 'or file ' if node.files else '' + raise APINotFoundException('No {0} {1} {2} found.'.format(current, node.node_type, or_file)) + + # Otherwise build up path + resolved_path.append(children[0]) + last = resolved_path[-1] + files = pop_files(last) + + # Resolve children + if not self.id_only: + # If there are path elements left, search in the last set of files + if len(path) > 0: + f = find_file(files, path[0]) + if not f: + raise APINotFoundException('No ' + path[0] + ' file found.') + if len(path) > 1: + raise APINotFoundException('Files have no children') + resolved_path.append(f) + elif last and last.get('node_type') != 'file': + # Retrieve any child nodes + if len(tree) > 0: + node = tree[0] + resolved_children = node.find(None, parent=last) + + # Add any files from the last node + resolved_children = resolved_children + files + + elif len(path) > 0: + raise APINotFoundException('Cannot retrieve id for file: {0}'.format(path[0])) return { - 'path': resolved, - 'children': children + 'path': resolved_path, + 'children': resolved_children } - @staticmethod - def _resolve(path, node, parents=None): - - if parents is None: - parents = [] - - last = None - if len(parents) > 0: - last = parents[len(parents) - 1] - - if len(path) == 0: - return node, parents, last - - current = path[0] - current_id = False - - # Check for syntax - if current.startswith(''): - current = current[4:len(current)-1] - current_id = True - print current - - children = node.get_children(last) - selected, next_ = node.filter(children, current, current_id) - - path = path[1:] - parents.append(selected) - - return Resolver._resolve(path, next_, parents) diff --git a/tests/integration_tests/python/test_uploads.py b/tests/integration_tests/python/test_uploads.py index 607ec86e8..80df8d27c 100644 --- a/tests/integration_tests/python/test_uploads.py +++ b/tests/integration_tests/python/test_uploads.py @@ -341,9 +341,16 @@ def test_reaper_project_search(data_builder, file_form, as_root): assert r.ok project_list = r.json() assert len(project_list) == 2 - project = project_list[1] - assert project_list[1]['label'] == expected_project_label_2 + + # Order is not guaranteed + if project_list[0]['_id'] == project_1: + project = project_list[1] + else: + project = project_list[0] + + assert project['label'] == expected_project_label_2 project_2 = project['_id'] + assert len(as_root.get('/projects/' + project_2 + '/sessions').json()) == 1 session = as_root.get('/projects/' + project_2 + '/sessions').json()[0]['_id'] @@ -374,8 +381,14 @@ def test_reaper_project_search(data_builder, file_form, as_root): project_list = r.json() # Ensure there are still only 2 projects assert len(project_list) == 2 - project = project_list[1] - assert project_list[1]['label'] == expected_project_label_2 + + # Order is not guaranteed + if project_list[0]['_id'] == project_1: + project = project_list[1] + else: + project = project_list[0] + + assert project['label'] == expected_project_label_2 assert len(as_root.get('/projects/' + project_2 + '/sessions').json()) == 2 session2 = as_root.get('/projects/' + project_2 + '/sessions').json()[1]['_id'] From 8c70ad3153c22e0197613d2807427d31439f92f3 Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Tue, 6 Mar 2018 09:22:10 -0600 Subject: [PATCH 41/53] Add lookup endpoint that routes to GET for path In order to support polymorphism in clients, this commit also adds a new optional request environment variable `fw_node_type` which, when set should be included as `node_type` in the result. --- api/api.py | 1 + api/handlers/containerhandler.py | 3 + api/handlers/grouphandler.py | 1 + api/handlers/resolvehandler.py | 52 +++++- api/resolver.py | 26 +-- api/util.py | 6 + .../integration_tests/python/test_resolver.py | 155 +++++++++++++++++- 7 files changed, 230 insertions(+), 14 deletions(-) diff --git a/api/api.py b/api/api.py index db7a9dc70..a25dec52d 100644 --- a/api/api.py +++ b/api/api.py @@ -95,6 +95,7 @@ def prefix(path, routes): route('/login', RequestHandler, h='log_in', m=['POST']), route('/logout', RequestHandler, h='log_out', m=['POST']), + route('/lookup', ResolveHandler, h='lookup', m=['POST']), route('/resolve', ResolveHandler, h='resolve', m=['POST']), route('/schemas/', SchemaHandler, m=['GET']), route('/report/', ReportHandler, m=['GET']), diff --git a/api/handlers/containerhandler.py b/api/handlers/containerhandler.py index 373016383..03799cdc8 100644 --- a/api/handlers/containerhandler.py +++ b/api/handlers/containerhandler.py @@ -115,6 +115,9 @@ def get(self, cont_name, **kwargs): inflate_job_info = cont_name == 'sessions' result['analyses'] = AnalysisStorage().get_analyses(cont_name, _id, inflate_job_info) + + util.add_node_type(self.request, result) + return self.handle_origin(result) def handle_origin(self, result): diff --git a/api/handlers/grouphandler.py b/api/handlers/grouphandler.py index a006b1209..e5d1d5cff 100644 --- a/api/handlers/grouphandler.py +++ b/api/handlers/grouphandler.py @@ -22,6 +22,7 @@ def get(self, _id): self._filter_permissions([result], self.uid) if self.is_true('join_avatars'): ContainerHandler.join_user_info([result]) + util.add_node_type(self.request, result) return result def delete(self, _id): diff --git a/api/handlers/resolvehandler.py b/api/handlers/resolvehandler.py index bd21381ea..1bb111c97 100644 --- a/api/handlers/resolvehandler.py +++ b/api/handlers/resolvehandler.py @@ -1,8 +1,10 @@ """ API request handlers for the jobs module """ +from webapp2 import Request from ..web import base +from ..web.errors import APINotFoundException from ..resolver import Resolver class ResolveHandler(base.RequestHandler): @@ -10,14 +12,58 @@ class ResolveHandler(base.RequestHandler): """Provide /resolve API route.""" def resolve(self): - """Resolve a path through the hierarchy.""" + """Resolve a path through the hierarchy, and include node details with children""" + return self._resolve_and_check_permissions(False) + + def lookup(self): + """Locate a node by path, and re-route to the endpoint for that node""" + result = self._resolve_and_check_permissions(True) + + # If we resolved a file, we can just return that file node + path = result.get('path', []) + + if not path: + raise APINotFoundException('No node matched that path') + + # In the event that we resolved a file, just return the file node + dest = path[-1] + if dest.get('node_type') == 'file': + return dest + + # Reroute to the actual path that will log access, resolve analyses, etc + path = self._get_node_path(dest) + + # Create new request instance using destination URI (eg. replace containers with cont_name) + destination_environ = self.request.environ + for key in 'PATH_INFO', 'REQUEST_URI': + destination_environ[key] = destination_environ[key].replace('lookup', path, 1) + # We also must update the method, and indicate that we want the node_type included + # The client will depend on node_type being set so that it can map to the correct type + destination_environ['REQUEST_METHOD'] = 'GET' + destination_environ['fw_node_type'] = dest['node_type'] + destination_request = Request(destination_environ) + # Apply SciTranRequest attrs + destination_request.id = self.request.id + destination_request.logger = self.request.logger + + # Dispatch the destination request + self.app.router.dispatch(destination_request, self.response) + + def _get_node_path(self, node): + """Get the actual resource path for node""" + # Right now all containers are just node_type + 's' + cname = node['node_type'] + 's' + return '{0}/{1}'.format(cname, node['_id']) + + def _resolve_and_check_permissions(self, id_only): + """Resolve a path through the hierarchy.""" if self.public_request: self.abort(403, 'Request requires login') doc = self.request.json - resolver = Resolver() + resolver = Resolver(id_only=id_only) result = resolver.resolve(doc['path']) # Cancel the request if anything in the path is unauthorized; remove any children that are unauthorized. @@ -52,3 +98,5 @@ def resolve(self): result["children"] = filtered_children return result + + diff --git a/api/resolver.py b/api/resolver.py index 1c070ebc0..fb8794c62 100644 --- a/api/resolver.py +++ b/api/resolver.py @@ -3,7 +3,7 @@ """ from . import config -from .web.errors import APINotFoundException +from .web.errors import APINotFoundException, InputValidationException from bson.objectid import ObjectId from collections import deque @@ -31,6 +31,7 @@ class Node(object): '_id': 1, 'label': 1, 'permissions': 1, + 'files': 1, } def __init__(self, collection, node_type, parent, files=True, use_id=False, object_id=True): @@ -126,7 +127,7 @@ def __init__(self, id_only=False): def resolve(self, path): if not isinstance(path, list): - raise Exception("Path must be an array of strings") + raise InputValidationException("Path must be an array of strings") path = deque(path) tree = deque(PROJECT_TREE) @@ -173,17 +174,20 @@ def resolve(self, path): last = resolved_path[-1] files = pop_files(last) + # If there are path elements left, search in the last set of files + if len(path) > 0: + filename = path.popleft() + f = find_file(files, filename) + if not f: + raise APINotFoundException('No ' + filename + ' file found.') + if len(path) > 0: + raise APINotFoundException('Files have no children') + resolved_path.append(f) + files = [] + # Resolve children if not self.id_only: - # If there are path elements left, search in the last set of files - if len(path) > 0: - f = find_file(files, path[0]) - if not f: - raise APINotFoundException('No ' + path[0] + ' file found.') - if len(path) > 1: - raise APINotFoundException('Files have no children') - resolved_path.append(f) - elif last and last.get('node_type') != 'file': + if last and last.get('node_type') != 'file': # Retrieve any child nodes if len(tree) > 0: node = tree[0] diff --git a/api/util.py b/api/util.py index 64d2f85d1..60d42b567 100644 --- a/api/util.py +++ b/api/util.py @@ -340,3 +340,9 @@ def parse_range_header(range_header_val, valid_units=('bytes',)): ranges.append((first, last)) return ranges + +def add_node_type(request, result): + """Adds a 'node_type' property to result if fw_node_type is set in the request environment.""" + if 'fw_node_type' in request.environ and isinstance(result, dict): + result['node_type'] = request.environ['fw_node_type'] + diff --git a/tests/integration_tests/python/test_resolver.py b/tests/integration_tests/python/test_resolver.py index 37c988226..67679afc7 100644 --- a/tests/integration_tests/python/test_resolver.py +++ b/tests/integration_tests/python/test_resolver.py @@ -14,7 +14,7 @@ def test_resolver(data_builder, as_admin, as_user, as_public, file_form): # try resolving invalid (non-list) path r = as_admin.post('/resolve', json={'path': 'test'}) - assert r.status_code == 500 + assert r.status_code == 400 # resolve root (empty) r = as_admin.post('/resolve', json={'path': []}) @@ -189,3 +189,156 @@ def idz(s): # try to resolve non-existent (also invalid) root/group/project/session/acquisition/file/child r = as_admin.post('/resolve', json={'path': [group, project_label, session_label, acquisition_label, acquisition_file, 'child']}) assert r.status_code == 404 + +def test_lookup(data_builder, as_admin, as_user, as_public, file_form): + # ROOT + # try accessing lookupr w/o logging in + r = as_public.post('/lookup', json={'path': []}) + assert r.status_code == 403 + + # try resolving invalid (non-list) path + r = as_admin.post('/lookup', json={'path': 'test'}) + assert r.status_code == 400 + + # lookup root (empty) + r = as_admin.post('/lookup', json={'path': []}) + result = r.json() + assert r.status_code == 404 + + # lookup root (1 group) + group = data_builder.create_group() + r = as_admin.post('/lookup', json={'path': []}) + result = r.json() + assert r.status_code == 404 + + # try to lookup non-existent root/child + r = as_admin.post('/lookup', json={'path': ['child']}) + assert r.status_code == 404 + + + # GROUP + # try to lookup root/group as different (and non-root) user + r = as_user.post('/lookup', json={'path': [group]}) + assert r.status_code == 403 + + # lookup root/group (empty) + r = as_admin.post('/lookup', json={'path': [group]}) + result = r.json() + assert r.ok + assert result['node_type'] == 'group' + assert result['_id'] == group + + # try to lookup non-existent root/group/child + r = as_admin.post('/lookup', json={'path': [group, 'child']}) + assert r.status_code == 404 + + + # PROJECT + # lookup root/group/project (empty) + project_label = 'test-lookupr-project-label' + project = data_builder.create_project(label=project_label) + + r = as_admin.post('/lookup', json={'path': [group, project_label]}) + result = r.json() + assert r.ok + assert result['node_type'] == 'project' + assert result['_id'] == project + + # lookup root/group/project/file + project_file = 'project_file' + r = as_admin.post('/projects/' + project + '/files', files=file_form(project_file)) + assert r.ok + + r = as_admin.post('/lookup', json={'path': [group, project_label, project_file]}) + result = r.json() + assert r.ok + assert result['node_type'] == 'file' + assert result['name'] == project_file + assert 'mimetype' in result + assert 'size' in result + + # try to lookup non-existent root/group/project/child + r = as_admin.post('/lookup', json={'path': [group, project_label, 'child']}) + assert r.status_code == 404 + + + # SESSION + # lookup root/group/project/session (empty) + session_label = 'test-lookupr-session-label' + session = data_builder.create_session(label=session_label) + + r = as_admin.post('/lookup', json={'path': [group, project_label, session_label]}) + result = r.json() + assert r.ok + assert result['node_type'] == 'session' + assert result['_id'] == session + + # lookup root/group/project/session/file + session_file = 'session_file' + r = as_admin.post('/sessions/' + session + '/files', files=file_form(session_file)) + assert r.ok + + r = as_admin.post('/lookup', json={'path': [group, project_label, session_label, session_file]}) + result = r.json() + assert r.ok + assert result['node_type'] == 'file' + assert result['name'] == session_file + assert 'mimetype' in result + assert 'size' in result + + # try to lookup non-existent root/group/project/session/child + r = as_admin.post('/lookup', json={'path': [group, project_label, session_label, 'child']}) + assert r.status_code == 404 + + # ACQUISITION + # lookup root/group/project/session/acquisition (empty) + acquisition_label = 'test-lookupr-acquisition-label' + acquisition = data_builder.create_acquisition(label=acquisition_label) + r = as_admin.post('/lookup', json={'path': [group, project_label, session_label, acquisition_label]}) + result = r.json() + assert r.ok + assert result['node_type'] == 'acquisition' + assert result['_id'] == acquisition + + # lookup root/group/project/session/acquisition/file + acquisition_file = 'acquisition_file' + r = as_admin.post('/acquisitions/' + acquisition + '/files', files=file_form(acquisition_file)) + assert r.ok + + r = as_admin.post('/lookup', json={'path': [group, project_label, session_label, acquisition_label, acquisition_file]}) + result = r.json() + assert r.ok + assert result['node_type'] == 'file' + assert result['name'] == acquisition_file + assert 'mimetype' in result + assert 'size' in result + + def idz(s): + return '' + + # lookup root/group/project/session/acquisition with id + r = as_admin.post('/lookup', json={'path': [idz(group), idz(project), idz(session), idz(acquisition)]}) + result = r.json() + assert r.ok + assert result['node_type'] == 'acquisition' + assert result['_id'] == acquisition + + # lookup root/group/project/session/acquisition/file with id + r = as_admin.post('/lookup', json={'path': [idz(group), idz(project), idz(session), idz(acquisition), acquisition_file]}) + result = r.json() + assert r.ok + assert result['node_type'] == 'file' + assert result['name'] == acquisition_file + assert 'mimetype' in result + assert 'size' in result + + # try to lookup non-existent root/group/project/session/acquisition/child + r = as_admin.post('/lookup', json={'path': [group, project_label, session_label, acquisition_label, 'child']}) + assert r.status_code == 404 + + + # FILE + # try to lookup non-existent (also invalid) root/group/project/session/acquisition/file/child + r = as_admin.post('/lookup', json={'path': [group, project_label, session_label, acquisition_label, acquisition_file, 'child']}) + assert r.status_code == 404 + From 9304596237e49c681a1aa41b3c9bc4e53035758b Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Tue, 6 Mar 2018 16:45:00 -0600 Subject: [PATCH 42/53] Move list_projection into ContainerStorage --- api/dao/basecontainerstorage.py | 26 +++++++++++++++++++++----- api/dao/containerstorage.py | 16 +++++++++++----- api/handlers/collectionshandler.py | 16 ++++++++++------ api/handlers/containerhandler.py | 14 +++----------- 4 files changed, 45 insertions(+), 27 deletions(-) diff --git a/api/dao/basecontainerstorage.py b/api/dao/basecontainerstorage.py index 9484833f4..ee73ac6ed 100644 --- a/api/dao/basecontainerstorage.py +++ b/api/dao/basecontainerstorage.py @@ -47,11 +47,12 @@ class ContainerStorage(object): Examples: projects, sessions, acquisitions and collections """ - def __init__(self, cont_name, use_object_id=False, use_delete_tag=False): + def __init__(self, cont_name, use_object_id=False, use_delete_tag=False, list_projection=None): self.cont_name = cont_name self.use_object_id = use_object_id self.use_delete_tag = use_delete_tag self.dbc = config.db[cont_name] + self.list_projection = list_projection @classmethod def factory(cls, cont_name): @@ -210,8 +211,7 @@ def get_el(self, _id, projection=None, fill_defaults=False): self._from_mongo(cont) if fill_defaults: self._fill_default_values(cont) - if cont is not None and cont.get('files', []): - cont['files'] = [f for f in cont['files'] if 'deleted' not in f] + self.filter_deleted_files(cont) return cont def get_all_el(self, query, user, projection, fill_defaults=False): @@ -240,8 +240,7 @@ def get_all_el(self, query, user, projection, fill_defaults=False): results = list(self.dbc.find(query, projection)) for cont in results: - if cont.get('files', []): - cont['files'] = [f for f in cont['files'] if 'deleted' not in f] + self.filter_deleted_files(cont) self._from_mongo(cont) if fill_defaults: self._fill_default_values(cont) @@ -304,3 +303,20 @@ def modify_info(self, _id, payload, modify_subject=False): update['$set']['modified'] = datetime.datetime.utcnow() return self.dbc.update_one(query, update) + + def filter_deleted_files(self, cont): + """ + Update container object, removing any files that are marked deleted. + """ + if cont is not None and 'files' in cont: + cont['files'] = [f for f in cont['files'] if 'deleted' not in f] + + + def get_list_projection(self): + """ + Return a copy of the list projection to use with this container, or None. + It is safe to modify the returned copy. + """ + if self.list_projection: + return self.list_projection.copy() + return None diff --git a/api/dao/containerstorage.py b/api/dao/containerstorage.py index 84f67d20a..a377258e3 100644 --- a/api/dao/containerstorage.py +++ b/api/dao/containerstorage.py @@ -40,9 +40,9 @@ def create_el(self, payload): class ProjectStorage(ContainerStorage): - def __init__(self): - super(ProjectStorage,self).__init__('projects', use_object_id=True, use_delete_tag=True) + super(ProjectStorage,self).__init__('projects', use_object_id=True, use_delete_tag=True, + list_projection={'info': 0, 'files.info': 0}) def create_el(self, payload): result = super(ProjectStorage, self).create_el(payload) @@ -100,7 +100,12 @@ def recalc_sessions_compliance(self, project_id=None): class SessionStorage(ContainerStorage): def __init__(self): - super(SessionStorage,self).__init__('sessions', use_object_id=True, use_delete_tag=True) + super(SessionStorage,self).__init__('sessions', use_object_id=True, use_delete_tag=True, + # Remove subject first/last from list view to better log access to this information + list_projection={'info': 0, 'analyses': 0, 'subject.firstname': 0, + 'subject.lastname': 0, 'subject.sex': 0, 'subject.age': 0, + 'subject.race': 0, 'subject.ethnicity': 0, 'subject.info': 0, + 'files.info': 0, 'tags': 0}) def _fill_default_values(self, cont): cont = super(SessionStorage,self)._fill_default_values(cont) @@ -225,7 +230,8 @@ def get_all_for_targets(self, target_type, target_ids, user=None, projection=Non class AcquisitionStorage(ContainerStorage): def __init__(self): - super(AcquisitionStorage,self).__init__('acquisitions', use_object_id=True, use_delete_tag=True) + super(AcquisitionStorage,self).__init__('acquisitions', use_object_id=True, use_delete_tag=True, + list_projection={'info': 0, 'collections': 0, 'files.info': 0, 'tags': 0}) def create_el(self, payload): result = super(AcquisitionStorage, self).create_el(payload) @@ -291,7 +297,7 @@ def get_all_for_targets(self, target_type, target_ids, user=None, projection=Non class CollectionStorage(ContainerStorage): def __init__(self): - super(CollectionStorage, self).__init__('collections', use_object_id=True, use_delete_tag=True) + super(CollectionStorage, self).__init__('collections', use_object_id=True, use_delete_tag=True, list_projection={'info': 0}) class AnalysisStorage(ContainerStorage): diff --git a/api/handlers/collectionshandler.py b/api/handlers/collectionshandler.py index 703b277cc..e6102c2d1 100644 --- a/api/handlers/collectionshandler.py +++ b/api/handlers/collectionshandler.py @@ -19,10 +19,9 @@ class CollectionsHandler(ContainerHandler): container_handler_configurations['collections'] = { 'permchecker': containerauth.collection_permissions, - 'storage': containerstorage.ContainerStorage('collections', use_object_id=True, use_delete_tag=True), + 'storage': containerstorage.CollectionStorage(), 'storage_schema_file': 'collection.json', - 'payload_schema_file': 'collection.json', - 'list_projection': {'info': 0} + 'payload_schema_file': 'collection.json' } def __init__(self, request=None, response=None): @@ -116,7 +115,7 @@ def delete(self, **kwargs): self.abort(404, 'Element not removed from container {} {}'.format(self.storage.cont_name, _id)) def get_all(self): - projection = self.container_handler_configurations['collections']['list_projection'] + projection = self.get_list_projection('collections') if self.superuser_request: permchecker = always_ok elif self.public_request: @@ -163,7 +162,7 @@ def get_sessions(self, cid): if not self.superuser_request: query['permissions._id'] = self.uid - projection = self.container_handler_configurations['sessions']['list_projection'] + projection = self.get_list_projection('sessions') sessions = list(containerstorage.SessionStorage().get_all_el(query, None, projection)) @@ -193,7 +192,7 @@ def get_acquisitions(self, cid): if not self.superuser_request: query['permissions._id'] = self.uid - projection = self.container_handler_configurations['acquisitions']['list_projection'] + projection = self.get_list_projection('acquisitions') acquisitions = list(containerstorage.AcquisitionStorage().get_all_el(query, None, projection)) @@ -202,3 +201,8 @@ def get_acquisitions(self, cid): for acquisition in acquisitions: acquisition = self.handle_origin(acquisition) return acquisitions + + def get_list_projection(self, container): + """Return the list_projection for container.""" + cfg = self.container_handler_configurations[container] + return cfg['storage'].get_list_projection() diff --git a/api/handlers/containerhandler.py b/api/handlers/containerhandler.py index 03799cdc8..f6940fa1f 100644 --- a/api/handlers/containerhandler.py +++ b/api/handlers/containerhandler.py @@ -42,7 +42,6 @@ class ContainerHandler(base.RequestHandler): 'sessions': True, 'acquisitions': True } - default_list_projection = ['files', 'notes', 'timestamp', 'timezone', 'public'] # This configurations are used by the ContainerHandler class to load the storage, # the permissions checker and the json schema validators used to handle a request. @@ -57,7 +56,6 @@ class ContainerHandler(base.RequestHandler): 'parent_storage': containerstorage.GroupStorage(), 'storage_schema_file': 'project.json', 'payload_schema_file': 'project.json', - 'list_projection': {'info': 0, 'files.info': 0}, 'propagated_properties': ['public'], 'children_cont': 'sessions' }, @@ -67,11 +65,6 @@ class ContainerHandler(base.RequestHandler): 'parent_storage': containerstorage.ProjectStorage(), 'storage_schema_file': 'session.json', 'payload_schema_file': 'session.json', - # Remove subject first/last from list view to better log access to this information - 'list_projection': {'info': 0, 'analyses': 0, 'subject.firstname': 0, - 'subject.lastname': 0, 'subject.sex': 0, 'subject.age': 0, - 'subject.race': 0, 'subject.ethnicity': 0, 'subject.info': 0, - 'files.info': 0, 'tags': 0}, 'children_cont': 'acquisitions' }, 'acquisitions': { @@ -79,8 +72,7 @@ class ContainerHandler(base.RequestHandler): 'permchecker': containerauth.default_container, 'parent_storage': containerstorage.SessionStorage(), 'storage_schema_file': 'acquisition.json', - 'payload_schema_file': 'acquisition.json', - 'list_projection': {'info': 0, 'collections': 0, 'files.info': 0, 'tags': 0} + 'payload_schema_file': 'acquisition.json' } } @@ -314,7 +306,7 @@ def get_all(self, cont_name, par_cont_name=None, par_id=None): self.config = self.container_handler_configurations[cont_name] self.storage = self.config['storage'] - projection = self.config['list_projection'].copy() + projection = self.storage.get_list_projection() if self.is_true('permissions'): if not projection: @@ -387,7 +379,7 @@ def _add_results_counts(self, results, cont_name): def get_all_for_user(self, cont_name, uid): self.config = self.container_handler_configurations[cont_name] self.storage = self.config['storage'] - projection = self.config['list_projection'] + projection = self.storage.get_list_projection() # select which permission filter will be applied to the list of results. if self.superuser_request or self.user_is_admin: permchecker = always_ok From 007304134e7baeeea598e980183e2655bdc63c27 Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Wed, 7 Mar 2018 10:21:49 -0600 Subject: [PATCH 43/53] Use ContainerStorage for resolver --- api/dao/basecontainerstorage.py | 15 ++++- api/resolver.py | 67 ++++++++----------- .../integration_tests/python/test_resolver.py | 4 ++ 3 files changed, 46 insertions(+), 40 deletions(-) diff --git a/api/dao/basecontainerstorage.py b/api/dao/basecontainerstorage.py index ee73ac6ed..6a9a64071 100644 --- a/api/dao/basecontainerstorage.py +++ b/api/dao/basecontainerstorage.py @@ -214,7 +214,18 @@ def get_el(self, _id, projection=None, fill_defaults=False): self.filter_deleted_files(cont) return cont - def get_all_el(self, query, user, projection, fill_defaults=False): + def get_all_el(self, query, user, projection, fill_defaults=False, **kwargs): + """ + Get all elements matching query for this container. + + Args: + query (dict): The query object, or None for all elements + user (dict): The user object, if filtering on permissions is desired, otherwise None + projection (dict): The optional projection to use for returned elements + fill_defaults (bool): Whether or not to populate the default values for returned elements. Default is False. + **kwargs: Additional arguments to pass to the underlying find function + + """ if query is None: query = {} if user: @@ -238,7 +249,7 @@ def get_all_el(self, query, user, projection, fill_defaults=False): else: replace_info_with_bool = False - results = list(self.dbc.find(query, projection)) + results = list(self.dbc.find(query, projection, **kwargs)) for cont in results: self.filter_deleted_files(cont) self._from_mongo(cont) diff --git a/api/resolver.py b/api/resolver.py index fb8794c62..47b447f7b 100644 --- a/api/resolver.py +++ b/api/resolver.py @@ -1,10 +1,9 @@ """ Resolve an ambiguous path through the data hierarchy. """ - -from . import config +from .dao import containerstorage, containerutil from .web.errors import APINotFoundException, InputValidationException -from bson.objectid import ObjectId +import bson from collections import deque class Node(object): @@ -12,46 +11,31 @@ class Node(object): # As neither property should ever change, this sort should be consistent sorting = [('created', 1), ('_id', 1)] - # Globally disable extraneous properties of unbounded length, along with some PHI fields. - default_projection = { - 'files': 0, - 'info': 0, - 'tags': 0, - 'subject.sex': 0, - 'subject.age': 0, - 'subject.race': 0, - 'subject.ethnicity': 0, - 'subject.info': 0, - 'subject.firstname': 0, - 'subject.lastname': 0, - } - # In some cases we only want to resolve the id of a container id_only_projection = { - '_id': 1, 'label': 1, 'permissions': 1, 'files': 1, } - def __init__(self, collection, node_type, parent, files=True, use_id=False, object_id=True): - self.collection = collection - self.node_type = node_type + def __init__(self, storage, parent, files=True, use_id=False): + self.storage = storage + self.node_type = containerutil.singularize(storage.cont_name) self.parent = parent self.files = files self.use_id = use_id - self.object_id = object_id - def find(self, criterion, parent=None, id_only=False, include_files=False, use_id=False, limit=0): - query = { - 'deleted': {'$exists': False} - } + def find(self, criterion, parent=None, id_only=False, include_files=True, use_id=False, limit=0): + query = {} # Setup criterion match if criterion: if use_id or self.use_id: - if self.object_id: - query['_id'] = ObjectId(criterion) + if self.storage.use_object_id: + try: + query['_id'] = bson.ObjectId(criterion) + except bson.errors.InvalidId as e: + raise InputValidationException(e.message) else: query['_id'] = criterion else: @@ -63,23 +47,27 @@ def find(self, criterion, parent=None, id_only=False, include_files=False, use_i # Setup projection if id_only: - proj = Node.id_only_projection + proj = Node.id_only_projection.copy() else: - proj = Node.default_projection.copy() - if include_files: - del proj['files'] + proj = self.storage.get_list_projection() + if not include_files: + proj['files'] = 0 - results = list(config.db[self.collection].find(query, proj, sort=Node.sorting, limit=limit)) + # We don't use the user field here because we want to return a 403 if + # they try to resolve something they don't have access to + results = self.storage.get_all_el(query, None, proj, sort=Node.sorting, limit=limit) for el in results: + self.storage.filter_deleted_files(el) el['node_type'] = self.node_type + return results PROJECT_TREE = [ - Node('groups', 'group', None, False, True, False), - Node('projects', 'project', 'group'), - Node('sessions', 'session', 'project'), - Node('acquisitions', 'acquisition', 'session') + Node(containerstorage.GroupStorage(), None, files=False, use_id=True), + Node(containerstorage.ProjectStorage(), 'group'), + Node(containerstorage.SessionStorage(), 'project'), + Node(containerstorage.AcquisitionStorage(), 'session') ] def parse_criterion(value): @@ -110,6 +98,9 @@ def pop_files(container): return files def find_file(files, name): + """ + Find a file by name + """ for f in files: if str(f.get('name')) == name: return f @@ -191,7 +182,7 @@ def resolve(self, path): # Retrieve any child nodes if len(tree) > 0: node = tree[0] - resolved_children = node.find(None, parent=last) + resolved_children = node.find(None, parent=last, include_files=False) # Add any files from the last node resolved_children = resolved_children + files diff --git a/tests/integration_tests/python/test_resolver.py b/tests/integration_tests/python/test_resolver.py index 67679afc7..0e71f5075 100644 --- a/tests/integration_tests/python/test_resolver.py +++ b/tests/integration_tests/python/test_resolver.py @@ -180,6 +180,10 @@ def idz(s): assert path_in_result([group, project, session, acquisition, acquisition_file], result) assert result['children'] == [] + # resolve root/group/project/session/acquisition/file with invalid id + r = as_admin.post('/resolve', json={'path': [idz(group), idz(project), idz('not-valid'), idz(acquisition), acquisition_file]}) + assert r.status_code == 400 + # try to resolve non-existent root/group/project/session/acquisition/child r = as_admin.post('/resolve', json={'path': [group, project_label, session_label, acquisition_label, 'child']}) assert r.status_code == 404 From 3049c32cad2a41bcabbeb32ad24b7544699eae59 Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Thu, 8 Mar 2018 08:43:28 -0600 Subject: [PATCH 44/53] Add swagger documentation for lookup endpoint --- swagger/paths/resolver.yaml | 18 ++++++++++++++++++ swagger/schemas/output/lookup.json | 18 ++++++++++++++++++ 2 files changed, 36 insertions(+) create mode 100644 swagger/schemas/output/lookup.json diff --git a/swagger/paths/resolver.yaml b/swagger/paths/resolver.yaml index bced3ed73..d200caee5 100644 --- a/swagger/paths/resolver.yaml +++ b/swagger/paths/resolver.yaml @@ -24,3 +24,21 @@ description: '' schema: $ref: schemas/output/resolver.json + +/lookup: + post: + summary: Perform path based lookup of a single node in the Flywheel hierarchy + description: | + This will perform a deep lookup of a node. See /resolve for more details. + operationId: lookup_path + parameters: + - name: body + in: body + required: true + schema: + $ref: schemas/input/resolver.json + responses: + '200': + description: '' + schema: + $ref: schemas/output/lookup.json diff --git a/swagger/schemas/output/lookup.json b/swagger/schemas/output/lookup.json new file mode 100644 index 000000000..6ef67983a --- /dev/null +++ b/swagger/schemas/output/lookup.json @@ -0,0 +1,18 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type":"object", + "allOf": [{ "$ref": "../definitions/resolver.json#/definitions/resolver-node" }], + "example": { + "node_type": "project", + "_id": "57e452791cff88b85f9f9c97", + "label": "Neuroscience", + "group": "scitran", + "created": "2016-09-22T21:51:53.151000+00:00", + "modified": "2016-09-22T21:51:53.151000+00:00", + "public": false, + "permissions": [{ + "access": "admin", + "_id": "coltonlw@flywheel.io" + }] + } +} From 22712cb39df6e98c7d48d372b86a4f1c88515998 Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Thu, 8 Mar 2018 09:18:10 -0600 Subject: [PATCH 45/53] Fix test failing due to indeterminate ordering --- tests/integration_tests/python/test_batch.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/tests/integration_tests/python/test_batch.py b/tests/integration_tests/python/test_batch.py index 2bdceeac2..9baa980a5 100644 --- a/tests/integration_tests/python/test_batch.py +++ b/tests/integration_tests/python/test_batch.py @@ -280,8 +280,9 @@ def test_no_input_batch(data_builder, default_payload, randstr, as_admin, as_roo batch1 = r.json() assert len(batch1['matched']) == 2 - assert batch1['matched'][0]['id'] == session - assert batch1['matched'][1]['id'] == session2 + matched_ids = [ x['id'] for x in batch1['matched'] ] + assert session in matched_ids + assert session2 in matched_ids # create a batch w/o inputs targeting acquisitions r = as_admin.post('/batch', json={ @@ -291,8 +292,9 @@ def test_no_input_batch(data_builder, default_payload, randstr, as_admin, as_roo assert r.ok batch2 = r.json() assert len(batch2['matched']) == 2 - assert batch2['matched'][0]['id'] == session - assert batch1['matched'][1]['id'] == session2 + matched_ids = [ x['id'] for x in batch2['matched'] ] + assert session in matched_ids + assert session2 in matched_ids # create a batch w/o inputs targeting project r = as_admin.post('/batch', json={ @@ -302,8 +304,9 @@ def test_no_input_batch(data_builder, default_payload, randstr, as_admin, as_roo assert r.ok batch3 = r.json() assert len(batch3['matched']) == 2 - assert batch3['matched'][0]['id'] == session - assert batch1['matched'][1]['id'] == session2 + matched_ids = [ x['id'] for x in batch3['matched'] ] + assert session in matched_ids + assert session2 in matched_ids batch_id = batch1['_id'] @@ -353,8 +356,9 @@ def test_no_input_batch(data_builder, default_payload, randstr, as_admin, as_roo batch4 = r.json() assert len(batch4['matched']) == 2 - assert batch4['matched'][0]['id'] == session - assert batch1['matched'][1]['id'] == session2 + matched_ids = [ x['id'] for x in batch4['matched'] ] + assert session in matched_ids + assert session2 in matched_ids batch_id = batch4['_id'] # run batch From 277deed3ab52aba043b0f8bcae86d98fe84db03a Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Fri, 9 Mar 2018 11:34:09 -0600 Subject: [PATCH 46/53] Refactor resolver to support virtual nodes --- api/dao/basecontainerstorage.py | 3 + api/resolver.py | 353 ++++++++++++------ .../integration_tests/python/test_resolver.py | 33 +- 3 files changed, 255 insertions(+), 134 deletions(-) diff --git a/api/dao/basecontainerstorage.py b/api/dao/basecontainerstorage.py index 6a9a64071..7ea80580b 100644 --- a/api/dao/basecontainerstorage.py +++ b/api/dao/basecontainerstorage.py @@ -109,6 +109,9 @@ def get_container(self, _id, projection=None, get_children=False): cont[CHILD_MAP[self.cont_name]] = children return cont + def get_child_container_name(self): + return CHILD_MAP.get(self.cont_name) + def get_children(self, _id, projection=None, uid=None): try: child_name = CHILD_MAP[self.cont_name] diff --git a/api/resolver.py b/api/resolver.py index 47b447f7b..aa10cb63d 100644 --- a/api/resolver.py +++ b/api/resolver.py @@ -1,12 +1,147 @@ """ Resolve an ambiguous path through the data hierarchy. + +The goal of the resolver is to provide a virtual graph that can be navigated using +path notation. Below is how the graph will ultimately be represented. Currently +subjects are not formalized and are excluded from the implementation. + +Quoted strings represent literal nodes in the graph. For example, to find the gear +called dicom-mr-classifier, you would use the path: ["gears", "dicom-mr-classifier"] + ++----+ +-------+ +-----+ +-------+ +|Root+---+"gears"+---+Gears+---+Version| ++-+--+ +-------+ +-----+ +-------+ + | ++-+----+ +|Groups| ++-+----+ + | ++-+------+ +|Projects+---+ ++-+------+ | + | | ++-+------+ | +----------+ +--------+ +|Subjects+---+---+"analyses"+---+Analyses| ++-+------+ | +----------+ +---+----+ + | | | ++-+------+ | | +|Sessions+---+-------+ | ++-+------+ | +---+---+ +-----+ + | +----------+"files"+---+Files| ++-+----------+ | +-------+ +-----+ +|Acquisitions+-------+ ++------------+ """ -from .dao import containerstorage, containerutil -from .web.errors import APINotFoundException, InputValidationException import bson + from collections import deque -class Node(object): +from .dao import containerutil +from .dao.basecontainerstorage import ContainerStorage +from .web.errors import APINotFoundException, InputValidationException + +def path_peek(path): + """Return the next path element or None""" + if len(path) > 0: + return path[0] + return None + +def parse_criterion(path_in): + """Parse criterion, returning true if we got an id""" + if not path_in: + return False, None + + value = path_in.popleft() + use_id = False + # Check for syntax + if value.startswith(''): + value = value[4:len(value)-1] + use_id = True + + return use_id, value + +def get_parent(path_out): + """Return the last parent element or None""" + if path_out: + return path_out[-1] + return None + +def apply_node_type(lst, node_type): + """Apply node_type to each item in in the list""" + if lst: + for item in lst: + item['node_type'] = node_type + +def pop_files(container): + """Return a consistently-ordered set of files for a given container.""" + if not container: + return [] + + files = container.pop('files', []) + + files.sort(key=lambda f: f.get('name', '')) + apply_node_type(files, 'file') + + return files + +def find_file(files, name): + """Find a file by name""" + for f in files: + if str(f.get('name')) == name: + return f + return None + +class BaseNode(object): + """Base class for all nodes in the resolver tree""" + def next(self, path_in, path_out, id_only): + # pylint: disable=W0613 + pass + + def get_children(self, path_out): + # pylint: disable=W0613 + return [] + +class RootNode(BaseNode): + """The root node of the resolver tree""" + def __init__(self): + self.groups_node = ContainerNode('groups', files=False, use_id=True) + + def next(self, path_in, path_out, id_only): + """Get the next node in the hierarchy""" + path_el = path_peek(path_in) + + if path_el == 'gears': + path_in.popleft() + return GearsNode() + + if path_el: + return self.groups_node + + # TODO: Gears + return None + + def get_children(self, path_out): + """Get the children of the current node in the hierarchy""" + return ContainerNode.get_container_children('groups') + +class FilesNode(BaseNode): + """Node that represents filename resolution""" + def next(self, path_in, path_out, id_only): + """Get the next node in the hierarchy""" + filename = path_in.popleft() + + parent = get_parent(path_out) + if not parent: + raise APINotFoundException('No ' + filename + ' file found.') + + f = find_file(pop_files(parent), filename) + if f is not None: + path_out.append(f) + return None + + raise APINotFoundException('No ' + filename + ' file found.') + +class ContainerNode(BaseNode): # All lists obtained by the Resolver are sorted by the created timestamp, then the database ID as a fallback. # As neither property should ever change, this sort should be consistent sorting = [('created', 1), ('_id', 1)] @@ -18,17 +153,28 @@ class Node(object): 'files': 1, } - def __init__(self, storage, parent, files=True, use_id=False): - self.storage = storage - self.node_type = containerutil.singularize(storage.cont_name) - self.parent = parent + def __init__(self, cont_name, files=True, use_id=False): + self.cont_name = cont_name + self.storage = ContainerStorage.factory(cont_name) + # node_type is also the parent id field name + self.node_type = containerutil.singularize(cont_name) self.files = files - self.use_id = use_id + self.use_id = use_id + self.child_name = self.storage.get_child_container_name() - def find(self, criterion, parent=None, id_only=False, include_files=True, use_id=False, limit=0): - query = {} + def next(self, path_in, path_out, id_only): + """Get the next node in the hierarchy, adding any value found to path_out""" + # If there is no path in, don't try to resolve + if not path_in: + return None + + use_id, criterion = parse_criterion(path_in) + parent = get_parent(path_out) + # Peek to see if we need files for the next path element + fetch_files = (path_peek(path_in) in ['files', None]) # Setup criterion match + query = {} if criterion: if use_id or self.use_id: if self.storage.use_object_id: @@ -42,69 +188,91 @@ def find(self, criterion, parent=None, id_only=False, include_files=True, use_id query['label'] = criterion # Add parent to query - if parent and self.parent: - query[self.parent] = parent['_id'] + if parent: + query[parent['node_type']] = parent['_id'] # Setup projection if id_only: - proj = Node.id_only_projection.copy() + proj = ContainerNode.id_only_projection.copy() + if fetch_files: + proj['files'] = 1 else: proj = self.storage.get_list_projection() - if not include_files: + if proj and not fetch_files: proj['files'] = 0 # We don't use the user field here because we want to return a 403 if # they try to resolve something they don't have access to - results = self.storage.get_all_el(query, None, proj, sort=Node.sorting, limit=limit) - for el in results: - self.storage.filter_deleted_files(el) - el['node_type'] = self.node_type + results = self.storage.get_all_el(query, None, proj, sort=ContainerNode.sorting, limit=1) + if not results: + raise APINotFoundException('No {0} {1} found.'.format(criterion, self.node_type)) + + child = results[0] - return results + self.storage.filter_deleted_files(child) + child['node_type'] = self.node_type + path_out.append(child) + # Get the next node + if not path_in: + return None -PROJECT_TREE = [ - Node(containerstorage.GroupStorage(), None, files=False, use_id=True), - Node(containerstorage.ProjectStorage(), 'group'), - Node(containerstorage.SessionStorage(), 'project'), - Node(containerstorage.AcquisitionStorage(), 'session') -] + if fetch_files: + path_in.popleft() + return FilesNode() -def parse_criterion(value): - if not value: - return False, None + # TODO: Check for analyses - use_id = False - # Check for syntax - if value.startswith(''): - value = value[4:len(value)-1] - use_id = True + if self.child_name: + return ContainerNode(self.child_name) - return use_id, value + return None -def pop_files(container): - """ - Return a consistently-ordered set of files for a given container. - """ - if not container: - return [] + def get_children(self, path_out): + """Get all children of the last node""" + parent = get_parent(path_out) - files = container.pop('files', []) + # Get container chilren + if self.child_name: + query = {} + if parent: + query[parent['node_type']] = parent['_id'] - files.sort(key=lambda f: f.get('name', '')) - for f in files: - f['node_type'] = 'file' + children = ContainerNode.get_container_children(self.child_name, query) + else: + children = [] - return files + # TODO: Add analyses? -def find_file(files, name): - """ - Find a file by name - """ - for f in files: - if str(f.get('name')) == name: - return f - return None + # Add files + return children + pop_files(parent) + + @classmethod + def get_container_children(cls, cont_name, query=None): + """Get all children of container named cont_name, using query""" + storage = ContainerStorage.factory(cont_name) + + proj = storage.get_list_projection() + if proj: + proj['files'] = 0 + + children = storage.get_all_el(query, None, proj, sort=ContainerNode.sorting) + apply_node_type(children, containerutil.singularize(cont_name)) + + return children + +class GearsNode(BaseNode): + """The top level "gears" node""" + def __init__(self): + pass + + def next(self, path_in, path_out, id_only): + """Get the next node in the hierarchy, adding any value found to path_out""" + return None + + def get_children(self, path_out): + """Get a list of all gears""" + return [] class Resolver(object): """ @@ -112,7 +280,6 @@ class Resolver(object): Does not tolerate ambiguity at any level of the path except the final node. """ - def __init__(self, id_only=False): self.id_only = id_only @@ -121,74 +288,24 @@ def resolve(self, path): raise InputValidationException("Path must be an array of strings") path = deque(path) - tree = deque(PROJECT_TREE) + node = None + next_node = RootNode() + resolved_path = [] resolved_children = [] - last = None - files = [] - - # Short circuit - just return a list of groups - if not path: - resolved_children = tree[0].find(None, id_only=self.id_only) - return { - 'path': resolved_path, - 'children': resolved_children - } - - # Walk down the tree, building path until we get to the last node + + # Walk down the tree, building path until we get to a leaf node # Keeping in mind that path may be empty - while len(path) > 0 and len(tree) > 0: - node = tree.popleft() - current_id, current = parse_criterion(path.popleft()) - - # Find the next child - children = node.find(current, parent=last, id_only=self.id_only, include_files=True, use_id=current_id, limit=1) - - # If children is empty, try to find a match in the last set of files - if not children: - # Check in last set of files - if not current_id: - child = find_file(files, current) - if child: - children = [child] - files = [] - if len(path) > 0: - raise APINotFoundException('Files have no children') - - if not children: - # Not found - or_file = 'or file ' if node.files else '' - raise APINotFoundException('No {0} {1} {2} found.'.format(current, node.node_type, or_file)) - - # Otherwise build up path - resolved_path.append(children[0]) - last = resolved_path[-1] - files = pop_files(last) - - # If there are path elements left, search in the last set of files + while next_node: + node = next_node + next_node = node.next(path, resolved_path, self.id_only) + + # If we haven't consumed path, then we didn't find what we were looking for if len(path) > 0: - filename = path.popleft() - f = find_file(files, filename) - if not f: - raise APINotFoundException('No ' + filename + ' file found.') - if len(path) > 0: - raise APINotFoundException('Files have no children') - resolved_path.append(f) - files = [] - - # Resolve children - if not self.id_only: - if last and last.get('node_type') != 'file': - # Retrieve any child nodes - if len(tree) > 0: - node = tree[0] - resolved_children = node.find(None, parent=last, include_files=False) - - # Add any files from the last node - resolved_children = resolved_children + files - - elif len(path) > 0: - raise APINotFoundException('Cannot retrieve id for file: {0}'.format(path[0])) + raise APINotFoundException('Could not resolve node for: ' + '/'.join(path)) + + if hasattr(node, 'get_children'): + resolved_children = node.get_children(resolved_path) return { 'path': resolved_path, diff --git a/tests/integration_tests/python/test_resolver.py b/tests/integration_tests/python/test_resolver.py index 0e71f5075..26fe6fe9d 100644 --- a/tests/integration_tests/python/test_resolver.py +++ b/tests/integration_tests/python/test_resolver.py @@ -5,6 +5,8 @@ def path_in_result(path, result): def child_in_result(child, result): return sum(all((k in c and c[k]==v) for k,v in child.iteritems()) for c in result['children']) == 1 +def idz(s): + return '' def test_resolver(data_builder, as_admin, as_user, as_public, file_form): # ROOT @@ -91,9 +93,14 @@ def test_resolver(data_builder, as_admin, as_user, as_public, file_form): assert child_in_result({'_id': session, 'node_type': 'session'}, result) assert len(result['children']) == 2 - # resolve root/group/project/file + # resolve root/group/project/file (old way) r = as_admin.post('/resolve', json={'path': [group, project_label, project_file]}) result = r.json() + assert r.status_code == 404 + + # resolve root/group/project/file + r = as_admin.post('/resolve', json={'path': [group, project_label, 'files', project_file]}) + result = r.json() assert r.ok assert path_in_result([group, project, project_file], result) assert result['children'] == [] @@ -133,7 +140,7 @@ def test_resolver(data_builder, as_admin, as_user, as_public, file_form): assert len(result['children']) == 2 # resolve root/group/project/session/file - r = as_admin.post('/resolve', json={'path': [group, project_label, session_label, session_file]}) + r = as_admin.post('/resolve', json={'path': [group, project_label, session_label, 'files', session_file]}) result = r.json() assert r.ok assert path_in_result([group, project, session, session_file], result) @@ -164,24 +171,21 @@ def test_resolver(data_builder, as_admin, as_user, as_public, file_form): assert len(result['children']) == 1 # resolve root/group/project/session/acquisition/file - r = as_admin.post('/resolve', json={'path': [group, project_label, session_label, acquisition_label, acquisition_file]}) + r = as_admin.post('/resolve', json={'path': [group, project_label, session_label, acquisition_label, 'files', acquisition_file]}) result = r.json() assert r.ok assert path_in_result([group, project, session, acquisition, acquisition_file], result) assert result['children'] == [] - def idz(s): - return '' - # resolve root/group/project/session/acquisition/file with id - r = as_admin.post('/resolve', json={'path': [idz(group), idz(project), idz(session), idz(acquisition), acquisition_file]}) + r = as_admin.post('/resolve', json={'path': [idz(group), idz(project), idz(session), idz(acquisition), 'files', acquisition_file]}) result = r.json() assert r.ok assert path_in_result([group, project, session, acquisition, acquisition_file], result) assert result['children'] == [] # resolve root/group/project/session/acquisition/file with invalid id - r = as_admin.post('/resolve', json={'path': [idz(group), idz(project), idz('not-valid'), idz(acquisition), acquisition_file]}) + r = as_admin.post('/resolve', json={'path': [idz(group), idz(project), idz('not-valid'), idz(acquisition), 'files', acquisition_file]}) assert r.status_code == 400 # try to resolve non-existent root/group/project/session/acquisition/child @@ -253,7 +257,7 @@ def test_lookup(data_builder, as_admin, as_user, as_public, file_form): r = as_admin.post('/projects/' + project + '/files', files=file_form(project_file)) assert r.ok - r = as_admin.post('/lookup', json={'path': [group, project_label, project_file]}) + r = as_admin.post('/lookup', json={'path': [group, project_label, 'files', project_file]}) result = r.json() assert r.ok assert result['node_type'] == 'file' @@ -282,7 +286,7 @@ def test_lookup(data_builder, as_admin, as_user, as_public, file_form): r = as_admin.post('/sessions/' + session + '/files', files=file_form(session_file)) assert r.ok - r = as_admin.post('/lookup', json={'path': [group, project_label, session_label, session_file]}) + r = as_admin.post('/lookup', json={'path': [group, project_label, session_label, 'files', session_file]}) result = r.json() assert r.ok assert result['node_type'] == 'file' @@ -309,7 +313,7 @@ def test_lookup(data_builder, as_admin, as_user, as_public, file_form): r = as_admin.post('/acquisitions/' + acquisition + '/files', files=file_form(acquisition_file)) assert r.ok - r = as_admin.post('/lookup', json={'path': [group, project_label, session_label, acquisition_label, acquisition_file]}) + r = as_admin.post('/lookup', json={'path': [group, project_label, session_label, acquisition_label, 'files', acquisition_file]}) result = r.json() assert r.ok assert result['node_type'] == 'file' @@ -317,9 +321,6 @@ def test_lookup(data_builder, as_admin, as_user, as_public, file_form): assert 'mimetype' in result assert 'size' in result - def idz(s): - return '' - # lookup root/group/project/session/acquisition with id r = as_admin.post('/lookup', json={'path': [idz(group), idz(project), idz(session), idz(acquisition)]}) result = r.json() @@ -328,7 +329,7 @@ def idz(s): assert result['_id'] == acquisition # lookup root/group/project/session/acquisition/file with id - r = as_admin.post('/lookup', json={'path': [idz(group), idz(project), idz(session), idz(acquisition), acquisition_file]}) + r = as_admin.post('/lookup', json={'path': [idz(group), idz(project), idz(session), idz(acquisition), 'files', acquisition_file]}) result = r.json() assert r.ok assert result['node_type'] == 'file' @@ -343,6 +344,6 @@ def idz(s): # FILE # try to lookup non-existent (also invalid) root/group/project/session/acquisition/file/child - r = as_admin.post('/lookup', json={'path': [group, project_label, session_label, acquisition_label, acquisition_file, 'child']}) + r = as_admin.post('/lookup', json={'path': [group, project_label, session_label, acquisition_label, 'files', acquisition_file, 'child']}) assert r.status_code == 404 From 31f0a5b7a12a9866757331fcf1011135ce848bc1 Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Fri, 9 Mar 2018 14:27:20 -0600 Subject: [PATCH 47/53] Add gears to resolver This does not yet support gear versioning. --- api/jobs/handlers.py | 5 +- api/resolver.py | 32 ++++++++-- .../integration_tests/python/test_resolver.py | 62 ++++++++++++++++++- 3 files changed, 92 insertions(+), 7 deletions(-) diff --git a/api/jobs/handlers.py b/api/jobs/handlers.py index ebef965d6..d5e4a5672 100644 --- a/api/jobs/handlers.py +++ b/api/jobs/handlers.py @@ -59,7 +59,10 @@ class GearHandler(base.RequestHandler): @require_login def get(self, _id): - return get_gear(_id) + result = get_gear(_id) + util.add_node_type(self.request, result) + return result + @require_login def get_invocation(self, _id): diff --git a/api/resolver.py b/api/resolver.py index aa10cb63d..6b131ee8d 100644 --- a/api/resolver.py +++ b/api/resolver.py @@ -38,6 +38,7 @@ from .dao import containerutil from .dao.basecontainerstorage import ContainerStorage +from .jobs import gears from .web.errors import APINotFoundException, InputValidationException def path_peek(path): @@ -263,16 +264,39 @@ def get_container_children(cls, cont_name, query=None): class GearsNode(BaseNode): """The top level "gears" node""" - def __init__(self): - pass - def next(self, path_in, path_out, id_only): """Get the next node in the hierarchy, adding any value found to path_out""" + if not path_in: + return None + + use_id, criterion = parse_criterion(path_in) + if use_id: + gear = gears.get_gear(criterion) + else: + gear = gears.get_gear_by_name(criterion) + + if not gear: + raise APINotFoundException('No gear {0} found.'.format(criterion)) + + gear['node_type'] = 'gear' + path_out.append(gear) + return None def get_children(self, path_out): """Get a list of all gears""" - return [] + + # No children for gears yet + if path_out: + return [] + + results = gears.get_gears() + + for gear in results: + gear['node_type'] = 'gear' + + return list(results) + class Resolver(object): """ diff --git a/tests/integration_tests/python/test_resolver.py b/tests/integration_tests/python/test_resolver.py index 26fe6fe9d..35f518349 100644 --- a/tests/integration_tests/python/test_resolver.py +++ b/tests/integration_tests/python/test_resolver.py @@ -1,10 +1,15 @@ def path_in_result(path, result): return [node.get('_id', node.get('name')) for node in result['path']] == path - def child_in_result(child, result): return sum(all((k in c and c[k]==v) for k,v in child.iteritems()) for c in result['children']) == 1 +def gear_in_path(name, id, result): + for g in result['path']: + if g['gear']['name'] == name and g['_id'] == id: + return True + return False + def idz(s): return '' @@ -216,7 +221,6 @@ def test_lookup(data_builder, as_admin, as_user, as_public, file_form): # lookup root (1 group) group = data_builder.create_group() r = as_admin.post('/lookup', json={'path': []}) - result = r.json() assert r.status_code == 404 # try to lookup non-existent root/child @@ -347,3 +351,57 @@ def test_lookup(data_builder, as_admin, as_user, as_public, file_form): r = as_admin.post('/lookup', json={'path': [group, project_label, session_label, acquisition_label, 'files', acquisition_file, 'child']}) assert r.status_code == 404 +def test_resolve_gears(data_builder, default_payload, as_admin, as_user, as_public, file_form): + # ROOT + # try accessing resolver w/o logging in + r = as_public.post('/resolve', json={'path': ['gears']}) + assert r.status_code == 403 + + # resolve root (1 gear) + gear_id = data_builder.create_gear() + gear = as_admin.get('/gears/' + gear_id).json() + gear_name = gear['gear']['name'] + + r = as_admin.post('/resolve', json={'path': ['gears']}) + result = r.json() + assert r.ok + assert result['path'] == [] + assert child_in_result({'_id': gear_id, 'node_type': 'gear'}, result) + + # resolve gear (empty) + r = as_admin.post('/resolve', json={'path': ['gears', gear_name]}) + result = r.json() + assert r.ok + assert gear_in_path(gear_name, gear_id, result) + assert result['children'] == [] + + # resolve gear by id + r = as_admin.post('/resolve', json={'path': ['gears', idz(gear_id)]}) + result = r.json() + assert r.ok + assert gear_in_path(gear_name, gear_id, result) + assert result['children'] == [] + + # Lookup (empty) + r = as_admin.post('/lookup', json={'path': ['gears']}) + result = r.json() + assert r.status_code == 404 + + # Lookup by name + r = as_admin.post('/lookup', json={'path': ['gears', gear_name]}) + result = r.json() + assert r.ok + assert result['node_type'] == 'gear' + assert result['_id'] == gear_id + assert result['gear']['name'] == gear_name + + # Lookup by id + r = as_admin.post('/lookup', json={'path': ['gears', idz(gear_id)]}) + result = r.json() + assert r.ok + assert result['node_type'] == 'gear' + assert result['_id'] == gear_id + assert result['gear']['name'] == gear_name + + + From 94d2fd64ba7fcb9dda8bd659cc09e45aca2b9fe3 Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Fri, 9 Mar 2018 15:06:37 -0600 Subject: [PATCH 48/53] Add optional query to get_analyses --- api/dao/containerstorage.py | 11 +++++++---- api/handlers/containerhandler.py | 4 ++-- api/jobs/gears.py | 2 +- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/api/dao/containerstorage.py b/api/dao/containerstorage.py index a377258e3..20a060ed0 100644 --- a/api/dao/containerstorage.py +++ b/api/dao/containerstorage.py @@ -311,10 +311,13 @@ def get_parent(self, parent_type, parent_id): return parent_storage.get_container(parent_id) - def get_analyses(self, parent_type, parent_id, inflate_job_info=False): - parent_type = containerutil.singularize(parent_type) - parent_id = bson.ObjectId(parent_id) - analyses = self.get_all_el({'parent.type': parent_type, 'parent.id': parent_id}, None, None) + def get_analyses(self, query, parent_type, parent_id, inflate_job_info=False, **kwargs): + if query is None: + query = {} + query['parent.type'] = containerutil.singularize(parent_type) + query['parent.id'] = bson.ObjectId(parent_id) + + analyses = self.get_all_el(query, None, None, **kwargs) if inflate_job_info: for analysis in analyses: self.inflate_job_info(analysis) diff --git a/api/handlers/containerhandler.py b/api/handlers/containerhandler.py index f6940fa1f..e50d07d1f 100644 --- a/api/handlers/containerhandler.py +++ b/api/handlers/containerhandler.py @@ -106,7 +106,7 @@ def get(self, cont_name, **kwargs): fileinfo['path'] = util.path_from_hash(fileinfo['hash']) inflate_job_info = cont_name == 'sessions' - result['analyses'] = AnalysisStorage().get_analyses(cont_name, _id, inflate_job_info) + result['analyses'] = AnalysisStorage().get_analyses(None, cont_name, _id, inflate_job_info) util.add_node_type(self.request, result) @@ -235,7 +235,7 @@ def get_jobs(self, cid): permchecker(noop)('GET', cid) - analyses = AnalysisStorage().get_analyses('session', cont['_id']) + analyses = AnalysisStorage().get_analyses(None, 'session', cont['_id']) acquisitions = cont.get('acquisitions', []) results = [] diff --git a/api/jobs/gears.py b/api/jobs/gears.py index 908e08034..411ed1314 100644 --- a/api/jobs/gears.py +++ b/api/jobs/gears.py @@ -60,7 +60,7 @@ def suggest_container(gear, cont_name, cid): """ root = ContainerStorage.factory(cont_name).get_container(cid, projection={'permissions':0}, get_children=True) - root['analyses'] = ContainerStorage.factory('analyses').get_analyses(cont_name, cid, False) + root['analyses'] = ContainerStorage.factory('analyses').get_analyses(None, cont_name, cid, False) invocation_schema = get_invocation_schema(gear) From 234274ad1bdcd67abd7868cf1405a934f3b3de31 Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Fri, 9 Mar 2018 16:33:28 -0600 Subject: [PATCH 49/53] Add analyses to resolver --- api/dao/containerstorage.py | 7 +- api/handlers/refererhandler.py | 2 + api/handlers/resolvehandler.py | 13 +- api/resolver.py | 83 ++++++-- .../integration_tests/python/test_resolver.py | 190 +++++++++++++++++- 5 files changed, 271 insertions(+), 24 deletions(-) diff --git a/api/dao/containerstorage.py b/api/dao/containerstorage.py index 20a060ed0..6eb54fbe9 100644 --- a/api/dao/containerstorage.py +++ b/api/dao/containerstorage.py @@ -303,7 +303,8 @@ def __init__(self): class AnalysisStorage(ContainerStorage): def __init__(self): - super(AnalysisStorage, self).__init__('analyses', use_object_id=True, use_delete_tag=True) + super(AnalysisStorage, self).__init__('analyses', use_object_id=True, use_delete_tag=True, + list_projection={'info': 0, 'files.info': 0, 'tags': 0}) def get_parent(self, parent_type, parent_id): @@ -311,13 +312,13 @@ def get_parent(self, parent_type, parent_id): return parent_storage.get_container(parent_id) - def get_analyses(self, query, parent_type, parent_id, inflate_job_info=False, **kwargs): + def get_analyses(self, query, parent_type, parent_id, inflate_job_info=False, projection=None, **kwargs): if query is None: query = {} query['parent.type'] = containerutil.singularize(parent_type) query['parent.id'] = bson.ObjectId(parent_id) - analyses = self.get_all_el(query, None, None, **kwargs) + analyses = self.get_all_el(query, None, projection, **kwargs) if inflate_job_info: for analysis in analyses: self.inflate_job_info(analysis) diff --git a/api/handlers/refererhandler.py b/api/handlers/refererhandler.py index a04440187..351758ddc 100644 --- a/api/handlers/refererhandler.py +++ b/api/handlers/refererhandler.py @@ -150,6 +150,8 @@ def get(self, **kwargs): if self.is_true('inflate_job'): self.storage.inflate_job_info(analysis) + util.add_node_type(self.request, analysis) + self.log_user_access(AccessType.view_container, cont_name=analysis['parent']['type'], cont_id=analysis['parent']['id']) return analysis diff --git a/api/handlers/resolvehandler.py b/api/handlers/resolvehandler.py index 1bb111c97..ca29c215a 100644 --- a/api/handlers/resolvehandler.py +++ b/api/handlers/resolvehandler.py @@ -3,6 +3,7 @@ """ from webapp2 import Request +from ..dao import containerutil from ..web import base from ..web.errors import APINotFoundException from ..resolver import Resolver @@ -52,8 +53,12 @@ def lookup(self): def _get_node_path(self, node): """Get the actual resource path for node""" - # Right now all containers are just node_type + 's' - cname = node['node_type'] + 's' + try: + cname = containerutil.pluralize(node['node_type']) + except ValueError: + # Handle everything else... + cname = node['node_type'] + 's' + return '{0}/{1}'.format(cname, node['_id']) def _resolve_and_check_permissions(self, id_only): @@ -70,7 +75,7 @@ def _resolve_and_check_permissions(self, id_only): if not self.superuser_request: for x in result["path"]: ok = False - if x['node_type'] in ['acquisition', 'session', 'project', 'group']: + if x['node_type'] in ['acquisition', 'session', 'project', 'group', 'analysis']: perms = x.get('permissions', []) for y in perms: if y.get('_id') == self.uid: @@ -83,7 +88,7 @@ def _resolve_and_check_permissions(self, id_only): filtered_children = [] for x in result["children"]: ok = False - if x['node_type'] in ['acquisition', 'session', 'project', 'group']: + if x['node_type'] in ['acquisition', 'session', 'project', 'group', 'analysis']: perms = x.get('permissions', []) for y in perms: if y.get('_id') == self.uid: diff --git a/api/resolver.py b/api/resolver.py index 6b131ee8d..abff415c1 100644 --- a/api/resolver.py +++ b/api/resolver.py @@ -8,6 +8,8 @@ Quoted strings represent literal nodes in the graph. For example, to find the gear called dicom-mr-classifier, you would use the path: ["gears", "dicom-mr-classifier"] +NOTE: Currently subjects and gear versions are not supported! + +----+ +-------+ +-----+ +-------+ |Root+---+"gears"+---+Gears+---+Version| +-+--+ +-------+ +-----+ +-------+ @@ -105,7 +107,7 @@ def get_children(self, path_out): class RootNode(BaseNode): """The root node of the resolver tree""" def __init__(self): - self.groups_node = ContainerNode('groups', files=False, use_id=True) + self.groups_node = ContainerNode('groups', files=False, use_id=True, analyses=False) def next(self, path_in, path_out, id_only): """Get the next node in the hierarchy""" @@ -118,7 +120,6 @@ def next(self, path_in, path_out, id_only): if path_el: return self.groups_node - # TODO: Gears return None def get_children(self, path_out): @@ -126,9 +127,12 @@ def get_children(self, path_out): return ContainerNode.get_container_children('groups') class FilesNode(BaseNode): - """Node that represents filename resolution""" + """Node that manages filename resolution""" def next(self, path_in, path_out, id_only): """Get the next node in the hierarchy""" + if not path_in: + return None + filename = path_in.popleft() parent = get_parent(path_out) @@ -142,6 +146,13 @@ def next(self, path_in, path_out, id_only): raise APINotFoundException('No ' + filename + ' file found.') + def get_children(self, path_out): + """Get the children of the current node in the hierarchy""" + parent = get_parent(path_out) + if parent: + return pop_files(parent) + return [] + class ContainerNode(BaseNode): # All lists obtained by the Resolver are sorted by the created timestamp, then the database ID as a fallback. # As neither property should ever change, this sort should be consistent @@ -154,13 +165,14 @@ class ContainerNode(BaseNode): 'files': 1, } - def __init__(self, cont_name, files=True, use_id=False): + def __init__(self, cont_name, files=True, use_id=False, analyses=True): self.cont_name = cont_name self.storage = ContainerStorage.factory(cont_name) # node_type is also the parent id field name self.node_type = containerutil.singularize(cont_name) self.files = files self.use_id = use_id + self.analyses = analyses self.child_name = self.storage.get_child_container_name() def next(self, path_in, path_out, id_only): @@ -188,10 +200,6 @@ def next(self, path_in, path_out, id_only): else: query['label'] = criterion - # Add parent to query - if parent: - query[parent['node_type']] = parent['_id'] - # Setup projection if id_only: proj = ContainerNode.id_only_projection.copy() @@ -204,7 +212,7 @@ def next(self, path_in, path_out, id_only): # We don't use the user field here because we want to return a 403 if # they try to resolve something they don't have access to - results = self.storage.get_all_el(query, None, proj, sort=ContainerNode.sorting, limit=1) + results = self.find(query, parent, proj) if not results: raise APINotFoundException('No {0} {1} found.'.format(criterion, self.node_type)) @@ -218,11 +226,18 @@ def next(self, path_in, path_out, id_only): if not path_in: return None + # Files if fetch_files: path_in.popleft() return FilesNode() - # TODO: Check for analyses + # Check for analyses + if path_peek(path_in) == 'analyses': + if self.analyses: + path_in.popleft() + return AnalysesNode() + + raise APINotFoundException('No analyses at the {0} level'.format(self.node_type)) if self.child_name: return ContainerNode(self.child_name) @@ -243,11 +258,29 @@ def get_children(self, path_out): else: children = [] - # TODO: Add analyses? + # Add analyses + if self.analyses: + analyses_node = AnalysesNode() + + proj = analyses_node.storage.get_list_projection() + if proj: + proj['files'] = 0 + + analyses = analyses_node.list_analyses(parent, proj=proj) + apply_node_type(analyses, analyses_node.node_type) + children = children + analyses # Add files return children + pop_files(parent) + def find(self, query, parent, proj): + """ Find the one child of this container that matches query """ + # Add parent to query + if parent: + query[parent['node_type']] = parent['_id'] + + return self.storage.get_all_el(query, None, proj, sort=ContainerNode.sorting, limit=1) + @classmethod def get_container_children(cls, cont_name, query=None): """Get all children of container named cont_name, using query""" @@ -297,6 +330,33 @@ def get_children(self, path_out): return list(results) +class AnalysesNode(ContainerNode): + """The analyses node""" + def __init__(self): + super(AnalysesNode, self).__init__('analyses', files=True, use_id=False, analyses=False) + + def find(self, query, parent, proj): + """Find the one child of this container that matches query""" + return self.list_analyses(parent, query, proj, limit=1) + + def get_children(self, path_out): + """Get a list of all gears""" + parent = get_parent(path_out) + if not parent: + raise APINotFoundException('No analyses at that level') + + # Only children of an analyses is files + if parent.get('node_type') == 'analysis': + return pop_files(parent) + + results = self.list_analyses(parent) + apply_node_type(results, self.node_type) + return results + + def list_analyses(self, parent, query=None, proj=None, **kwargs): + """Get a list of all analyses that match query, using the given projection""" + return self.storage.get_analyses(query, parent['node_type'], parent['_id'], projection=proj, sort=ContainerNode.sorting, **kwargs) + class Resolver(object): """ @@ -335,4 +395,3 @@ def resolve(self, path): 'path': resolved_path, 'children': resolved_children } - diff --git a/tests/integration_tests/python/test_resolver.py b/tests/integration_tests/python/test_resolver.py index 35f518349..54e4cb37d 100644 --- a/tests/integration_tests/python/test_resolver.py +++ b/tests/integration_tests/python/test_resolver.py @@ -13,6 +13,14 @@ def gear_in_path(name, id, result): def idz(s): return '' +def create_analysis(as_admin, file_form, container, c_id, label): + r = as_admin.post('/' + container + '/' + c_id + '/analyses', files=file_form( + 'one.csv', meta={'label': label, 'inputs': [{'name': 'one.csv'}]} + )) + assert r.ok + return r.json()['_id'] + + def test_resolver(data_builder, as_admin, as_user, as_public, file_form): # ROOT # try accessing resolver w/o logging in @@ -98,6 +106,14 @@ def test_resolver(data_builder, as_admin, as_user, as_public, file_form): assert child_in_result({'_id': session, 'node_type': 'session'}, result) assert len(result['children']) == 2 + # resolve root/group/project/files (1 file, 1 session) + r = as_admin.post('/resolve', json={'path': [group, project_label, 'files']}) + result = r.json() + assert r.ok + assert path_in_result([group, project], result) + assert child_in_result({'name': project_file, 'node_type': 'file'}, result) + assert len(result['children']) == 1 + # resolve root/group/project/file (old way) r = as_admin.post('/resolve', json={'path': [group, project_label, project_file]}) result = r.json() @@ -205,7 +221,7 @@ def test_resolver(data_builder, as_admin, as_user, as_public, file_form): def test_lookup(data_builder, as_admin, as_user, as_public, file_form): # ROOT - # try accessing lookupr w/o logging in + # try accessing lookup w/o logging in r = as_public.post('/lookup', json={'path': []}) assert r.status_code == 403 @@ -247,7 +263,7 @@ def test_lookup(data_builder, as_admin, as_user, as_public, file_form): # PROJECT # lookup root/group/project (empty) - project_label = 'test-lookupr-project-label' + project_label = 'test-lookup-project-label' project = data_builder.create_project(label=project_label) r = as_admin.post('/lookup', json={'path': [group, project_label]}) @@ -276,7 +292,7 @@ def test_lookup(data_builder, as_admin, as_user, as_public, file_form): # SESSION # lookup root/group/project/session (empty) - session_label = 'test-lookupr-session-label' + session_label = 'test-lookup-session-label' session = data_builder.create_session(label=session_label) r = as_admin.post('/lookup', json={'path': [group, project_label, session_label]}) @@ -304,7 +320,7 @@ def test_lookup(data_builder, as_admin, as_user, as_public, file_form): # ACQUISITION # lookup root/group/project/session/acquisition (empty) - acquisition_label = 'test-lookupr-acquisition-label' + acquisition_label = 'test-lookup-acquisition-label' acquisition = data_builder.create_acquisition(label=acquisition_label) r = as_admin.post('/lookup', json={'path': [group, project_label, session_label, acquisition_label]}) result = r.json() @@ -351,7 +367,7 @@ def test_lookup(data_builder, as_admin, as_user, as_public, file_form): r = as_admin.post('/lookup', json={'path': [group, project_label, session_label, acquisition_label, 'files', acquisition_file, 'child']}) assert r.status_code == 404 -def test_resolve_gears(data_builder, default_payload, as_admin, as_user, as_public, file_form): +def test_resolve_gears(data_builder, as_admin, as_user, as_public, file_form): # ROOT # try accessing resolver w/o logging in r = as_public.post('/resolve', json={'path': ['gears']}) @@ -403,5 +419,169 @@ def test_resolve_gears(data_builder, default_payload, as_admin, as_user, as_publ assert result['_id'] == gear_id assert result['gear']['name'] == gear_name +def test_resolve_analyses(data_builder, as_admin, as_user, as_public, file_form): + analysis_file = 'one.csv' + + # Create group + group = data_builder.create_group() + + # Create project + project_label = 'test-resolve-analyses-project-label' + project = data_builder.create_project(label=project_label) + + project_file = 'project_file' + r = as_admin.post('/projects/' + project + '/files', files=file_form(project_file)) + assert r.ok + + project_analysis_name = 'test-project-analysis' + project_analysis = create_analysis(as_admin, file_form, 'projects', project, project_analysis_name) + + # Create session + session_label = 'test-resolve-analyses-session-label' + session = data_builder.create_session(label=session_label) + + session_file = 'session_file' + r = as_admin.post('/sessions/' + session + '/files', files=file_form(session_file)) + assert r.ok + + session_analysis_name = 'test-session-analysis' + session_analysis = create_analysis(as_admin, file_form, 'sessions', session, session_analysis_name) + + # Create acquisition + acquisition_label = 'test-resolve-analyses-acquisition-label' + acquisition = data_builder.create_acquisition(label=acquisition_label) + + acquisition_file = 'acquisition_file' + r = as_admin.post('/acquisitions/' + acquisition + '/files', files=file_form(acquisition_file)) + assert r.ok + + acq_analysis_name = 'test-acquisition-analysis' + acq_analysis = create_analysis(as_admin, file_form, 'acquisitions', acquisition, acq_analysis_name) + + # PROJECT + # resolve root/group/project (1 file, 1 session) + r = as_admin.post('/resolve', json={'path': [group, project_label]}) + result = r.json() + assert r.ok + assert path_in_result([group, project], result) + assert child_in_result({'name': project_file, 'node_type': 'file'}, result) + assert child_in_result({'_id': session, 'node_type': 'session'}, result) + assert child_in_result({'_id': project_analysis, 'node_type': 'analysis'}, result) + assert len(result['children']) == 3 + + # resolve root/group/project/analysis + r = as_admin.post('/resolve', json={'path': [group, project_label, 'analyses']}) + result = r.json() + assert r.ok + assert path_in_result([group, project], result) + assert child_in_result({'_id': project_analysis, 'node_type': 'analysis'}, result) + assert len(result['children']) == 1 + + # resolve root/group/project/analysis/name + r = as_admin.post('/resolve', json={'path': [group, project_label, 'analyses', project_analysis_name]}) + result = r.json() + assert r.ok + assert path_in_result([group, project, project_analysis], result) + assert child_in_result({'name': analysis_file, 'node_type': 'file'}, result) + assert len(result['children']) == 1 + + # lookup root/group/project/analysis/name + r = as_admin.post('/lookup', json={'path': [group, project_label, 'analyses', project_analysis_name]}) + result = r.json() + assert r.ok + assert result['node_type'] == 'analysis' + assert result['_id'] == project_analysis + assert len(result['files']) == 1 + + # resolve root/group/project/analysis/files + r = as_admin.post('/resolve', json={'path': [group, project_label, 'analyses', project_analysis_name, 'files', analysis_file]}) + result = r.json() + assert r.ok + assert path_in_result([group, project, project_analysis, analysis_file], result) + assert result['children'] == [] + + # SESSION + # resolve root/group/project/session + r = as_admin.post('/resolve', json={'path': [group, project_label, session_label]}) + result = r.json() + assert r.ok + assert path_in_result([group, project, session], result) + assert child_in_result({'name': session_file, 'node_type': 'file'}, result) + assert child_in_result({'_id': acquisition, 'node_type': 'acquisition'}, result) + assert child_in_result({'_id': session_analysis, 'node_type': 'analysis'}, result) + assert len(result['children']) == 3 + + # resolve root/group/project/analysis/name + r = as_admin.post('/resolve', json={'path': [group, project_label, session_label, 'analyses', session_analysis_name]}) + result = r.json() + assert r.ok + assert path_in_result([group, project, session, session_analysis], result) + assert child_in_result({'name': analysis_file, 'node_type': 'file'}, result) + assert len(result['children']) == 1 + + # lookup root/group/project/analysis/name + r = as_admin.post('/lookup', json={'path': [group, project_label, session_label, 'analyses', session_analysis_name]}) + result = r.json() + assert r.ok + assert result['node_type'] == 'analysis' + assert result['_id'] == session_analysis + assert len(result['files']) == 1 + + # resolve root/group/project/analysis/files + r = as_admin.post('/resolve', json={'path': [group, project_label, session_label, 'analyses', session_analysis_name, 'files', analysis_file]}) + result = r.json() + assert r.ok + assert path_in_result([group, project, session, session_analysis, analysis_file], result) + assert result['children'] == [] + + # ACQUISITION + # resolve root/group/project/session/acquisition + r = as_admin.post('/resolve', json={'path': [group, project_label, session_label, acquisition_label]}) + result = r.json() + assert r.ok + assert path_in_result([group, project, session, acquisition], result) + assert child_in_result({'name': acquisition_file, 'node_type': 'file'}, result) + assert child_in_result({'_id': acq_analysis, 'node_type': 'analysis'}, result) + assert len(result['children']) == 2 + + # resolve root/group/project/analysis/name + r = as_admin.post('/resolve', json={'path': [group, project_label, session_label, acquisition_label, 'analyses', acq_analysis_name]}) + result = r.json() + assert r.ok + assert path_in_result([group, project, session, acquisition, acq_analysis], result) + assert child_in_result({'name': analysis_file, 'node_type': 'file'}, result) + assert len(result['children']) == 1 + + # lookup root/group/project/analysis/name + r = as_admin.post('/lookup', json={'path': [group, project_label, session_label, acquisition_label, 'analyses', acq_analysis_name]}) + result = r.json() + assert r.ok + assert result['node_type'] == 'analysis' + assert result['_id'] == acq_analysis + assert len(result['files']) == 1 + + # resolve root/group/project/analysis/id + r = as_admin.post('/resolve', json={'path': [group, project_label, idz(session), acquisition_label, 'analyses', idz(acq_analysis)]}) + result = r.json() + assert r.ok + assert path_in_result([group, project, session, acquisition, acq_analysis], result) + assert child_in_result({'name': analysis_file, 'node_type': 'file'}, result) + assert len(result['children']) == 1 + + # lookup root/group/project/analysis/name + r = as_admin.post('/lookup', json={'path': [group, project_label, idz(session), acquisition_label, 'analyses', idz(acq_analysis)]}) + result = r.json() + assert r.ok + assert result['node_type'] == 'analysis' + assert result['_id'] == acq_analysis + assert len(result['files']) == 1 + + # resolve root/group/project/analysis/files + r = as_admin.post('/resolve', json={'path': [group, project_label, session_label, acquisition_label, 'analyses', acq_analysis_name, 'files', analysis_file]}) + result = r.json() + assert r.ok + assert path_in_result([group, project, session, acquisition, acq_analysis, analysis_file], result) + assert result['children'] == [] + From 4791196a4a7a7d64d220fd4e2290e5d00dab0097 Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Mon, 12 Mar 2018 08:47:51 -0500 Subject: [PATCH 50/53] Add resolver definitions for gears and analyses --- swagger/paths/resolver.yaml | 6 ++++++ swagger/schemas/definitions/resolver.json | 16 +++++++++++++++- 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/swagger/paths/resolver.yaml b/swagger/paths/resolver.yaml index d200caee5..3ad449456 100644 --- a/swagger/paths/resolver.yaml +++ b/swagger/paths/resolver.yaml @@ -10,6 +10,12 @@ * session label * acquisition label + Additionally, analyses for project/session/acquisition nodes can be resolved by inserting the literal + string `"analyses"`. e.g. `['scitran', 'MyProject', 'analyses']`. + + Files for projects, sessions, acquisitions and analyses can be resolved by inserting the literal string + `"files"`. e.g. `['scitran', 'MyProject', 'files']`. + An ID can be used instead of a label by formatting the string as ``. The full path to the node, and the node's children will be included in the response. operationId: resolve_path diff --git a/swagger/schemas/definitions/resolver.json b/swagger/schemas/definitions/resolver.json index f32093f41..699a0a6d7 100644 --- a/swagger/schemas/definitions/resolver.json +++ b/swagger/schemas/definitions/resolver.json @@ -70,6 +70,20 @@ {"$ref":"file.json#/definitions/file-output"} ], "x-discriminator-value": "file" - } + }, + "analysis-node": { + "allOf": [ + {"$ref":"#/definitions/resolver-node"}, + {"$ref":"analysis.json#/definitions/analysis-output"} + ], + "x-discriminator-value": "analysis" + }, + "gear-node": { + "allOf": [ + {"$ref":"#/definitions/resolver-node"}, + {"$ref":"gear.json#/definitions/gear-doc"} + ], + "x-discriminator-value": "gear" + } } } \ No newline at end of file From 21e34fcfb5a93e20ed991e44c04ed2ebacf64e6d Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Mon, 12 Mar 2018 09:16:56 -0500 Subject: [PATCH 51/53] Improve resolver test coverage --- api/resolver.py | 6 +++--- tests/integration_tests/python/test_resolver.py | 15 +++++++++++++-- 2 files changed, 16 insertions(+), 5 deletions(-) diff --git a/api/resolver.py b/api/resolver.py index abff415c1..a9139dd50 100644 --- a/api/resolver.py +++ b/api/resolver.py @@ -98,11 +98,11 @@ class BaseNode(object): """Base class for all nodes in the resolver tree""" def next(self, path_in, path_out, id_only): # pylint: disable=W0613 - pass + pass # pragma: no cover def get_children(self, path_out): # pylint: disable=W0613 - return [] + return [] # pragma: no cover class RootNode(BaseNode): """The root node of the resolver tree""" @@ -340,7 +340,7 @@ def find(self, query, parent, proj): return self.list_analyses(parent, query, proj, limit=1) def get_children(self, path_out): - """Get a list of all gears""" + """Get a list of all analyses""" parent = get_parent(path_out) if not parent: raise APINotFoundException('No analyses at that level') diff --git a/tests/integration_tests/python/test_resolver.py b/tests/integration_tests/python/test_resolver.py index 54e4cb37d..d49fd89cc 100644 --- a/tests/integration_tests/python/test_resolver.py +++ b/tests/integration_tests/python/test_resolver.py @@ -50,7 +50,6 @@ def test_resolver(data_builder, as_admin, as_user, as_public, file_form): r = as_admin.post('/resolve', json={'path': ['child']}) assert r.status_code == 404 - # GROUP # try to resolve root/group as different (and non-root) user r = as_user.post('/resolve', json={'path': [group]}) @@ -76,7 +75,6 @@ def test_resolver(data_builder, as_admin, as_user, as_public, file_form): r = as_admin.post('/resolve', json={'path': [group, 'child']}) assert r.status_code == 404 - # PROJECT # resolve root/group/project (empty) r = as_admin.post('/resolve', json={'path': [group, project_label]}) @@ -126,6 +124,10 @@ def test_resolver(data_builder, as_admin, as_user, as_public, file_form): assert path_in_result([group, project, project_file], result) assert result['children'] == [] + # resolve non-existent root/group/project/file + r = as_admin.post('/resolve', json={'path': [group, project_label, 'files', 'NON-EXISTENT-FILE.dat']}) + assert r.status_code == 404 + # try to resolve non-existent root/group/project/child r = as_admin.post('/resolve', json={'path': [group, project_label, 'child']}) assert r.status_code == 404 @@ -419,6 +421,11 @@ def test_resolve_gears(data_builder, as_admin, as_user, as_public, file_form): assert result['_id'] == gear_id assert result['gear']['name'] == gear_name + # Lookup (not-found) + r = as_admin.post('/lookup', json={'path': ['gears', 'NON-EXISTENT-GEAR']}) + assert r.status_code == 404 + + def test_resolve_analyses(data_builder, as_admin, as_user, as_public, file_form): analysis_file = 'one.csv' @@ -458,6 +465,10 @@ def test_resolve_analyses(data_builder, as_admin, as_user, as_public, file_form) acq_analysis_name = 'test-acquisition-analysis' acq_analysis = create_analysis(as_admin, file_form, 'acquisitions', acquisition, acq_analysis_name) + # GROUP + r = as_admin.post('/resolve', json={'path': [group, 'analyses']}) + assert r.status_code == 404 + # PROJECT # resolve root/group/project (1 file, 1 session) r = as_admin.post('/resolve', json={'path': [group, project_label]}) From 7c737984b112020696242d94106e95e7c137d3bb Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Mon, 12 Mar 2018 10:39:02 -0500 Subject: [PATCH 52/53] Rename resolver's node_type to container_type --- api/handlers/containerhandler.py | 2 +- api/handlers/grouphandler.py | 2 +- api/handlers/refererhandler.py | 2 +- api/handlers/resolvehandler.py | 16 ++-- api/jobs/handlers.py | 2 +- api/resolver.py | 36 ++++----- api/util.py | 8 +- swagger/schemas/definitions/resolver.json | 6 +- swagger/schemas/output/lookup.json | 2 +- swagger/schemas/output/resolver.json | 8 +- .../integration_tests/python/test_resolver.py | 74 +++++++++---------- 11 files changed, 79 insertions(+), 79 deletions(-) diff --git a/api/handlers/containerhandler.py b/api/handlers/containerhandler.py index e50d07d1f..e5fc14c51 100644 --- a/api/handlers/containerhandler.py +++ b/api/handlers/containerhandler.py @@ -108,7 +108,7 @@ def get(self, cont_name, **kwargs): inflate_job_info = cont_name == 'sessions' result['analyses'] = AnalysisStorage().get_analyses(None, cont_name, _id, inflate_job_info) - util.add_node_type(self.request, result) + util.add_container_type(self.request, result) return self.handle_origin(result) diff --git a/api/handlers/grouphandler.py b/api/handlers/grouphandler.py index e5d1d5cff..c54f3430d 100644 --- a/api/handlers/grouphandler.py +++ b/api/handlers/grouphandler.py @@ -22,7 +22,7 @@ def get(self, _id): self._filter_permissions([result], self.uid) if self.is_true('join_avatars'): ContainerHandler.join_user_info([result]) - util.add_node_type(self.request, result) + util.add_container_type(self.request, result) return result def delete(self, _id): diff --git a/api/handlers/refererhandler.py b/api/handlers/refererhandler.py index 351758ddc..84c6b44d9 100644 --- a/api/handlers/refererhandler.py +++ b/api/handlers/refererhandler.py @@ -150,7 +150,7 @@ def get(self, **kwargs): if self.is_true('inflate_job'): self.storage.inflate_job_info(analysis) - util.add_node_type(self.request, analysis) + util.add_container_type(self.request, analysis) self.log_user_access(AccessType.view_container, cont_name=analysis['parent']['type'], cont_id=analysis['parent']['id']) return analysis diff --git a/api/handlers/resolvehandler.py b/api/handlers/resolvehandler.py index ca29c215a..0a2866dec 100644 --- a/api/handlers/resolvehandler.py +++ b/api/handlers/resolvehandler.py @@ -28,7 +28,7 @@ def lookup(self): # In the event that we resolved a file, just return the file node dest = path[-1] - if dest.get('node_type') == 'file': + if dest.get('container_type') == 'file': return dest # Reroute to the actual path that will log access, resolve analyses, etc @@ -38,10 +38,10 @@ def lookup(self): destination_environ = self.request.environ for key in 'PATH_INFO', 'REQUEST_URI': destination_environ[key] = destination_environ[key].replace('lookup', path, 1) - # We also must update the method, and indicate that we want the node_type included - # The client will depend on node_type being set so that it can map to the correct type + # We also must update the method, and indicate that we want the container_type included + # The client will depend on container_type being set so that it can map to the correct type destination_environ['REQUEST_METHOD'] = 'GET' - destination_environ['fw_node_type'] = dest['node_type'] + destination_environ['fw_container_type'] = dest['container_type'] destination_request = Request(destination_environ) # Apply SciTranRequest attrs @@ -54,10 +54,10 @@ def lookup(self): def _get_node_path(self, node): """Get the actual resource path for node""" try: - cname = containerutil.pluralize(node['node_type']) + cname = containerutil.pluralize(node['container_type']) except ValueError: # Handle everything else... - cname = node['node_type'] + 's' + cname = node['container_type'] + 's' return '{0}/{1}'.format(cname, node['_id']) @@ -75,7 +75,7 @@ def _resolve_and_check_permissions(self, id_only): if not self.superuser_request: for x in result["path"]: ok = False - if x['node_type'] in ['acquisition', 'session', 'project', 'group', 'analysis']: + if x['container_type'] in ['acquisition', 'session', 'project', 'group', 'analysis']: perms = x.get('permissions', []) for y in perms: if y.get('_id') == self.uid: @@ -88,7 +88,7 @@ def _resolve_and_check_permissions(self, id_only): filtered_children = [] for x in result["children"]: ok = False - if x['node_type'] in ['acquisition', 'session', 'project', 'group', 'analysis']: + if x['container_type'] in ['acquisition', 'session', 'project', 'group', 'analysis']: perms = x.get('permissions', []) for y in perms: if y.get('_id') == self.uid: diff --git a/api/jobs/handlers.py b/api/jobs/handlers.py index d5e4a5672..26e66232f 100644 --- a/api/jobs/handlers.py +++ b/api/jobs/handlers.py @@ -60,7 +60,7 @@ class GearHandler(base.RequestHandler): @require_login def get(self, _id): result = get_gear(_id) - util.add_node_type(self.request, result) + util.add_container_type(self.request, result) return result diff --git a/api/resolver.py b/api/resolver.py index a9139dd50..75ab0f98e 100644 --- a/api/resolver.py +++ b/api/resolver.py @@ -69,11 +69,11 @@ def get_parent(path_out): return path_out[-1] return None -def apply_node_type(lst, node_type): - """Apply node_type to each item in in the list""" +def apply_container_type(lst, container_type): + """Apply container_type to each item in in the list""" if lst: for item in lst: - item['node_type'] = node_type + item['container_type'] = container_type def pop_files(container): """Return a consistently-ordered set of files for a given container.""" @@ -83,7 +83,7 @@ def pop_files(container): files = container.pop('files', []) files.sort(key=lambda f: f.get('name', '')) - apply_node_type(files, 'file') + apply_container_type(files, 'file') return files @@ -168,8 +168,8 @@ class ContainerNode(BaseNode): def __init__(self, cont_name, files=True, use_id=False, analyses=True): self.cont_name = cont_name self.storage = ContainerStorage.factory(cont_name) - # node_type is also the parent id field name - self.node_type = containerutil.singularize(cont_name) + # container_type is also the parent id field name + self.container_type = containerutil.singularize(cont_name) self.files = files self.use_id = use_id self.analyses = analyses @@ -214,12 +214,12 @@ def next(self, path_in, path_out, id_only): # they try to resolve something they don't have access to results = self.find(query, parent, proj) if not results: - raise APINotFoundException('No {0} {1} found.'.format(criterion, self.node_type)) + raise APINotFoundException('No {0} {1} found.'.format(criterion, self.container_type)) child = results[0] self.storage.filter_deleted_files(child) - child['node_type'] = self.node_type + child['container_type'] = self.container_type path_out.append(child) # Get the next node @@ -237,7 +237,7 @@ def next(self, path_in, path_out, id_only): path_in.popleft() return AnalysesNode() - raise APINotFoundException('No analyses at the {0} level'.format(self.node_type)) + raise APINotFoundException('No analyses at the {0} level'.format(self.container_type)) if self.child_name: return ContainerNode(self.child_name) @@ -252,7 +252,7 @@ def get_children(self, path_out): if self.child_name: query = {} if parent: - query[parent['node_type']] = parent['_id'] + query[parent['container_type']] = parent['_id'] children = ContainerNode.get_container_children(self.child_name, query) else: @@ -267,7 +267,7 @@ def get_children(self, path_out): proj['files'] = 0 analyses = analyses_node.list_analyses(parent, proj=proj) - apply_node_type(analyses, analyses_node.node_type) + apply_container_type(analyses, analyses_node.container_type) children = children + analyses # Add files @@ -277,7 +277,7 @@ def find(self, query, parent, proj): """ Find the one child of this container that matches query """ # Add parent to query if parent: - query[parent['node_type']] = parent['_id'] + query[parent['container_type']] = parent['_id'] return self.storage.get_all_el(query, None, proj, sort=ContainerNode.sorting, limit=1) @@ -291,7 +291,7 @@ def get_container_children(cls, cont_name, query=None): proj['files'] = 0 children = storage.get_all_el(query, None, proj, sort=ContainerNode.sorting) - apply_node_type(children, containerutil.singularize(cont_name)) + apply_container_type(children, containerutil.singularize(cont_name)) return children @@ -311,7 +311,7 @@ def next(self, path_in, path_out, id_only): if not gear: raise APINotFoundException('No gear {0} found.'.format(criterion)) - gear['node_type'] = 'gear' + gear['container_type'] = 'gear' path_out.append(gear) return None @@ -326,7 +326,7 @@ def get_children(self, path_out): results = gears.get_gears() for gear in results: - gear['node_type'] = 'gear' + gear['container_type'] = 'gear' return list(results) @@ -346,16 +346,16 @@ def get_children(self, path_out): raise APINotFoundException('No analyses at that level') # Only children of an analyses is files - if parent.get('node_type') == 'analysis': + if parent.get('container_type') == 'analysis': return pop_files(parent) results = self.list_analyses(parent) - apply_node_type(results, self.node_type) + apply_container_type(results, self.container_type) return results def list_analyses(self, parent, query=None, proj=None, **kwargs): """Get a list of all analyses that match query, using the given projection""" - return self.storage.get_analyses(query, parent['node_type'], parent['_id'], projection=proj, sort=ContainerNode.sorting, **kwargs) + return self.storage.get_analyses(query, parent['container_type'], parent['_id'], projection=proj, sort=ContainerNode.sorting, **kwargs) class Resolver(object): diff --git a/api/util.py b/api/util.py index 60d42b567..657b3b976 100644 --- a/api/util.py +++ b/api/util.py @@ -341,8 +341,8 @@ def parse_range_header(range_header_val, valid_units=('bytes',)): return ranges -def add_node_type(request, result): - """Adds a 'node_type' property to result if fw_node_type is set in the request environment.""" - if 'fw_node_type' in request.environ and isinstance(result, dict): - result['node_type'] = request.environ['fw_node_type'] +def add_container_type(request, result): + """Adds a 'container_type' property to result if fw_container_type is set in the request environment.""" + if 'fw_container_type' in request.environ and isinstance(result, dict): + result['container_type'] = request.environ['fw_container_type'] diff --git a/swagger/schemas/definitions/resolver.json b/swagger/schemas/definitions/resolver.json index 699a0a6d7..2af1470d8 100644 --- a/swagger/schemas/definitions/resolver.json +++ b/swagger/schemas/definitions/resolver.json @@ -25,12 +25,12 @@ "resolver-node": { "type": "object", "properties": { - "node_type": { + "container_type": { "type": "string" } }, - "discriminator": "node_type", - "required": ["node_type"] + "discriminator": "container_type", + "required": ["container_type"] }, "resolver-node-list": { "type": "array", diff --git a/swagger/schemas/output/lookup.json b/swagger/schemas/output/lookup.json index 6ef67983a..490cfb672 100644 --- a/swagger/schemas/output/lookup.json +++ b/swagger/schemas/output/lookup.json @@ -3,7 +3,7 @@ "type":"object", "allOf": [{ "$ref": "../definitions/resolver.json#/definitions/resolver-node" }], "example": { - "node_type": "project", + "container_type": "project", "_id": "57e452791cff88b85f9f9c97", "label": "Neuroscience", "group": "scitran", diff --git a/swagger/schemas/output/resolver.json b/swagger/schemas/output/resolver.json index fd8979424..c52161a7d 100644 --- a/swagger/schemas/output/resolver.json +++ b/swagger/schemas/output/resolver.json @@ -5,7 +5,7 @@ "example": { "path": [ { - "node_type": "group", + "container_type": "group", "_id": "scitran", "label": "Scitran", "permissions": [ @@ -18,7 +18,7 @@ "modified": "2016-08-19T11:41:15.360000+00:00" }, { - "node_type": "project", + "container_type": "project", "_id": "57e452791cff88b85f9f9c97", "label": "Neuroscience", "group": "scitran", @@ -33,7 +33,7 @@ ], "children": [ { - "node_type": "session", + "container_type": "session", "_id": "57e01cccb1dc04000fb83f03", "label": "control_1", "group": "scitran", @@ -51,7 +51,7 @@ }] }, { - "node_type": "file", + "container_type": "file", "origin": { "method": "importer", "type": "device", diff --git a/tests/integration_tests/python/test_resolver.py b/tests/integration_tests/python/test_resolver.py index d49fd89cc..d6ad3fbdf 100644 --- a/tests/integration_tests/python/test_resolver.py +++ b/tests/integration_tests/python/test_resolver.py @@ -44,7 +44,7 @@ def test_resolver(data_builder, as_admin, as_user, as_public, file_form): result = r.json() assert r.ok assert result['path'] == [] - assert child_in_result({'_id': group, 'node_type': 'group'}, result) + assert child_in_result({'_id': group, 'container_type': 'group'}, result) # try to resolve non-existent root/child r = as_admin.post('/resolve', json={'path': ['child']}) @@ -69,7 +69,7 @@ def test_resolver(data_builder, as_admin, as_user, as_public, file_form): result = r.json() assert r.ok assert path_in_result([group], result) - assert child_in_result({'_id': project, 'node_type': 'project'}, result) + assert child_in_result({'_id': project, 'container_type': 'project'}, result) # try to resolve non-existent root/group/child r = as_admin.post('/resolve', json={'path': [group, 'child']}) @@ -91,7 +91,7 @@ def test_resolver(data_builder, as_admin, as_user, as_public, file_form): result = r.json() assert r.ok assert path_in_result([group, project], result) - assert child_in_result({'name': project_file, 'node_type': 'file'}, result) + assert child_in_result({'name': project_file, 'container_type': 'file'}, result) assert len(result['children']) == 1 # resolve root/group/project (1 file, 1 session) @@ -101,7 +101,7 @@ def test_resolver(data_builder, as_admin, as_user, as_public, file_form): result = r.json() assert r.ok assert path_in_result([group, project], result) - assert child_in_result({'_id': session, 'node_type': 'session'}, result) + assert child_in_result({'_id': session, 'container_type': 'session'}, result) assert len(result['children']) == 2 # resolve root/group/project/files (1 file, 1 session) @@ -109,7 +109,7 @@ def test_resolver(data_builder, as_admin, as_user, as_public, file_form): result = r.json() assert r.ok assert path_in_result([group, project], result) - assert child_in_result({'name': project_file, 'node_type': 'file'}, result) + assert child_in_result({'name': project_file, 'container_type': 'file'}, result) assert len(result['children']) == 1 # resolve root/group/project/file (old way) @@ -149,7 +149,7 @@ def test_resolver(data_builder, as_admin, as_user, as_public, file_form): result = r.json() assert r.ok assert path_in_result([group, project, session], result) - assert child_in_result({'name': session_file, 'node_type': 'file'}, result) + assert child_in_result({'name': session_file, 'container_type': 'file'}, result) assert len(result['children']) == 1 # resolve root/group/project/session (1 file, 1 acquisition) @@ -159,7 +159,7 @@ def test_resolver(data_builder, as_admin, as_user, as_public, file_form): result = r.json() assert r.ok assert path_in_result([group, project, session], result) - assert child_in_result({'_id': acquisition, 'node_type': 'acquisition'}, result) + assert child_in_result({'_id': acquisition, 'container_type': 'acquisition'}, result) assert len(result['children']) == 2 # resolve root/group/project/session/file @@ -190,7 +190,7 @@ def test_resolver(data_builder, as_admin, as_user, as_public, file_form): result = r.json() assert r.ok assert path_in_result([group, project, session, acquisition], result) - assert child_in_result({'name': acquisition_file, 'node_type': 'file'}, result) + assert child_in_result({'name': acquisition_file, 'container_type': 'file'}, result) assert len(result['children']) == 1 # resolve root/group/project/session/acquisition/file @@ -255,7 +255,7 @@ def test_lookup(data_builder, as_admin, as_user, as_public, file_form): r = as_admin.post('/lookup', json={'path': [group]}) result = r.json() assert r.ok - assert result['node_type'] == 'group' + assert result['container_type'] == 'group' assert result['_id'] == group # try to lookup non-existent root/group/child @@ -271,7 +271,7 @@ def test_lookup(data_builder, as_admin, as_user, as_public, file_form): r = as_admin.post('/lookup', json={'path': [group, project_label]}) result = r.json() assert r.ok - assert result['node_type'] == 'project' + assert result['container_type'] == 'project' assert result['_id'] == project # lookup root/group/project/file @@ -282,7 +282,7 @@ def test_lookup(data_builder, as_admin, as_user, as_public, file_form): r = as_admin.post('/lookup', json={'path': [group, project_label, 'files', project_file]}) result = r.json() assert r.ok - assert result['node_type'] == 'file' + assert result['container_type'] == 'file' assert result['name'] == project_file assert 'mimetype' in result assert 'size' in result @@ -300,7 +300,7 @@ def test_lookup(data_builder, as_admin, as_user, as_public, file_form): r = as_admin.post('/lookup', json={'path': [group, project_label, session_label]}) result = r.json() assert r.ok - assert result['node_type'] == 'session' + assert result['container_type'] == 'session' assert result['_id'] == session # lookup root/group/project/session/file @@ -311,7 +311,7 @@ def test_lookup(data_builder, as_admin, as_user, as_public, file_form): r = as_admin.post('/lookup', json={'path': [group, project_label, session_label, 'files', session_file]}) result = r.json() assert r.ok - assert result['node_type'] == 'file' + assert result['container_type'] == 'file' assert result['name'] == session_file assert 'mimetype' in result assert 'size' in result @@ -327,7 +327,7 @@ def test_lookup(data_builder, as_admin, as_user, as_public, file_form): r = as_admin.post('/lookup', json={'path': [group, project_label, session_label, acquisition_label]}) result = r.json() assert r.ok - assert result['node_type'] == 'acquisition' + assert result['container_type'] == 'acquisition' assert result['_id'] == acquisition # lookup root/group/project/session/acquisition/file @@ -338,7 +338,7 @@ def test_lookup(data_builder, as_admin, as_user, as_public, file_form): r = as_admin.post('/lookup', json={'path': [group, project_label, session_label, acquisition_label, 'files', acquisition_file]}) result = r.json() assert r.ok - assert result['node_type'] == 'file' + assert result['container_type'] == 'file' assert result['name'] == acquisition_file assert 'mimetype' in result assert 'size' in result @@ -347,14 +347,14 @@ def test_lookup(data_builder, as_admin, as_user, as_public, file_form): r = as_admin.post('/lookup', json={'path': [idz(group), idz(project), idz(session), idz(acquisition)]}) result = r.json() assert r.ok - assert result['node_type'] == 'acquisition' + assert result['container_type'] == 'acquisition' assert result['_id'] == acquisition # lookup root/group/project/session/acquisition/file with id r = as_admin.post('/lookup', json={'path': [idz(group), idz(project), idz(session), idz(acquisition), 'files', acquisition_file]}) result = r.json() assert r.ok - assert result['node_type'] == 'file' + assert result['container_type'] == 'file' assert result['name'] == acquisition_file assert 'mimetype' in result assert 'size' in result @@ -384,7 +384,7 @@ def test_resolve_gears(data_builder, as_admin, as_user, as_public, file_form): result = r.json() assert r.ok assert result['path'] == [] - assert child_in_result({'_id': gear_id, 'node_type': 'gear'}, result) + assert child_in_result({'_id': gear_id, 'container_type': 'gear'}, result) # resolve gear (empty) r = as_admin.post('/resolve', json={'path': ['gears', gear_name]}) @@ -409,7 +409,7 @@ def test_resolve_gears(data_builder, as_admin, as_user, as_public, file_form): r = as_admin.post('/lookup', json={'path': ['gears', gear_name]}) result = r.json() assert r.ok - assert result['node_type'] == 'gear' + assert result['container_type'] == 'gear' assert result['_id'] == gear_id assert result['gear']['name'] == gear_name @@ -417,7 +417,7 @@ def test_resolve_gears(data_builder, as_admin, as_user, as_public, file_form): r = as_admin.post('/lookup', json={'path': ['gears', idz(gear_id)]}) result = r.json() assert r.ok - assert result['node_type'] == 'gear' + assert result['container_type'] == 'gear' assert result['_id'] == gear_id assert result['gear']['name'] == gear_name @@ -475,9 +475,9 @@ def test_resolve_analyses(data_builder, as_admin, as_user, as_public, file_form) result = r.json() assert r.ok assert path_in_result([group, project], result) - assert child_in_result({'name': project_file, 'node_type': 'file'}, result) - assert child_in_result({'_id': session, 'node_type': 'session'}, result) - assert child_in_result({'_id': project_analysis, 'node_type': 'analysis'}, result) + assert child_in_result({'name': project_file, 'container_type': 'file'}, result) + assert child_in_result({'_id': session, 'container_type': 'session'}, result) + assert child_in_result({'_id': project_analysis, 'container_type': 'analysis'}, result) assert len(result['children']) == 3 # resolve root/group/project/analysis @@ -485,7 +485,7 @@ def test_resolve_analyses(data_builder, as_admin, as_user, as_public, file_form) result = r.json() assert r.ok assert path_in_result([group, project], result) - assert child_in_result({'_id': project_analysis, 'node_type': 'analysis'}, result) + assert child_in_result({'_id': project_analysis, 'container_type': 'analysis'}, result) assert len(result['children']) == 1 # resolve root/group/project/analysis/name @@ -493,14 +493,14 @@ def test_resolve_analyses(data_builder, as_admin, as_user, as_public, file_form) result = r.json() assert r.ok assert path_in_result([group, project, project_analysis], result) - assert child_in_result({'name': analysis_file, 'node_type': 'file'}, result) + assert child_in_result({'name': analysis_file, 'container_type': 'file'}, result) assert len(result['children']) == 1 # lookup root/group/project/analysis/name r = as_admin.post('/lookup', json={'path': [group, project_label, 'analyses', project_analysis_name]}) result = r.json() assert r.ok - assert result['node_type'] == 'analysis' + assert result['container_type'] == 'analysis' assert result['_id'] == project_analysis assert len(result['files']) == 1 @@ -517,9 +517,9 @@ def test_resolve_analyses(data_builder, as_admin, as_user, as_public, file_form) result = r.json() assert r.ok assert path_in_result([group, project, session], result) - assert child_in_result({'name': session_file, 'node_type': 'file'}, result) - assert child_in_result({'_id': acquisition, 'node_type': 'acquisition'}, result) - assert child_in_result({'_id': session_analysis, 'node_type': 'analysis'}, result) + assert child_in_result({'name': session_file, 'container_type': 'file'}, result) + assert child_in_result({'_id': acquisition, 'container_type': 'acquisition'}, result) + assert child_in_result({'_id': session_analysis, 'container_type': 'analysis'}, result) assert len(result['children']) == 3 # resolve root/group/project/analysis/name @@ -527,14 +527,14 @@ def test_resolve_analyses(data_builder, as_admin, as_user, as_public, file_form) result = r.json() assert r.ok assert path_in_result([group, project, session, session_analysis], result) - assert child_in_result({'name': analysis_file, 'node_type': 'file'}, result) + assert child_in_result({'name': analysis_file, 'container_type': 'file'}, result) assert len(result['children']) == 1 # lookup root/group/project/analysis/name r = as_admin.post('/lookup', json={'path': [group, project_label, session_label, 'analyses', session_analysis_name]}) result = r.json() assert r.ok - assert result['node_type'] == 'analysis' + assert result['container_type'] == 'analysis' assert result['_id'] == session_analysis assert len(result['files']) == 1 @@ -551,8 +551,8 @@ def test_resolve_analyses(data_builder, as_admin, as_user, as_public, file_form) result = r.json() assert r.ok assert path_in_result([group, project, session, acquisition], result) - assert child_in_result({'name': acquisition_file, 'node_type': 'file'}, result) - assert child_in_result({'_id': acq_analysis, 'node_type': 'analysis'}, result) + assert child_in_result({'name': acquisition_file, 'container_type': 'file'}, result) + assert child_in_result({'_id': acq_analysis, 'container_type': 'analysis'}, result) assert len(result['children']) == 2 # resolve root/group/project/analysis/name @@ -560,14 +560,14 @@ def test_resolve_analyses(data_builder, as_admin, as_user, as_public, file_form) result = r.json() assert r.ok assert path_in_result([group, project, session, acquisition, acq_analysis], result) - assert child_in_result({'name': analysis_file, 'node_type': 'file'}, result) + assert child_in_result({'name': analysis_file, 'container_type': 'file'}, result) assert len(result['children']) == 1 # lookup root/group/project/analysis/name r = as_admin.post('/lookup', json={'path': [group, project_label, session_label, acquisition_label, 'analyses', acq_analysis_name]}) result = r.json() assert r.ok - assert result['node_type'] == 'analysis' + assert result['container_type'] == 'analysis' assert result['_id'] == acq_analysis assert len(result['files']) == 1 @@ -576,14 +576,14 @@ def test_resolve_analyses(data_builder, as_admin, as_user, as_public, file_form) result = r.json() assert r.ok assert path_in_result([group, project, session, acquisition, acq_analysis], result) - assert child_in_result({'name': analysis_file, 'node_type': 'file'}, result) + assert child_in_result({'name': analysis_file, 'container_type': 'file'}, result) assert len(result['children']) == 1 # lookup root/group/project/analysis/name r = as_admin.post('/lookup', json={'path': [group, project_label, idz(session), acquisition_label, 'analyses', idz(acq_analysis)]}) result = r.json() assert r.ok - assert result['node_type'] == 'analysis' + assert result['container_type'] == 'analysis' assert result['_id'] == acq_analysis assert len(result['files']) == 1 From 230a39e4d14af208244411d83842dd6b9065f4b9 Mon Sep 17 00:00:00 2001 From: Justin Ehlert Date: Wed, 14 Mar 2018 10:27:52 -0500 Subject: [PATCH 53/53] Resolve review comments Overall, increase documentation and attempt to reduce complexity in resolver.py. Also changed the paradigm for ContainerStorage list projections to use function overrides. --- api/dao/basecontainerstorage.py | 5 +- api/dao/containerstorage.py | 38 ++- api/handlers/resolvehandler.py | 1 - api/resolver.py | 299 +++++++++--------- .../integration_tests/python/test_resolver.py | 4 + 5 files changed, 177 insertions(+), 170 deletions(-) diff --git a/api/dao/basecontainerstorage.py b/api/dao/basecontainerstorage.py index 7ea80580b..85190e578 100644 --- a/api/dao/basecontainerstorage.py +++ b/api/dao/basecontainerstorage.py @@ -47,12 +47,11 @@ class ContainerStorage(object): Examples: projects, sessions, acquisitions and collections """ - def __init__(self, cont_name, use_object_id=False, use_delete_tag=False, list_projection=None): + def __init__(self, cont_name, use_object_id=False, use_delete_tag=False): self.cont_name = cont_name self.use_object_id = use_object_id self.use_delete_tag = use_delete_tag self.dbc = config.db[cont_name] - self.list_projection = list_projection @classmethod def factory(cls, cont_name): @@ -331,6 +330,4 @@ def get_list_projection(self): Return a copy of the list projection to use with this container, or None. It is safe to modify the returned copy. """ - if self.list_projection: - return self.list_projection.copy() return None diff --git a/api/dao/containerstorage.py b/api/dao/containerstorage.py index 6eb54fbe9..d3fda6aba 100644 --- a/api/dao/containerstorage.py +++ b/api/dao/containerstorage.py @@ -41,8 +41,7 @@ def create_el(self, payload): class ProjectStorage(ContainerStorage): def __init__(self): - super(ProjectStorage,self).__init__('projects', use_object_id=True, use_delete_tag=True, - list_projection={'info': 0, 'files.info': 0}) + super(ProjectStorage,self).__init__('projects', use_object_id=True, use_delete_tag=True) def create_el(self, payload): result = super(ProjectStorage, self).create_el(payload) @@ -96,16 +95,14 @@ def recalc_sessions_compliance(self, project_id=None): changed_sessions.append(s['_id']) return changed_sessions + def get_list_projection(self): + return {'info': 0, 'files.info': 0} + class SessionStorage(ContainerStorage): def __init__(self): - super(SessionStorage,self).__init__('sessions', use_object_id=True, use_delete_tag=True, - # Remove subject first/last from list view to better log access to this information - list_projection={'info': 0, 'analyses': 0, 'subject.firstname': 0, - 'subject.lastname': 0, 'subject.sex': 0, 'subject.age': 0, - 'subject.race': 0, 'subject.ethnicity': 0, 'subject.info': 0, - 'files.info': 0, 'tags': 0}) + super(SessionStorage,self).__init__('sessions', use_object_id=True, use_delete_tag=True) def _fill_default_values(self, cont): cont = super(SessionStorage,self)._fill_default_values(cont) @@ -225,13 +222,20 @@ def get_all_for_targets(self, target_type, target_ids, user=None, projection=Non return self.get_all_el(query, user, projection) + def get_list_projection(self): + # Remove subject first/last from list view to better log access to this information + return {'info': 0, 'analyses': 0, 'subject.firstname': 0, + 'subject.lastname': 0, 'subject.sex': 0, 'subject.age': 0, + 'subject.race': 0, 'subject.ethnicity': 0, 'subject.info': 0, + 'files.info': 0, 'tags': 0} + + class AcquisitionStorage(ContainerStorage): def __init__(self): - super(AcquisitionStorage,self).__init__('acquisitions', use_object_id=True, use_delete_tag=True, - list_projection={'info': 0, 'collections': 0, 'files.info': 0, 'tags': 0}) + super(AcquisitionStorage,self).__init__('acquisitions', use_object_id=True, use_delete_tag=True) def create_el(self, payload): result = super(AcquisitionStorage, self).create_el(payload) @@ -293,18 +297,23 @@ def get_all_for_targets(self, target_type, target_ids, user=None, projection=Non query['collections'] = collection_id return self.get_all_el(query, user, projection) + def get_list_projection(self): + return {'info': 0, 'collections': 0, 'files.info': 0, 'tags': 0} + class CollectionStorage(ContainerStorage): def __init__(self): - super(CollectionStorage, self).__init__('collections', use_object_id=True, use_delete_tag=True, list_projection={'info': 0}) + super(CollectionStorage, self).__init__('collections', use_object_id=True, use_delete_tag=True) + + def get_list_projection(self): + return {'info': 0} class AnalysisStorage(ContainerStorage): def __init__(self): - super(AnalysisStorage, self).__init__('analyses', use_object_id=True, use_delete_tag=True, - list_projection={'info': 0, 'files.info': 0, 'tags': 0}) + super(AnalysisStorage, self).__init__('analyses', use_object_id=True, use_delete_tag=True) def get_parent(self, parent_type, parent_id): @@ -420,3 +429,6 @@ def inflate_job_info(self, analysis): analysis['job'] = job return analysis + + def get_list_projection(self): + return {'info': 0, 'files.info': 0, 'tags': 0} diff --git a/api/handlers/resolvehandler.py b/api/handlers/resolvehandler.py index 0a2866dec..1e926474b 100644 --- a/api/handlers/resolvehandler.py +++ b/api/handlers/resolvehandler.py @@ -104,4 +104,3 @@ def _resolve_and_check_permissions(self, id_only): return result - diff --git a/api/resolver.py b/api/resolver.py index 75ab0f98e..6e87bdea2 100644 --- a/api/resolver.py +++ b/api/resolver.py @@ -43,128 +43,114 @@ from .jobs import gears from .web.errors import APINotFoundException, InputValidationException -def path_peek(path): - """Return the next path element or None""" - if len(path) > 0: - return path[0] - return None - -def parse_criterion(path_in): - """Parse criterion, returning true if we got an id""" - if not path_in: - return False, None - - value = path_in.popleft() - use_id = False - # Check for syntax - if value.startswith(''): - value = value[4:len(value)-1] - use_id = True - - return use_id, value - -def get_parent(path_out): - """Return the last parent element or None""" - if path_out: - return path_out[-1] - return None - def apply_container_type(lst, container_type): """Apply container_type to each item in in the list""" - if lst: - for item in lst: - item['container_type'] = container_type - -def pop_files(container): - """Return a consistently-ordered set of files for a given container.""" - if not container: - return [] - - files = container.pop('files', []) - - files.sort(key=lambda f: f.get('name', '')) - apply_container_type(files, 'file') + for item in lst: + item['container_type'] = container_type - return files -def find_file(files, name): - """Find a file by name""" - for f in files: - if str(f.get('name')) == name: - return f - return None - -class BaseNode(object): +class Node(object): """Base class for all nodes in the resolver tree""" - def next(self, path_in, path_out, id_only): - # pylint: disable=W0613 - pass # pragma: no cover - - def get_children(self, path_out): - # pylint: disable=W0613 - return [] # pragma: no cover + def next(self, path_in, path_out, id_only): # pylint: disable=W0613 + """ + Find the next node in the hierarchy that matches the next item in path_in. + Places the found node in path_out and return the next Node in the tree. + + Args: + path_in (deque): The remaining path elements to search in left-to-right order. + path_out (list): The currently resolved path, in left-to-right order. + id_only (bool): Whether to resolve just ids for path elements, or full nodes. + + Returns: + Node: The next node in the hierarchy, or None + """ + raise NotImplementedError() + + def get_children(self, path_out): # pylint: disable=W0613 + """ + Get all children of the last path element. + + Args: + path_out (list): The currently resolved path. + + Returns: + list: A list of child elements for the last path element. + """ + raise NotImplementedError() + + def get_parent(self, path_out): + """Return the last element in path_out or None""" + if path_out: + return path_out[-1] + return None -class RootNode(BaseNode): - """The root node of the resolver tree""" - def __init__(self): - self.groups_node = ContainerNode('groups', files=False, use_id=True, analyses=False) + def parse_criterion(self, path_in): + """ + Parse criterion, returning true if we got an id. - def next(self, path_in, path_out, id_only): - """Get the next node in the hierarchy""" - path_el = path_peek(path_in) + Args: + path_in (deque): The path in, must not be empty. - if path_el == 'gears': - path_in.popleft() - return GearsNode() + Returns: + bool, str: A boolean value indicating whether or not we parsed an id, and the parsed value. + """ + value = path_in.popleft() + use_id = False - if path_el: - return self.groups_node + # Check for syntax + if value.startswith(''): + value = value[4:len(value)-1] + use_id = True - return None + return use_id, value - def get_children(self, path_out): - """Get the children of the current node in the hierarchy""" - return ContainerNode.get_container_children('groups') -class FilesNode(BaseNode): +class FilesNode(Node): """Node that manages filename resolution""" def next(self, path_in, path_out, id_only): - """Get the next node in the hierarchy""" - if not path_in: - return None - filename = path_in.popleft() + parent = self.get_parent(path_out) - parent = get_parent(path_out) - if not parent: - raise APINotFoundException('No ' + filename + ' file found.') - - f = find_file(pop_files(parent), filename) - if f is not None: - path_out.append(f) - return None + # Find the matching file + for f in FilesNode.pop_files(parent): + if str(f.get('name', '')) == filename: + path_out.append(f) + return None raise APINotFoundException('No ' + filename + ' file found.') def get_children(self, path_out): - """Get the children of the current node in the hierarchy""" - parent = get_parent(path_out) - if parent: - return pop_files(parent) - return [] + parent = self.get_parent(path_out) + return FilesNode.pop_files(parent) + + @staticmethod + def pop_files(container): + """ + Return a consistently-ordered set of files for a given container. + This will remove the 'files' attribute from the container. + + Args: + container (dict): The container, or None if there is no parent. + + Returns: + list: The list of files, or an empty list + """ + if not container: + return [] + + files = container.pop('files', []) + + files.sort(key=lambda f: f.get('name', '')) + apply_container_type(files, 'file') -class ContainerNode(BaseNode): + return files + + +class ContainerNode(Node): # All lists obtained by the Resolver are sorted by the created timestamp, then the database ID as a fallback. # As neither property should ever change, this sort should be consistent sorting = [('created', 1), ('_id', 1)] - # In some cases we only want to resolve the id of a container - id_only_projection = { - 'label': 1, - 'permissions': 1, - 'files': 1, - } - def __init__(self, cont_name, files=True, use_id=False, analyses=True): self.cont_name = cont_name self.storage = ContainerStorage.factory(cont_name) @@ -176,33 +162,27 @@ def __init__(self, cont_name, files=True, use_id=False, analyses=True): self.child_name = self.storage.get_child_container_name() def next(self, path_in, path_out, id_only): - """Get the next node in the hierarchy, adding any value found to path_out""" - # If there is no path in, don't try to resolve - if not path_in: - return None - - use_id, criterion = parse_criterion(path_in) - parent = get_parent(path_out) + use_id, criterion = self.parse_criterion(path_in) + parent = self.get_parent(path_out) # Peek to see if we need files for the next path element - fetch_files = (path_peek(path_in) in ['files', None]) + fetch_files = (not path_in or path_in[0] == 'files') # Setup criterion match query = {} - if criterion: - if use_id or self.use_id: - if self.storage.use_object_id: - try: - query['_id'] = bson.ObjectId(criterion) - except bson.errors.InvalidId as e: - raise InputValidationException(e.message) - else: - query['_id'] = criterion + if use_id or self.use_id: + if self.storage.use_object_id: + try: + query['_id'] = bson.ObjectId(criterion) + except bson.errors.InvalidId as e: + raise InputValidationException(e.message) else: - query['label'] = criterion + query['_id'] = criterion + else: + query['label'] = criterion # Setup projection if id_only: - proj = ContainerNode.id_only_projection.copy() + proj = ContainerNode.get_id_only_projection() if fetch_files: proj['files'] = 1 else: @@ -210,8 +190,6 @@ def next(self, path_in, path_out, id_only): if proj and not fetch_files: proj['files'] = 0 - # We don't use the user field here because we want to return a 403 if - # they try to resolve something they don't have access to results = self.find(query, parent, proj) if not results: raise APINotFoundException('No {0} {1} found.'.format(criterion, self.container_type)) @@ -223,30 +201,25 @@ def next(self, path_in, path_out, id_only): path_out.append(child) # Get the next node - if not path_in: - return None - - # Files - if fetch_files: - path_in.popleft() - return FilesNode() + if path_in: + # Files + if fetch_files: + path_in.popleft() + return FilesNode() - # Check for analyses - if path_peek(path_in) == 'analyses': - if self.analyses: + # Check for analyses + if path_in[0] == 'analyses' and self.analyses: path_in.popleft() return AnalysesNode() - raise APINotFoundException('No analyses at the {0} level'.format(self.container_type)) - - if self.child_name: - return ContainerNode(self.child_name) + # Otherwise, the next node is our child container + if self.child_name: + return ContainerNode(self.child_name) return None def get_children(self, path_out): - """Get all children of the last node""" - parent = get_parent(path_out) + parent = self.get_parent(path_out) # Get container chilren if self.child_name: @@ -271,7 +244,7 @@ def get_children(self, path_out): children = children + analyses # Add files - return children + pop_files(parent) + return children + FilesNode.pop_files(parent) def find(self, query, parent, proj): """ Find the one child of this container that matches query """ @@ -279,10 +252,21 @@ def find(self, query, parent, proj): if parent: query[parent['container_type']] = parent['_id'] + # We don't use the user field here because we want to return a 403 if + # they try to resolve something they don't have access to return self.storage.get_all_el(query, None, proj, sort=ContainerNode.sorting, limit=1) - @classmethod - def get_container_children(cls, cont_name, query=None): + @staticmethod + def get_id_only_projection(): + """Return a projection that will return the minimal values required for id-only resolution.""" + return { + 'label': 1, + 'permissions': 1, + 'files': 1, + } + + @staticmethod + def get_container_children(cont_name, query=None): """Get all children of container named cont_name, using query""" storage = ContainerStorage.factory(cont_name) @@ -295,14 +279,11 @@ def get_container_children(cls, cont_name, query=None): return children -class GearsNode(BaseNode): + +class GearsNode(Node): """The top level "gears" node""" def next(self, path_in, path_out, id_only): - """Get the next node in the hierarchy, adding any value found to path_out""" - if not path_in: - return None - - use_id, criterion = parse_criterion(path_in) + use_id, criterion = self.parse_criterion(path_in) if use_id: gear = gears.get_gear(criterion) else: @@ -317,8 +298,6 @@ def next(self, path_in, path_out, id_only): return None def get_children(self, path_out): - """Get a list of all gears""" - # No children for gears yet if path_out: return [] @@ -330,24 +309,20 @@ def get_children(self, path_out): return list(results) + class AnalysesNode(ContainerNode): - """The analyses node""" def __init__(self): super(AnalysesNode, self).__init__('analyses', files=True, use_id=False, analyses=False) def find(self, query, parent, proj): - """Find the one child of this container that matches query""" return self.list_analyses(parent, query, proj, limit=1) def get_children(self, path_out): - """Get a list of all analyses""" - parent = get_parent(path_out) - if not parent: - raise APINotFoundException('No analyses at that level') + parent = self.get_parent(path_out) # Only children of an analyses is files if parent.get('container_type') == 'analysis': - return pop_files(parent) + return FilesNode.pop_files(parent) results = self.list_analyses(parent) apply_container_type(results, self.container_type) @@ -358,6 +333,21 @@ def list_analyses(self, parent, query=None, proj=None, **kwargs): return self.storage.get_analyses(query, parent['container_type'], parent['_id'], projection=proj, sort=ContainerNode.sorting, **kwargs) +class RootNode(Node): + """The root node of the resolver tree""" + def next(self, path_in, path_out, id_only): + """Get the next node in the hierarchy""" + if path_in[0] == 'gears': + path_in.popleft() + return GearsNode() + + return ContainerNode('groups', files=False, use_id=True, analyses=False) + + def get_children(self, path_out): + """Get the children of the current node in the hierarchy""" + return ContainerNode.get_container_children('groups') + + class Resolver(object): """ Given an array of human-meaningful, possibly-ambiguous strings, resolve it as a path through the hierarchy. @@ -382,6 +372,11 @@ def resolve(self, path): # Keeping in mind that path may be empty while next_node: node = next_node + + # Don't attempt to resolve the next node if path is empty + if not path: + break + next_node = node.next(path, resolved_path, self.id_only) # If we haven't consumed path, then we didn't find what we were looking for diff --git a/tests/integration_tests/python/test_resolver.py b/tests/integration_tests/python/test_resolver.py index d6ad3fbdf..7ddc2389a 100644 --- a/tests/integration_tests/python/test_resolver.py +++ b/tests/integration_tests/python/test_resolver.py @@ -425,6 +425,10 @@ def test_resolve_gears(data_builder, as_admin, as_user, as_public, file_form): r = as_admin.post('/lookup', json={'path': ['gears', 'NON-EXISTENT-GEAR']}) assert r.status_code == 404 + # Lookup by id (not-found) + r = as_admin.post('/lookup', json={'path': ['gears', idz('ffffffffffffffffffffffff')]}) + assert r.status_code == 404 + def test_resolve_analyses(data_builder, as_admin, as_user, as_public, file_form): analysis_file = 'one.csv'