diff --git a/.circleci/participant.sh b/.circleci/participant.sh index 4d897729c..d2ee147f3 100644 --- a/.circleci/participant.sh +++ b/.circleci/participant.sh @@ -32,7 +32,8 @@ DOCKER_RUN="docker run -i -v $HOME/data:/data:ro \ -v $SCRATCH:/scratch -w /scratch \ ${DOCKER_IMAGE}:${DOCKER_TAG} \ /data/${TEST_DATA_NAME} out/ participant \ - --verbose-reports --profile" + --verbose-reports --profile \ + --webapi-addr $( hostname -I | awk '{print $1}' ) --webapi-port ${MRIQC_API_PORT} --upload-strict" case $CIRCLE_NODE_INDEX in 0) diff --git a/.circleci/webapi.sh b/.circleci/webapi.sh new file mode 100644 index 000000000..a76f75b67 --- /dev/null +++ b/.circleci/webapi.sh @@ -0,0 +1,34 @@ +#!/bin/bash +# +# Balance nipype testing workflows across CircleCI build nodes +# + +# Setting # $ help set +set -e # Exit immediately if a command exits with a non-zero status. +set -u # Treat unset variables as an error when substituting. +set -x # Print command traces before executing command. + +# Exit if build_only tag is found +if [ "$(grep -qiP 'build[ _]?only' <<< "$GIT_COMMIT_MSG"; echo $? )" == "0" ]; then + exit 0 +fi + +# Exit if docs_only tag is found +if [ "$(grep -qiP 'docs[ _]?only' <<< "$GIT_COMMIT_MSG"; echo $? )" == "0" ]; then + echo "Building [docs_only], nothing to do." + exit 0 +fi + +MODALITY=T1w +NRECORDS=4 +if [ "$CIRCLE_NODE_INDEX" == "1" ]; then + MODALITY=bold + NRECORDS=9 +fi + +echo "Checking records in MRIQC Web API" +docker run -i --entrypoint="/usr/local/miniconda/bin/python" \ + ${DOCKER_IMAGE}:${DOCKER_TAG} \ + /root/src/mriqc/mriqc/bin/mriqcwebapi_test.py \ + ${MODALITY} ${NRECORDS} \ + --webapi-addr $( hostname -I | awk '{print $1}' ) --webapi-port ${MRIQC_API_PORT} \ No newline at end of file diff --git a/circle.yml b/circle.yml index ec8b200dd..febb5589e 100644 --- a/circle.yml +++ b/circle.yml @@ -8,6 +8,9 @@ machine: DOCKER_TAG: "latest" TEST_DATA_NAME: "circle-tests" TEST_DATA_URL: "https://files.osf.io/v1/resources/fvuh8/providers/osfstorage/590ce4a96c613b025147c568" + SECRET_KEY: CI + MRIQC_API_PORT: 80 + MRIQC_API_TAG: 0.3.0 services: - docker @@ -15,6 +18,7 @@ dependencies: cache_directories: - "~/docker" - "~/data" + - "~/mriqcwebapi" pre: # Download test data @@ -23,6 +27,14 @@ dependencies: - mkdir -p $SCRATCH && sudo setfacl -d -m group:ubuntu:rwx $SCRATCH && sudo setfacl -m group:ubuntu:rwx $SCRATCH - if [[ ! -d ~/data/${TEST_DATA_NAME} ]]; then wget --retry-connrefused --waitretry=5 --read-timeout=20 --timeout=15 -t 0 -q -O ${TEST_DATA_NAME}.tar.gz "${TEST_DATA_URL}" && tar xzf ${TEST_DATA_NAME}.tar.gz -C ~/data/; fi - docker load --input $HOME/docker/cache.tar || true + # Prepare MRIQCWebAPI + - pip install docker-compose + - if [[ ! -d $HOME/mriqcwebapi ]]; then cd; git clone https://github.com/poldracklab/mriqcwebapi.git; fi; + - cd $HOME/mriqcwebapi && git fetch --tags && git checkout ${MRIQC_API_TAG} + - docker-compose -f $HOME/mriqcwebapi/dockereve-master/docker-compose.yml pull + - docker-compose -f $HOME/mriqcwebapi/dockereve-master/docker-compose.yml build + - nohup bash -c "docker-compose -f $HOME/mriqcwebapi/dockereve-master/docker-compose.yml --verbose up -d" && sleep 10 + - docker run -it --entrypoint=/usr/bin/curl ${DOCKER_IMAGE}:${DOCKER_TAG} --retry 10 --retry-delay 15 -vkf http://$( hostname -I | awk '{print $1}' ) override: - echo "${CIRCLE_TAG:-$CIRCLE_SHA1}" > mriqc/VERSION - ? | @@ -31,7 +43,7 @@ dependencies: done && [ "$e" -eq "0" ] : timeout: 3200 - - docker save -o $HOME/docker/cache.tar ubuntu:xenial-20161213 ${DOCKER_IMAGE}:${DOCKER_TAG} : + - docker save -o $HOME/docker/cache.tar ubuntu:xenial-20161213 ${DOCKER_IMAGE}:${DOCKER_TAG} python:3.4-onbuild tutum/nginx:latest mongo:latest : timeout: 3200 test: override: @@ -53,6 +65,10 @@ test: parallel: true environment: GIT_COMMIT_MSG: $( git log --format=oneline -n 1 $CIRCLE_SHA1 ) + - bash .circleci/webapi.sh : + parallel: true + environment: + GIT_COMMIT_MSG: $( git log --format=oneline -n 1 $CIRCLE_SHA1 ) general: artifacts: @@ -77,7 +93,7 @@ deployment: echo "This is not a release candidate, pushing ${DOCKER_IMAGE}:${DOCKER_TAG}" docker push ${DOCKER_IMAGE}:${DOCKER_TAG} fi - fi + fi : timeout: 21600 - | diff --git a/mriqc/bin/mriqc_run.py b/mriqc/bin/mriqc_run.py index 76ab28a97..88a8a2e8f 100644 --- a/mriqc/bin/mriqc_run.py +++ b/mriqc/bin/mriqc_run.py @@ -85,6 +85,15 @@ def get_parser(): g_outputs.add_argument('--email', action='store', default='', type=str, help='Email address to include with quality metric submission.') + g_outputs.add_argument( + '--webapi-addr', action='store', default='34.201.213.252', type=str, + help='IP address where the MRIQC WebAPI is listening') + g_outputs.add_argument( + '--webapi-port', action='store', default=80, type=int, + help='port where the MRIQC WebAPI is listening') + + g_outputs.add_argument('--upload-strict', action='store_true', default=False, + help='upload will fail if if upload is strict') # General performance g_perfm = parser.add_argument_group('Options to handle performance') g_perfm.add_argument('--n_procs', '--nprocs', '--n_cpus', '--nprocs', @@ -98,7 +107,6 @@ def get_parser(): help="Cast the input data to float32 if it's represented in higher precision " "(saves space and improves perfomance)") - # Workflow settings g_conf = parser.add_argument_group('Workflow configuration') g_conf.add_argument('--ica', action='store_true', default=False, @@ -170,9 +178,12 @@ def main(): 'verbose_reports': opts.verbose_reports or opts.testing, 'float32': opts.float32, 'ica': opts.ica, - 'no_sub': opts.no_sub or opts.testing, + 'no_sub': opts.no_sub, 'email': opts.email, 'fd_thres': opts.fd_thres, + 'webapi_addr' : opts.webapi_addr, + 'webapi_port' : opts.webapi_port, + 'upload_strict' : opts.upload_strict, } if opts.hmc_afni: diff --git a/mriqc/bin/mriqcwebapi_test.py b/mriqc/bin/mriqcwebapi_test.py new file mode 100644 index 000000000..89af67430 --- /dev/null +++ b/mriqc/bin/mriqcwebapi_test.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Author: oesteban +# @Date: 2015-11-19 16:44:27 +from __future__ import print_function, division, absolute_import, unicode_literals + +def get_parser(): + """Build parser object""" + from argparse import ArgumentParser + from argparse import RawTextHelpFormatter + + parser = ArgumentParser(description='MRIQCWebAPI: Check entries', + formatter_class=RawTextHelpFormatter) + parser.add_argument('modality', action='store', choices=['T1w', 'bold'], + help='number of expected items in the database') + parser.add_argument('expected', action='store', type=int, + help='number of expected items in the database') + parser.add_argument( + '--webapi-addr', action='store', default='34.201.213.252', type=str, + help='IP address where the MRIQC WebAPI is listening') + parser.add_argument( + '--webapi-port', action='store', default=80, type=int, + help='port where the MRIQC WebAPI is listening') + return parser + + +def main(): + """Entry point""" + from requests import get + from mriqc import MRIQC_LOG + + # Run parser + opts = get_parser().parse_args() + + endpoint = 'http://{}:{}/{}'.format(opts.webapi_addr, + opts.webapi_port, + opts.modality) + MRIQC_LOG.info('Sending GET: %s', endpoint) + resp = get(endpoint).json() + MRIQC_LOG.info('There are %d records in database', resp['_meta']['total']) + assert opts.expected == resp['_meta']['total'] + + +if __name__ == '__main__': + main() diff --git a/mriqc/interfaces/__init__.py b/mriqc/interfaces/__init__.py index 7888517e4..e2fea0b61 100644 --- a/mriqc/interfaces/__init__.py +++ b/mriqc/interfaces/__init__.py @@ -14,3 +14,4 @@ from mriqc.interfaces.bids import ReadSidecarJSON, IQMFileSink from mriqc.interfaces.viz import PlotMosaic, PlotContours, PlotSpikes from mriqc.interfaces.common import ConformImage, EnsureSize +from mriqc.interfaces.webapi import UploadIQMs diff --git a/mriqc/interfaces/webapi.py b/mriqc/interfaces/webapi.py new file mode 100644 index 000000000..ec68717fa --- /dev/null +++ b/mriqc/interfaces/webapi.py @@ -0,0 +1,198 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +from __future__ import print_function, division, absolute_import, unicode_literals + +from nipype import logging +from nipype.interfaces.base import (Bunch, traits, isdefined, TraitedSpec, + BaseInterfaceInputSpec, File, Str) +from niworkflows.interfaces.base import SimpleInterface + + +IFLOGGER = logging.getLogger('interface') + +SECRET_KEY = """\ +ZUsBaabr6PEbav5DKAHIODEnwpwC58oQTJF7KWvDBPUmBIVFFtw\ +Od7lQBdz9r9ulJTR1BtxBDqDuY0owxK6LbLB1u1b64ZkIMd46\ +""" + +# metadata whitelist +META_WHITELIST = [ + 'AccelNumReferenceLines', + 'AccelerationFactorPE', + 'AcquisitionMatrix', + 'CogAtlasID', + 'CogPOID', + 'CoilCombinationMethod', + 'ContrastBolusIngredient', + 'ConversionSoftware', + 'ConversionSoftwareVersion', + 'DelayTime', + 'DeviceSerialNumber', + 'EchoTime', + 'EchoTrainLength', + 'EffectiveEchoSpacing', + 'FlipAngle', + 'GradientSetType', + 'HardcopyDeviceSoftwareVersion', + 'ImageType', + 'ImagingFrequency', + 'InPlanePhaseEncodingDirection', + 'InstitutionAddress', + 'InstitutionName', + 'Instructions', + 'InversionTime', + 'MRAcquisitionType', + 'MRTransmitCoilSequence', + 'MagneticFieldStrength', + 'Manufacturer', + 'ManufacturersModelName', + 'MatrixCoilMode', + 'MultibandAccelerationFactor', + 'NumberOfAverages', + 'NumberOfPhaseEncodingSteps', + 'NumberOfVolumesDiscardedByScanner', + 'NumberOfVolumesDiscardedByUser', + 'NumberShots', + 'ParallelAcquisitionTechnique', + 'ParallelReductionFactorInPlane', + 'PartialFourier', + 'PartialFourierDirection', + 'PatientPosition', + 'PercentPhaseFieldOfView', + 'PercentSampling', + 'PhaseEncodingDirection', + 'PixelBandwidth', + 'ProtocolName', + 'PulseSequenceDetails', + 'PulseSequenceType', + 'ReceiveCoilName', + 'RepetitionTime', + 'ScanOptions', + 'ScanningSequence', + 'SequenceName', + 'SequenceVariant', + 'SliceEncodingDirection', + 'SoftwareVersions', + 'TaskDescription', + 'TaskName', + 'TotalReadoutTime', + 'TotalScanTimeSec', + 'TransmitCoilName', + 'VariableFlipAngleFlag', + 'acq_id', + 'modality', + 'run_id', + 'subject_id', + 'task_id', +] + +PROV_WHITELIST = [ + 'version', + 'md5sum', + 'software', + 'settings' +] + + +class UploadIQMsInputSpec(BaseInterfaceInputSpec): + in_iqms = File(exists=True, mandatory=True, desc='the input IQMs-JSON file') + address = Str(mandatory=True, desc='ip address listening') + port = traits.Int(mandatory=True, desc='MRIQCWebAPI service port') + email = Str(desc='set sender email') + strict = traits.Bool(False, usedefault=True, + desc='crash if upload was not succesfull') + + +class UploadIQMs(SimpleInterface): + """ + Upload features to MRIQCWebAPI + """ + + input_spec = UploadIQMsInputSpec + output_spec = TraitedSpec + + def _run_interface(self, runtime): + email = None + if isdefined(self.inputs.email): + email = self.inputs.email + + response = upload_qc_metrics( + self.inputs.in_iqms, + self.inputs.address, + self.inputs.port, + email + ) + + if response.status_code == 201: + IFLOGGER.info('QC metrics successfully uploaded.') + return runtime + + errmsg = 'QC metrics failed to upload. Status %d: %s' % ( + response.status_code, response.text) + IFLOGGER.warn(errmsg) + if self.inputs.strict: + raise RuntimeError(response.text) + + return runtime + + +def upload_qc_metrics(in_iqms, addr, port, email=None): + """ + Upload qc metrics to remote repository. + + :param str in_iqms: Path to the qc metric json file as a string + :param str email: email address to be included with the metric submission + :param bool no_sub: Flag from settings indicating whether or not metrics should be submitted. + If False, metrics will be submitted. If True, metrics will not be submitted. + :param str mriqc_webapi: the default mriqcWebAPI url + :param bool upload_strict: the client should fail if it's strict mode + + :return: either the response object if a response was successfully sent + or it returns the string "No Response" + :rtype: object + + + """ + from json import load, dumps + import requests + from io import open + from copy import deepcopy + + with open(in_iqms, 'r') as input_json: + in_data = load(input_json) + + # Extract metadata and provenance + meta = in_data.pop('bids_meta') + prov = in_data.pop('provenance') + + # At this point, data should contain only IQMs + data = deepcopy(in_data) + + # Check modality + modality = meta.get('modality', 'None') + if modality not in ('T1w', 'bold'): + errmsg = ('Submitting to MRIQCWebAPI: image modality should be "bold" or "T1w", ' + '(found "%s")' % modality) + return Bunch(status_code=1, text=errmsg) + + # Filter metadata values that aren't in whitelist + data['bids_meta'] = {k: meta[k] for k in META_WHITELIST if k in meta} + # Filter provenance values that aren't in whitelist + data['provenance'] = {k: prov[k] for k in PROV_WHITELIST if k in prov} + + if email: + data['email'] = email + + headers = {'token': SECRET_KEY, "Content-Type": "application/json"} + try: + # if the modality is bold, call "bold" endpointt + response = requests.post( + 'http://{}:{}/{}'.format(addr, port, modality), + headers=headers, data=dumps(data)) + except requests.ConnectionError as err: + errmsg = 'QC metrics failed to upload due to connection error shown below:\n%s' % err + return Bunch(status_code=1, text=errmsg) + + return response diff --git a/mriqc/reports/__init__.py b/mriqc/reports/__init__.py index 8ac1747a1..75e6181ca 100644 --- a/mriqc/reports/__init__.py +++ b/mriqc/reports/__init__.py @@ -51,4 +51,3 @@ from __future__ import unicode_literals from mriqc.reports.individual import individual_html from mriqc.reports.group import gen_html as group_html -from mriqc.reports.utils import upload_qc_metrics diff --git a/mriqc/reports/utils.py b/mriqc/reports/utils.py index c0ae79883..e40b9e9d7 100644 --- a/mriqc/reports/utils.py +++ b/mriqc/reports/utils.py @@ -7,7 +7,7 @@ # @Date: 2016-01-05 11:33:39 # @Email: code@oscaresteban.es # @Last modified by: oesteban -# @Last Modified time: 2017-05-23 09:00:47 +# @Last Modified time: 2017-05-25 13:41:58 """ Helpers in report generation""" from __future__ import print_function, division, absolute_import, unicode_literals @@ -36,6 +36,7 @@ def iqms2html(indict, table_id): result_str += '\n' return result_str + def unfold_columns(indict, prefix=None): """Converts an input dict with flattened keys to an array of columns""" if prefix is None: @@ -72,7 +73,7 @@ def read_report_snippet(in_file): """Add a snippet into the report""" import os.path as op import re - from io import open #pylint: disable=W0622 + from io import open # pylint: disable=W0622 is_svg = (op.splitext(op.basename(in_file))[1] == '.svg') @@ -92,77 +93,6 @@ def read_report_snippet(in_file): corrected.append(line) return '\n'.join(corrected[svg_tag_line:]) -def upload_qc_metrics(in_iqms, email='', no_sub=False): - """Upload qc metrics to remote repository. - - Arguments: - in_iqms -- Path to the qc metric json file as a string - - Keyword arguments: - email -- email address to be included with the metric submission, defaults to empty string - no_sub -- Flag from settings indicating whether or not metrics should be submitted. If False, metrics will be submitted. If True, metrics will not be submitted. Defaults to False. - - Returns: - either returns response object if a response was successfully sent - or it returns the string "No Response" - """ - from json import load, dumps - import requests - from mriqc import logging - - report_log = logging.getLogger('mriqc.report') - report_log.setLevel(logging.INFO) - - if no_sub is True: - report_log.info('QC metrics were not uploaded because --no_sub or --testing options were set.') - r = "No Response" - else: - with open(in_iqms, 'r') as h: - in_data = load(h) - # metadata whitelist - whitelist = ["ContrastBolusIngredient", "RepetitionTime", "TaskName", "Manufacturer", - "ManufacturersModelName", "MagneticFieldStrength", "DeviceSerialNumber", - "SoftwareVersions", "HardcopyDeviceSoftwareVersion", "ReceiveCoilName", - "GradientSetType", "MRTransmitCoilSequence", "MatrixCoilMode", - "CoilCombinationMethod", "PulseSequenceType", "PulseSequenceDetails", - "NumberShots", "ParallelReductionFactorInPlane", "ParallelAcquisitionTechnique", - "PartialFourier", "PartialFourierDirection", "PhaseEncodingDirection", - "EffectiveEchoSpacing", "TotalReadoutTime", - "EchoTime", "InversionTime", "SliceTiming", "SliceEncodingDirection", - "NumberOfVolumesDiscardedByScanner", "NumberOfVolumesDiscardedByUser", - "DelayTime", "FlipAngle", "MultibandAccelerationFactor", "Instructions", - "TaskDescription", "CogAtlasID", "CogPOID", "InstitutionName", - "InstitutionAddress", "ConversionSoftware", "ConversionSoftwareVersion", - "md5sum", "modality", "mriqc_pred", "software", "subject_id", "version"] - # flatten data - data = {k: v for k, v in list(in_data.items()) if k != 'metadata'} - # Filter Metadata values that aren't in whitelist - try: - data.update({k: v for k, v in list(in_data['metadata'].items()) if k in whitelist}) - except KeyError: - pass - # Preemptively adding code to handle settings - try: - data.update({k: v for k, v in list(in_data['settings'].items()) if k in whitelist}) - except KeyError: - pass - - if email != '': - data['email'] = email - secret_key = 'ZUsBaabr6PEbav5DKAHIODEnwpwC58oQTJF7KWvDBPUmBIVFFtwOd7lQBdz9r9ulJTR1BtxBDqDuY0owxK6LbLB1u1b64ZkIMd46' - headers = {'token': secret_key, "Content-Type": "application/json"} - try: - r = requests.put("http://34.201.213.252:5000/measurements/upload", - headers=headers, data=dumps(data)) - if r.status_code == 201: - report_log.info('QC metrics successfully uploaded.') - else: - report_log.warn('QC metrics failed to upload. Status %d: %s' % (r.status_code, r.text)) - except requests.ConnectionError as e: - report_log.warn('QC metrics failed to upload due to connection error shown below:\n%s' % e) - r = "No Response" - return r - # def check_reports(dataset, settings, save_failed=True): # """Check if reports have been created""" @@ -184,7 +114,7 @@ def upload_qc_metrics(in_iqms, email='', no_sub=False): # components.insert(0, qctype) # report_fname = op.join( -# settings['report_dir'], '_'.join(components) + '_report.html') +# settings['report_dir'], '_'.join(components) + '_report.html') # if not op.isfile(report_fname): # missing[mod].append( diff --git a/mriqc/workflows/anatomical.py b/mriqc/workflows/anatomical.py index 6b60b4ce2..67e880308 100644 --- a/mriqc/workflows/anatomical.py +++ b/mriqc/workflows/anatomical.py @@ -55,7 +55,6 @@ from niworkflows.interfaces.registration import RobustMNINormalizationRPT as RobustMNINormalization from mriqc import DEFAULTS -from mriqc.workflows.utils import upload_wf from mriqc.interfaces import (StructuralQC, ArtifactMask, ReadSidecarJSON, ConformImage, ComputeQI2, IQMFileSink, RotationMask) @@ -75,7 +74,8 @@ def anat_qc_workflow(dataset, settings, mod='T1w', name='anatMRIQC'): wf = anat_qc_workflow([op.join(datadir, 'sub-001/anat/sub-001_T1w.nii.gz')], settings={'bids_dir': datadir, 'output_dir': op.abspath('out'), - 'ants_nthreads': 1}) + 'ants_nthreads': 1, + 'no_sub': True}) """ @@ -107,8 +107,6 @@ def anat_qc_workflow(dataset, settings, mod='T1w', name='anatMRIQC'): iqmswf = compute_iqms(settings, modality=mod) # Reports repwf = individual_reports(settings) - # Upload metrics - upldwf = upload_wf(settings) # Connect all nodes workflow.connect([ @@ -149,10 +147,22 @@ def anat_qc_workflow(dataset, settings, mod='T1w', name='anatMRIQC'): (segment, repwf, [('tissue_class_map', 'inputnode.segmentation')]), (iqmswf, repwf, [('outputnode.out_noisefit', 'inputnode.noisefit')]), (iqmswf, repwf, [('outputnode.out_file', 'inputnode.in_iqms')]), - (iqmswf, upldwf, [('outputnode.out_file', 'inputnode.in_iqms')]), (iqmswf, outputnode, [('outputnode.out_file', 'out_json')]) ]) + # Upload metrics + if not settings.get('no_sub', False): + from mriqc.interfaces.webapi import UploadIQMs + upldwf = pe.Node(UploadIQMs(), name='UploadMetrics') + upldwf.inputs.email = settings.get('email', '') + upldwf.inputs.address = settings.get('webapi_addr') + upldwf.inputs.port = settings.get('webapi_port', 5000) + upldwf.inputs.strict = settings.get('upload_strict', False) + + workflow.connect([ + (iqmswf, upldwf, [('outputnode.out_file', 'in_iqms')]), + ]) + return workflow def spatial_normalization(settings, mod='T1w', name='SpatialNormalization', diff --git a/mriqc/workflows/functional.py b/mriqc/workflows/functional.py index d7509b00d..f8aad869e 100644 --- a/mriqc/workflows/functional.py +++ b/mriqc/workflows/functional.py @@ -31,7 +31,6 @@ """ from __future__ import print_function, division, absolute_import, unicode_literals -import os import os.path as op from nipype import logging @@ -43,7 +42,6 @@ from nipype.interfaces import afni from mriqc import DEFAULTS -from mriqc.workflows.utils import slice_wise_fft, upload_wf from mriqc.interfaces import ReadSidecarJSON, FunctionalQC, Spikes, IQMFileSink from mriqc.utils.misc import check_folder, reorient_and_discard_non_steady from niworkflows.interfaces import segmentation as nws @@ -64,7 +62,8 @@ def fmri_qc_workflow(dataset, settings, name='funcMRIQC'): datadir = op.abspath('data') wf = fmri_qc_workflow([op.join(datadir, 'sub-001/func/sub-001_task-rest_bold.nii.gz')], settings={'bids_dir': datadir, - 'output_dir': op.abspath('out')}) + 'output_dir': op.abspath('out'), + 'no_sub': True}) """ @@ -127,8 +126,6 @@ def fmri_qc_workflow(dataset, settings, name='funcMRIQC'): iqmswf = compute_iqms(settings) # Reports repwf = individual_reports(settings) - # Upload metrics - upldwf = upload_wf(settings) workflow.connect([ (inputnode, iqmswf, [('in_file', 'inputnode.in_file')]), @@ -157,7 +154,6 @@ def fmri_qc_workflow(dataset, settings, name='funcMRIQC'): (iqmswf, repwf, [('outputnode.out_file', 'inputnode.in_iqms'), ('outputnode.out_dvars', 'inputnode.in_dvars'), ('outputnode.outliers', 'inputnode.outliers')]), - (iqmswf, upldwf, [('outputnode.out_file', 'inputnode.in_iqms')]), (hmcwf, outputnode, [('outputnode.out_fd', 'out_fd')]), ]) @@ -179,6 +175,19 @@ def fmri_qc_workflow(dataset, settings, name='funcMRIQC'): (melodic, repwf, [('out_report', 'inputnode.ica_report')]) ]) + # Upload metrics + if not settings.get('no_sub', False): + from mriqc.interfaces.webapi import UploadIQMs + upldwf = pe.Node(UploadIQMs(), name='UploadMetrics') + upldwf.inputs.address = settings.get('webapi_addr') + upldwf.inputs.port = settings.get('webapi_port') + upldwf.inputs.email = settings.get('email') + upldwf.inputs.strict = settings.get('upload_strict', False) + + workflow.connect([ + (iqmswf, upldwf, [('outputnode.out_file', 'in_iqms')]), + ]) + return workflow def compute_iqms(settings, name='ComputeIQMs'): @@ -283,8 +292,9 @@ def compute_iqms(settings, name='ComputeIQMs'): (datasink, outputnode, [('out_file', 'out_file')]) ]) + # FFT spikes finder if settings.get('fft_spikes_detector', False): - # FFT spikes finder + from mriqc.workflows.utils import slice_wise_fft spikes_fft = pe.Node(niu.Function( input_names=['in_file'], output_names=['n_spikes', 'out_spikes', 'out_fft'], diff --git a/mriqc/workflows/utils.py b/mriqc/workflows/utils.py index 0b4e79d73..39f3cd64c 100644 --- a/mriqc/workflows/utils.py +++ b/mriqc/workflows/utils.py @@ -7,7 +7,6 @@ # @Date: 2016-01-05 17:15:12 # @Email: code@oscaresteban.es # @Last modified by: oesteban -# @Last Modified time: 2017-05-30 16:35:52 """Helper functions for the workflows""" from __future__ import print_function, division, absolute_import, unicode_literals from builtins import range @@ -163,35 +162,3 @@ def slice_wise_fft(in_file, ftmask=None, spike_thres=3., out_prefix=None): np.savetxt(out_spikes, spikes_list, fmt=b'%d', delimiter=b'\t', header='TR\tZ') return len(spikes_list), out_spikes, out_fft - - -def upload_wf(settings, name='UploadWorkflow'): - """Workflow wrapping the upload_qc_metrics function. - - Arguments: - settings -- dictionary containing mriqc settings - - Keyword arguments: - name -- workflow name, defaults to UploadWorkflow - - Returns: - workflow with inputnode and UploadMetrics node. - """ - from mriqc.reports import upload_qc_metrics - - no_sub = settings.get('no_sub', False) - email = settings.get('email', '') - - workflow = pe.Workflow(name=name) - inputnode = pe.Node(niu.IdentityInterface(fields=['in_iqms']), - name='inputnode') - upld = pe.Node(niu.Function(input_names=['in_iqms', 'no_sub', 'email'], - output_names=['response'], - function=upload_qc_metrics), - name='UploadMetrics') - upld.inputs.email = email - upld.inputs.no_sub = no_sub - - workflow.connect([(inputnode, upld, [('in_iqms', 'in_iqms')])]) - - return workflow