Skip to content

Commit

Permalink
Make project a Workflow attribute.
Browse files Browse the repository at this point in the history
  • Loading branch information
FredLoney committed May 1, 2015
1 parent 0696900 commit 70dc7e7
Show file tree
Hide file tree
Showing 23 changed files with 251 additions and 264 deletions.
2 changes: 0 additions & 2 deletions qipipe/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,5 +6,3 @@
`Fast and Loose Versioning <https://gist.github.com/FredLoney/6d946112e0b0f2fc4b57>`_
scheme. Minor and patch version numbers begin at 1.
"""

from helpers.project import project
19 changes: 12 additions & 7 deletions qipipe/pipeline/mask.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
from nipype.pipeline import engine as pe
from nipype.interfaces.utility import (IdentityInterface, Function)
from nipype.interfaces import fsl
from .. import project
from ..interfaces import (XNATUpload, MriVolCluster)
from .workflow_base import WorkflowBase
from qiutil.logging import logger
Expand All @@ -15,19 +14,22 @@
"""The XNAT scan time series resource name."""


def run(subject, session, scan, time_series, **opts):
def run(project, subject, session, scan, time_series, **opts):
"""
Creates a :class:`qipipe.pipeline.mask.MaskWorkflow` and runs it
on the given inputs.
:param project: the project name
:param subject: the input subject
:param session: the input session
:param scan: the input scan number
:param time_series: the input 4D NiFTI time series to mask
:param opts: additional :class:`MaskWorkflow` initialization parameters
:return: the XNAT mask resource name
"""
return MaskWorkflow(**opts).run(subject, session, scan, time_series)
wf_gen = MaskWorkflow(project, **opts)

return wf_gen.run(subject, session, scan, time_series)


class MaskWorkflow(WorkflowBase):
Expand Down Expand Up @@ -62,17 +64,20 @@ class MaskWorkflow(WorkflowBase):
interface options
"""

def __init__(self, cfg_file=None, base_dir=None):
def __init__(self, project, **opts):
"""
If the optional configuration file is specified, then the workflow
settings in that file override the default settings.
:param project: the XNAT project name
:param opts: the :class:`qipipe.pipeline.workflow_base.WorkflowBase`
initializer options, as well as the following options:
:keyword base_dir: the workflow execution directory
(default is a new temp directory)
:keyword cfg_file: the optional workflow inputs configuration file
"""
super(MaskWorkflow, self).__init__(logger(__name__), cfg_file)
super(MaskWorkflow, self).__init__(project, logger(__name__), **opts)

base_dir = opts.get('base_dir')
self.workflow = self._create_workflow(base_dir)
"""The mask creation workflow."""

Expand Down Expand Up @@ -167,7 +172,7 @@ def _create_workflow(self, base_dir=None):
workflow.connect(mask_name, 'out_file', inv_mask, 'out_file')

# Upload the mask to XNAT.
upload_mask_xfc = XNATUpload(project=project(), resource=RESOURCE,
upload_mask_xfc = XNATUpload(project=self.project, resource=RESOURCE,
modality='MR')
upload_mask = pe.Node(upload_mask_xfc, name='upload_mask')
workflow.connect(input_spec, 'subject', upload_mask, 'subject')
Expand Down
27 changes: 13 additions & 14 deletions qipipe/pipeline/modeling.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
from nipype.interfaces.utility import (IdentityInterface, Function, Merge)
import qiutil
from qiutil.logging import logger
from .. import project
from ..interfaces import XNATUpload
from ..helpers.bolus_arrival import bolus_arrival_index, BolusArrivalError
from .workflow_base import WorkflowBase
Expand All @@ -16,11 +15,12 @@
"""The XNAT modeling resource object label prefix."""


def run(subject, session, scan, time_series, **opts):
def run(project, subject, session, scan, time_series, **opts):
"""
Creates a :class:`qipipe.pipeline.modeling.ModelingWorkflow` and
runs it on the given inputs.
:param project: the project name
:param subject: the input subject
:param session: the input session
:param scan: input scan
Expand All @@ -31,7 +31,7 @@ def run(subject, session, scan, time_series, **opts):
result
"""
mask = opts.pop('mask', None)
wf = ModelingWorkflow(**opts)
wf = ModelingWorkflow(project, **opts)

return wf.run(subject, session, scan, time_series, mask=mask)

Expand Down Expand Up @@ -124,7 +124,7 @@ class ModelingWorkflow(WorkflowBase):
.. _AIRC DCE: https://everett.ohsu.edu/hg/qin_dce
"""

def __init__(self, **opts):
def __init__(self, project, **opts):
"""
The modeling parameters can be defined in either the options or the
configuration as follows:
Expand All @@ -147,8 +147,9 @@ def __init__(self, **opts):
- The *baseline_end_idx* defaults to 1 if it is not set in
either the input options or the configuration.
:param opts: the following initialization options:
:keyword cfg_file: the optional workflow inputs configuration file
:param project: the XNAT project name
:param opts: the :class:`qipipe.pipeline.workflow_base.WorkflowBase`
initializer options, as well as the following options:
:keyword base_dir: the workflow execution directory
(default a new temp directory)
:keyword r1_0_val: the optional fixed |R10| value
Expand All @@ -159,8 +160,7 @@ def __init__(self, **opts):
:keyword baseline_end_idx: the number of volumes to merge into a R1
series baseline image (default is 1)
"""
cfg_file = opts.pop('cfg_file', None)
super(ModelingWorkflow, self).__init__(logger(__name__), cfg_file)
super(ModelingWorkflow, self).__init__(project, logger(__name__), **opts)

resource = opts.pop('modeling', None)
if not resource:
Expand Down Expand Up @@ -237,7 +237,7 @@ def _generate_resource_name(self):
"""
return '_'.join((PK_PREFIX, qiutil.file.generate_file_name()))

def _create_workflow(self, base_dir=None, **opts):
def _create_workflow(self, **opts):
"""
Builds the modeling workflow.
Expand All @@ -249,12 +249,11 @@ def _create_workflow(self, base_dir=None, **opts):
self._logger.debug("Building the modeling workflow...")

# The base workflow.
if not base_dir:
base_dir = tempfile.mkdtemp()
base_dir = opts.pop('base_dir', tempfile.mkdtemp())
mdl_wf = pe.Workflow(name='modeling', base_dir=base_dir)

# Start with a base workflow.
base_wf = self._create_base_workflow(base_dir=base_dir, **opts)
base_wf = self._create_base_workflow(base_dir, **opts)

# The workflow input fields.
in_fields = ['subject', 'session', 'scan', 'time_series', 'mask',
Expand All @@ -280,7 +279,7 @@ def _create_workflow(self, base_dir=None, **opts):
for i, field in enumerate(out_fields):
base_field = 'output_spec.' + field
mdl_wf.connect(base_wf, base_field, merge_outputs, "in%d" % (i + 1))
upload_xfc = XNATUpload(project=project(), resource=self.resource)
upload_xfc = XNATUpload(project=self.project, resource=self.resource)
upload_node = pe.Node(upload_xfc, name='upload_outputs')
mdl_wf.connect(input_spec, 'subject', upload_node, 'subject')
mdl_wf.connect(input_spec, 'session', upload_node, 'session')
Expand All @@ -307,7 +306,7 @@ def _create_workflow(self, base_dir=None, **opts):

return mdl_wf

def _create_base_workflow(self, base_dir=None, **opts):
def _create_base_workflow(self, base_dir, **opts):
"""
Creates the modeling base workflow. This workflow performs the steps
described in :class:`qipipe.pipeline.modeling.ModelingWorkflow` with
Expand Down

0 comments on commit 70dc7e7

Please sign in to comment.