Skip to content

Commit

Permalink
Nipype already places child workflows in subdirectories.
Browse files Browse the repository at this point in the history
  • Loading branch information
FredLoney committed Jun 15, 2017
1 parent 01800aa commit 8bee1ca
Show file tree
Hide file tree
Showing 6 changed files with 26 additions and 29 deletions.
6 changes: 2 additions & 4 deletions qipipe/pipeline/modeling.py
Original file line number Diff line number Diff line change
Expand Up @@ -455,8 +455,7 @@ def _create_airc_workflow(self, **opts):
:param opts: the PK modeling parameters
:return: the Nipype Workflow
"""
base_dir = "%s/%s" % (self.base_dir, 'airc')
workflow = pe.Workflow(name='airc', base_dir=base_dir)
workflow = pe.Workflow(name='airc', base_dir=self.base_dir)

# The modeling profile configuration sections.
self.profile_sections = OHSU_CONF_SECTIONS
Expand Down Expand Up @@ -635,8 +634,7 @@ def _create_mock_workflow(self, **opts):
:param opts: the PK modeling parameters
:return: the Nipype Workflow
"""
base_dir = "%s/%s" % (self.base_dir, 'mock')
workflow = pe.Workflow(name='mock', base_dir=base_dir)
workflow = pe.Workflow(name='mock', base_dir=self.base_dir)

# The modeling profile configuration sections.
self.profile_sections = []
Expand Down
10 changes: 5 additions & 5 deletions qipipe/pipeline/qipipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -638,7 +638,7 @@ def _create_workflow(self, subject, session, scan, actions, **opts):
output_names=['results'],
function=model)
mdl_node = pe.Node(mdl_xfc, name='model')
mdl_opts = self._child_options(name='model')
mdl_opts = self._child_options()
mdl_opts['technique'] = self.modeling_technique
mdl_node.inputs.opts = mdl_opts
self.logger.info("Enabled modeling with options %s." % mdl_opts)
Expand All @@ -659,7 +659,7 @@ def _create_workflow(self, subject, session, scan, actions, **opts):
# than delegating to this qipipeline workflow as the
# parent, since Nipype Function arguments must be
# primitive.
reg_opts = self._child_options(name='registration')
reg_opts = self._child_options()
reg_opts['resource'] = self.registration_resource
reg_opts['technique'] = self.registration_technique
if 'recursive_registration' in opts:
Expand Down Expand Up @@ -693,7 +693,7 @@ def _create_workflow(self, subject, session, scan, actions, **opts):
output_names=['volume'],
function=roi)
roi_node = pe.Node(roi_xfc, name='roi')
roi_opts = self._child_options(name='roi')
roi_opts = self._child_options()
roi_node.inputs.opts = roi_opts
self.logger.info("Enabled ROI conversion with options %s." %
roi_opts)
Expand All @@ -716,7 +716,7 @@ def _create_workflow(self, subject, session, scan, actions, **opts):
# TypeError: cannot deepcopy this pattern object
# The work-around is to break out the separate simple options
# that the WorkflowBase constructor extracts from the parent.
stg_opts = self._child_options(name='stage')
stg_opts = self._child_options()
if 'dest' in opts:
stg_opts['dest'] = opts['dest']
if not self.collection:
Expand Down Expand Up @@ -757,7 +757,7 @@ def _create_workflow(self, subject, session, scan, actions, **opts):
raise PipelineError("The mask workflow requires the"
" collection option")
crop_posterior = self.collection.crop_posterior
mask_opts = self._child_options(name='mask')
mask_opts = self._child_options()
mask_opts['crop_posterior'] = crop_posterior
mask_inputs = ['subject', 'session', 'scan', 'time_series',
'opts']
Expand Down
3 changes: 1 addition & 2 deletions qipipe/pipeline/registration.py
Original file line number Diff line number Diff line change
Expand Up @@ -393,8 +393,7 @@ def _create_realignment_workflow(self, **opts):
self.logger.debug('Creating the registration realignment workflow...')

# The workflow.
base_dir = "%s/%s" % (self.base_dir, self.technique)
realign_wf = pe.Workflow(name=self.technique, base_dir=base_dir)
realign_wf = pe.Workflow(name=self.technique, base_dir=self.base_dir)

# The workflow input.
in_fields = ['subject', 'session', 'scan', 'in_file',
Expand Down
3 changes: 1 addition & 2 deletions qipipe/pipeline/staging.py
Original file line number Diff line number Diff line change
Expand Up @@ -384,8 +384,7 @@ def _create_workflow(self):
self.logger.debug('Creating the DICOM processing workflow...')

# The Nipype workflow object.
base_dir = "%s/%s" % (self.base_dir, 'stage_volume')
workflow = pe.Workflow(name='stage_volume', base_dir=base_dir)
workflow = pe.Workflow(name='stage_volume', base_dir=self.base_dir)

# The workflow input.
in_fields = ['collection', 'subject', 'session', 'scan',
Expand Down
4 changes: 3 additions & 1 deletion qipipe/pipeline/workflow_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,18 +178,20 @@ def depict_workflow(self, workflow):
self.logger.debug("The %s workflow graph is depicted at %s.png." %
(workflow.name, base_name))

def _child_options(self, name=None):
def _child_options(self):
"""
Collects the following options for creating a child workflow:
* project
* config_dir
* base_dir
* dry_run
* distributable
:return: the options sufficient to create a child workflow
"""
return dict(
project=self.project,
base_dir=self.base_dir,
config_dir=self.config_dir,
dry_run=self.dry_run,
distributable=self.is_distributable
Expand Down
29 changes: 14 additions & 15 deletions test/unit/pipeline/test_qipipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,22 +20,22 @@
class TestQIPipeline(object):
"""
Pipeline unit tests.
Note:: a precondition for running this test is that the environment
variable ``QIPIPE_DATA`` is set to the DICOM source directory.
If ``QIPIPE_DATA`` is not set, then no test cases are run and a
log message is issued.
Note:: this test takes app. four hours to run serially without
modeling.
"""

def setUp(self):
shutil.rmtree(RESULTS, True)

def tearDown(self):
shutil.rmtree(RESULTS, True)

def test_breast(self):
data = os.getenv('QIPIPE_DATA')
if data:
Expand All @@ -57,7 +57,7 @@ def test_breast(self):
logger(__name__).info('Skipping the pipeline unit Breast'
' test, since the QIPIPE_DATA environment'
' variable is not set.')

def test_sarcoma(self):
data = os.getenv('QIPIPE_DATA')
if data:
Expand All @@ -78,24 +78,23 @@ def test_sarcoma(self):
logger(__name__).info('Skipping the pipeline unit Sarcoma'
' test, since the QIPIPE_DATA environment'
' variable is not set.')

def _test_collection(self, collection, fixture):
"""
Run the pipeline on the given collection and verify that scans are
created in XNAT.
:param collection: the image collection name
:param fixture: the test input directory holding a link to the
first visit data
"""
logger(__name__).debug("Testing the pipeline on %s..." % fixture)

# The staging destination and work area.
dest = os.path.join(RESULTS, 'data')
base_dir = os.path.join(RESULTS, 'work')


# The pipeline options.
opts = dict(base_dir=base_dir, config_dir=CONF_DIR, dest=dest,
opts = dict(base_dir=RESULTS, config_dir=CONF_DIR, dest=dest,
project=PROJECT, collection=collection,
registration_technique='mock',
modeling_technique='mock')
Expand All @@ -106,7 +105,7 @@ def _test_collection(self, collection, fixture):
subjects = sbj_dir_dict.keys()
# The test subject input directories.
sources = sbj_dir_dict.values()

with qixnat.connect() as xnat:
# Delete any existing test subjects.
for sbj in subjects:
Expand Down Expand Up @@ -142,13 +141,13 @@ def _test_collection(self, collection, fixture):
assert_true(mdl_obj.exists(),
"The %s %s modeling resource %s was not"
" created in XNAT" % (sbj, sess, rsc))

# Delete the test subjects.
for sbj in subjects:
xnat.delete(PROJECT, sbj)


if __name__ == "__main__":
import nose

nose.main(defaultTest=__name__)

0 comments on commit 8bee1ca

Please sign in to comment.