Skip to content

fix: udpated indexing to ensure edge cases are considered #1046

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Feb 10, 2015
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGES
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
Next release
============

* FIX: Edge case with sparsemodels and PEP8 cleanup (https://github.com/nipy/nipype/pull/1046)
* ENH: New io interfaces for JSON files reading/writing (https://github.com/nipy/nipype/pull/1020)
* ENH: Enhanced openfmri script to support freesurfer linkage (https://github.com/nipy/nipype/pull/1037)
* BUG: matplotlib is supposed to be optional (https://github.com/nipy/nipype/pull/1003)
Expand Down
22 changes: 11 additions & 11 deletions examples/fmri_ants_openfmri.py
Original file line number Diff line number Diff line change
Expand Up @@ -218,7 +218,7 @@ def create_reg_workflow(name='registration'):
reg.inputs.args = '--float'
reg.inputs.output_warped_image = 'output_warped_image.nii.gz'
reg.inputs.num_threads = 4
reg.plugin_args = {'qsub_args': '-pe orte 4',
reg.plugin_args = {'qsub_args': '-pe orte 4',
'sbatch_args': '--mem=6G -c 4'}
register.connect(stripper, 'out_file', reg, 'moving_image')
register.connect(inputnode,'target_image_brain', reg,'fixed_image')
Expand Down Expand Up @@ -428,7 +428,7 @@ def create_fs_reg_workflow(name='registration'):
reg.inputs.args = '--float'
reg.inputs.output_warped_image = 'output_warped_image.nii.gz'
reg.inputs.num_threads = 4
reg.plugin_args = {'qsub_args': '-pe orte 4',
reg.plugin_args = {'qsub_args': '-pe orte 4',
'sbatch_args': '--mem=6G -c 4'}
register.connect(stripper, 'out_file', reg, 'moving_image')
register.connect(inputnode,'target_image', reg,'fixed_image')
Expand Down Expand Up @@ -562,17 +562,17 @@ def get_subjectinfo(subject_id, base_dir, task_id, model_id):
'task%03d_run*' % (idx + 1))))
runs = [int(val[-3:]) for val in files]
run_ids.insert(idx, runs)
json_info = os.path.join(base_dir, subject_id, 'BOLD',
'task%03d_run%03d' % (task_id, run_ids[task_id - 1][0]),
json_info = os.path.join(base_dir, subject_id, 'BOLD',
'task%03d_run%03d' % (task_id, run_ids[task_id - 1][0]),
'bold_scaninfo.json')
if os.path.exists(json_info):
import json
with open(json_info, 'rt') as fp:
data = json.load(fp)
TR = data['global']['const']['RepetitionTime']/1000.
else:
task_scan_key = os.path.join(base_dir, subject_id, 'BOLD',
'task%03d_run%03d' % (task_id, run_ids[task_id - 1][0]),
task_scan_key = os.path.join(base_dir, subject_id, 'BOLD',
'task%03d_run%03d' % (task_id, run_ids[task_id - 1][0]),
'scan_key.txt')
if os.path.exists(task_scan_key):
TR = np.genfromtxt(task_scan_key)[1]
Expand Down Expand Up @@ -667,7 +667,7 @@ def analyze_openfmri_dataset(data_dir, subject=None, model_id=None,
name='datasource')
datasource.inputs.base_directory = data_dir
datasource.inputs.template = '*'

if has_contrast:
datasource.inputs.field_template = {'anat': '%s/anatomy/T1_001.nii.gz',
'bold': '%s/BOLD/task%03d_r*/bold.nii.gz',
Expand Down Expand Up @@ -909,18 +909,18 @@ def split_files(in_files, splits):
splitfunc, 'in_files')

if subjects_dir:
get_roi_mean = pe.MapNode(fs.SegStats(default_color_table=True),
get_roi_mean = pe.MapNode(fs.SegStats(default_color_table=True),
iterfield=['in_file'], name='get_aparc_means')
get_roi_mean.inputs.avgwf_txt_file = True
wf.connect(fixed_fx.get_node('outputspec'), 'copes', get_roi_mean, 'in_file')
wf.connect(registration, 'outputspec.aparc', get_roi_mean, 'segmentation_file')
get_roi_tsnr = pe.MapNode(fs.SegStats(default_color_table=True),

get_roi_tsnr = pe.MapNode(fs.SegStats(default_color_table=True),
iterfield=['in_file'], name='get_aparc_tsnr')
get_roi_tsnr.inputs.avgwf_txt_file = True
wf.connect(tsnr, 'tsnr_file', get_roi_tsnr, 'in_file')
wf.connect(registration, 'outputspec.aparc', get_roi_tsnr, 'segmentation_file')

"""
Connect to a datasink
"""
Expand Down
324 changes: 187 additions & 137 deletions nipype/algorithms/modelgen.py

Large diffs are not rendered by default.

46 changes: 0 additions & 46 deletions nipype/algorithms/tests/test_auto_Overlap.py

This file was deleted.

6 changes: 4 additions & 2 deletions nipype/pipeline/plugins/tests/test_callback.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,8 @@ def test_callback_multiproc_normal():
output_names=[]),
name='f_node')
wf.add_nodes([f_node])
wf.config['execution'] = {'crashdump_dir': wf.base_dir}
wf.config['execution']['crashdump_dir'] = wf.base_dir
wf.config['execution']['poll_sleep_duration'] = 2
wf.run(plugin='MultiProc', plugin_args={'status_callback': so.callback})
assert_equal(len(so.statuses), 2)
for (n, s) in so.statuses:
Expand All @@ -89,7 +90,8 @@ def test_callback_multiproc_exception():
output_names=[]),
name='f_node')
wf.add_nodes([f_node])
wf.config['execution'] = {'crashdump_dir': wf.base_dir}
wf.config['execution']['crashdump_dir'] = wf.base_dir
wf.config['execution']['poll_sleep_duration'] = 2
try:
wf.run(plugin='MultiProc',
plugin_args={'status_callback': so.callback})
Expand Down
1 change: 1 addition & 0 deletions nipype/pipeline/plugins/tests/test_multiproc.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ def test_run_multiproc():
pipe.connect([(mod1,mod2,[('output1','input1')])])
pipe.base_dir = os.getcwd()
mod1.inputs.input1 = 1
pipe.config['execution']['poll_sleep_duration'] = 2
execgraph = pipe.run(plugin="MultiProc")
names = ['.'.join((node._hierarchy,node.name)) for node in execgraph.nodes()]
node = execgraph.nodes()[names.index('pipe.mod1')]
Expand Down
4 changes: 3 additions & 1 deletion nipype/pipeline/plugins/tests/test_multiproc_nondaemon.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,9 @@ def run_multiproc_nondaemon_with_flag(nondaemon_flag):
pipe.base_dir = os.getcwd()
f1.inputs.insum = 0

pipe.config = {'execution': {'stop_on_first_crash': True}}
pipe.config['execution']['stop_on_first_crash'] = True
pipe.config['execution']['poll_sleep_duration'] = 2

# execute the pipe using the MultiProc plugin with 2 processes and the non_daemon flag
# to enable child processes which start other multiprocessing jobs
execgraph = pipe.run(plugin="MultiProc",
Expand Down
3 changes: 2 additions & 1 deletion nipype/pipeline/tests/test_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -660,7 +660,8 @@ def func1(in1):
# set local check
w1.config['execution'] = {'stop_on_first_crash': 'true',
'local_hash_check': 'true',
'crashdump_dir': wd}
'crashdump_dir': wd,
'poll_sleep_duration': 2}

# test output of num_subnodes method when serial is default (False)
yield assert_equal, n1.num_subnodes(), len(n1.inputs.in1)
Expand Down