From 34dd5ea16e32680a70e84fcf723d6536d043fffe Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Mon, 12 Aug 2013 13:23:11 -0400 Subject: [PATCH 01/60] enh: appropriate xor for threshold and white matter parameters --- nipype/interfaces/freesurfer/model.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nipype/interfaces/freesurfer/model.py b/nipype/interfaces/freesurfer/model.py index 9f35ae8d0e..19963f3832 100644 --- a/nipype/interfaces/freesurfer/model.py +++ b/nipype/interfaces/freesurfer/model.py @@ -342,9 +342,9 @@ def __init__(self, **kwargs): class BinarizeInputSpec(FSTraitedSpec): in_file = File(exists=True, argstr='--i %s', mandatory=True, copyfile=False, desc='input volume') - min = traits.Float(argstr='--min %f', + min = traits.Float(argstr='--min %f', xor=['wm_ven_csf'], desc='min thresh') - max = traits.Float(argstr='--max %f', + max = traits.Float(argstr='--max %f', xor=['wm_ven_csf'], desc='max thresh') rmin = traits.Float(argstr='--rmin %f', desc='compute min based on rmin*globalmean') @@ -356,7 +356,7 @@ class BinarizeInputSpec(FSTraitedSpec): desc='set match vals to 2 and 41 (aseg for cerebral WM)') ventricles = traits.Bool(argstr='--ventricles', desc='set match vals those for aseg ventricles+choroid (not 4th)') - wm_ven_csf = traits.Bool(argstr='--wm+vcsf', + wm_ven_csf = traits.Bool(argstr='--wm+vcsf', xor=['min', 'max'], desc='WM and ventricular CSF, including choroid (not 4th)') binary_file = File(argstr='--o %s', genfile=True, desc='binary output volume') @@ -430,7 +430,7 @@ def _list_outputs(self): outfile = fname_presuffix(self.inputs.in_file, newpath=os.getcwd(), suffix='_thresh') - outputs['binary_file'] = outfile + outputs['binary_file'] = os.path.abspath(outfile) value = self.inputs.count_file if isdefined(value): if isinstance(value, bool): From d3616d23a266acd58aa55d857596ef307812b607 Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Mon, 12 Aug 2013 13:25:39 -0400 Subject: [PATCH 02/60] enh: adding initial version of resting workflow --- examples/rsfmri_preprocessing.py | 229 +++++++++++++++++++++++++++++++ 1 file changed, 229 insertions(+) create mode 100644 examples/rsfmri_preprocessing.py diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py new file mode 100644 index 0000000000..65cb862152 --- /dev/null +++ b/examples/rsfmri_preprocessing.py @@ -0,0 +1,229 @@ +import os + +from nipype import (afni, fsl, freesurfer, nipy, Function, + DataGrabber, DataSink) +from nipype import Workflow, Node, MapNode + +from nipype.algorithms.rapidart import ArtifactDetect +from nipype.algorithms.misc import TSNR +from nipype.interfaces.fsl.utils import EPIDeWarp +from nipype.interfaces.io import FreeSurferSource + +import numpy as np + +#robust mean +def median(in_files): + import os + import nibabel as nb + import numpy as np + from nipype.utils.filemanip import filename_to_list + average = None + for idx, filename in enumerate(filename_to_list(in_files)): + img = nb.load(filename) + data = np.median(img.get_data(), axis=3) + if not average: + average = data + else: + average = average + data + median_img = nb.Nifti1Image(average/float(idx + 1), + img.get_affine(), img.get_header()) + filename = os.path.join(os.getcwd(), 'median.nii.gz') + median_img.to_filename(filename) + return filename + + +def get_aparc_aseg(files): + """Return the aparc+aseg.mgz file""" + for name in files: + if 'aparc+aseg.mgz' in name: + return name + raise ValueError('aparc+aseg.mgz not found') + + +def get_info(dicom_files): + from dcmstack.extract import default_extractor + from dicom import read_file + from nipype.utils.filemanip import filename_to_list + meta = default_extractor(read_file(filename_to_list(dicom_files)[0], + stop_before_pixels=True, + force=True)) + return meta['RepetitionTime']/1000., meta['CsaImage.MosaicRefAcqTimes'] + + +def motion_regressors(motion_params, order=2, derivatives=2): + """ + motion + d(motion)/dt + d2(motion)/dt2 (linear + quadratic) + """ + from nipype.utils.filemanip import filename_to_list + import numpy as np + out_files = [] + for idx, filename in enumerate(filename_to_list(motion_params)): + params = np.genfromtxt(filename) + out_params = params + for d in range(1, derivatives + 1): + cparams = np.vstack((np.repeat(params[0, :][None, :], d, axis=0), + params)) + out_params = np.hstack((out_params, np.diff(cparams, d, axis=0))) + out_params2 = out_params + for i in range(2, order + 1): + out_params2 = np.hstack((out_params2, np.power(out_params, i))) + filename = os.path.join(os.getcwd(), "motion_regressor%02d.txt" % idx) + np.savetxt(filename, out_params2, fmt="%.10f") + out_files.append(filename) + return out_files + +def create_workflow(files, + subject_id, + n_vol=0, + despike=True, + TR=None, + slice_times=None, + fieldmap_images=None, + norm_threshold=1): + + wf = Workflow(name='resting') + + #skip vols + remove_vol = MapNode(fsl.ExtractROI(t_min=n_vol, t_size=-1), + iterfield=['in_file'], + name="remove_volumes") + remove_vol.inputs.in_file = files + + # despike + despike = MapNode(afni.Despike(outputtype='NIFTI_GZ'), + iterfield=['in_file'], + name='despike') + #despike.plugin_args = {'qsub_args': '-l nodes=1:ppn='} + + wf.connect(remove_vol, 'roi_file', despike, 'in_file') + + #nipy realign + realign = Node(nipy.FmriRealign4d(), name='realign') + realign.inputs.tr = TR + realign.inputs.time_interp = True + realign.inputs.slice_order = np.argsort(np.argsort(slice_times)).tolist() + if despike: + wf.connect(despike, 'out_file', realign, 'in_file') + else: + wf.connect(remove_vol, 'roi_file', realign, 'in_file') + + #TSNR + tsnr = MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr') + wf.connect(realign, 'out_file', tsnr, 'in_file') + + calc_median = Node(Function(input_names=['in_files'], + output_names=['median_file'], + function=median), + name='median') + wf.connect(tsnr, 'detrended_file', calc_median, 'in_files') + + register = Node(freesurfer.BBRegister(), + name='bbregister') + register.inputs.subject_id = subject_id + register.inputs.init = 'fsl' + register.inputs.contrast_type = 't2' + register.inputs.out_fsl_file = True + + if fieldmap_images: + pass + else: + wf.connect(calc_median, 'median_file', register, 'source_file') + + fssource = Node(FreeSurferSource(), + name='fssource') + fssource.inputs.subject_id = subject_id + fssource.inputs.subjects_dir = os.environ['SUBJECTS_DIR'] + + #extract wm+csf, brain masks by eroding freesurfer lables + wmcsf = Node(freesurfer.Binarize(), name='wmcsfmask') + mask = wmcsf.clone('anatmask') + + wmcsftransform = Node(freesurfer.ApplyVolTransform(inverse=True, + interp='nearest'), + name='wmcsftransform') + wmcsftransform.inputs.subjects_dir = os.environ['SUBJECTS_DIR'] + + wmcsf.inputs.wm_ven_csf = True + wmcsf.inputs.binary_file = 'wmcsf.nii.gz' + wmcsf.inputs.erode = 2 + wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), wmcsf, 'in_file') + + wf.connect(calc_median, 'median_file', wmcsftransform, 'source_file') + wf.connect(register, 'out_reg_file', wmcsftransform, 'reg_file') + wf.connect(wmcsf, 'binary_file', wmcsftransform, 'target_file') + + mask.inputs.dilate = 3 + mask.inputs.binary_file = 'mask.nii.gz' + mask.inputs.erode = 2 + mask.inputs.min = 0.5 + wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), mask, 'in_file') + + masktransform = wmcsftransform.clone("masktransform") + wf.connect(calc_median, 'median_file', masktransform, 'source_file') + wf.connect(register, 'out_reg_file', masktransform, 'reg_file') + wf.connect(wmcsf, 'binary_file', masktransform, 'target_file') + + #art outliers + art = Node(interface=ArtifactDetect(use_differences=[True, False], + use_norm=True, + norm_threshold=norm_threshold, + zintensity_threshold=3, + parameter_source='NiPy', + bound_by_brainmask=True, + mask_type='file'), + name="art") + wf.connect(tsnr, 'detrended_file', art, 'realigned_files') + wf.connect(realign, 'par_file', + art, 'realignment_parameters') + wf.connect(masktransform, 'transformed_file', art, 'mask_file') + return wf + +if __name__ == "__main__": + import argparse + dcmfile = '/software/data/sad_resting/500000-32-1.dcm' + niifile = '/software/data/sad_resting/resting.nii.gz' + TR, slice_times = get_info(dcmfile) + wf = create_workflow(niifile, + 'SAD_024', + n_vol=2, + despike=True, + TR=TR, + slice_times=slice_times, + ) + wf.config['execution'].update(**{'hash_method': 'content', + 'remove_unnecessary_outputs': False}) + wf.base_dir = os.getcwd() + wf.run() + +''' +#fieldmap dewarping +unwarp = MapNode(EPIDeWarp(), name='dewarp') + + +# regress motion + art (compnorm + outliers) from realigned data +def regress(filter): + return filtered_data + +#compute compcorr on wm, csf separately +def compcorr(): + return components + +#regress those out +def regress(comps): + return filtered_data + +#smooth +freesurfer.SurfaceSmooth() + +#bandpass +fsl.ImageMaths + +#convert to grayordinates +def to_grayordinates(): + return grayordinates + +#compute similarity matrix and partial correlation +def compute_similarity(): + return matrix + +''' From 27eea36e556e3959c708daa1b54b8bfedaef1026 Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Mon, 12 Aug 2013 19:31:38 -0400 Subject: [PATCH 03/60] enh: completed upto compcorr correction --- examples/rsfmri_preprocessing.py | 122 ++++++++++++++++++++++++++----- 1 file changed, 105 insertions(+), 17 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index 65cb862152..6d18c8e17e 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -56,6 +56,7 @@ def motion_regressors(motion_params, order=2, derivatives=2): """ from nipype.utils.filemanip import filename_to_list import numpy as np + import os out_files = [] for idx, filename in enumerate(filename_to_list(motion_params)): params = np.genfromtxt(filename) @@ -72,6 +73,63 @@ def motion_regressors(motion_params, order=2, derivatives=2): out_files.append(filename) return out_files +def build_filter1(motion_params, comp_norm, outliers): + """Builds a regressor set comprisong motion parameters, composite norm and + outliers + """ + from nipype.utils.filemanip import filename_to_list + import numpy as np + import os + out_files = [] + for idx, filename in enumerate(filename_to_list(motion_params)): + params = np.genfromtxt(filename) + norm_val = np.genfromtxt(filename_to_list(comp_norm)[idx]) + out_params = np.hstack((params, norm_val[:, None])) + try: + outlier_val = np.genfromtxt(filename_to_list(outliers)[idx]) + except IOerror: + outlier_val = np.empty((0)) + if outlier_val.shape[0] != 0: + for index in outlier_val: + outlier_vector = np.zeros((out_params.shape[0], 1)) + outlier_vector[index] = 1 + out_params = np.hstack((out_params, outlier_vector)) + filename = os.path.join(os.getcwd(), "filter_regressor%02d.txt" % idx) + np.savetxt(filename, out_params, fmt="%.10f") + out_files.append(filename) + return out_files + + +def extract_noise_components(realigned_file, mask_file, num_components=6): + """Derive components most reflective of physiological noise + + Parameters + ---------- + realigned_file : + mask_file : + num_components : + + Returns + ------- + components_file : + """ + + import os + from nibabel import load + import numpy as np + import scipy as sp + + imgseries = load(realigned_file) + noise_mask = load(mask_file) + voxel_timecourses = imgseries.get_data()[np.nonzero(noise_mask.get_data())] + voxel_timecourses = voxel_timecourses.byteswap().newbyteorder() + voxel_timecourses[np.isnan(np.sum(voxel_timecourses, axis=1)), :] = 0 + _, _, v = sp.linalg.svd(voxel_timecourses, full_matrices=False) + components_file = os.path.join(os.getcwd(), 'noise_components.txt') + np.savetxt(components_file, v[:num_components, :].T) + return components_file + + def create_workflow(files, subject_id, n_vol=0, @@ -79,7 +137,8 @@ def create_workflow(files, TR=None, slice_times=None, fieldmap_images=None, - norm_threshold=1): + norm_threshold=1, + num_components=6): wf = Workflow(name='resting') @@ -144,8 +203,9 @@ def create_workflow(files, wmcsftransform.inputs.subjects_dir = os.environ['SUBJECTS_DIR'] wmcsf.inputs.wm_ven_csf = True + wmcsf.inputs.match = [4, 5, 14, 15, 24, 31, 43, 44, 63] wmcsf.inputs.binary_file = 'wmcsf.nii.gz' - wmcsf.inputs.erode = 2 + wmcsf.inputs.erode = 1 wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), wmcsf, 'in_file') wf.connect(calc_median, 'median_file', wmcsftransform, 'source_file') @@ -161,7 +221,7 @@ def create_workflow(files, masktransform = wmcsftransform.clone("masktransform") wf.connect(calc_median, 'median_file', masktransform, 'source_file') wf.connect(register, 'out_reg_file', masktransform, 'reg_file') - wf.connect(wmcsf, 'binary_file', masktransform, 'target_file') + wf.connect(mask, 'binary_file', masktransform, 'target_file') #art outliers art = Node(interface=ArtifactDetect(use_differences=[True, False], @@ -176,6 +236,47 @@ def create_workflow(files, wf.connect(realign, 'par_file', art, 'realignment_parameters') wf.connect(masktransform, 'transformed_file', art, 'mask_file') + + motreg = Node(Function(input_names=['motion_params', 'order', + 'derivatives'], + output_names=['out_files'], + function=motion_regressors), + name='getmotionregress') + wf.connect(realign, 'par_file', motreg, 'motion_params') + + createfilter1 = Node(Function(input_names=['motion_params', 'comp_norm', + 'outliers'], + output_names=['out_files'], + function=build_filter1), + name='makemotionbasedfilter') + wf.connect(motreg, 'out_files', createfilter1, 'motion_params') + wf.connect(art, 'norm_files', createfilter1, 'comp_norm') + wf.connect(art, 'outlier_files', createfilter1, 'outliers') + + filter1 = MapNode(fsl.FilterRegressor(filter_all=True), + iterfield=['in_file', 'design_file'], + name='filtermotion') + wf.connect(tsnr, 'detrended_file', filter1, 'in_file') + wf.connect(createfilter1, 'out_files', filter1, 'design_file') + wf.connect(masktransform, 'transformed_file', filter1, 'mask') + + createfilter2 = MapNode(Function(input_names=['realigned_file', 'mask_file', + 'num_components'], + output_names=['out_files'], + function=extract_noise_components), + iterfield=['realigned_file'], + name='makecompcorrfilter') + createfilter2.inputs.num_components = num_components + wf.connect(filter1, 'out_file', createfilter2, 'realigned_file') + wf.connect(masktransform, 'transformed_file', createfilter2, 'mask_file') + + filter2 = MapNode(fsl.FilterRegressor(filter_all=True), + iterfield=['in_file', 'design_file'], + name='filtercompcorr') + wf.connect(filter1, 'out_file', filter2, 'in_file') + wf.connect(createfilter2, 'out_files', filter2, 'design_file') + wf.connect(masktransform, 'transformed_file', filter2, 'mask') + return wf if __name__ == "__main__": @@ -200,20 +301,8 @@ def create_workflow(files, unwarp = MapNode(EPIDeWarp(), name='dewarp') -# regress motion + art (compnorm + outliers) from realigned data -def regress(filter): - return filtered_data - -#compute compcorr on wm, csf separately -def compcorr(): - return components - -#regress those out -def regress(comps): - return filtered_data - #smooth -freesurfer.SurfaceSmooth() +freesurfer.Smooth() #bandpass fsl.ImageMaths @@ -225,5 +314,4 @@ def to_grayordinates(): #compute similarity matrix and partial correlation def compute_similarity(): return matrix - ''' From 41f48e4ad3ac28ae1a86cf58167f7ae29b1e1fcb Mon Sep 17 00:00:00 2001 From: cdla Date: Tue, 13 Aug 2013 14:11:01 -0400 Subject: [PATCH 04/60] updated changes --- examples/rsfmri_preprocessing.py | 40 +++++++++++++++++++++++--------- 1 file changed, 29 insertions(+), 11 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index 6d18c8e17e..307effd96e 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -138,7 +138,11 @@ def create_workflow(files, slice_times=None, fieldmap_images=None, norm_threshold=1, - num_components=6): + num_components=6, + vol_fwhm=6, + surf_fwhm=6, + lowpass_freq=.08, + highpass_freq=.9): wf = Workflow(name='resting') @@ -276,13 +280,34 @@ def create_workflow(files, wf.connect(filter1, 'out_file', filter2, 'in_file') wf.connect(createfilter2, 'out_files', filter2, 'design_file') wf.connect(masktransform, 'transformed_file', filter2, 'mask') - + freesurfer.Smooth() + smooth = Node(freesurfer.Smooth(), + name = 'smooth') + smooth.inputs.surface_fwhm=surf_fwhm + smooth.inputs.vol_fwhm=vol_fwhm + wf.connect(filter2, 'out_file', smooth, 'in_file') + wf.connect(register, 'out_reg_file', smooth, 'reg_file') + + bandpass = Node(fsl.TemporalFilter(), + name='bandpassfilter') + if highpass_freq < 0: + bandpass.inputs.highpass_sigma = -1 + else: + bandpass.inputs.highpass_sigma = 1 / (2 * TR * highpass_freq) + if lowpass_freq < 0: + bandpass.inputs.lowpass_sigma = -1 + else: + bandpass.inputs.lowpass_sigma = 1 / (2 * TR * lowpass_freq) + wf.connect(smooth, 'smoothed_file', bandpass, 'in_file') return wf if __name__ == "__main__": import argparse - dcmfile = '/software/data/sad_resting/500000-32-1.dcm' - niifile = '/software/data/sad_resting/resting.nii.gz' + #dcmfile = '/software/data/sad_resting/500000-32-1.dcm' + #niifile = '/software/data/sad_resting/resting.nii.gz' + dcmfile = '/mindhive/xnat/dicom_storage/sad/SAD_024/dicoms/500000-32-1.dcm' + niifile = '/mindhive/xnat/data/sad/SAD_024/BOLD/resting.nii.gz' + TR, slice_times = get_info(dcmfile) wf = create_workflow(niifile, 'SAD_024', @@ -300,13 +325,6 @@ def create_workflow(files, #fieldmap dewarping unwarp = MapNode(EPIDeWarp(), name='dewarp') - -#smooth -freesurfer.Smooth() - -#bandpass -fsl.ImageMaths - #convert to grayordinates def to_grayordinates(): return grayordinates From 731308bf4b41cc9670545d1a671408731c908515 Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Tue, 13 Aug 2013 15:48:50 -0400 Subject: [PATCH 05/60] fix: parameters, node types --- examples/rsfmri_preprocessing.py | 28 +++++++++++++++++----------- 1 file changed, 17 insertions(+), 11 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index 307effd96e..dcfeb14ffe 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -87,7 +87,7 @@ def build_filter1(motion_params, comp_norm, outliers): out_params = np.hstack((params, norm_val[:, None])) try: outlier_val = np.genfromtxt(filename_to_list(outliers)[idx]) - except IOerror: + except IOError: outlier_val = np.empty((0)) if outlier_val.shape[0] != 0: for index in outlier_val: @@ -140,7 +140,7 @@ def create_workflow(files, norm_threshold=1, num_components=6, vol_fwhm=6, - surf_fwhm=6, + surf_fwhm=10, lowpass_freq=.08, highpass_freq=.9): @@ -280,16 +280,19 @@ def create_workflow(files, wf.connect(filter1, 'out_file', filter2, 'in_file') wf.connect(createfilter2, 'out_files', filter2, 'design_file') wf.connect(masktransform, 'transformed_file', filter2, 'mask') - freesurfer.Smooth() - smooth = Node(freesurfer.Smooth(), - name = 'smooth') + + smooth = MapNode(freesurfer.Smooth(), + iterfield=['in_file'], + name = 'smooth') + smooth.inputs.proj_frac_avg = (0.1, 0.9, 0.1) smooth.inputs.surface_fwhm=surf_fwhm smooth.inputs.vol_fwhm=vol_fwhm wf.connect(filter2, 'out_file', smooth, 'in_file') wf.connect(register, 'out_reg_file', smooth, 'reg_file') - bandpass = Node(fsl.TemporalFilter(), - name='bandpassfilter') + bandpass = MapNode(fsl.TemporalFilter(), + iterfield=['in_file'], + name='bandpassfilter') if highpass_freq < 0: bandpass.inputs.highpass_sigma = -1 else: @@ -303,10 +306,10 @@ def create_workflow(files, if __name__ == "__main__": import argparse - #dcmfile = '/software/data/sad_resting/500000-32-1.dcm' - #niifile = '/software/data/sad_resting/resting.nii.gz' - dcmfile = '/mindhive/xnat/dicom_storage/sad/SAD_024/dicoms/500000-32-1.dcm' - niifile = '/mindhive/xnat/data/sad/SAD_024/BOLD/resting.nii.gz' + dcmfile = '/software/data/sad_resting/500000-32-1.dcm' + niifile = '/software/data/sad_resting/resting.nii.gz' + #dcmfile = '/mindhive/xnat/dicom_storage/sad/SAD_024/dicoms/500000-32-1.dcm' + #niifile = '/mindhive/xnat/data/sad/SAD_024/BOLD/resting.nii.gz' TR, slice_times = get_info(dcmfile) wf = create_workflow(niifile, @@ -315,6 +318,9 @@ def create_workflow(files, despike=True, TR=TR, slice_times=slice_times, + vol_fwhm=4, + lowpass_freq=-1, + highpass_freq=-1 ) wf.config['execution'].update(**{'hash_method': 'content', 'remove_unnecessary_outputs': False}) From 39793ef71c08dae4a64f789808bd957362e0b729 Mon Sep 17 00:00:00 2001 From: cdla Date: Wed, 14 Aug 2013 07:54:47 -0400 Subject: [PATCH 06/60] changes filter1 form fsl.FilterRegressor to fsl.GLM --- examples/rsfmri_preprocessing.py | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index 307effd96e..e15f18b2c3 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -257,12 +257,20 @@ def create_workflow(files, wf.connect(art, 'norm_files', createfilter1, 'comp_norm') wf.connect(art, 'outlier_files', createfilter1, 'outliers') - filter1 = MapNode(fsl.FilterRegressor(filter_all=True), - iterfield=['in_file', 'design_file'], - name='filtermotion') + #filter1 = MapNode(fsl.FilterRegressor(filter_all=True), + # iterfield=['in_file', 'design_file'], + # name='filtermotion') + #wf.connect(tsnr, 'detrended_file', filter1, 'in_file') + #wf.connect(createfilter1, 'out_files', filter1, 'design_file') + #wf.connect(masktransform, 'transformed_file', filter1, 'mask') + + filter1 = MapNode(fsl.GLM(), + iterfield=['in_file', 'design_file'], + name='filtermotion') wf.connect(tsnr, 'detrended_file', filter1, 'in_file') wf.connect(createfilter1, 'out_files', filter1, 'design_file') - wf.connect(masktransform, 'transformed_file', filter1, 'mask') + wf.connect(masktransform,'transformed_file',filter1,'mask') + createfilter2 = MapNode(Function(input_names=['realigned_file', 'mask_file', 'num_components'], @@ -271,7 +279,9 @@ def create_workflow(files, iterfield=['realigned_file'], name='makecompcorrfilter') createfilter2.inputs.num_components = num_components - wf.connect(filter1, 'out_file', createfilter2, 'realigned_file') + #wf.connect(filter1, 'out_file', createfilter2, 'realigned_file') + wf.connect(filter1, 'out_res', createfilter2,'realigned_file') + wf.connect(masktransform, 'transformed_file', createfilter2, 'mask_file') filter2 = MapNode(fsl.FilterRegressor(filter_all=True), From ccd1d48a016fc06723616f72f3fcedef62ae0950 Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Wed, 14 Aug 2013 10:23:56 -0400 Subject: [PATCH 07/60] enh: use voxel dimension were erosion and smoothing, pep8 cleanup --- examples/rsfmri_preprocessing.py | 55 +++++++++++++++++++++----------- 1 file changed, 36 insertions(+), 19 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index dcfeb14ffe..b7ba9906fc 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -1,5 +1,8 @@ import os +from nipype.interfaces.base import CommandLine +CommandLine.set_default_terminal_output('allatonce') + from nipype import (afni, fsl, freesurfer, nipy, Function, DataGrabber, DataSink) from nipype import Workflow, Node, MapNode @@ -11,6 +14,7 @@ import numpy as np + #robust mean def median(in_files): import os @@ -47,7 +51,8 @@ def get_info(dicom_files): meta = default_extractor(read_file(filename_to_list(dicom_files)[0], stop_before_pixels=True, force=True)) - return meta['RepetitionTime']/1000., meta['CsaImage.MosaicRefAcqTimes'] + return (meta['RepetitionTime']/1000., meta['CsaImage.MosaicRefAcqTimes'], + meta['SpacingBetweenSlices']) def motion_regressors(motion_params, order=2, derivatives=2): @@ -73,6 +78,7 @@ def motion_regressors(motion_params, order=2, derivatives=2): out_files.append(filename) return out_files + def build_filter1(motion_params, comp_norm, outliers): """Builds a regressor set comprisong motion parameters, composite norm and outliers @@ -136,10 +142,11 @@ def create_workflow(files, despike=True, TR=None, slice_times=None, + slice_thickness=None, fieldmap_images=None, norm_threshold=1, num_components=6, - vol_fwhm=6, + vol_fwhm=None, surf_fwhm=10, lowpass_freq=.08, highpass_freq=.9): @@ -175,9 +182,9 @@ def create_workflow(files, wf.connect(realign, 'out_file', tsnr, 'in_file') calc_median = Node(Function(input_names=['in_files'], - output_names=['median_file'], - function=median), - name='median') + output_names=['median_file'], + function=median), + name='median') wf.connect(tsnr, 'detrended_file', calc_median, 'in_files') register = Node(freesurfer.BBRegister(), @@ -218,7 +225,7 @@ def create_workflow(files, mask.inputs.dilate = 3 mask.inputs.binary_file = 'mask.nii.gz' - mask.inputs.erode = 2 + mask.inputs.erode = int(slice_thickness) mask.inputs.min = 0.5 wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), mask, 'in_file') @@ -249,10 +256,10 @@ def create_workflow(files, wf.connect(realign, 'par_file', motreg, 'motion_params') createfilter1 = Node(Function(input_names=['motion_params', 'comp_norm', - 'outliers'], - output_names=['out_files'], - function=build_filter1), - name='makemotionbasedfilter') + 'outliers'], + output_names=['out_files'], + function=build_filter1), + name='makemotionbasedfilter') wf.connect(motreg, 'out_files', createfilter1, 'motion_params') wf.connect(art, 'norm_files', createfilter1, 'comp_norm') wf.connect(art, 'outlier_files', createfilter1, 'outliers') @@ -265,7 +272,7 @@ def create_workflow(files, wf.connect(masktransform, 'transformed_file', filter1, 'mask') createfilter2 = MapNode(Function(input_names=['realigned_file', 'mask_file', - 'num_components'], + 'num_components'], output_names=['out_files'], function=extract_noise_components), iterfield=['realigned_file'], @@ -283,13 +290,15 @@ def create_workflow(files, smooth = MapNode(freesurfer.Smooth(), iterfield=['in_file'], - name = 'smooth') + name='smooth') smooth.inputs.proj_frac_avg = (0.1, 0.9, 0.1) - smooth.inputs.surface_fwhm=surf_fwhm - smooth.inputs.vol_fwhm=vol_fwhm + smooth.inputs.surface_fwhm = surf_fwhm + if vol_fwhm is None: + vol_fwhm = 2 * slice_thickness + smooth.inputs.vol_fwhm = vol_fwhm wf.connect(filter2, 'out_file', smooth, 'in_file') wf.connect(register, 'out_reg_file', smooth, 'reg_file') - + bandpass = MapNode(fsl.TemporalFilter(), iterfield=['in_file'], name='bandpassfilter') @@ -302,30 +311,38 @@ def create_workflow(files, else: bandpass.inputs.lowpass_sigma = 1 / (2 * TR * lowpass_freq) wf.connect(smooth, 'smoothed_file', bandpass, 'in_file') + return wf if __name__ == "__main__": import argparse dcmfile = '/software/data/sad_resting/500000-32-1.dcm' niifile = '/software/data/sad_resting/resting.nii.gz' + subject_id = 'SAD_024' + dcmfile = '/software/data/sad_resting/allyson/562000-34-1.dcm' + niifile = '/software/data/sad_resting/allyson/Resting1.nii' + fieldmaps = ['/software/data/sad_resting/allyson/fieldmap_resting1.nii', + '/software/data/sad_resting/allyson/fieldmap_resting2.nii'] + echo_time = 0.7 + subject_id = '308' #dcmfile = '/mindhive/xnat/dicom_storage/sad/SAD_024/dicoms/500000-32-1.dcm' #niifile = '/mindhive/xnat/data/sad/SAD_024/BOLD/resting.nii.gz' - TR, slice_times = get_info(dcmfile) + TR, slice_times, slice_thickness = get_info(dcmfile) wf = create_workflow(niifile, - 'SAD_024', + subject_id, n_vol=2, despike=True, TR=TR, slice_times=slice_times, - vol_fwhm=4, + slice_thickness=np.round(slice_thickness), lowpass_freq=-1, highpass_freq=-1 ) wf.config['execution'].update(**{'hash_method': 'content', 'remove_unnecessary_outputs': False}) wf.base_dir = os.getcwd() - wf.run() + wf.run(plugin='MultiProc') ''' #fieldmap dewarping From 8393f3b872670577153f670f52e2a8452f73e52d Mon Sep 17 00:00:00 2001 From: cdla Date: Wed, 14 Aug 2013 07:54:47 -0400 Subject: [PATCH 08/60] changes filter1 form fsl.FilterRegressor to fsl.GLM --- examples/rsfmri_preprocessing.py | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index b7ba9906fc..e8d6451195 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -264,12 +264,20 @@ def create_workflow(files, wf.connect(art, 'norm_files', createfilter1, 'comp_norm') wf.connect(art, 'outlier_files', createfilter1, 'outliers') - filter1 = MapNode(fsl.FilterRegressor(filter_all=True), - iterfield=['in_file', 'design_file'], - name='filtermotion') + #filter1 = MapNode(fsl.FilterRegressor(filter_all=True), + # iterfield=['in_file', 'design_file'], + # name='filtermotion') + #wf.connect(tsnr, 'detrended_file', filter1, 'in_file') + #wf.connect(createfilter1, 'out_files', filter1, 'design_file') + #wf.connect(masktransform, 'transformed_file', filter1, 'mask') + + filter1 = MapNode(fsl.GLM(), + iterfield=['in_file', 'design_file'], + name='filtermotion') wf.connect(tsnr, 'detrended_file', filter1, 'in_file') wf.connect(createfilter1, 'out_files', filter1, 'design_file') - wf.connect(masktransform, 'transformed_file', filter1, 'mask') + wf.connect(masktransform,'transformed_file',filter1,'mask') + createfilter2 = MapNode(Function(input_names=['realigned_file', 'mask_file', 'num_components'], @@ -278,7 +286,9 @@ def create_workflow(files, iterfield=['realigned_file'], name='makecompcorrfilter') createfilter2.inputs.num_components = num_components - wf.connect(filter1, 'out_file', createfilter2, 'realigned_file') + #wf.connect(filter1, 'out_file', createfilter2, 'realigned_file') + wf.connect(filter1, 'out_res', createfilter2,'realigned_file') + wf.connect(masktransform, 'transformed_file', createfilter2, 'mask_file') filter2 = MapNode(fsl.FilterRegressor(filter_all=True), From dca8707b38ab1f8b659a19e76ce9af268ebe9020 Mon Sep 17 00:00:00 2001 From: cdla Date: Wed, 14 Aug 2013 19:26:39 -0400 Subject: [PATCH 09/60] added dewarp and datasink format --- examples/rsfmri_preprocessing.py | 128 +++++++++++++++++++++++-------- 1 file changed, 94 insertions(+), 34 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index e8d6451195..2630276e7f 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -143,13 +143,17 @@ def create_workflow(files, TR=None, slice_times=None, slice_thickness=None, - fieldmap_images=None, + fieldmap_images=[], norm_threshold=1, num_components=6, vol_fwhm=None, surf_fwhm=10, - lowpass_freq=.08, - highpass_freq=.9): + lowpass_freq=-1, + highpass_freq=-1, + sink_directory=os.getcwd(), + FM_TEdiff=2.46, + FM_sigma=2, + FM_echo_spacing=.7): wf = Workflow(name='resting') @@ -195,7 +199,19 @@ def create_workflow(files, register.inputs.out_fsl_file = True if fieldmap_images: - pass + fieldmap=Node(interface=EPIDeWarp(), name ='fieldmap_unwarp') + dewarper=MapNode(interface=fsl.FUGUE(), iterfield=['in_file'], name = 'dewarper') + fieldmap.inputs.tediff=FM_TEdiff + fieldmap.inputs.esp=FM_echo_spacing + fieldmap.inputs.sigma=FM_sigma + fieldmap.inputs.mag_file=fieldmap_images[0] + fieldmap.inputs.dph_file=fieldmap_images[1] + wf.connect(calc_median,'median_file',fieldmap,'exf_file') + wf.connect(tsnr, 'detrended_file', dewarper, 'in_file') + wf.connect(fieldmap,'exf_mask', dewarper,'mask_file') + wf.connect(fieldmap, 'vsm_file', dewarper,'shift_in_file') + wf.connect(fieldmap,'exfdw',register, 'source_file') + else: wf.connect(calc_median, 'median_file', register, 'source_file') @@ -218,8 +234,11 @@ def create_workflow(files, wmcsf.inputs.binary_file = 'wmcsf.nii.gz' wmcsf.inputs.erode = 1 wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), wmcsf, 'in_file') - - wf.connect(calc_median, 'median_file', wmcsftransform, 'source_file') + + if fieldmap_images: + wf.connect(fieldmap,'exf_mask',wmcsftransform,'source_file') + else: + wf.connect(calc_median, 'median_file', wmcsftransform, 'source_file') wf.connect(register, 'out_reg_file', wmcsftransform, 'reg_file') wf.connect(wmcsf, 'binary_file', wmcsftransform, 'target_file') @@ -230,7 +249,10 @@ def create_workflow(files, wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), mask, 'in_file') masktransform = wmcsftransform.clone("masktransform") - wf.connect(calc_median, 'median_file', masktransform, 'source_file') + if fieldmap_images: + wf.connect(fieldmap,'exf_mask',masktransform,'source_file') + else: + wf.connect(calc_median, 'median_file', masktransform, 'source_file') wf.connect(register, 'out_reg_file', masktransform, 'reg_file') wf.connect(mask, 'binary_file', masktransform, 'target_file') @@ -243,7 +265,10 @@ def create_workflow(files, bound_by_brainmask=True, mask_type='file'), name="art") - wf.connect(tsnr, 'detrended_file', art, 'realigned_files') + if fieldmap_images: + wf.connect(dewarper,'unwarped_file',art,'realigned_files') + else: + wf.connect(tsnr, 'detrended_file', art, 'realigned_files') wf.connect(realign, 'par_file', art, 'realignment_parameters') wf.connect(masktransform, 'transformed_file', art, 'mask_file') @@ -264,19 +289,22 @@ def create_workflow(files, wf.connect(art, 'norm_files', createfilter1, 'comp_norm') wf.connect(art, 'outlier_files', createfilter1, 'outliers') - #filter1 = MapNode(fsl.FilterRegressor(filter_all=True), - # iterfield=['in_file', 'design_file'], - # name='filtermotion') - #wf.connect(tsnr, 'detrended_file', filter1, 'in_file') - #wf.connect(createfilter1, 'out_files', filter1, 'design_file') - #wf.connect(masktransform, 'transformed_file', filter1, 'mask') - - filter1 = MapNode(fsl.GLM(), - iterfield=['in_file', 'design_file'], - name='filtermotion') - wf.connect(tsnr, 'detrended_file', filter1, 'in_file') + filter1 = MapNode(fsl.FilterRegressor(filter_all=True), + iterfield=['in_file', 'design_file'], + name='filtermotion') + if fieldmap_images: + wf.connect(dewarper,'unwarped_file',filter1,'in_file') + else: + wf.connect(tsnr, 'detrended_file', filter1, 'in_file') wf.connect(createfilter1, 'out_files', filter1, 'design_file') - wf.connect(masktransform,'transformed_file',filter1,'mask') + wf.connect(masktransform, 'transformed_file', filter1, 'mask') + + # filter1 = MapNode(fsl.GLM(), +# iterfield=['in_file', 'design_file'], +# name='filtermotion') + #wf.connect(tsnr, 'detrended_file', filter1, 'in_file') + # wf.connect(createfilter1, 'out_files', filter1, 'design_file') + #wf.connect(masktransform,'transformed_file',filter1,'mask') createfilter2 = MapNode(Function(input_names=['realigned_file', 'mask_file', @@ -286,10 +314,10 @@ def create_workflow(files, iterfield=['realigned_file'], name='makecompcorrfilter') createfilter2.inputs.num_components = num_components - #wf.connect(filter1, 'out_file', createfilter2, 'realigned_file') - wf.connect(filter1, 'out_res', createfilter2,'realigned_file') + wf.connect(filter1, 'out_file', createfilter2, 'realigned_file') + #wf.connect(filter1, 'out_res', createfilter2,'realigned_file') - wf.connect(masktransform, 'transformed_file', createfilter2, 'mask_file') + wf.connect(wmcsftransform, 'transformed_file', createfilter2, 'mask_file') filter2 = MapNode(fsl.FilterRegressor(filter_all=True), iterfield=['in_file', 'design_file'], @@ -322,23 +350,46 @@ def create_workflow(files, bandpass.inputs.lowpass_sigma = 1 / (2 * TR * lowpass_freq) wf.connect(smooth, 'smoothed_file', bandpass, 'in_file') + datasink=Node(interface=DataSink(), + name="datasink") + datasink.inputs.base_directory=sink_directory + wf.connect(despike,'out_file',datasink,'resting.despike') + wf.connect(realign,'par_file',datasink,'resting.motion') + wf.connect(tsnr,'tsnr_file',datasink,'resting.tsnr') + wf.connect(tsnr,'mean_file',datasink,'resting.tsnr.@mean') + wf.connect(tsnr,'stddev_file',datasink,'resting.tsnr.@stddev') + wf.connect(art,'norm_files',datasink,'resting.art') + wf.connect(art,'outlier_files',datasink,'resting.art.@outlier_files') + wf.connect(mask,'binary_file',datasink,'resting.mask') + wf.connect(masktransform,'transformed_file',datasink,'resting.mask.@transformed_file') + wf.connect(register,'out_reg_file',datasink,'resting.out_reg_file') + wf.connect(smooth,'smoothed_file',datasink,'resting.output.fullpass') + wf.connect(bandpass,'out_file',datasink,'resting.output.bandpassed') + wf.connect(createfilter1,'out_files',datasink,'resting.motion.@regressors') + wf.connect(createfilter2,'out_files',datasink,'resting.compcorr') + wf.connect(wmcsftransform,'transformed_file',datasink,'resting.compcorr.@transformed_file') + return wf if __name__ == "__main__": import argparse - dcmfile = '/software/data/sad_resting/500000-32-1.dcm' - niifile = '/software/data/sad_resting/resting.nii.gz' - subject_id = 'SAD_024' - dcmfile = '/software/data/sad_resting/allyson/562000-34-1.dcm' - niifile = '/software/data/sad_resting/allyson/Resting1.nii' - fieldmaps = ['/software/data/sad_resting/allyson/fieldmap_resting1.nii', - '/software/data/sad_resting/allyson/fieldmap_resting2.nii'] + #dcmfile = '/software/data/sad_resting/500000-32-1.dcm' + #niifile = '/software/data/sad_resting/resting.nii.gz' + #subject_id = 'SAD_024' + #dcmfile = '/software/data/sad_resting/allyson/562000-34-1.dcm' + #niifile = '/software/data/sad_resting/allyson/Resting1.nii' + #fieldmaps = ['/software/data/sad_resting/allyson/fieldmap_resting1.nii', + # '/software/data/sad_resting/allyson/fieldmap_resting2.nii'] echo_time = 0.7 subject_id = '308' #dcmfile = '/mindhive/xnat/dicom_storage/sad/SAD_024/dicoms/500000-32-1.dcm' #niifile = '/mindhive/xnat/data/sad/SAD_024/BOLD/resting.nii.gz' - + dcmfile = '/mindhive/xnat/data/GATES/308/dicoms/562000-34-1.dcm' + niifile= '/mindhive/xnat/data/GATES/308/niftis/Resting1.nii' + fieldmap_images=['/mindhive/xnat/data/GATES/308/niftis/fieldmap_resting1.nii', + '/mindhive/xnat/data/GATES/308/niftis/fieldmap_resting2.nii'] TR, slice_times, slice_thickness = get_info(dcmfile) + wf = create_workflow(niifile, subject_id, n_vol=2, @@ -346,12 +397,21 @@ def create_workflow(files, TR=TR, slice_times=slice_times, slice_thickness=np.round(slice_thickness), - lowpass_freq=-1, - highpass_freq=-1 - ) + lowpass_freq=.08, + highpass_freq=.9, + vol_fwhm=6, + surf_fwhm=10, + sink_directory=os.path.abspath('/mindhive/scratch/Wed/cdla/resting/sink/'), + FM_TEdiff=2.46 , + FM_sigma=2 , + FM_echo_spacing=.7 , + fieldmap_images=fieldmap_images + ) + wf.config['execution'].update(**{'hash_method': 'content', 'remove_unnecessary_outputs': False}) wf.base_dir = os.getcwd() + wf.write_graph(graph2use='flat') wf.run(plugin='MultiProc') ''' From 687b96200cebc4331b878c122dcaddfd272aa4ba Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Thu, 15 Aug 2013 13:50:37 -0400 Subject: [PATCH 10/60] fix: glm fixes and pep8 cleanup --- examples/rsfmri_preprocessing.py | 197 +++++++++++---------------- nipype/interfaces/fsl/model.py | 5 +- nipype/pipeline/plugins/multiproc.py | 8 +- 3 files changed, 92 insertions(+), 118 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index 9f36008300..248750195d 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -1,10 +1,6 @@ import os -from nipype.interfaces.base import CommandLine -CommandLine.set_default_terminal_output('allatonce') - -from nipype import (afni, fsl, freesurfer, nipy, Function, - DataGrabber, DataSink) +from nipype import (afni, fsl, freesurfer, nipy, Function, DataSink) from nipype import Workflow, Node, MapNode from nipype.algorithms.rapidart import ArtifactDetect @@ -147,13 +143,13 @@ def create_workflow(files, norm_threshold=1, num_components=6, vol_fwhm=None, - surf_fwhm=10, + surf_fwhm=None, lowpass_freq=-1, highpass_freq=-1, - sink_directory=os.getcwd(), - FM_TEdiff=2.46, - FM_sigma=2, - FM_echo_spacing=.7): + sink_directory=os.getcwd(), + FM_TEdiff=2.46, + FM_sigma=2, + FM_echo_spacing=.7): wf = Workflow(name='resting') @@ -199,19 +195,20 @@ def create_workflow(files, register.inputs.out_fsl_file = True if fieldmap_images: - fieldmap=Node(interface=EPIDeWarp(), name ='fieldmap_unwarp') - dewarper=MapNode(interface=fsl.FUGUE(), iterfield=['in_file'], name = 'dewarper') - fieldmap.inputs.tediff=FM_TEdiff - fieldmap.inputs.esp=FM_echo_spacing - fieldmap.inputs.sigma=FM_sigma - fieldmap.inputs.mag_file=fieldmap_images[0] - fieldmap.inputs.dph_file=fieldmap_images[1] - wf.connect(calc_median,'median_file',fieldmap,'exf_file') - wf.connect(tsnr, 'detrended_file', dewarper, 'in_file') - wf.connect(fieldmap,'exf_mask', dewarper,'mask_file') - wf.connect(fieldmap, 'vsm_file', dewarper,'shift_in_file') - wf.connect(fieldmap,'exfdw',register, 'source_file') - + fieldmap = Node(interface=EPIDeWarp(), name='fieldmap_unwarp') + fieldmap.inputs.tediff = FM_TEdiff + fieldmap.inputs.esp = FM_echo_spacing + fieldmap.inputs.sigma = FM_sigma + fieldmap.inputs.mag_file = fieldmap_images[0] + fieldmap.inputs.dph_file = fieldmap_images[1] + wf.connect(calc_median, 'median_file', fieldmap, 'exf_file') + + dewarper = MapNode(interface=fsl.FUGUE(), iterfield=['in_file'], + name='dewarper') + wf.connect(tsnr, 'detrended_file', dewarper, 'in_file') + wf.connect(fieldmap, 'exf_mask', dewarper, 'mask_file') + wf.connect(fieldmap, 'vsm_file', dewarper, 'shift_in_file') + wf.connect(fieldmap, 'exfdw', register, 'source_file') else: wf.connect(calc_median, 'median_file', register, 'source_file') @@ -234,11 +231,11 @@ def create_workflow(files, wmcsf.inputs.binary_file = 'wmcsf.nii.gz' wmcsf.inputs.erode = 1 wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), wmcsf, 'in_file') - + if fieldmap_images: - wf.connect(fieldmap,'exf_mask',wmcsftransform,'source_file') + wf.connect(fieldmap, 'exf_mask', wmcsftransform, 'source_file') else: - wf.connect(calc_median, 'median_file', wmcsftransform, 'source_file') + wf.connect(calc_median, 'median_file', wmcsftransform, 'source_file') wf.connect(register, 'out_reg_file', wmcsftransform, 'reg_file') wf.connect(wmcsf, 'binary_file', wmcsftransform, 'target_file') @@ -250,7 +247,7 @@ def create_workflow(files, masktransform = wmcsftransform.clone("masktransform") if fieldmap_images: - wf.connect(fieldmap,'exf_mask',masktransform,'source_file') + wf.connect(fieldmap, 'exf_mask', masktransform, 'source_file') else: wf.connect(calc_median, 'median_file', masktransform, 'source_file') wf.connect(register, 'out_reg_file', masktransform, 'reg_file') @@ -266,7 +263,7 @@ def create_workflow(files, mask_type='file'), name="art") if fieldmap_images: - wf.connect(dewarper,'unwarped_file',art,'realigned_files') + wf.connect(dewarper, 'unwarped_file', art, 'realigned_files') else: wf.connect(tsnr, 'detrended_file', art, 'realigned_files') wf.connect(realign, 'par_file', @@ -289,38 +286,16 @@ def create_workflow(files, wf.connect(art, 'norm_files', createfilter1, 'comp_norm') wf.connect(art, 'outlier_files', createfilter1, 'outliers') -<<<<<<< HEAD - filter1 = MapNode(fsl.FilterRegressor(filter_all=True), - iterfield=['in_file', 'design_file'], + filter1 = MapNode(fsl.GLM(out_res_name='timeseries.nii.gz'), + iterfield=['in_file', 'design'], name='filtermotion') if fieldmap_images: - wf.connect(dewarper,'unwarped_file',filter1,'in_file') + wf.connect(dewarper, 'unwarped_file', filter1, 'in_file') else: wf.connect(tsnr, 'detrended_file', filter1, 'in_file') -======= - #filter1 = MapNode(fsl.FilterRegressor(filter_all=True), - # iterfield=['in_file', 'design_file'], - # name='filtermotion') - #wf.connect(tsnr, 'detrended_file', filter1, 'in_file') - #wf.connect(createfilter1, 'out_files', filter1, 'design_file') - #wf.connect(masktransform, 'transformed_file', filter1, 'mask') - - filter1 = MapNode(fsl.GLM(), - iterfield=['in_file', 'design_file'], - name='filtermotion') - wf.connect(tsnr, 'detrended_file', filter1, 'in_file') ->>>>>>> 39793ef71c08dae4a64f789808bd957362e0b729 - wf.connect(createfilter1, 'out_files', filter1, 'design_file') - wf.connect(masktransform,'transformed_file',filter1,'mask') - - - # filter1 = MapNode(fsl.GLM(), -# iterfield=['in_file', 'design_file'], -# name='filtermotion') - #wf.connect(tsnr, 'detrended_file', filter1, 'in_file') - # wf.connect(createfilter1, 'out_files', filter1, 'design_file') - #wf.connect(masktransform,'transformed_file',filter1,'mask') + wf.connect(createfilter1, 'out_files', filter1, 'design') + wf.connect(masktransform, 'transformed_file', filter1, 'mask') createfilter2 = MapNode(Function(input_names=['realigned_file', 'mask_file', 'num_components'], @@ -329,34 +304,27 @@ def create_workflow(files, iterfield=['realigned_file'], name='makecompcorrfilter') createfilter2.inputs.num_components = num_components -<<<<<<< HEAD - wf.connect(filter1, 'out_file', createfilter2, 'realigned_file') - #wf.connect(filter1, 'out_res', createfilter2,'realigned_file') - - wf.connect(wmcsftransform, 'transformed_file', createfilter2, 'mask_file') -======= - #wf.connect(filter1, 'out_file', createfilter2, 'realigned_file') - wf.connect(filter1, 'out_res', createfilter2,'realigned_file') - + wf.connect(filter1, 'out_res', createfilter2, 'realigned_file') wf.connect(masktransform, 'transformed_file', createfilter2, 'mask_file') ->>>>>>> 39793ef71c08dae4a64f789808bd957362e0b729 - filter2 = MapNode(fsl.FilterRegressor(filter_all=True), - iterfield=['in_file', 'design_file'], + filter2 = MapNode(fsl.GLM(out_res_name='timeseries_cleaned.nii.gz'), + iterfield=['in_file', 'design'], name='filtercompcorr') - wf.connect(filter1, 'out_file', filter2, 'in_file') - wf.connect(createfilter2, 'out_files', filter2, 'design_file') + wf.connect(filter1, 'out_res', filter2, 'in_file') + wf.connect(createfilter2, 'out_files', filter2, 'design') wf.connect(masktransform, 'transformed_file', filter2, 'mask') smooth = MapNode(freesurfer.Smooth(), iterfield=['in_file'], name='smooth') smooth.inputs.proj_frac_avg = (0.1, 0.9, 0.1) + if surf_fwhm is None: + surf_fwhm = 5 * slice_thickness smooth.inputs.surface_fwhm = surf_fwhm if vol_fwhm is None: vol_fwhm = 2 * slice_thickness smooth.inputs.vol_fwhm = vol_fwhm - wf.connect(filter2, 'out_file', smooth, 'in_file') + wf.connect(filter2, 'out_res', smooth, 'in_file') wf.connect(register, 'out_reg_file', smooth, 'reg_file') bandpass = MapNode(fsl.TemporalFilter(), @@ -372,44 +340,50 @@ def create_workflow(files, bandpass.inputs.lowpass_sigma = 1 / (2 * TR * lowpass_freq) wf.connect(smooth, 'smoothed_file', bandpass, 'in_file') - datasink=Node(interface=DataSink(), - name="datasink") - datasink.inputs.base_directory=sink_directory - wf.connect(despike,'out_file',datasink,'resting.despike') - wf.connect(realign,'par_file',datasink,'resting.motion') - wf.connect(tsnr,'tsnr_file',datasink,'resting.tsnr') - wf.connect(tsnr,'mean_file',datasink,'resting.tsnr.@mean') - wf.connect(tsnr,'stddev_file',datasink,'resting.tsnr.@stddev') - wf.connect(art,'norm_files',datasink,'resting.art') - wf.connect(art,'outlier_files',datasink,'resting.art.@outlier_files') - wf.connect(mask,'binary_file',datasink,'resting.mask') - wf.connect(masktransform,'transformed_file',datasink,'resting.mask.@transformed_file') - wf.connect(register,'out_reg_file',datasink,'resting.out_reg_file') - wf.connect(smooth,'smoothed_file',datasink,'resting.output.fullpass') - wf.connect(bandpass,'out_file',datasink,'resting.output.bandpassed') - wf.connect(createfilter1,'out_files',datasink,'resting.motion.@regressors') - wf.connect(createfilter2,'out_files',datasink,'resting.compcorr') - wf.connect(wmcsftransform,'transformed_file',datasink,'resting.compcorr.@transformed_file') - + datasink = Node(interface=DataSink(), name="datasink") + datasink.inputs.base_directory = sink_directory + wf.connect(despike, 'out_file', datasink, 'resting.despike') + wf.connect(realign, 'par_file', datasink, 'resting.motion') + wf.connect(tsnr, 'tsnr_file', datasink, 'resting.tsnr') + wf.connect(tsnr, 'mean_file', datasink, 'resting.tsnr.@mean') + wf.connect(tsnr, 'stddev_file', datasink, 'resting.tsnr.@stddev') + wf.connect(art, 'norm_files', datasink, 'resting.art') + wf.connect(art, 'outlier_files', datasink, 'resting.art.@outlier_files') + wf.connect(mask, 'binary_file', datasink, 'resting.mask') + wf.connect(masktransform, 'transformed_file', + datasink, 'resting.mask.@transformed_file') + wf.connect(register, 'out_reg_file', datasink, 'resting.out_reg_file') + wf.connect(smooth, 'smoothed_file', datasink, 'resting.output.fullpass') + wf.connect(bandpass, 'out_file', datasink, 'resting.output.bandpassed') + wf.connect(createfilter1, 'out_files', + datasink, 'resting.motion.@regressors') + wf.connect(createfilter2, 'out_files', datasink, 'resting.compcorr') + wf.connect(wmcsftransform, 'transformed_file', + datasink, 'resting.compcorr.@transformed_file') return wf if __name__ == "__main__": import argparse - #dcmfile = '/software/data/sad_resting/500000-32-1.dcm' - #niifile = '/software/data/sad_resting/resting.nii.gz' - #subject_id = 'SAD_024' - #dcmfile = '/software/data/sad_resting/allyson/562000-34-1.dcm' - #niifile = '/software/data/sad_resting/allyson/Resting1.nii' - #fieldmaps = ['/software/data/sad_resting/allyson/fieldmap_resting1.nii', - # '/software/data/sad_resting/allyson/fieldmap_resting2.nii'] + from socket import getfqdn + if not 'mit.edu' in getfqdn(): + #dcmfile = '/software/data/sad_resting/500000-32-1.dcm' + #niifile = '/software/data/sad_resting/resting.nii.gz' + #subject_id = 'SAD_024' + dcmfile = '/software/data/sad_resting/allyson/562000-34-1.dcm' + niifile = '/software/data/sad_resting/allyson/Resting1.nii' + fieldmap_images = ['/software/data/sad_resting/allyson/fieldmap_resting1.nii', + '/software/data/sad_resting/allyson/fieldmap_resting2.nii'] + sink = os.path.join(os.getcwd(), 'output') + else: + #dcmfile = '/mindhive/xnat/dicom_storage/sad/SAD_024/dicoms/500000-32-1.dcm' + #niifile = '/mindhive/xnat/data/sad/SAD_024/BOLD/resting.nii.gz' + dcmfile = '/mindhive/xnat/data/GATES/308/dicoms/562000-34-1.dcm' + niifile = '/mindhive/xnat/data/GATES/308/niftis/Resting1.nii' + fieldmap_images = ['/mindhive/xnat/data/GATES/308/niftis/fieldmap_resting1.nii', + '/mindhive/xnat/data/GATES/308/niftis/fieldmap_resting2.nii'] + sink = os.path.abspath('/mindhive/scratch/Wed/cdla/resting/sink/') echo_time = 0.7 subject_id = '308' - #dcmfile = '/mindhive/xnat/dicom_storage/sad/SAD_024/dicoms/500000-32-1.dcm' - #niifile = '/mindhive/xnat/data/sad/SAD_024/BOLD/resting.nii.gz' - dcmfile = '/mindhive/xnat/data/GATES/308/dicoms/562000-34-1.dcm' - niifile= '/mindhive/xnat/data/GATES/308/niftis/Resting1.nii' - fieldmap_images=['/mindhive/xnat/data/GATES/308/niftis/fieldmap_resting1.nii', - '/mindhive/xnat/data/GATES/308/niftis/fieldmap_resting2.nii'] TR, slice_times, slice_thickness = get_info(dcmfile) wf = create_workflow(niifile, @@ -420,15 +394,13 @@ def create_workflow(files, slice_times=slice_times, slice_thickness=np.round(slice_thickness), lowpass_freq=.08, - highpass_freq=.9, - vol_fwhm=6, - surf_fwhm=10, - sink_directory=os.path.abspath('/mindhive/scratch/Wed/cdla/resting/sink/'), - FM_TEdiff=2.46 , - FM_sigma=2 , - FM_echo_spacing=.7 , - fieldmap_images=fieldmap_images - ) + highpass_freq=.9, + sink_directory=sink, + FM_TEdiff=2.46, + FM_sigma=2, + FM_echo_spacing=.7, + fieldmap_images=fieldmap_images + ) wf.config['execution'].update(**{'hash_method': 'content', 'remove_unnecessary_outputs': False}) @@ -437,9 +409,6 @@ def create_workflow(files, wf.run(plugin='MultiProc') ''' -#fieldmap dewarping -unwarp = MapNode(EPIDeWarp(), name='dewarp') - #convert to grayordinates def to_grayordinates(): return grayordinates diff --git a/nipype/interfaces/fsl/model.py b/nipype/interfaces/fsl/model.py index 1fbfe8e56c..61f5124de7 100755 --- a/nipype/interfaces/fsl/model.py +++ b/nipype/interfaces/fsl/model.py @@ -1794,7 +1794,6 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name in ('out_file'): - return fname_presuffix(self.inputs.in_file, - suffix='_glm.txt', use_ext=False) + if name in ['out_file']: + return self._gen_fname(self.inputs.in_file, suffix='_glm') return None diff --git a/nipype/pipeline/plugins/multiproc.py b/nipype/pipeline/plugins/multiproc.py index d9718e3ccc..fe24878750 100644 --- a/nipype/pipeline/plugins/multiproc.py +++ b/nipype/pipeline/plugins/multiproc.py @@ -75,8 +75,14 @@ def _get_result(self, taskid): def _submit_job(self, node, updatehash=False): self._taskid += 1 + try: + if node.inputs.terminal_output == 'stream': + node.inputs.terminal_output = 'file' + except: + pass self._taskresult[self._taskid] = self.pool.apply_async(run_node, - (node, updatehash,)) + (node, + updatehash,)) return self._taskid def _report_crash(self, node, result=None): From db6f5cfbfb63df50114d9e35d27ea25b4f3d47ee Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Thu, 15 Aug 2013 16:02:44 -0400 Subject: [PATCH 11/60] fix: force demean and ensure sigma calculation returns float --- examples/rsfmri_preprocessing.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index 248750195d..08db38af28 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -239,8 +239,8 @@ def create_workflow(files, wf.connect(register, 'out_reg_file', wmcsftransform, 'reg_file') wf.connect(wmcsf, 'binary_file', wmcsftransform, 'target_file') - mask.inputs.dilate = 3 mask.inputs.binary_file = 'mask.nii.gz' + mask.inputs.dilate = int(slice_thickness) + 1 mask.inputs.erode = int(slice_thickness) mask.inputs.min = 0.5 wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), mask, 'in_file') @@ -286,7 +286,8 @@ def create_workflow(files, wf.connect(art, 'norm_files', createfilter1, 'comp_norm') wf.connect(art, 'outlier_files', createfilter1, 'outliers') - filter1 = MapNode(fsl.GLM(out_res_name='timeseries.nii.gz'), + filter1 = MapNode(fsl.GLM(out_res_name='timeseries.nii.gz', + demean=True), iterfield=['in_file', 'design'], name='filtermotion') if fieldmap_images: @@ -307,7 +308,8 @@ def create_workflow(files, wf.connect(filter1, 'out_res', createfilter2, 'realigned_file') wf.connect(masktransform, 'transformed_file', createfilter2, 'mask_file') - filter2 = MapNode(fsl.GLM(out_res_name='timeseries_cleaned.nii.gz'), + filter2 = MapNode(fsl.GLM(out_res_name='timeseries_cleaned.nii.gz', + demean=True), iterfield=['in_file', 'design'], name='filtercompcorr') wf.connect(filter1, 'out_res', filter2, 'in_file') @@ -333,11 +335,11 @@ def create_workflow(files, if highpass_freq < 0: bandpass.inputs.highpass_sigma = -1 else: - bandpass.inputs.highpass_sigma = 1 / (2 * TR * highpass_freq) + bandpass.inputs.highpass_sigma = 1. / (2 * TR * highpass_freq) if lowpass_freq < 0: bandpass.inputs.lowpass_sigma = -1 else: - bandpass.inputs.lowpass_sigma = 1 / (2 * TR * lowpass_freq) + bandpass.inputs.lowpass_sigma = 1. / (2 * TR * lowpass_freq) wf.connect(smooth, 'smoothed_file', bandpass, 'in_file') datasink = Node(interface=DataSink(), name="datasink") @@ -365,12 +367,13 @@ def create_workflow(files, if __name__ == "__main__": import argparse from socket import getfqdn - if not 'mit.edu' in getfqdn(): + if not 'ba3.mit.edu' in getfqdn(): #dcmfile = '/software/data/sad_resting/500000-32-1.dcm' #niifile = '/software/data/sad_resting/resting.nii.gz' #subject_id = 'SAD_024' dcmfile = '/software/data/sad_resting/allyson/562000-34-1.dcm' niifile = '/software/data/sad_resting/allyson/Resting1.nii' + fieldmap_images = [] fieldmap_images = ['/software/data/sad_resting/allyson/fieldmap_resting1.nii', '/software/data/sad_resting/allyson/fieldmap_resting2.nii'] sink = os.path.join(os.getcwd(), 'output') From c30a4a0d8df1339d3028545dc09a75a8ad5d02dd Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Thu, 15 Aug 2013 17:40:50 -0400 Subject: [PATCH 12/60] fix: cleaned up datasink organization --- examples/rsfmri_preprocessing.py | 36 +++++++++++++++++--------------- 1 file changed, 19 insertions(+), 17 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index 08db38af28..c10675bd35 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -344,24 +344,27 @@ def create_workflow(files, datasink = Node(interface=DataSink(), name="datasink") datasink.inputs.base_directory = sink_directory - wf.connect(despike, 'out_file', datasink, 'resting.despike') - wf.connect(realign, 'par_file', datasink, 'resting.motion') - wf.connect(tsnr, 'tsnr_file', datasink, 'resting.tsnr') - wf.connect(tsnr, 'mean_file', datasink, 'resting.tsnr.@mean') - wf.connect(tsnr, 'stddev_file', datasink, 'resting.tsnr.@stddev') - wf.connect(art, 'norm_files', datasink, 'resting.art') - wf.connect(art, 'outlier_files', datasink, 'resting.art.@outlier_files') + datasink.inputs.container = subject_id + datasink.inputs.regexp_substitutions = (r'(_.*)(\d+/)', r'run\2') + wf.connect(despike, 'out_file', datasink, 'resting.qa.despike') + wf.connect(realign, 'par_file', datasink, 'resting.qa.motion') + wf.connect(tsnr, 'tsnr_file', datasink, 'resting.qa.@tsnr') + wf.connect(tsnr, 'mean_file', datasink, 'resting.qa.@tsnr_mean') + wf.connect(tsnr, 'stddev_file', datasink, 'resting.qa.@tsnr_stddev') + wf.connect(art, 'norm_files', datasink, 'resting.qa.@art') + wf.connect(art, 'outlier_files', datasink, 'resting.qa.@art_outlier_files') wf.connect(mask, 'binary_file', datasink, 'resting.mask') wf.connect(masktransform, 'transformed_file', datasink, 'resting.mask.@transformed_file') - wf.connect(register, 'out_reg_file', datasink, 'resting.out_reg_file') - wf.connect(smooth, 'smoothed_file', datasink, 'resting.output.fullpass') - wf.connect(bandpass, 'out_file', datasink, 'resting.output.bandpassed') + wf.connect(register, 'out_reg_file', datasink, 'resting.registration') + wf.connect(register, 'min_cost_file', + datasink, 'resting.registration.@mincost') + wf.connect(smooth, 'smoothed_file', datasink, 'resting.timeseries.fullpass') + wf.connect(bandpass, 'out_file', datasink, 'resting.timeseries.bandpassed') wf.connect(createfilter1, 'out_files', - datasink, 'resting.motion.@regressors') - wf.connect(createfilter2, 'out_files', datasink, 'resting.compcorr') - wf.connect(wmcsftransform, 'transformed_file', - datasink, 'resting.compcorr.@transformed_file') + datasink, 'resting.regress.@regressors') + wf.connect(createfilter2, 'out_files', + datasink, 'resting.regress.@compcorr') return wf if __name__ == "__main__": @@ -405,11 +408,10 @@ def create_workflow(files, fieldmap_images=fieldmap_images ) - wf.config['execution'].update(**{'hash_method': 'content', - 'remove_unnecessary_outputs': False}) + wf.config['execution'].update(**{'remove_unnecessary_outputs': False}) wf.base_dir = os.getcwd() wf.write_graph(graph2use='flat') - wf.run(plugin='MultiProc') + wf.run() #(plugin='MultiProc') ''' #convert to grayordinates From c30e92eca6bce8bb222d8120652f50fa5a3ed975 Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Mon, 19 Aug 2013 12:50:31 -0400 Subject: [PATCH 13/60] enh: some parcellation time series --- examples/rsfmri_preprocessing.py | 47 ++++++++++++++++++++++++++++++-- 1 file changed, 45 insertions(+), 2 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index c10675bd35..2c0fd3c802 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -149,7 +149,8 @@ def create_workflow(files, sink_directory=os.getcwd(), FM_TEdiff=2.46, FM_sigma=2, - FM_echo_spacing=.7): + FM_echo_spacing=.7, + target_subject='fsaverage4'): wf = Workflow(name='resting') @@ -342,6 +343,44 @@ def create_workflow(files, bandpass.inputs.lowpass_sigma = 1. / (2 * TR * lowpass_freq) wf.connect(smooth, 'smoothed_file', bandpass, 'in_file') + aparctransform = wmcsftransform.clone("aparctransform") + if fieldmap_images: + wf.connect(fieldmap, 'exf_mask', aparctransform, 'source_file') + else: + wf.connect(calc_median, 'median_file', aparctransform, 'source_file') + wf.connect(register, 'out_reg_file', aparctransform, 'reg_file') + wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), aparctransform, 'target_file') + + sampleaparc = MapNode(freesurfer.SegStats(avgwf_txt_file=True, + default_color_table=True), + iterfield=['in_file'], + name='aparc_ts') + wf.connect(aparctransform, 'transformed_file', + sampleaparc, 'segmentation_file') + wf.connect(bandpass, 'out_file', sampleaparc, 'in_file') + + samplerlh = MapNode(freesurfer.SampleToSurface(), + iterfield=['source_file'], + name='sampler_lh') + samplerlh.inputs.sampling_method = "average" + samplerlh.inputs.sampling_range = (0.1, 0.9, 0.1) + samplerlh.inputs.sampling_units = "frac" + samplerlh.inputs.interp_method = "trilinear" + samplerlh.inputs.cortex_mask = True + samplerlh.inputs.target_subject = target_subject + samplerlh.inputs.out_type = 'niigz' + samplerlh.inputs.subjects_dir = os.environ['SUBJECTS_DIR'] + + samplerrh = samplerlh.clone('sampler_rh') + + samplerlh.inputs.hemi = 'lh' + wf.connect(bandpass, 'out_file', samplerlh, 'source_file') + wf.connect(register, 'out_reg_file', samplerlh, 'reg_file') + + samplerrh.set_input('hemi', 'rh') + wf.connect(bandpass, 'out_file', samplerrh, 'source_file') + wf.connect(register, 'out_reg_file', samplerrh, 'reg_file') + datasink = Node(interface=DataSink(), name="datasink") datasink.inputs.base_directory = sink_directory datasink.inputs.container = subject_id @@ -365,6 +404,10 @@ def create_workflow(files, datasink, 'resting.regress.@regressors') wf.connect(createfilter2, 'out_files', datasink, 'resting.regress.@compcorr') + wf.connect(sampleaparc, 'summary_file', + datasink, 'resting.parcellations.aparc') + wf.connect(sampleaparc, 'avgwf_txt_file', + datasink, 'resting.parcellations.aparc.@avgwf') return wf if __name__ == "__main__": @@ -411,7 +454,7 @@ def create_workflow(files, wf.config['execution'].update(**{'remove_unnecessary_outputs': False}) wf.base_dir = os.getcwd() wf.write_graph(graph2use='flat') - wf.run() #(plugin='MultiProc') + wf.run() # (plugin='MultiProc') ''' #convert to grayordinates From 7caa670fa55e2320f184dc608fa862c3becb26b9 Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Wed, 21 Aug 2013 08:44:23 -0400 Subject: [PATCH 14/60] fix: cleaned prov encoding to store dictionaries and lists as prov json --- nipype/interfaces/base.py | 32 +++++++++++++++++++++----------- 1 file changed, 21 insertions(+), 11 deletions(-) diff --git a/nipype/interfaces/base.py b/nipype/interfaces/base.py index e5fecc78b2..d60e8a2486 100644 --- a/nipype/interfaces/base.py +++ b/nipype/interfaces/base.py @@ -701,18 +701,28 @@ def safe_encode(x): return pm.Literal(int(x), pm.XSD['integer']) if isinstance(x, (float,)): return pm.Literal(x, pm.XSD['float']) + if isinstance(x, dict): + outdict = {} + for key, value in x.items(): + encoded_value = safe_encode(value) + if isinstance(encoded_value, (pm.Literal,)): + outdict[key] = encoded_value.json_representation() + else: + outdict[key] = encoded_value + return pm.Literal(json.dumps(outdict), pm.XSD['string']) + if isinstance(x, list): + outlist = [] + for value in x: + encoded_value = safe_encode(value) + if isinstance(encoded_value, (pm.Literal,)): + outlist.append(encoded_value.json_representation()) + else: + outlist.append(encoded_value) + return pm.Literal(json.dumps(outlist), pm.XSD['string']) try: - if isinstance(x, dict): - outdict = {} - for key, value in x.items(): - outdict[key] = safe_encode(value) - return pm.Literal(json.dumps(outdict), pm.XSD['string']) - if isinstance(x, list): - outlist = [safe_encode(value) for value in x] - return pm.Literal(json.dumps(outlist), pm.XSD['string']) - return pm.Literal(dumps(x), - nipype['pickle']) - except TypeError: + return pm.Literal(dumps(x), nipype['pickle']) + except TypeError, e: + iflogger.info(e) return pm.Literal("Could not encode", pm.XSD['string']) From aa31885dabb5bd37c735786aba6a834374951872 Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Wed, 21 Aug 2013 08:46:12 -0400 Subject: [PATCH 15/60] enh: added new options to apply volume transform --- nipype/interfaces/freesurfer/preprocess.py | 34 +++++++++++++++------- 1 file changed, 24 insertions(+), 10 deletions(-) diff --git a/nipype/interfaces/freesurfer/preprocess.py b/nipype/interfaces/freesurfer/preprocess.py index dddeef5392..6bca6b6b1e 100644 --- a/nipype/interfaces/freesurfer/preprocess.py +++ b/nipype/interfaces/freesurfer/preprocess.py @@ -784,6 +784,8 @@ class ApplyVolTransformInputSpec(FSTraitedSpec): desc='Output template volume', mandatory=True) tal = traits.Bool(argstr='--tal', xor=_targ_xor, mandatory=True, desc='map to a sub FOV of MNI305 (with --reg only)') + tal_resolution = traits.Float(argstr="--talres %.10f", + desc="Resolution to sample when using tal") fs_target = traits.Bool(argstr='--fstarg', xor=_targ_xor, mandatory=True, requires=['reg_file'], desc='use orig.mgz from subject in regfile as target') @@ -805,10 +807,30 @@ class ApplyVolTransformInputSpec(FSTraitedSpec): desc='set matrix = identity and use subject for any templates') inverse = traits.Bool(desc='sample from target to source', argstr='--inv') - interp = traits.Enum('trilin', 'nearest', argstr='--interp %s', + interp = traits.Enum('trilin', 'nearest', 'cubic', argstr='--interp %s', desc='Interpolation method ( or nearest)') no_resample = traits.Bool(desc='Do not resample; just change vox2ras matrix', argstr='--no-resample') + m3z_file = File(argstr="--m3z %s", + desc=('This is the morph to be applied to the volume. ' + 'Unless the morph is in mri/transforms (eg.: for ' + 'talairach.m3z computed by reconall), you will need ' + 'to specify the full path to this morph and use the ' + '--noDefM3zPath flag.')) + no_ded_m3z_path = traits.Bool(argstr="--noDefM3zPath", + requires=['m3z_file'], + desc=('To be used with the m3z flag. ' + 'Instructs the code not to look for the' + 'm3z morph in the default location ' + '(SUBJECTS_DIR/subj/mri/transforms), ' + 'but instead just use the path ' + 'indicated in --m3z.')) + + invert_morph = traits.Bool(argstr="--inv-morph", + requires=['m3z_file'], + desc=('Compute and use the inverse of the ' + 'non-linear morph to resample the input ' + 'volume. To be used by --m3z.')) class ApplyVolTransformOutputSpec(TraitedSpec): @@ -853,7 +875,7 @@ def _get_outfile(self): def _list_outputs(self): outputs = self.output_spec().get() - outputs['transformed_file'] = self._get_outfile() + outputs['transformed_file'] = os.path.abspath(self._get_outfile()) return outputs def _gen_filename(self, name): @@ -1192,11 +1214,3 @@ def _gen_filename(self, name): if name == "out_file": return self._list_outputs()["out_file"] return None - -''' -interfaces to do: - -mri_vol2surf -mri_surf2vol -mri_surf2surf -''' From dbfbb3c18eaa503c7481ddd47995f81cfefb1329 Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Wed, 21 Aug 2013 08:46:40 -0400 Subject: [PATCH 16/60] fix: changed warning to logging --- nipype/pipeline/engine.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nipype/pipeline/engine.py b/nipype/pipeline/engine.py index 082ec48e4e..479d11506a 100644 --- a/nipype/pipeline/engine.py +++ b/nipype/pipeline/engine.py @@ -1211,7 +1211,7 @@ def hash_exists(self, updatehash=False): outdir = self.output_dir() hashfiles = glob(os.path.join(outdir, '*.json')) if len(hashfiles) > 1: - warn('Removing multiple hashfiles and forcing node to rerun') + logger.info('Removing multiple hashfiles and forcing node to rerun') for hashfile in hashfiles: os.unlink(hashfile) hashfile = os.path.join(outdir, '_0x%s.json' % hashvalue) From 8a4a8007889ebee08f81cab77db84d0ff12b909a Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Wed, 21 Aug 2013 08:48:24 -0400 Subject: [PATCH 17/60] api: changed smoothing sigmas to accept float in antsRegistration --- nipype/interfaces/ants/registration.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nipype/interfaces/ants/registration.py b/nipype/interfaces/ants/registration.py index 73121cbc77..c55b9cfede 100644 --- a/nipype/interfaces/ants/registration.py +++ b/nipype/interfaces/ants/registration.py @@ -283,7 +283,7 @@ class RegistrationInputSpec(ANTSCommandInputSpec): traits.Float()))) # Convergence flags number_of_iterations = traits.List(traits.List(traits.Int())) - smoothing_sigmas = traits.List(traits.List(traits.Int()), mandatory=True) + smoothing_sigmas = traits.List(traits.List(traits.Float()), mandatory=True) sigma_units = traits.List(traits.Enum('mm', 'vox'), requires=['smoothing_sigmas'], desc="units for smoothing sigmas", mandatory=True) @@ -370,7 +370,7 @@ class Registration(ANTSCommand): >>> reg3.cmdline 'antsRegistration --collapse-linear-transforms-to-fixed-image-header 0 --collapse-output-transforms 0 --dimensionality 3 --initial-moving-transform [ trans.mat, 1 ] --interpolation Linear --output [ output_, output_warped_image.nii.gz ] --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1x0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2x1x0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.025, 0.975 ] --write-composite-transform 1' - # Test collapse transforms flag + >>> # Test collapse transforms flag >>> reg4 = copy.deepcopy(reg) >>> reg4.inputs.collapse_output_transforms = True >>> outputs = reg4._list_outputs() @@ -378,7 +378,7 @@ class Registration(ANTSCommand): {'reverse_invert_flags': [True, False], 'inverse_composite_transform': ['.../nipype/testing/data/output_InverseComposite.h5'], 'warped_image': '.../nipype/testing/data/output_warped_image.nii.gz', 'inverse_warped_image': , 'forward_invert_flags': [False, False], 'reverse_transforms': ['.../nipype/testing/data/output_0GenericAffine.mat', '.../nipype/testing/data/output_1InverseWarp.nii.gz'], 'composite_transform': ['.../nipype/testing/data/output_Composite.h5'], 'forward_transforms': ['.../nipype/testing/data/output_0GenericAffine.mat', '.../nipype/testing/data/output_1Warp.nii.gz']} >>> reg4.aggregate_outputs() #doctest: +SKIP - # Test multiple metrics per stage + >>> # Test multiple metrics per stage >>> reg5 = copy.deepcopy(reg) >>> reg5.inputs.metric = ['CC', ['CC', 'Mattes']] >>> reg5.inputs.metric_weight = [1, [.5]*2] From 5e3579178f1a29327fe22825f7e2ca802e1ad974 Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Wed, 21 Aug 2013 08:50:03 -0400 Subject: [PATCH 18/60] enh: updated antsApplyTransforms to accept input image type --- nipype/interfaces/ants/resampling.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/nipype/interfaces/ants/resampling.py b/nipype/interfaces/ants/resampling.py index 4ab18c5522..01a97850b1 100644 --- a/nipype/interfaces/ants/resampling.py +++ b/nipype/interfaces/ants/resampling.py @@ -208,9 +208,16 @@ def _list_outputs(self): class ApplyTransformsInputSpec(ANTSCommandInputSpec): - dimension = traits.Enum( - 3, 2, argstr='--dimensionality %d', usedefault=True, - desc='image dimension (2 or 3)') + dimension = traits.Enum(2, 3, 4, argstr='--dimensionality %d', + desc=('This option forces the image to be treated ' + 'as a specified-dimensional image. If not ' + 'specified, antsWarp tries to infer the ' + 'dimensionality from the input image.')) + input_image_type = traits.Enum(0, 1, 2, 3, + argstr='--input-image-type %d', + desc=('Option specifying the input image ' + 'type of scalar (default), vector, ' + 'tensor, or time series.')) input_image = File(argstr='--input %s', mandatory=True, desc=('image to apply transformation to (generally a ' 'coregistered functional)'), From 9e043c66b87b8e9e7e1e0ed9045e6ff4e38df7e8 Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Wed, 21 Aug 2013 08:52:06 -0400 Subject: [PATCH 19/60] enh: updated to use ants for structural registration to OASIS template from mindboggle --- examples/rsfmri_preprocessing.py | 132 ++++++++++++++++++++++++++++++- 1 file changed, 131 insertions(+), 1 deletion(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index 2c0fd3c802..c93c8c87c2 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -1,12 +1,14 @@ import os -from nipype import (afni, fsl, freesurfer, nipy, Function, DataSink) +from nipype import (ants, afni, fsl, freesurfer, nipy, Function, DataSink) from nipype import Workflow, Node, MapNode from nipype.algorithms.rapidart import ArtifactDetect from nipype.algorithms.misc import TSNR from nipype.interfaces.fsl.utils import EPIDeWarp from nipype.interfaces.io import FreeSurferSource +from nipype.interfaces.c3 import C3dAffineTool +from nipype.interfaces.utility import Merge import numpy as np @@ -132,6 +134,27 @@ def extract_noise_components(realigned_file, mask_file, num_components=6): return components_file +def extract_subrois(timeseries_file, label_file, indices): + import os + import nibabel as nb + import numpy as np + img = nb.load(timeseries_file) + data = img.get_data() + roiimg = nb.load(label_file) + rois = roiimg.get_data() + out_ts_file = os.path.join(os.getcwd(), 'timeseries.txt') + with open(out_ts_file, 'wt') as fp: + for fsindex, cmaindex in sorted(indices.items()): + ijk = np.nonzero(rois == cmaindex) + ts = data[ijk] + print fsindex, cmaindex, ts.shape + for i0, row in enumerate(ts): + fp.writelines('%d,%d,%d,%d,' % (fsindex, ijk[0][i0], + ijk[1][i0], ijk[2][i0]) + + ','.join(['%.10f' % val for val in row])) + return out_ts_file + + def create_workflow(files, subject_id, n_vol=0, @@ -355,6 +378,10 @@ def create_workflow(files, default_color_table=True), iterfield=['in_file'], name='aparc_ts') + sampleaparc.inputs.segment_id = [8] + range(10, 14) + [17, 18, 26, 47] + \ + range(49, 55) + [58] + range(1001, 1036) + \ + range(2001, 2036) + wf.connect(aparctransform, 'transformed_file', sampleaparc, 'segmentation_file') wf.connect(bandpass, 'out_file', sampleaparc, 'in_file') @@ -381,6 +408,109 @@ def create_workflow(files, wf.connect(bandpass, 'out_file', samplerrh, 'source_file') wf.connect(register, 'out_reg_file', samplerrh, 'reg_file') + # antsRegistration + reg = Node(ants.Registration(), + name='antsRegister') + #its=10000x111110x11110 + reg.inputs.output_transform_prefix = "output_" + reg.inputs.transforms = ['Translation', 'Rigid', 'Affine', 'SyN'] + reg.inputs.transform_parameters = [(0.1,), (0.1,), (0.1,), (0.2, 3.0, 0.0)] + #reg.inputs.number_of_iterations = [[10000, 111110, 11110]]*3 + [[100, 50, 30]] + reg.inputs.number_of_iterations = [[100, 100, 100]]*3 + [[100, 20, 10]] + reg.inputs.dimension = 3 + reg.inputs.write_composite_transform = True + reg.inputs.collapse_output_transforms = False + reg.inputs.metric = ['Mattes']*3 + [['Mattes', 'CC']] + reg.inputs.metric_weight = [1]*3 + [[0.5, 0.5]] + reg.inputs.radius_or_number_of_bins = [32]*3 + [[32, 4]] + reg.inputs.sampling_strategy = ['Regular']*3 + [[None, None]] + reg.inputs.sampling_percentage = [0.3]*3 +[[None, None]] + reg.inputs.convergence_threshold = [1.e-8]*3 + [-0.01] + reg.inputs.convergence_window_size = [20]*3 + [5] + reg.inputs.smoothing_sigmas = [[4,2,1]]*3 + [[1,0.5,0]] + reg.inputs.sigma_units = ['vox'] * 4 + reg.inputs.shrink_factors = [[6,4,2]] + [[3,2,1]]*2 + [[4,2,1]] + reg.inputs.use_estimate_learning_rate_once = [True]*4 + reg.inputs.use_histogram_matching = [False]*3 + [True] # This is the default + reg.inputs.output_warped_image = 'output_warped_image.nii.gz' + reg.inputs.fixed_image = \ + os.path.abspath('OASIS-TRT-20_template_to_MNI152_2mm.nii.gz') + reg.inputs.num_threads = 4 + reg.inputs.terminal_output = 'file' + + convert = Node(freesurfer.MRIConvert(out_type='niigz'), name='convert2nii') + wf.connect(fssource, 'T1', convert, 'in_file') + maskT1 = Node(fsl.BinaryMaths(operation='mul'), name='maskT1') + wf.connect(mask, 'binary_file', maskT1, 'operand_file') + wf.connect(convert, 'out_file', maskT1, 'in_file') + wf.connect(maskT1, 'out_file', reg, 'moving_image') + + convert2itk = MapNode(C3dAffineTool(), + iterfield=['transform_file', 'source_file'], + name='convert2itk') + convert2itk.inputs.fsl2ras = True + convert2itk.inputs.itk_transform = True + wf.connect(register, 'out_fsl_file', convert2itk, 'transform_file') + if fieldmap_images: + wf.connect(fieldmap, 'exf_mask', convert2itk, 'source_file') + else: + wf.connect(calc_median, 'median_file', convert2itk, 'source_file') + wf.connect(convert, 'out_file', convert2itk, 'reference_file') + + pickfirst = lambda x : x[0] + merge = MapNode(Merge(2), iterfield=['in2'], name='mergexfm') + wf.connect(convert2itk, 'itk_transform', merge, 'in2') + wf.connect(reg, ('composite_transform', pickfirst), merge, 'in1') + + sample2mni = MapNode(ants.ApplyTransforms(), + iterfield=['input_image', 'transforms'], + name='sample2mni') + sample2mni.inputs.input_image_type = 3 + sample2mni.inputs.interpolation = 'BSpline' + sample2mni.inputs.invert_transform_flags = [False, False] + sample2mni.inputs.reference_image = \ + os.path.abspath('OASIS-TRT-20_template_to_MNI152_2mm.nii.gz') + sample2mni.inputs.terminal_output = 'file' + + wf.connect(bandpass, 'out_file', sample2mni, 'input_image') + wf.connect(merge, 'out', sample2mni, 'transforms') + + ''' + sample2mni = MapNode(freesurfer.ApplyVolTransform(), + iterfield=['source_file'], + name='sample2mni') + sample2mni.inputs.m3z_file = 'talairach.m3z' + sample2mni.inputs.target_file = os.path.join(os.environ['FREESURFER_HOME'], + 'subjects', 'fsaverage', + 'mri', 'aparc+aseg.mgz') + sample2mni.inputs.transformed_file = 'funcInMNI.nii' + sample2mni.inputs.interp = 'cubic' + wf.connect(bandpass, 'out_file', sample2mni, 'source_file') + wf.connect(register, 'out_reg_file', sample2mni, 'reg_file') + ''' + + ts2txt = MapNode(Function(input_names=['timeseries_file', 'label_file', + 'indices'], + output_names=['subcort_ts.txt'], + function=extract_subrois), + iterfield=['timeseries_file'], overwrite=True, + name='getsubcortts') + ''' + ts2txt.inputs.indices = [8] + range(10, 14) + [17, 18, 26, 47] + \ + range(49, 55) + [58] + ts2txt.inputs.mask_file = os.path.join(os.environ['FREESURFER_HOME'], + 'subjects', 'fsaverage', + 'mri', 'aparc+aseg.mgz') + ''' + ts2txt.inputs.indices = dict(zip([8] + range(10, 14) + [17, 18, 26, 47] + \ + range(49, 55) + [58], + [39, 60, 37, 58, 56, 48, 32, 30, + 38, 59, 36, 57, 55, 47, 31, 23])) + ts2txt.inputs.label_file = \ + os.path.abspath(('OASIS-TRT-20_DKT31_CMA_jointfusion_labels_in_MNI152' + '_2mm.nii.gz')) + wf.connect(sample2mni, 'output_image', ts2txt, 'timeseries_file') + datasink = Node(interface=DataSink(), name="datasink") datasink.inputs.base_directory = sink_directory datasink.inputs.container = subject_id From 1c13c4fffc2f1cbae7c06eb349a03c5dae74d4e4 Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Wed, 21 Aug 2013 09:28:50 -0400 Subject: [PATCH 20/60] enh: updated datasink --- examples/rsfmri_preprocessing.py | 35 ++++++++------------------------ 1 file changed, 8 insertions(+), 27 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index c93c8c87c2..9902826eab 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -147,7 +147,6 @@ def extract_subrois(timeseries_file, label_file, indices): for fsindex, cmaindex in sorted(indices.items()): ijk = np.nonzero(rois == cmaindex) ts = data[ijk] - print fsindex, cmaindex, ts.shape for i0, row in enumerate(ts): fp.writelines('%d,%d,%d,%d,' % (fsindex, ijk[0][i0], ijk[1][i0], ijk[2][i0]) + @@ -475,33 +474,12 @@ def create_workflow(files, wf.connect(bandpass, 'out_file', sample2mni, 'input_image') wf.connect(merge, 'out', sample2mni, 'transforms') - ''' - sample2mni = MapNode(freesurfer.ApplyVolTransform(), - iterfield=['source_file'], - name='sample2mni') - sample2mni.inputs.m3z_file = 'talairach.m3z' - sample2mni.inputs.target_file = os.path.join(os.environ['FREESURFER_HOME'], - 'subjects', 'fsaverage', - 'mri', 'aparc+aseg.mgz') - sample2mni.inputs.transformed_file = 'funcInMNI.nii' - sample2mni.inputs.interp = 'cubic' - wf.connect(bandpass, 'out_file', sample2mni, 'source_file') - wf.connect(register, 'out_reg_file', sample2mni, 'reg_file') - ''' - ts2txt = MapNode(Function(input_names=['timeseries_file', 'label_file', 'indices'], - output_names=['subcort_ts.txt'], + output_names=['out_file'], function=extract_subrois), iterfield=['timeseries_file'], overwrite=True, name='getsubcortts') - ''' - ts2txt.inputs.indices = [8] + range(10, 14) + [17, 18, 26, 47] + \ - range(49, 55) + [58] - ts2txt.inputs.mask_file = os.path.join(os.environ['FREESURFER_HOME'], - 'subjects', 'fsaverage', - 'mri', 'aparc+aseg.mgz') - ''' ts2txt.inputs.indices = dict(zip([8] + range(10, 14) + [17, 18, 26, 47] + \ range(49, 55) + [58], [39, 60, 37, 58, 56, 48, 32, 30, @@ -538,6 +516,13 @@ def create_workflow(files, datasink, 'resting.parcellations.aparc') wf.connect(sampleaparc, 'avgwf_txt_file', datasink, 'resting.parcellations.aparc.@avgwf') + wf.connect(samplerlh, 'out_file', + datasink, 'resting.parcellations.grayo.@left') + wf.connect(samplerrh, 'out_file', + datasink, 'resting.parcellations.grayo.@right') + wf.connect(ts2txt, 'out_file', + datasink, 'resting.parcellations.grayo.@subcortical') + return wf if __name__ == "__main__": @@ -587,10 +572,6 @@ def create_workflow(files, wf.run() # (plugin='MultiProc') ''' -#convert to grayordinates -def to_grayordinates(): - return grayordinates - #compute similarity matrix and partial correlation def compute_similarity(): return matrix From 4730b20b58819b8e6328422d6805294ec222c44c Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Wed, 21 Aug 2013 09:33:28 -0400 Subject: [PATCH 21/60] enh: save mni timeseries --- examples/rsfmri_preprocessing.py | 1 + 1 file changed, 1 insertion(+) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index 9902826eab..9aa64b9319 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -508,6 +508,7 @@ def create_workflow(files, datasink, 'resting.registration.@mincost') wf.connect(smooth, 'smoothed_file', datasink, 'resting.timeseries.fullpass') wf.connect(bandpass, 'out_file', datasink, 'resting.timeseries.bandpassed') + wf.connect(sample2mni, 'output_image', ts2txt, 'resting.timeseries.mni') wf.connect(createfilter1, 'out_files', datasink, 'resting.regress.@regressors') wf.connect(createfilter2, 'out_files', From 4e7958e54aaa6ed6d05324f9eaf4f21001e3065f Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Wed, 21 Aug 2013 10:25:08 -0400 Subject: [PATCH 22/60] tst: fixed ants doctests to indicate smoothing sigmas --- nipype/interfaces/ants/registration.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nipype/interfaces/ants/registration.py b/nipype/interfaces/ants/registration.py index c55b9cfede..f4d512c462 100644 --- a/nipype/interfaces/ants/registration.py +++ b/nipype/interfaces/ants/registration.py @@ -356,19 +356,19 @@ class Registration(ANTSCommand): >>> reg1.inputs.winsorize_lower_quantile = 0.025 >>> reg1.inputs.collapse_linear_transforms_to_fixed_image_header = False >>> reg1.cmdline - 'antsRegistration --collapse-linear-transforms-to-fixed-image-header 0 --collapse-output-transforms 0 --dimensionality 3 --initial-moving-transform [ trans.mat, 1 ] --interpolation Linear --output [ output_, output_warped_image.nii.gz ] --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1x0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2x1x0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.025, 1.0 ] --write-composite-transform 1' + 'antsRegistration --collapse-linear-transforms-to-fixed-image-header 0 --collapse-output-transforms 0 --dimensionality 3 --initial-moving-transform [ trans.mat, 1 ] --interpolation Linear --output [ output_, output_warped_image.nii.gz ] --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.025, 1.0 ] --write-composite-transform 1' >>> reg1.run() #doctest: +SKIP >>> reg2 = copy.deepcopy(reg) >>> reg2.inputs.winsorize_upper_quantile = 0.975 >>> reg2.cmdline - 'antsRegistration --collapse-linear-transforms-to-fixed-image-header 0 --collapse-output-transforms 0 --dimensionality 3 --initial-moving-transform [ trans.mat, 1 ] --interpolation Linear --output [ output_, output_warped_image.nii.gz ] --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1x0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2x1x0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 0.975 ] --write-composite-transform 1' + 'antsRegistration --collapse-linear-transforms-to-fixed-image-header 0 --collapse-output-transforms 0 --dimensionality 3 --initial-moving-transform [ trans.mat, 1 ] --interpolation Linear --output [ output_, output_warped_image.nii.gz ] --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 0.975 ] --write-composite-transform 1' >>> reg3 = copy.deepcopy(reg) >>> reg3.inputs.winsorize_lower_quantile = 0.025 >>> reg3.inputs.winsorize_upper_quantile = 0.975 >>> reg3.cmdline - 'antsRegistration --collapse-linear-transforms-to-fixed-image-header 0 --collapse-output-transforms 0 --dimensionality 3 --initial-moving-transform [ trans.mat, 1 ] --interpolation Linear --output [ output_, output_warped_image.nii.gz ] --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1x0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2x1x0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.025, 0.975 ] --write-composite-transform 1' + 'antsRegistration --collapse-linear-transforms-to-fixed-image-header 0 --collapse-output-transforms 0 --dimensionality 3 --initial-moving-transform [ trans.mat, 1 ] --interpolation Linear --output [ output_, output_warped_image.nii.gz ] --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.025, 0.975 ] --write-composite-transform 1' >>> # Test collapse transforms flag >>> reg4 = copy.deepcopy(reg) @@ -386,7 +386,7 @@ class Registration(ANTSCommand): >>> reg5.inputs.sampling_strategy = ['Random', None] # use default strategy in second stage >>> reg5.inputs.sampling_percentage = [0.05, [0.05, 0.10]] >>> reg5.cmdline - 'antsRegistration --collapse-linear-transforms-to-fixed-image-header 0 --collapse-output-transforms 0 --dimensionality 3 --initial-moving-transform [ trans.mat, 1 ] --interpolation Linear --output [ output_, output_warped_image.nii.gz ] --transform Affine[ 2.0 ] --metric CC[ fixed1.nii, moving1.nii, 1, 4, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1x0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] --metric CC[ fixed1.nii, moving1.nii, 0.5, 32, Dense, 0.05 ] --metric Mattes[ fixed1.nii, moving1.nii, 0.5, 32, Dense, 0.1 ] --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2x1x0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] --write-composite-transform 1' + 'antsRegistration --collapse-linear-transforms-to-fixed-image-header 0 --collapse-output-transforms 0 --dimensionality 3 --initial-moving-transform [ trans.mat, 1 ] --interpolation Linear --output [ output_, output_warped_image.nii.gz ] --transform Affine[ 2.0 ] --metric CC[ fixed1.nii, moving1.nii, 1, 4, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] --metric CC[ fixed1.nii, moving1.nii, 0.5, 32, Dense, 0.05 ] --metric Mattes[ fixed1.nii, moving1.nii, 0.5, 32, Dense, 0.1 ] --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] --write-composite-transform 1' """ DEF_SAMPLING_STRATEGY = 'Dense' """The default sampling stratey argument.""" From 176574ef6762046744f674a68816ec448db5f0cb Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Wed, 21 Aug 2013 12:20:57 -0400 Subject: [PATCH 23/60] fix: some datasink changes --- examples/rsfmri_preprocessing.py | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index 9aa64b9319..5d87746a72 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -194,6 +194,10 @@ def create_workflow(files, realign = Node(nipy.FmriRealign4d(), name='realign') realign.inputs.tr = TR realign.inputs.time_interp = True + # FIX # dbg + # This double argsort is necessary to convert slice order to space order + # that's required by nipy realign currently. This will change in the next + # release of Nipy. realign.inputs.slice_order = np.argsort(np.argsort(slice_times)).tolist() if despike: wf.connect(despike, 'out_file', realign, 'in_file') @@ -478,7 +482,7 @@ def create_workflow(files, 'indices'], output_names=['out_file'], function=extract_subrois), - iterfield=['timeseries_file'], overwrite=True, + iterfield=['timeseries_file'], name='getsubcortts') ts2txt.inputs.indices = dict(zip([8] + range(10, 14) + [17, 18, 26, 47] + \ range(49, 55) + [58], @@ -495,20 +499,21 @@ def create_workflow(files, datasink.inputs.regexp_substitutions = (r'(_.*)(\d+/)', r'run\2') wf.connect(despike, 'out_file', datasink, 'resting.qa.despike') wf.connect(realign, 'par_file', datasink, 'resting.qa.motion') - wf.connect(tsnr, 'tsnr_file', datasink, 'resting.qa.@tsnr') - wf.connect(tsnr, 'mean_file', datasink, 'resting.qa.@tsnr_mean') + wf.connect(tsnr, 'tsnr_file', datasink, 'resting.qa.tsnr') + wf.connect(tsnr, 'mean_file', datasink, 'resting.qa.tsnr.@mean') wf.connect(tsnr, 'stddev_file', datasink, 'resting.qa.@tsnr_stddev') - wf.connect(art, 'norm_files', datasink, 'resting.qa.@art') - wf.connect(art, 'outlier_files', datasink, 'resting.qa.@art_outlier_files') + wf.connect(art, 'norm_files', datasink, 'resting.qa.art.@norm') + wf.connect(art, 'intensity_files', datasink, 'resting.qa.art.@intensity') + wf.connect(art, 'outlier_files', datasink, 'resting.qa.art.@outlier_files') wf.connect(mask, 'binary_file', datasink, 'resting.mask') wf.connect(masktransform, 'transformed_file', datasink, 'resting.mask.@transformed_file') wf.connect(register, 'out_reg_file', datasink, 'resting.registration') wf.connect(register, 'min_cost_file', - datasink, 'resting.registration.@mincost') + datasink, 'resting.qa.bbreg.@mincost') wf.connect(smooth, 'smoothed_file', datasink, 'resting.timeseries.fullpass') wf.connect(bandpass, 'out_file', datasink, 'resting.timeseries.bandpassed') - wf.connect(sample2mni, 'output_image', ts2txt, 'resting.timeseries.mni') + wf.connect(sample2mni, 'output_image', datasink, 'resting.timeseries.mni') wf.connect(createfilter1, 'out_files', datasink, 'resting.regress.@regressors') wf.connect(createfilter2, 'out_files', From 6fc397728989672a44fa5d890abf349bdfce6c89 Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Wed, 21 Aug 2013 12:54:45 -0400 Subject: [PATCH 24/60] doc: improved documentation and fixed PEP8 --- examples/rsfmri_preprocessing.py | 179 +++++++++++++++++++++++-------- 1 file changed, 137 insertions(+), 42 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index 5d87746a72..6ad4031b91 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -1,3 +1,18 @@ +#!/usr/bin/env python +""" +A preprocessing workflow for Siemens resting state data. + +This workflow makes use of: + +- AFNI +- ANTS +- C3D_Affine_Tool +- DicomStack +- FreeSurfer +- FSL +- NiPy + +""" import os from nipype import (ants, afni, fsl, freesurfer, nipy, Function, DataSink) @@ -13,8 +28,20 @@ import numpy as np -#robust mean def median(in_files): + """Computes an average of the median of each realigned timeseries + + Parameters + ---------- + + in_files: one or more realigned Nifti 4D time series + + Returns + ------- + + out_file: a 3D Nifti file + """ + import os import nibabel as nb import numpy as np @@ -43,6 +70,14 @@ def get_aparc_aseg(files): def get_info(dicom_files): + """Given a Siemens dicom file return metadata + + Returns + ------- + RepetitionTime + Slice Acquisition Times + Spacing between slices + """ from dcmstack.extract import default_extractor from dicom import read_file from nipype.utils.filemanip import filename_to_list @@ -54,7 +89,8 @@ def get_info(dicom_files): def motion_regressors(motion_params, order=2, derivatives=2): - """ + """Compute motion regressors upto given order and derivative + motion + d(motion)/dt + d2(motion)/dt2 (linear + quadratic) """ from nipype.utils.filemanip import filename_to_list @@ -80,6 +116,21 @@ def motion_regressors(motion_params, order=2, derivatives=2): def build_filter1(motion_params, comp_norm, outliers): """Builds a regressor set comprisong motion parameters, composite norm and outliers + + The outliers are added as a single time point column for each outlier + + + Parameters + ---------- + + motion_params: a text file containing motion parameters and its derivatives + comp_norm: a text file containing the composite norm + outliers: a text file containing 0-based outlier indices + + Returns + ------- + components_file: a text file containing all the regressors + """ from nipype.utils.filemanip import filename_to_list import numpy as np @@ -109,13 +160,13 @@ def extract_noise_components(realigned_file, mask_file, num_components=6): Parameters ---------- - realigned_file : - mask_file : - num_components : + realigned_file: a 4D Nifti file containing realigned volumes + mask_file: a 3D Nifti file containing white matter + ventricular masks + num_components: number of components to use for noise decomposition Returns ------- - components_file : + components_file: a text file containing the noise components """ import os @@ -135,6 +186,22 @@ def extract_noise_components(realigned_file, mask_file, num_components=6): def extract_subrois(timeseries_file, label_file, indices): + """Extract voxel time courses for each subcortical roi index + + Parameters + ---------- + + timeseries_file: a 4D Nifti file + label_file: a 3D file containing rois in the same space/size of the 4D file + indices: a list of indices for ROIs to extract. Currently a dictionary + mapping freesurfer indices to CMA/Label Fusion indices are being used + + Returns + ------- + out_file: a text file containing time courses for each voxel of each roi + The first four columns are: freesurfer index, i, j, k positions in the + label file + """ import os import nibabel as nb import numpy as np @@ -144,16 +211,21 @@ def extract_subrois(timeseries_file, label_file, indices): rois = roiimg.get_data() out_ts_file = os.path.join(os.getcwd(), 'timeseries.txt') with open(out_ts_file, 'wt') as fp: - for fsindex, cmaindex in sorted(indices.items()): + for fsindex, cmaindex in sorted(indices.items()): ijk = np.nonzero(rois == cmaindex) ts = data[ijk] for i0, row in enumerate(ts): fp.writelines('%d,%d,%d,%d,' % (fsindex, ijk[0][i0], - ijk[1][i0], ijk[2][i0]) + + ijk[1][i0], ijk[2][i0]) + ','.join(['%.10f' % val for val in row])) return out_ts_file +""" +Creates the main preprocessing workflow +""" + + def create_workflow(files, subject_id, n_vol=0, @@ -176,13 +248,14 @@ def create_workflow(files, wf = Workflow(name='resting') - #skip vols + # Skip starting volumes remove_vol = MapNode(fsl.ExtractROI(t_min=n_vol, t_size=-1), iterfield=['in_file'], name="remove_volumes") remove_vol.inputs.in_file = files - # despike + # Run AFNI's despike. This is always run, however, whether this is fed to + # realign depends on the input configuration despike = MapNode(afni.Despike(outputtype='NIFTI_GZ'), iterfield=['in_file'], name='despike') @@ -190,7 +263,7 @@ def create_workflow(files, wf.connect(remove_vol, 'roi_file', despike, 'in_file') - #nipy realign + # Run Nipy joint slice timing and realignment algorithm realign = Node(nipy.FmriRealign4d(), name='realign') realign.inputs.tr = TR realign.inputs.time_interp = True @@ -204,16 +277,18 @@ def create_workflow(files, else: wf.connect(remove_vol, 'roi_file', realign, 'in_file') - #TSNR + # Comute TSNR on realigned data regressing polynomials upto order 2 tsnr = MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr') wf.connect(realign, 'out_file', tsnr, 'in_file') + # Compute the median image across runs calc_median = Node(Function(input_names=['in_files'], output_names=['median_file'], function=median), name='median') wf.connect(tsnr, 'detrended_file', calc_median, 'in_files') + # Coregister the median to the surface register = Node(freesurfer.BBRegister(), name='bbregister') register.inputs.subject_id = subject_id @@ -221,6 +296,7 @@ def create_workflow(files, register.inputs.contrast_type = 't2' register.inputs.out_fsl_file = True + # Compute fieldmaps and unwarp using them if fieldmap_images: fieldmap = Node(interface=EPIDeWarp(), name='fieldmap_unwarp') fieldmap.inputs.tediff = FM_TEdiff @@ -239,26 +315,25 @@ def create_workflow(files, else: wf.connect(calc_median, 'median_file', register, 'source_file') + # Get the subject's freesurfer source directory fssource = Node(FreeSurferSource(), name='fssource') fssource.inputs.subject_id = subject_id fssource.inputs.subjects_dir = os.environ['SUBJECTS_DIR'] - #extract wm+csf, brain masks by eroding freesurfer lables + # Extract wm+csf, brain masks by eroding freesurfer lables and then + # transform the masks into the space of the median wmcsf = Node(freesurfer.Binarize(), name='wmcsfmask') mask = wmcsf.clone('anatmask') - wmcsftransform = Node(freesurfer.ApplyVolTransform(inverse=True, interp='nearest'), name='wmcsftransform') wmcsftransform.inputs.subjects_dir = os.environ['SUBJECTS_DIR'] - wmcsf.inputs.wm_ven_csf = True wmcsf.inputs.match = [4, 5, 14, 15, 24, 31, 43, 44, 63] wmcsf.inputs.binary_file = 'wmcsf.nii.gz' wmcsf.inputs.erode = 1 wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), wmcsf, 'in_file') - if fieldmap_images: wf.connect(fieldmap, 'exf_mask', wmcsftransform, 'source_file') else: @@ -271,7 +346,6 @@ def create_workflow(files, mask.inputs.erode = int(slice_thickness) mask.inputs.min = 0.5 wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), mask, 'in_file') - masktransform = wmcsftransform.clone("masktransform") if fieldmap_images: wf.connect(fieldmap, 'exf_mask', masktransform, 'source_file') @@ -280,7 +354,7 @@ def create_workflow(files, wf.connect(register, 'out_reg_file', masktransform, 'reg_file') wf.connect(mask, 'binary_file', masktransform, 'target_file') - #art outliers + # Compute Art outliers art = Node(interface=ArtifactDetect(use_differences=[True, False], use_norm=True, norm_threshold=norm_threshold, @@ -297,6 +371,7 @@ def create_workflow(files, art, 'realignment_parameters') wf.connect(masktransform, 'transformed_file', art, 'mask_file') + # Compute motion regressors motreg = Node(Function(input_names=['motion_params', 'order', 'derivatives'], output_names=['out_files'], @@ -304,6 +379,7 @@ def create_workflow(files, name='getmotionregress') wf.connect(realign, 'par_file', motreg, 'motion_params') + # Create a filter to remove motion and art confounds createfilter1 = Node(Function(input_names=['motion_params', 'comp_norm', 'outliers'], output_names=['out_files'], @@ -313,6 +389,7 @@ def create_workflow(files, wf.connect(art, 'norm_files', createfilter1, 'comp_norm') wf.connect(art, 'outlier_files', createfilter1, 'outliers') + # Filter the motion and art confounds filter1 = MapNode(fsl.GLM(out_res_name='timeseries.nii.gz', demean=True), iterfield=['in_file', 'design'], @@ -325,6 +402,7 @@ def create_workflow(files, wf.connect(createfilter1, 'out_files', filter1, 'design') wf.connect(masktransform, 'transformed_file', filter1, 'mask') + # Create a filter to remove noise components based on white matter and CSF createfilter2 = MapNode(Function(input_names=['realigned_file', 'mask_file', 'num_components'], output_names=['out_files'], @@ -335,6 +413,7 @@ def create_workflow(files, wf.connect(filter1, 'out_res', createfilter2, 'realigned_file') wf.connect(masktransform, 'transformed_file', createfilter2, 'mask_file') + # Filter noise components filter2 = MapNode(fsl.GLM(out_res_name='timeseries_cleaned.nii.gz', demean=True), iterfield=['in_file', 'design'], @@ -343,6 +422,7 @@ def create_workflow(files, wf.connect(createfilter2, 'out_files', filter2, 'design') wf.connect(masktransform, 'transformed_file', filter2, 'mask') + # Smoothing using surface and volume smoothing smooth = MapNode(freesurfer.Smooth(), iterfield=['in_file'], name='smooth') @@ -356,6 +436,7 @@ def create_workflow(files, wf.connect(filter2, 'out_res', smooth, 'in_file') wf.connect(register, 'out_reg_file', smooth, 'reg_file') + # Bandpass filter the data bandpass = MapNode(fsl.TemporalFilter(), iterfield=['in_file'], name='bandpassfilter') @@ -369,6 +450,7 @@ def create_workflow(files, bandpass.inputs.lowpass_sigma = 1. / (2 * TR * lowpass_freq) wf.connect(smooth, 'smoothed_file', bandpass, 'in_file') + # Convert aparc to subject functional space aparctransform = wmcsftransform.clone("aparctransform") if fieldmap_images: wf.connect(fieldmap, 'exf_mask', aparctransform, 'source_file') @@ -377,18 +459,21 @@ def create_workflow(files, wf.connect(register, 'out_reg_file', aparctransform, 'reg_file') wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), aparctransform, 'target_file') + # Sample the average time series in aparc ROIs sampleaparc = MapNode(freesurfer.SegStats(avgwf_txt_file=True, default_color_table=True), iterfield=['in_file'], name='aparc_ts') - sampleaparc.inputs.segment_id = [8] + range(10, 14) + [17, 18, 26, 47] + \ - range(49, 55) + [58] + range(1001, 1036) + \ - range(2001, 2036) + sampleaparc.inputs.segment_id = ([8] + range(10, 14) + [17, 18, 26, 47] + + range(49, 55) + [58] + range(1001, 1036) + + range(2001, 2036)) wf.connect(aparctransform, 'transformed_file', sampleaparc, 'segmentation_file') wf.connect(bandpass, 'out_file', sampleaparc, 'in_file') + # Sample the time series onto the surface of the target surface. Performs + # sampling into left and right hemisphere samplerlh = MapNode(freesurfer.SampleToSurface(), iterfield=['source_file'], name='sampler_lh') @@ -411,43 +496,51 @@ def create_workflow(files, wf.connect(bandpass, 'out_file', samplerrh, 'source_file') wf.connect(register, 'out_reg_file', samplerrh, 'reg_file') - # antsRegistration - reg = Node(ants.Registration(), - name='antsRegister') - #its=10000x111110x11110 + # Compute registration between the subject's structural and MNI template + # This is currently set to perform a very quick registration. However, the + # registration can be made significantly more accurate for cortical + # structures by increasing the number of iterations + # All parameters are set using the example from: + # + reg = Node(ants.Registration(), name='antsRegister') reg.inputs.output_transform_prefix = "output_" reg.inputs.transforms = ['Translation', 'Rigid', 'Affine', 'SyN'] reg.inputs.transform_parameters = [(0.1,), (0.1,), (0.1,), (0.2, 3.0, 0.0)] - #reg.inputs.number_of_iterations = [[10000, 111110, 11110]]*3 + [[100, 50, 30]] - reg.inputs.number_of_iterations = [[100, 100, 100]]*3 + [[100, 20, 10]] + # reg.inputs.number_of_iterations = ([[10000, 111110, 11110]]*3 + + # [[100, 50, 30]]) + reg.inputs.number_of_iterations = [[100, 100, 100]] * 3 + [[100, 20, 10]] reg.inputs.dimension = 3 reg.inputs.write_composite_transform = True reg.inputs.collapse_output_transforms = False - reg.inputs.metric = ['Mattes']*3 + [['Mattes', 'CC']] - reg.inputs.metric_weight = [1]*3 + [[0.5, 0.5]] - reg.inputs.radius_or_number_of_bins = [32]*3 + [[32, 4]] - reg.inputs.sampling_strategy = ['Regular']*3 + [[None, None]] - reg.inputs.sampling_percentage = [0.3]*3 +[[None, None]] - reg.inputs.convergence_threshold = [1.e-8]*3 + [-0.01] - reg.inputs.convergence_window_size = [20]*3 + [5] - reg.inputs.smoothing_sigmas = [[4,2,1]]*3 + [[1,0.5,0]] + reg.inputs.metric = ['Mattes'] * 3 + [['Mattes', 'CC']] + reg.inputs.metric_weight = [1] * 3 + [[0.5, 0.5]] + reg.inputs.radius_or_number_of_bins = [32] * 3 + [[32, 4]] + reg.inputs.sampling_strategy = ['Regular'] * 3 + [[None, None]] + reg.inputs.sampling_percentage = [0.3] * 3 + [[None, None]] + reg.inputs.convergence_threshold = [1.e-8] * 3 + [-0.01] + reg.inputs.convergence_window_size = [20] * 3 + [5] + reg.inputs.smoothing_sigmas = [[4, 2, 1]] * 3 + [[1, 0.5, 0]] reg.inputs.sigma_units = ['vox'] * 4 - reg.inputs.shrink_factors = [[6,4,2]] + [[3,2,1]]*2 + [[4,2,1]] - reg.inputs.use_estimate_learning_rate_once = [True]*4 - reg.inputs.use_histogram_matching = [False]*3 + [True] # This is the default + reg.inputs.shrink_factors = [[6, 4, 2]] + [[3, 2, 1]]*2 + [[4, 2, 1]] + reg.inputs.use_estimate_learning_rate_once = [True] * 4 + reg.inputs.use_histogram_matching = [False] * 3 + [True] reg.inputs.output_warped_image = 'output_warped_image.nii.gz' reg.inputs.fixed_image = \ os.path.abspath('OASIS-TRT-20_template_to_MNI152_2mm.nii.gz') reg.inputs.num_threads = 4 reg.inputs.terminal_output = 'file' + # Convert T1.mgz to nifti for using with ANTS convert = Node(freesurfer.MRIConvert(out_type='niigz'), name='convert2nii') wf.connect(fssource, 'T1', convert, 'in_file') + + # Mask the T1.mgz file with the brain mask computed earlier maskT1 = Node(fsl.BinaryMaths(operation='mul'), name='maskT1') wf.connect(mask, 'binary_file', maskT1, 'operand_file') wf.connect(convert, 'out_file', maskT1, 'in_file') wf.connect(maskT1, 'out_file', reg, 'moving_image') + # Convert the BBRegister transformation to ANTS ITK format convert2itk = MapNode(C3dAffineTool(), iterfield=['transform_file', 'source_file'], name='convert2itk') @@ -460,11 +553,13 @@ def create_workflow(files, wf.connect(calc_median, 'median_file', convert2itk, 'source_file') wf.connect(convert, 'out_file', convert2itk, 'reference_file') - pickfirst = lambda x : x[0] + # Concatenate the affine and ants transforms into a list + pickfirst = lambda x: x[0] merge = MapNode(Merge(2), iterfield=['in2'], name='mergexfm') wf.connect(convert2itk, 'itk_transform', merge, 'in2') wf.connect(reg, ('composite_transform', pickfirst), merge, 'in1') + # Apply the combined transform to the time series file sample2mni = MapNode(ants.ApplyTransforms(), iterfield=['input_image', 'transforms'], name='sample2mni') @@ -474,17 +569,17 @@ def create_workflow(files, sample2mni.inputs.reference_image = \ os.path.abspath('OASIS-TRT-20_template_to_MNI152_2mm.nii.gz') sample2mni.inputs.terminal_output = 'file' - wf.connect(bandpass, 'out_file', sample2mni, 'input_image') wf.connect(merge, 'out', sample2mni, 'transforms') + # Sample the time series file for each subcortical roi ts2txt = MapNode(Function(input_names=['timeseries_file', 'label_file', 'indices'], output_names=['out_file'], function=extract_subrois), iterfield=['timeseries_file'], name='getsubcortts') - ts2txt.inputs.indices = dict(zip([8] + range(10, 14) + [17, 18, 26, 47] + \ + ts2txt.inputs.indices = dict(zip([8] + range(10, 14) + [17, 18, 26, 47] + range(49, 55) + [58], [39, 60, 37, 58, 56, 48, 32, 30, 38, 59, 36, 57, 55, 47, 31, 23])) @@ -493,6 +588,7 @@ def create_workflow(files, '_2mm.nii.gz')) wf.connect(sample2mni, 'output_image', ts2txt, 'timeseries_file') + # Save the relevant data into an output directory datasink = Node(interface=DataSink(), name="datasink") datasink.inputs.base_directory = sink_directory datasink.inputs.container = subject_id @@ -528,7 +624,6 @@ def create_workflow(files, datasink, 'resting.parcellations.grayo.@right') wf.connect(ts2txt, 'out_file', datasink, 'resting.parcellations.grayo.@subcortical') - return wf if __name__ == "__main__": From be783ea45d14a643cb9b5df99de63e854d0b802f Mon Sep 17 00:00:00 2001 From: cdla Date: Wed, 21 Aug 2013 13:48:06 -0400 Subject: [PATCH 25/60] minor changes --- examples/rsfmri_preprocessing.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index c10675bd35..72da68ea2d 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -7,7 +7,6 @@ from nipype.algorithms.misc import TSNR from nipype.interfaces.fsl.utils import EPIDeWarp from nipype.interfaces.io import FreeSurferSource - import numpy as np @@ -79,6 +78,7 @@ def build_filter1(motion_params, comp_norm, outliers): """Builds a regressor set comprisong motion parameters, composite norm and outliers """ + from nipype.utils.filemanip import filename_to_list import numpy as np import os @@ -370,7 +370,8 @@ def create_workflow(files, if __name__ == "__main__": import argparse from socket import getfqdn - if not 'ba3.mit.edu' in getfqdn(): + from datetime import date + if not 'mit.edu' in getfqdn(): #dcmfile = '/software/data/sad_resting/500000-32-1.dcm' #niifile = '/software/data/sad_resting/resting.nii.gz' #subject_id = 'SAD_024' @@ -381,13 +382,14 @@ def create_workflow(files, '/software/data/sad_resting/allyson/fieldmap_resting2.nii'] sink = os.path.join(os.getcwd(), 'output') else: + os.environ['SUBJECTS_DIR']='/mindhive/xnat/surfaces/GATES/' #dcmfile = '/mindhive/xnat/dicom_storage/sad/SAD_024/dicoms/500000-32-1.dcm' #niifile = '/mindhive/xnat/data/sad/SAD_024/BOLD/resting.nii.gz' dcmfile = '/mindhive/xnat/data/GATES/308/dicoms/562000-34-1.dcm' niifile = '/mindhive/xnat/data/GATES/308/niftis/Resting1.nii' fieldmap_images = ['/mindhive/xnat/data/GATES/308/niftis/fieldmap_resting1.nii', '/mindhive/xnat/data/GATES/308/niftis/fieldmap_resting2.nii'] - sink = os.path.abspath('/mindhive/scratch/Wed/cdla/resting/sink/') + sink = os.path.join('/mindhive/scratch/',date.today().strftime('%a'),'cdla','restingwf','output') echo_time = 0.7 subject_id = '308' TR, slice_times, slice_thickness = get_info(dcmfile) From 4148d4d79063474ce60aa78039e9b41b457648db Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Wed, 21 Aug 2013 14:14:21 -0400 Subject: [PATCH 26/60] tst: trying to test travis error with pipeline --- nipype/pipeline/engine.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nipype/pipeline/engine.py b/nipype/pipeline/engine.py index 479d11506a..3076399d23 100644 --- a/nipype/pipeline/engine.py +++ b/nipype/pipeline/engine.py @@ -1211,6 +1211,7 @@ def hash_exists(self, updatehash=False): outdir = self.output_dir() hashfiles = glob(os.path.join(outdir, '*.json')) if len(hashfiles) > 1: + logger.info(hashfiles) logger.info('Removing multiple hashfiles and forcing node to rerun') for hashfile in hashfiles: os.unlink(hashfile) From 542af3adb4bb191f4c34f45d392ac401bb50be93 Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Wed, 21 Aug 2013 17:41:48 -0400 Subject: [PATCH 27/60] enh: added registration output and reference image --- examples/rsfmri_preprocessing.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index 6ad4031b91..a44ee2370e 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -598,13 +598,19 @@ def create_workflow(files, wf.connect(tsnr, 'tsnr_file', datasink, 'resting.qa.tsnr') wf.connect(tsnr, 'mean_file', datasink, 'resting.qa.tsnr.@mean') wf.connect(tsnr, 'stddev_file', datasink, 'resting.qa.@tsnr_stddev') + if fieldmap_images: + wf.connect(fieldmap, 'exf_mask', datasink, 'resting.reference') + else: + wf.connect(calc_median, 'median_file', datasink, 'resting.reference') wf.connect(art, 'norm_files', datasink, 'resting.qa.art.@norm') wf.connect(art, 'intensity_files', datasink, 'resting.qa.art.@intensity') wf.connect(art, 'outlier_files', datasink, 'resting.qa.art.@outlier_files') wf.connect(mask, 'binary_file', datasink, 'resting.mask') wf.connect(masktransform, 'transformed_file', datasink, 'resting.mask.@transformed_file') - wf.connect(register, 'out_reg_file', datasink, 'resting.registration') + wf.connect(register, 'out_reg_file', datasink, 'resting.registration.bbreg') + wf.connect(reg, ('composite_transform', pickfirst), + datasink, 'resting.registration.ants') wf.connect(register, 'min_cost_file', datasink, 'resting.qa.bbreg.@mincost') wf.connect(smooth, 'smoothed_file', datasink, 'resting.timeseries.fullpass') From 25ab601d6ea841d2a378acda2b275792c83b3333 Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Thu, 22 Aug 2013 18:26:31 -0400 Subject: [PATCH 28/60] enh: added argument parsing --- examples/rsfmri_preprocessing.py | 112 +++++++++++++++++++------------ 1 file changed, 68 insertions(+), 44 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index a44ee2370e..567ba8d835 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -12,6 +12,10 @@ - FSL - NiPy +For example: + +python rsfmri_preprocessing.py -d /data/12345-34-1.dcm -f /data/Resting.nii -s subj001 -n 2 --despike -o output -p "plugin_args=dict(plugin='PBS', plugin_args=dict(qsub_args='-q many'))" + """ import os @@ -256,12 +260,12 @@ def create_workflow(files, # Run AFNI's despike. This is always run, however, whether this is fed to # realign depends on the input configuration - despike = MapNode(afni.Despike(outputtype='NIFTI_GZ'), + despiker = MapNode(afni.Despike(outputtype='NIFTI_GZ'), iterfield=['in_file'], name='despike') - #despike.plugin_args = {'qsub_args': '-l nodes=1:ppn='} + #despiker.plugin_args = {'qsub_args': '-l nodes=1:ppn='} - wf.connect(remove_vol, 'roi_file', despike, 'in_file') + wf.connect(remove_vol, 'roi_file', despiker, 'in_file') # Run Nipy joint slice timing and realignment algorithm realign = Node(nipy.FmriRealign4d(), name='realign') @@ -273,7 +277,7 @@ def create_workflow(files, # release of Nipy. realign.inputs.slice_order = np.argsort(np.argsort(slice_times)).tolist() if despike: - wf.connect(despike, 'out_file', realign, 'in_file') + wf.connect(despiker, 'out_file', realign, 'in_file') else: wf.connect(remove_vol, 'roi_file', realign, 'in_file') @@ -593,7 +597,7 @@ def create_workflow(files, datasink.inputs.base_directory = sink_directory datasink.inputs.container = subject_id datasink.inputs.regexp_substitutions = (r'(_.*)(\d+/)', r'run\2') - wf.connect(despike, 'out_file', datasink, 'resting.qa.despike') + wf.connect(despiker, 'out_file', datasink, 'resting.qa.despike') wf.connect(realign, 'par_file', datasink, 'resting.qa.motion') wf.connect(tsnr, 'tsnr_file', datasink, 'resting.qa.tsnr') wf.connect(tsnr, 'mean_file', datasink, 'resting.qa.tsnr.@mean') @@ -633,50 +637,70 @@ def create_workflow(files, return wf if __name__ == "__main__": - import argparse - from socket import getfqdn - if not 'ba3.mit.edu' in getfqdn(): - #dcmfile = '/software/data/sad_resting/500000-32-1.dcm' - #niifile = '/software/data/sad_resting/resting.nii.gz' - #subject_id = 'SAD_024' - dcmfile = '/software/data/sad_resting/allyson/562000-34-1.dcm' - niifile = '/software/data/sad_resting/allyson/Resting1.nii' - fieldmap_images = [] - fieldmap_images = ['/software/data/sad_resting/allyson/fieldmap_resting1.nii', - '/software/data/sad_resting/allyson/fieldmap_resting2.nii'] - sink = os.path.join(os.getcwd(), 'output') - else: - #dcmfile = '/mindhive/xnat/dicom_storage/sad/SAD_024/dicoms/500000-32-1.dcm' - #niifile = '/mindhive/xnat/data/sad/SAD_024/BOLD/resting.nii.gz' - dcmfile = '/mindhive/xnat/data/GATES/308/dicoms/562000-34-1.dcm' - niifile = '/mindhive/xnat/data/GATES/308/niftis/Resting1.nii' - fieldmap_images = ['/mindhive/xnat/data/GATES/308/niftis/fieldmap_resting1.nii', - '/mindhive/xnat/data/GATES/308/niftis/fieldmap_resting2.nii'] - sink = os.path.abspath('/mindhive/scratch/Wed/cdla/resting/sink/') - echo_time = 0.7 - subject_id = '308' - TR, slice_times, slice_thickness = get_info(dcmfile) - - wf = create_workflow(niifile, - subject_id, - n_vol=2, - despike=True, + from argparse import ArgumentParser + parser = ArgumentParser(description=__doc__) + parser.add_argument("-d", "--dicom_file", dest="dicom_file", + help="an example dicom file from the resting series") + parser.add_argument("-f", "--files", dest="files", nargs="+", + help="4d nifti files for resting state", + required=True) + parser.add_argument("-s", "--subject_id", dest="subject_id", + help="FreeSurfer subject id", required=True) + parser.add_argument("-n", "--n_vol", dest="n_vol", default=0, type=int, + help="Volumes to skip at the beginning") + parser.add_argument("--despike", dest="despike", default=False, + action="store_true", help="Use despiked data") + parser.add_argument("--TR", dest="TR", default=None, + help="TR if dicom not provided") + parser.add_argument("--slice_times", dest="slice_times", nargs="+", + help="Slice times") + parser.add_argument("-l", "--lowpass_freq", dest="lowpass_freq", + default=-1, help="Low pass frequency (Hz)") + parser.add_argument("-u", "--highpass_freq", dest="highpass_freq", + default=-1, help="High pass frequency (Hz)") + parser.add_argument("-o", "--output_dir", dest="sink", + help="Output directory base") + parser.add_argument("-w", "--work_dir", dest="work_dir", + help="Output directory base") + parser.add_argument("-p", "--plugin_args", dest="plugin_args", + default='plugin_args=dict()', + help="Plugin args") + args = parser.parse_args() + + TR = args.TR + slice_times = args.slice_times + slice_thickness = None + if args.dicom_file: + TR, slice_times, slice_thickness = get_info(args.dicom_file) + + if slice_thickness is None: + from nibabel import load + img = load(args.files[0]) + slice_thickness = max(img.get_header().get_zooms()[:3]) + print TR, slice_times, slice_thickness + + wf = create_workflow([os.path.abspath(filename) for filename in args.files], + subject_id=args.subject_id, + n_vol=args.n_vol, + despike=args.despike, TR=TR, slice_times=slice_times, - slice_thickness=np.round(slice_thickness), - lowpass_freq=.08, - highpass_freq=.9, - sink_directory=sink, - FM_TEdiff=2.46, - FM_sigma=2, - FM_echo_spacing=.7, - fieldmap_images=fieldmap_images - ) + slice_thickness=slice_thickness, + lowpass_freq=args.lowpass_freq, + highpass_freq=args.highpass_freq, + sink_directory=os.path.abspath(args.sink)) + + if args.work_dir: + work_dir = os.path.abspath(args.workdir) + else: + work_dir = os.getcwd() wf.config['execution'].update(**{'remove_unnecessary_outputs': False}) - wf.base_dir = os.getcwd() + wf.base_dir = work_dir wf.write_graph(graph2use='flat') - wf.run() # (plugin='MultiProc') + exec args.plugin_args + print plugin_args + wf.run(**plugin_args) ''' #compute similarity matrix and partial correlation From 13988664bfa968dff3299a5c117f4273a996727f Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Thu, 22 Aug 2013 18:33:41 -0400 Subject: [PATCH 29/60] fix: remove only json hash files --- nipype/pipeline/engine.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nipype/pipeline/engine.py b/nipype/pipeline/engine.py index 3076399d23..54c0569ba9 100644 --- a/nipype/pipeline/engine.py +++ b/nipype/pipeline/engine.py @@ -1209,7 +1209,7 @@ def hash_exists(self, updatehash=False): # of the dictionary itself. hashed_inputs, hashvalue = self._get_hashval() outdir = self.output_dir() - hashfiles = glob(os.path.join(outdir, '*.json')) + hashfiles = glob(os.path.join(outdir, '_0x*.json')) if len(hashfiles) > 1: logger.info(hashfiles) logger.info('Removing multiple hashfiles and forcing node to rerun') @@ -1310,7 +1310,7 @@ def run(self, updatehash=False): hashfile_unfinished) if isinstance(self, MapNode): # remove old json files - for filename in glob(os.path.join(outdir, '*.json')): + for filename in glob(os.path.join(outdir, '_0x*.json')): os.unlink(filename) outdir = make_output_dir(outdir) self._save_hashfile(hashfile_unfinished, hashed_inputs) From fe036bd8a0ce4d8ff92cd23bba78695f45d40898 Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Fri, 23 Aug 2013 16:55:46 +0100 Subject: [PATCH 30/60] enh: writing out a single combined timeseries file --- examples/rsfmri_preprocessing.py | 35 +++++++++++++++++++++++++++----- 1 file changed, 30 insertions(+), 5 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index 567ba8d835..058684338e 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -225,6 +225,24 @@ def extract_subrois(timeseries_file, label_file, indices): return out_ts_file +def combine_hemi(left, right): + """Combine left and right hemisphere time series into a single text file + """ + import os + from nibabel import load + import numpy as np + lh_data = load(left).get_data() + rh_data = load(right).get_data() + + indices = np.vstack((1000000 + np.arange(0, lh_data.shape[0])[:, None], + 2000000 + np.arange(0, rh_data.shape[0])[:, None])) + all_data = np.hstack((indices, np.vstack((lh_data.squeeze(), + rh_data.squeeze())))) + filename = 'combined_surf.txt' + np.savetxt(filename, all_data, + fmt=','.join(['%d'] + ['%.10f'] * (all_data.shape[1] -1))) + return os.path.abspath(filename) + """ Creates the main preprocessing workflow """ @@ -485,7 +503,7 @@ def create_workflow(files, samplerlh.inputs.sampling_range = (0.1, 0.9, 0.1) samplerlh.inputs.sampling_units = "frac" samplerlh.inputs.interp_method = "trilinear" - samplerlh.inputs.cortex_mask = True + #samplerlh.inputs.cortex_mask = True samplerlh.inputs.target_subject = target_subject samplerlh.inputs.out_type = 'niigz' samplerlh.inputs.subjects_dir = os.environ['SUBJECTS_DIR'] @@ -500,6 +518,15 @@ def create_workflow(files, wf.connect(bandpass, 'out_file', samplerrh, 'source_file') wf.connect(register, 'out_reg_file', samplerrh, 'reg_file') + # Combine left and right hemisphere to text file + combiner = MapNode(Function(input_names=['left', 'right'], + output_names=['out_file'], + function=combine_hemi), + iterfield=['left', 'right'], + name="combiner") + wf.connect(samplerlh, 'out_file', combiner, 'left') + wf.connect(samplerrh, 'out_file', combiner, 'right') + # Compute registration between the subject's structural and MNI template # This is currently set to perform a very quick registration. However, the # registration can be made significantly more accurate for cortical @@ -628,10 +655,8 @@ def create_workflow(files, datasink, 'resting.parcellations.aparc') wf.connect(sampleaparc, 'avgwf_txt_file', datasink, 'resting.parcellations.aparc.@avgwf') - wf.connect(samplerlh, 'out_file', - datasink, 'resting.parcellations.grayo.@left') - wf.connect(samplerrh, 'out_file', - datasink, 'resting.parcellations.grayo.@right') + wf.connect(combiner, 'out_file', + datasink, 'resting.parcellations.grayo.@surface') wf.connect(ts2txt, 'out_file', datasink, 'resting.parcellations.grayo.@subcortical') return wf From ee3aa67c783e292bfe320f93a4fd2160a8cfa709 Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Mon, 26 Aug 2013 05:32:27 -0400 Subject: [PATCH 31/60] fix: cleaned up substitutions and added multiple fsaverage targets --- examples/rsfmri_preprocessing.py | 37 ++++++++++++++++++++------------ 1 file changed, 23 insertions(+), 14 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index 058684338e..2e133deceb 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -27,7 +27,8 @@ from nipype.interfaces.fsl.utils import EPIDeWarp from nipype.interfaces.io import FreeSurferSource from nipype.interfaces.c3 import C3dAffineTool -from nipype.interfaces.utility import Merge +from nipype.interfaces.utility import Merge, IdentityInterface +from nipype.utils.filemanip import filename_to_list import numpy as np @@ -148,11 +149,10 @@ def build_filter1(motion_params, comp_norm, outliers): outlier_val = np.genfromtxt(filename_to_list(outliers)[idx]) except IOError: outlier_val = np.empty((0)) - if outlier_val.shape[0] != 0: - for index in outlier_val: - outlier_vector = np.zeros((out_params.shape[0], 1)) - outlier_vector[index] = 1 - out_params = np.hstack((out_params, outlier_vector)) + for index in np.atleast_1d(outlier_val): + outlier_vector = np.zeros((out_params.shape[0], 1)) + outlier_vector[index] = 1 + out_params = np.hstack((out_params, outlier_vector)) filename = os.path.join(os.getcwd(), "filter_regressor%02d.txt" % idx) np.savetxt(filename, out_params, fmt="%.10f") out_files.append(filename) @@ -213,7 +213,7 @@ def extract_subrois(timeseries_file, label_file, indices): data = img.get_data() roiimg = nb.load(label_file) rois = roiimg.get_data() - out_ts_file = os.path.join(os.getcwd(), 'timeseries.txt') + out_ts_file = os.path.join(os.getcwd(), 'subcortical_timeseries.txt') with open(out_ts_file, 'wt') as fp: for fsindex, cmaindex in sorted(indices.items()): ijk = np.nonzero(rois == cmaindex) @@ -266,9 +266,10 @@ def create_workflow(files, FM_TEdiff=2.46, FM_sigma=2, FM_echo_spacing=.7, - target_subject='fsaverage4'): + target_subject=['fsaverage3', 'fsaverage4'], + name='resting'): - wf = Workflow(name='resting') + wf = Workflow(name=name) # Skip starting volumes remove_vol = MapNode(fsl.ExtractROI(t_min=n_vol, t_size=-1), @@ -383,6 +384,7 @@ def create_workflow(files, zintensity_threshold=3, parameter_source='NiPy', bound_by_brainmask=True, + save_plot=False, mask_type='file'), name="art") if fieldmap_images: @@ -494,8 +496,12 @@ def create_workflow(files, sampleaparc, 'segmentation_file') wf.connect(bandpass, 'out_file', sampleaparc, 'in_file') + # Sample the time series onto the surface of the target surface. Performs # sampling into left and right hemisphere + target = Node(IdentityInterface(fields=['target_subject']), name='target') + target.iterables = ('target_subject', filename_to_list(target_subject)) + samplerlh = MapNode(freesurfer.SampleToSurface(), iterfield=['source_file'], name='sampler_lh') @@ -504,7 +510,6 @@ def create_workflow(files, samplerlh.inputs.sampling_units = "frac" samplerlh.inputs.interp_method = "trilinear" #samplerlh.inputs.cortex_mask = True - samplerlh.inputs.target_subject = target_subject samplerlh.inputs.out_type = 'niigz' samplerlh.inputs.subjects_dir = os.environ['SUBJECTS_DIR'] @@ -513,10 +518,12 @@ def create_workflow(files, samplerlh.inputs.hemi = 'lh' wf.connect(bandpass, 'out_file', samplerlh, 'source_file') wf.connect(register, 'out_reg_file', samplerlh, 'reg_file') - + wf.connect(target, 'target_subject', samplerlh, 'target_subject') + samplerrh.set_input('hemi', 'rh') wf.connect(bandpass, 'out_file', samplerrh, 'source_file') wf.connect(register, 'out_reg_file', samplerrh, 'reg_file') + wf.connect(target, 'target_subject', samplerrh, 'target_subject') # Combine left and right hemisphere to text file combiner = MapNode(Function(input_names=['left', 'right'], @@ -560,6 +567,7 @@ def create_workflow(files, os.path.abspath('OASIS-TRT-20_template_to_MNI152_2mm.nii.gz') reg.inputs.num_threads = 4 reg.inputs.terminal_output = 'file' + reg.plugin_args = {'qsub_args': '-l nodes=1:ppn=4'} # Convert T1.mgz to nifti for using with ANTS convert = Node(freesurfer.MRIConvert(out_type='niigz'), name='convert2nii') @@ -623,7 +631,8 @@ def create_workflow(files, datasink = Node(interface=DataSink(), name="datasink") datasink.inputs.base_directory = sink_directory datasink.inputs.container = subject_id - datasink.inputs.regexp_substitutions = (r'(_.*)(\d+/)', r'run\2') + datasink.inputs.substitutions = [('_target_subject_', '')] + datasink.inputs.regexp_substitutions = (r'(/_.*(\d+/))', r'/run\2') #(r'(_.*)(\d+/)', r'run\2') wf.connect(despiker, 'out_file', datasink, 'resting.qa.despike') wf.connect(realign, 'par_file', datasink, 'resting.qa.motion') wf.connect(tsnr, 'tsnr_file', datasink, 'resting.qa.tsnr') @@ -716,13 +725,13 @@ def create_workflow(files, sink_directory=os.path.abspath(args.sink)) if args.work_dir: - work_dir = os.path.abspath(args.workdir) + work_dir = os.path.abspath(args.work_dir) else: work_dir = os.getcwd() wf.config['execution'].update(**{'remove_unnecessary_outputs': False}) wf.base_dir = work_dir - wf.write_graph(graph2use='flat') + #wf.write_graph(graph2use='flat') exec args.plugin_args print plugin_args wf.run(**plugin_args) From 19f80880681f6b15f795f7a5a0a332101a0cf00f Mon Sep 17 00:00:00 2001 From: cdla Date: Tue, 27 Aug 2013 14:35:42 -0400 Subject: [PATCH 32/60] added field map parse args to satra's most recent version --- examples/rsfmri_preprocessing.py | 233 +++++++++++++++++++++---------- 1 file changed, 156 insertions(+), 77 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index 89e0de54f1..ad0f8d6e50 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -12,6 +12,10 @@ - FSL - NiPy +For example: + +python rsfmri_preprocessing.py -d /data/12345-34-1.dcm -f /data/Resting.nii -s subj001 -n 2 --despike -o output -p "plugin_args=dict(plugin='PBS', plugin_args=dict(qsub_args='-q many'))" + """ import os @@ -22,12 +26,10 @@ from nipype.algorithms.misc import TSNR from nipype.interfaces.fsl.utils import EPIDeWarp from nipype.interfaces.io import FreeSurferSource - - from nipype.interfaces.c3 import C3dAffineTool -from nipype.interfaces.utility import Merge +from nipype.interfaces.utility import Merge, IdentityInterface +from nipype.utils.filemanip import filename_to_list ->>>>>>> 6fc397728989672a44fa5d890abf349bdfce6c89 import numpy as np @@ -135,7 +137,6 @@ def build_filter1(motion_params, comp_norm, outliers): components_file: a text file containing all the regressors """ - from nipype.utils.filemanip import filename_to_list import numpy as np import os @@ -148,11 +149,10 @@ def build_filter1(motion_params, comp_norm, outliers): outlier_val = np.genfromtxt(filename_to_list(outliers)[idx]) except IOError: outlier_val = np.empty((0)) - if outlier_val.shape[0] != 0: - for index in outlier_val: - outlier_vector = np.zeros((out_params.shape[0], 1)) - outlier_vector[index] = 1 - out_params = np.hstack((out_params, outlier_vector)) + for index in np.atleast_1d(outlier_val): + outlier_vector = np.zeros((out_params.shape[0], 1)) + outlier_vector[index] = 1 + out_params = np.hstack((out_params, outlier_vector)) filename = os.path.join(os.getcwd(), "filter_regressor%02d.txt" % idx) np.savetxt(filename, out_params, fmt="%.10f") out_files.append(filename) @@ -213,7 +213,7 @@ def extract_subrois(timeseries_file, label_file, indices): data = img.get_data() roiimg = nb.load(label_file) rois = roiimg.get_data() - out_ts_file = os.path.join(os.getcwd(), 'timeseries.txt') + out_ts_file = os.path.join(os.getcwd(), 'subcortical_timeseries.txt') with open(out_ts_file, 'wt') as fp: for fsindex, cmaindex in sorted(indices.items()): ijk = np.nonzero(rois == cmaindex) @@ -225,6 +225,25 @@ def extract_subrois(timeseries_file, label_file, indices): return out_ts_file +def combine_hemi(left, right): + """Combine left and right hemisphere time series into a single text file + """ + import os + from nibabel import load + import numpy as np + lh_data = load(left).get_data() + rh_data = load(right).get_data() + + indices = np.vstack((1000000 + np.arange(0, lh_data.shape[0])[:, None], + 2000000 + np.arange(0, rh_data.shape[0])[:, None])) + all_data = np.hstack((indices, np.vstack((lh_data.squeeze(), + rh_data.squeeze())))) + filename = 'combined_surf.txt' + np.savetxt(filename, all_data, + fmt=','.join(['%d'] + ['%.10f'] * (all_data.shape[1] -1))) + return os.path.abspath(filename) + + """ Creates the main preprocessing workflow """ @@ -248,9 +267,11 @@ def create_workflow(files, FM_TEdiff=2.46, FM_sigma=2, FM_echo_spacing=.7, - target_subject='fsaverage4'): + #target_subject=['fsaverage3', 'fsaverage4'], + target_subject=['fsaverage4'], + name='resting'): - wf = Workflow(name='resting') + wf = Workflow(name=name) # Skip starting volumes remove_vol = MapNode(fsl.ExtractROI(t_min=n_vol, t_size=-1), @@ -260,12 +281,12 @@ def create_workflow(files, # Run AFNI's despike. This is always run, however, whether this is fed to # realign depends on the input configuration - despike = MapNode(afni.Despike(outputtype='NIFTI_GZ'), + despiker = MapNode(afni.Despike(outputtype='NIFTI_GZ'), iterfield=['in_file'], name='despike') - #despike.plugin_args = {'qsub_args': '-l nodes=1:ppn='} + #despiker.plugin_args = {'qsub_args': '-l nodes=1:ppn='} - wf.connect(remove_vol, 'roi_file', despike, 'in_file') + wf.connect(remove_vol, 'roi_file', despiker, 'in_file') # Run Nipy joint slice timing and realignment algorithm realign = Node(nipy.FmriRealign4d(), name='realign') @@ -277,7 +298,7 @@ def create_workflow(files, # release of Nipy. realign.inputs.slice_order = np.argsort(np.argsort(slice_times)).tolist() if despike: - wf.connect(despike, 'out_file', realign, 'in_file') + wf.connect(despiker, 'out_file', realign, 'in_file') else: wf.connect(remove_vol, 'roi_file', realign, 'in_file') @@ -365,6 +386,7 @@ def create_workflow(files, zintensity_threshold=3, parameter_source='NiPy', bound_by_brainmask=True, + save_plot=False, mask_type='file'), name="art") if fieldmap_images: @@ -402,7 +424,6 @@ def create_workflow(files, wf.connect(dewarper, 'unwarped_file', filter1, 'in_file') else: wf.connect(tsnr, 'detrended_file', filter1, 'in_file') - wf.connect(createfilter1, 'out_files', filter1, 'design') wf.connect(masktransform, 'transformed_file', filter1, 'mask') @@ -476,8 +497,12 @@ def create_workflow(files, sampleaparc, 'segmentation_file') wf.connect(bandpass, 'out_file', sampleaparc, 'in_file') + # Sample the time series onto the surface of the target surface. Performs # sampling into left and right hemisphere + target = Node(IdentityInterface(fields=['target_subject']), name='target') + target.iterables = ('target_subject', filename_to_list(target_subject)) + samplerlh = MapNode(freesurfer.SampleToSurface(), iterfield=['source_file'], name='sampler_lh') @@ -485,8 +510,7 @@ def create_workflow(files, samplerlh.inputs.sampling_range = (0.1, 0.9, 0.1) samplerlh.inputs.sampling_units = "frac" samplerlh.inputs.interp_method = "trilinear" - samplerlh.inputs.cortex_mask = True - samplerlh.inputs.target_subject = target_subject + #samplerlh.inputs.cortex_mask = True samplerlh.inputs.out_type = 'niigz' samplerlh.inputs.subjects_dir = os.environ['SUBJECTS_DIR'] @@ -495,10 +519,21 @@ def create_workflow(files, samplerlh.inputs.hemi = 'lh' wf.connect(bandpass, 'out_file', samplerlh, 'source_file') wf.connect(register, 'out_reg_file', samplerlh, 'reg_file') - + wf.connect(target, 'target_subject', samplerlh, 'target_subject') + samplerrh.set_input('hemi', 'rh') wf.connect(bandpass, 'out_file', samplerrh, 'source_file') wf.connect(register, 'out_reg_file', samplerrh, 'reg_file') + wf.connect(target, 'target_subject', samplerrh, 'target_subject') + + # Combine left and right hemisphere to text file + combiner = MapNode(Function(input_names=['left', 'right'], + output_names=['out_file'], + function=combine_hemi), + iterfield=['left', 'right'], + name="combiner") + wf.connect(samplerlh, 'out_file', combiner, 'left') + wf.connect(samplerrh, 'out_file', combiner, 'right') # Compute registration between the subject's structural and MNI template # This is currently set to perform a very quick registration. However, the @@ -529,10 +564,12 @@ def create_workflow(files, reg.inputs.use_estimate_learning_rate_once = [True] * 4 reg.inputs.use_histogram_matching = [False] * 3 + [True] reg.inputs.output_warped_image = 'output_warped_image.nii.gz' - reg.inputs.fixed_image = \ - os.path.abspath('OASIS-TRT-20_template_to_MNI152_2mm.nii.gz') + #reg.inputs.fixed_image = \ + # os.path.abspath('OASIS-TRT-20_template_to_MNI152_2mm.nii.gz') + reg.inputs.fixed_image='/software/brain_templates/OASIS-TRT-20_template_to_MNI152_2mm.nii.gz' reg.inputs.num_threads = 4 reg.inputs.terminal_output = 'file' + reg.plugin_args = {'qsub_args': '-l nodes=1:ppn=4'} # Convert T1.mgz to nifti for using with ANTS convert = Node(freesurfer.MRIConvert(out_type='niigz'), name='convert2nii') @@ -570,8 +607,9 @@ def create_workflow(files, sample2mni.inputs.input_image_type = 3 sample2mni.inputs.interpolation = 'BSpline' sample2mni.inputs.invert_transform_flags = [False, False] - sample2mni.inputs.reference_image = \ - os.path.abspath('OASIS-TRT-20_template_to_MNI152_2mm.nii.gz') + #sample2mni.inputs.reference_image = \ + # os.path.abspath('OASIS-TRT-20_template_to_MNI152_2mm.nii.gz') + sample2mni.inputs.reference_image='/software/brain_templates/OASIS-TRT-20_template_to_MNI152_2mm.nii.gz' sample2mni.inputs.terminal_output = 'file' wf.connect(bandpass, 'out_file', sample2mni, 'input_image') wf.connect(merge, 'out', sample2mni, 'transforms') @@ -587,17 +625,19 @@ def create_workflow(files, range(49, 55) + [58], [39, 60, 37, 58, 56, 48, 32, 30, 38, 59, 36, 57, 55, 47, 31, 23])) - ts2txt.inputs.label_file = \ - os.path.abspath(('OASIS-TRT-20_DKT31_CMA_jointfusion_labels_in_MNI152' - '_2mm.nii.gz')) + #ts2txt.inputs.label_file = \ + # os.path.abspath(('OASIS-TRT-20_DKT31_CMA_jointfusion_labels_in_MNI152' + # '_2mm.nii.gz')) + ts2txt.inputs.label_file = '/software/brain_templates/OASIS-TRT-20_DKT31_CMA_jointfusion_labels_in_MNI152_2mm.nii.gz' wf.connect(sample2mni, 'output_image', ts2txt, 'timeseries_file') # Save the relevant data into an output directory datasink = Node(interface=DataSink(), name="datasink") datasink.inputs.base_directory = sink_directory datasink.inputs.container = subject_id - datasink.inputs.regexp_substitutions = (r'(_.*)(\d+/)', r'run\2') - wf.connect(despike, 'out_file', datasink, 'resting.qa.despike') + datasink.inputs.substitutions = [('_target_subject_', '')] + datasink.inputs.regexp_substitutions = (r'(/_.*(\d+/))', r'/run\2') #(r'(_.*)(\d+/)', r'run\2') + wf.connect(despiker, 'out_file', datasink, 'resting.qa.despike') wf.connect(realign, 'par_file', datasink, 'resting.qa.motion') wf.connect(tsnr, 'tsnr_file', datasink, 'resting.qa.tsnr') wf.connect(tsnr, 'mean_file', datasink, 'resting.qa.tsnr.@mean') @@ -628,61 +668,100 @@ def create_workflow(files, datasink, 'resting.parcellations.aparc') wf.connect(sampleaparc, 'avgwf_txt_file', datasink, 'resting.parcellations.aparc.@avgwf') - wf.connect(samplerlh, 'out_file', - datasink, 'resting.parcellations.grayo.@left') - wf.connect(samplerrh, 'out_file', - datasink, 'resting.parcellations.grayo.@right') + wf.connect(combiner, 'out_file', + datasink, 'resting.parcellations.grayo.@surface') wf.connect(ts2txt, 'out_file', datasink, 'resting.parcellations.grayo.@subcortical') return wf if __name__ == "__main__": - import argparse - from socket import getfqdn - from datetime import date - if not 'mit.edu' in getfqdn(): - #dcmfile = '/software/data/sad_resting/500000-32-1.dcm' - #niifile = '/software/data/sad_resting/resting.nii.gz' - #subject_id = 'SAD_024' - dcmfile = '/software/data/sad_resting/allyson/562000-34-1.dcm' - niifile = '/software/data/sad_resting/allyson/Resting1.nii' - fieldmap_images = [] - fieldmap_images = ['/software/data/sad_resting/allyson/fieldmap_resting1.nii', - '/software/data/sad_resting/allyson/fieldmap_resting2.nii'] - sink = os.path.join(os.getcwd(), 'output') + from argparse import ArgumentParser + parser = ArgumentParser(description=__doc__) + parser.add_argument("-d", "--dicom_file", dest="dicom_file", + help="an example dicom file from the resting series") + parser.add_argument("-f", "--files", dest="files", nargs="+", + help="4d nifti files for resting state", + required=True) + parser.add_argument("-s", "--subject_id", dest="subject_id", + help="FreeSurfer subject id", required=True) + parser.add_argument("-n", "--n_vol", dest="n_vol", default=0, type=int, + help="Volumes to skip at the beginning") + parser.add_argument("--despike", dest="despike", default=False, + action="store_true", help="Use despiked data") + parser.add_argument("--TR", dest="TR", default=None, + help="TR if dicom not provided") + parser.add_argument("--slice_times", dest="slice_times", nargs="+", + help="Slice times") + parser.add_argument("-l", "--lowpass_freq", dest="lowpass_freq", + default=-1, help="Low pass frequency (Hz)") + parser.add_argument("-u", "--highpass_freq", dest="highpass_freq", + default=-1, help="High pass frequency (Hz)") + parser.add_argument("-o", "--output_dir", dest="sink", + help="Output directory base") + parser.add_argument("-w", "--work_dir", dest="work_dir", + help="Output directory base") + parser.add_argument("-p", "--plugin_args", dest="plugin_args", + default='plugin_args=dict()', + help="Plugin args") + parser.add_argument("--field_maps", dest="field_maps", nargs="+", + help="field map niftis") + parser.add_argument("--fm_echospacing",dest="echo_spacing", + help="field map echo spacing") + parser.add_argument("--fm_TE_diff", dest='TE_diff', + help="field map echo time difference") + parser.add_argument("--fm_sigma", dest='sigma', + help="field map sigma value") + args = parser.parse_args() + + TR = args.TR + slice_times = args.slice_times + slice_thickness = None + if args.dicom_file: + TR, slice_times, slice_thickness = get_info(args.dicom_file) + + if slice_thickness is None: + from nibabel import load + img = load(args.files[0]) + slice_thickness = max(img.get_header().get_zooms()[:3]) + if args.field_maps: + wf = create_workflow([os.path.abspath(filename) for filename in args.files], + subject_id=args.subject_id, + n_vol=args.n_vol, + despike=args.despike, + TR=TR, + slice_times=slice_times, + slice_thickness=slice_thickness, + lowpass_freq=args.lowpass_freq, + highpass_freq=args.highpass_freq, + sink_directory=os.path.abspath(args.sink), + fieldmap_images=args.field_maps, + FM_TEdiff=float(args.TE_diff), + FM_echo_spacing=float(args.echo_spacing), + FM_sigma=int(args.sigma)) + + else: + wf = create_workflow([os.path.abspath(filename) for filename in args.files], + subject_id=args.subject_id, + n_vol=args.n_vol, + despike=args.despike, + TR=TR, + slice_times=slice_times, + slice_thickness=slice_thickness, + lowpass_freq=args.lowpass_freq, + highpass_freq=args.highpass_freq, + sink_directory=os.path.abspath(args.sink)) + + if args.work_dir: + work_dir = os.path.abspath(args.work_dir) else: - os.environ['SUBJECTS_DIR']='/mindhive/xnat/surfaces/GATES/' - #dcmfile = '/mindhive/xnat/dicom_storage/sad/SAD_024/dicoms/500000-32-1.dcm' - #niifile = '/mindhive/xnat/data/sad/SAD_024/BOLD/resting.nii.gz' - dcmfile = '/mindhive/xnat/data/GATES/308/dicoms/562000-34-1.dcm' - niifile = '/mindhive/xnat/data/GATES/308/niftis/Resting1.nii' - fieldmap_images = ['/mindhive/xnat/data/GATES/308/niftis/fieldmap_resting1.nii', - '/mindhive/xnat/data/GATES/308/niftis/fieldmap_resting2.nii'] - sink = os.path.join('/mindhive/scratch/',date.today().strftime('%a'),'cdla','restingwf','output') - echo_time = 0.7 - subject_id = '308' - TR, slice_times, slice_thickness = get_info(dcmfile) - - wf = create_workflow(niifile, - subject_id, - n_vol=2, - despike=True, - TR=TR, - slice_times=slice_times, - slice_thickness=np.round(slice_thickness), - lowpass_freq=.08, - highpass_freq=.9, - sink_directory=sink, - FM_TEdiff=2.46, - FM_sigma=2, - FM_echo_spacing=.7, - fieldmap_images=fieldmap_images - ) + work_dir = os.getcwd() wf.config['execution'].update(**{'remove_unnecessary_outputs': False}) - wf.base_dir = os.getcwd() - wf.write_graph(graph2use='flat') - wf.run() # (plugin='MultiProc') + wf.base_dir = work_dir + #wf.write_graph(graph2use='flat') + exec args.plugin_args + print plugin_args + wf.run(**plugin_args) ''' #compute similarity matrix and partial correlation From cc0d20a0bee3f85f4d0da70a02384398d85a4540 Mon Sep 17 00:00:00 2001 From: cdla Date: Tue, 27 Aug 2013 14:39:19 -0400 Subject: [PATCH 33/60] minor changes --- examples/rsfmri_preprocessing.py | 20 ++++++++------------ 1 file changed, 8 insertions(+), 12 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index ad0f8d6e50..d753411808 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -267,8 +267,7 @@ def create_workflow(files, FM_TEdiff=2.46, FM_sigma=2, FM_echo_spacing=.7, - #target_subject=['fsaverage3', 'fsaverage4'], - target_subject=['fsaverage4'], + target_subject=['fsaverage3', 'fsaverage4'], name='resting'): wf = Workflow(name=name) @@ -564,9 +563,8 @@ def create_workflow(files, reg.inputs.use_estimate_learning_rate_once = [True] * 4 reg.inputs.use_histogram_matching = [False] * 3 + [True] reg.inputs.output_warped_image = 'output_warped_image.nii.gz' - #reg.inputs.fixed_image = \ - # os.path.abspath('OASIS-TRT-20_template_to_MNI152_2mm.nii.gz') - reg.inputs.fixed_image='/software/brain_templates/OASIS-TRT-20_template_to_MNI152_2mm.nii.gz' + reg.inputs.fixed_image = \ + os.path.abspath('OASIS-TRT-20_template_to_MNI152_2mm.nii.gz') reg.inputs.num_threads = 4 reg.inputs.terminal_output = 'file' reg.plugin_args = {'qsub_args': '-l nodes=1:ppn=4'} @@ -607,9 +605,8 @@ def create_workflow(files, sample2mni.inputs.input_image_type = 3 sample2mni.inputs.interpolation = 'BSpline' sample2mni.inputs.invert_transform_flags = [False, False] - #sample2mni.inputs.reference_image = \ - # os.path.abspath('OASIS-TRT-20_template_to_MNI152_2mm.nii.gz') - sample2mni.inputs.reference_image='/software/brain_templates/OASIS-TRT-20_template_to_MNI152_2mm.nii.gz' + sample2mni.inputs.reference_image = \ + os.path.abspath('OASIS-TRT-20_template_to_MNI152_2mm.nii.gz') sample2mni.inputs.terminal_output = 'file' wf.connect(bandpass, 'out_file', sample2mni, 'input_image') wf.connect(merge, 'out', sample2mni, 'transforms') @@ -625,10 +622,9 @@ def create_workflow(files, range(49, 55) + [58], [39, 60, 37, 58, 56, 48, 32, 30, 38, 59, 36, 57, 55, 47, 31, 23])) - #ts2txt.inputs.label_file = \ - # os.path.abspath(('OASIS-TRT-20_DKT31_CMA_jointfusion_labels_in_MNI152' - # '_2mm.nii.gz')) - ts2txt.inputs.label_file = '/software/brain_templates/OASIS-TRT-20_DKT31_CMA_jointfusion_labels_in_MNI152_2mm.nii.gz' + ts2txt.inputs.label_file = \ + os.path.abspath(('OASIS-TRT-20_DKT31_CMA_jointfusion_labels_in_MNI152' + '_2mm.nii.gz')) wf.connect(sample2mni, 'output_image', ts2txt, 'timeseries_file') # Save the relevant data into an output directory From 5f76bedac590eca8766679c130d034b61c331d64 Mon Sep 17 00:00:00 2001 From: cdla Date: Tue, 27 Aug 2013 14:50:57 -0400 Subject: [PATCH 34/60] readded engine.py ... my bad --- nipype/pipeline/engine.py | 1961 +++++++++++++++++++++++++++++++++++++ 1 file changed, 1961 insertions(+) create mode 100644 nipype/pipeline/engine.py diff --git a/nipype/pipeline/engine.py b/nipype/pipeline/engine.py new file mode 100644 index 0000000000..54c0569ba9 --- /dev/null +++ b/nipype/pipeline/engine.py @@ -0,0 +1,1961 @@ +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +"""Defines functionality for pipelined execution of interfaces + +The `Pipeline` class provides core functionality for batch processing. + + Change directory to provide relative paths for doctests + >>> import os + >>> filepath = os.path.dirname( os.path.realpath( __file__ ) ) + >>> datadir = os.path.realpath(os.path.join(filepath, '../testing/data')) + >>> os.chdir(datadir) + +""" + +from glob import glob +import gzip +from copy import deepcopy +import cPickle +import inspect +import os +import os.path as op +import re +import shutil +from shutil import rmtree +from socket import gethostname +from string import Template +import sys +from tempfile import mkdtemp +from warnings import warn + +import numpy as np + +from ..utils.misc import package_check, str2bool +package_check('networkx', '1.3') +import networkx as nx + +from .. import config, logging +logger = logging.getLogger('workflow') +from ..interfaces.base import (traits, InputMultiPath, CommandLine, + Undefined, TraitedSpec, DynamicTraitedSpec, + Bunch, InterfaceResult, md5, Interface, + TraitDictObject, TraitListObject, isdefined) +from ..utils.misc import getsource, create_function_from_source +from ..utils.filemanip import (save_json, FileNotFoundError, + filename_to_list, list_to_filename, + copyfiles, fnames_presuffix, loadpkl, + split_filename, load_json, savepkl, + write_rst_header, write_rst_dict, + write_rst_list) + +from .utils import (generate_expanded_graph, modify_paths, + export_graph, make_output_dir, + clean_working_directory, format_dot, + get_print_name, merge_dict, evaluate_connect_function) + + +def _write_inputs(node): + lines = [] + nodename = node.fullname.replace('.', '_') + for key, _ in node.inputs.items(): + val = getattr(node.inputs, key) + if isdefined(val): + if type(val) == str: + try: + func = create_function_from_source(val) + except RuntimeError, e: + lines.append("%s.inputs.%s = '%s'" % (nodename, key, val)) + else: + funcname = [name for name in func.func_globals + if name != '__builtins__'][0] + lines.append(cPickle.loads(val)) + if funcname == nodename: + lines[-1] = lines[-1].replace(' %s(' % funcname, + ' %s_1(' % funcname) + funcname = '%s_1' % funcname + lines.append('from nipype.utils.misc import getsource') + lines.append("%s.inputs.%s = getsource(%s)" % (nodename, + key, + funcname)) + else: + lines.append('%s.inputs.%s = %s' % (nodename, key, val)) + return lines + + +def format_node(node, format='python', include_config=False): + """Format a node in a given output syntax.""" + lines = [] + name = node.fullname.replace('.', '_') + if format == 'python': + klass = node._interface + importline = 'from %s import %s' % (klass.__module__, + klass.__class__.__name__) + comment = '# Node: %s' % node.fullname + spec = inspect.getargspec(node._interface.__init__) + args = spec.args[1:] + if args: + filled_args = [] + for arg in args: + if hasattr(node._interface, '_%s' % arg): + filled_args.append('%s=%s' % (arg, getattr(node._interface, + '_%s' % arg))) + args = ', '.join(filled_args) + else: + args = '' + klass_name = klass.__class__.__name__ + if isinstance(node, MapNode): + nodedef = '%s = MapNode(%s(%s), iterfield=%s, name="%s")' \ + % (name, klass_name, args, node.iterfield, name) + else: + nodedef = '%s = Node(%s(%s), name="%s")' \ + % (name, klass_name, args, name) + lines = [importline, comment, nodedef] + + if include_config: + lines = [importline, "from collections import OrderedDict", + comment, nodedef] + lines.append('%s.config = %s' % (name, node.config)) + + if node.iterables is not None: + lines.append('%s.iterables = %s' % (name, node.iterables)) + lines.extend(_write_inputs(node)) + + return lines + + +class WorkflowBase(object): + """Defines common attributes and functions for workflows and nodes.""" + + def __init__(self, name, base_dir=None, **kwargs): + """ Initialize base parameters of a workflow or node + + Parameters + ---------- + name : string (mandatory) + Name of this node. Name must be alphanumeric and not contain any + special characters (e.g., '.', '@'). + base_dir : string + base output directory (will be hashed before creations) + default=None, which results in the use of mkdtemp + + """ + self.base_dir = base_dir + self.config = None + self._verify_name(name) + self.name = name + # for compatibility with node expansion using iterables + self._id = self.name + self._hierarchy = None + + @property + def inputs(self): + raise NotImplementedError + + @property + def outputs(self): + raise NotImplementedError + + @property + def fullname(self): + fullname = self.name + if self._hierarchy: + fullname = self._hierarchy + '.' + self.name + return fullname + + def clone(self, name): + """Clone a workflowbase object + + Parameters + ---------- + + name : string (mandatory) + A clone of node or workflow must have a new name + """ + if (name is None) or (name == self.name): + raise Exception('Cloning requires a new name') + self._verify_name(name) + clone = deepcopy(self) + clone.name = name + clone._id = name + clone._hierarchy = None + return clone + + def _check_outputs(self, parameter): + return hasattr(self.outputs, parameter) + + def _check_inputs(self, parameter): + return hasattr(self.inputs, parameter) + + def _verify_name(self, name): + valid_name = bool(re.match('^[\w-]+$', name)) + if not valid_name: + raise Exception('the name must not contain any special characters') + + def __repr__(self): + if self._hierarchy: + return '.'.join((self._hierarchy, self._id)) + else: + return self._id + + def save(self, filename=None): + if filename is None: + filename = 'temp.npz' + np.savez(filename, object=self) + + def load(self, filename): + return np.load(filename) + + +class Workflow(WorkflowBase): + """Controls the setup and execution of a pipeline of processes.""" + + def __init__(self, name, base_dir=None, *args, **kwargs): + """Create a workflow object. + + Parameters + ---------- + name : alphanumeric string + unique identifier for the workflow + base_dir : string, optional + path to workflow storage + + """ + super(Workflow, self).__init__(name, base_dir, *args, **kwargs) + self._graph = nx.DiGraph() + self.config = deepcopy(config._sections) + + # PUBLIC API + def clone(self, name): + """Clone a workflow + + .. note:: + + Will reset attributes used for executing workflow. See + _init_runtime_fields. + + Parameters + ---------- + + name: alphanumeric name + unique name for the workflow + + """ + clone = super(Workflow, self).clone(name) + clone._reset_hierarchy() + return clone + + # Graph creation functions + def connect(self, *args, **kwargs): + """Connect nodes in the pipeline. + + This routine also checks if inputs and outputs are actually provided by + the nodes that are being connected. + + Creates edges in the directed graph using the nodes and edges specified + in the `connection_list`. Uses the NetworkX method + DiGraph.add_edges_from. + + Parameters + ---------- + + args : list or a set of four positional arguments + + Four positional arguments of the form:: + + connect(source, sourceoutput, dest, destinput) + + source : nodewrapper node + sourceoutput : string (must be in source.outputs) + dest : nodewrapper node + destinput : string (must be in dest.inputs) + + A list of 3-tuples of the following form:: + + [(source, target, + [('sourceoutput/attribute', 'targetinput'), + ...]), + ...] + + Or:: + + [(source, target, [(('sourceoutput1', func, arg2, ...), + 'targetinput'), ...]), + ...] + sourceoutput1 will always be the first argument to func + and func will be evaluated and the results sent ot targetinput + + currently func needs to define all its needed imports within the + function as we use the inspect module to get at the source code + and execute it remotely + """ + if len(args) == 1: + connection_list = args[0] + elif len(args) == 4: + connection_list = [(args[0], args[2], [(args[1], args[3])])] + else: + raise Exception('unknown set of parameters to connect function') + if not kwargs: + disconnect = False + else: + disconnect = kwargs['disconnect'] + newnodes = [] + for srcnode, destnode, _ in connection_list: + if self in [srcnode, destnode]: + msg = ('Workflow connect cannot contain itself as node:' + ' src[%s] dest[%s] workflow[%s]') % (srcnode, + destnode, + self.name) + + raise IOError(msg) + if (srcnode not in newnodes) and not self._has_node(srcnode): + newnodes.append(srcnode) + if (destnode not in newnodes) and not self._has_node(destnode): + newnodes.append(destnode) + if newnodes: + self._check_nodes(newnodes) + for node in newnodes: + if node._hierarchy is None: + node._hierarchy = self.name + not_found = [] + connected_ports = {} + for srcnode, destnode, connects in connection_list: + if destnode not in connected_ports: + connected_ports[destnode] = [] + # check to see which ports of destnode are already + # connected. + if not disconnect and (destnode in self._graph.nodes()): + for edge in self._graph.in_edges_iter(destnode): + data = self._graph.get_edge_data(*edge) + for sourceinfo, destname in data['connect']: + if destname not in connected_ports[destnode]: + connected_ports[destnode] += [destname] + for source, dest in connects: + # Currently datasource/sink/grabber.io modules + # determine their inputs/outputs depending on + # connection settings. Skip these modules in the check + if dest in connected_ports[destnode]: + raise Exception(""" +Trying to connect %s:%s to %s:%s but input '%s' of node '%s' is already +connected. +""" % (srcnode, source, destnode, dest, dest, destnode)) + if not (hasattr(destnode, '_interface') and + '.io' in str(destnode._interface.__class__)): + if not destnode._check_inputs(dest): + not_found.append(['in', destnode.name, dest]) + if not (hasattr(srcnode, '_interface') and + '.io' in str(srcnode._interface.__class__)): + if isinstance(source, tuple): + # handles the case that source is specified + # with a function + sourcename = source[0] + elif isinstance(source, str): + sourcename = source + else: + raise Exception(('Unknown source specification in ' + 'connection from output of %s') % + srcnode.name) + if sourcename and not srcnode._check_outputs(sourcename): + not_found.append(['out', srcnode.name, sourcename]) + connected_ports[destnode] += [dest] + infostr = [] + for info in not_found: + infostr += ["Module %s has no %sput called %s\n" % (info[1], + info[0], + info[2])] + if not_found: + raise Exception('\n'.join(['Some connections were not found'] + + infostr)) + + # turn functions into strings + for srcnode, destnode, connects in connection_list: + for idx, (src, dest) in enumerate(connects): + if isinstance(src, tuple) and not isinstance(src[1], str): + function_source = getsource(src[1]) + connects[idx] = ((src[0], function_source, src[2:]), dest) + + # add connections + for srcnode, destnode, connects in connection_list: + edge_data = self._graph.get_edge_data(srcnode, destnode, None) + if edge_data: + logger.debug('(%s, %s): Edge data exists: %s' + % (srcnode, destnode, str(edge_data))) + for data in connects: + if data not in edge_data['connect']: + edge_data['connect'].append(data) + if disconnect: + logger.debug('Removing connection: %s' % str(data)) + edge_data['connect'].remove(data) + if edge_data['connect']: + self._graph.add_edges_from([(srcnode, + destnode, + edge_data)]) + else: + #pass + logger.debug('Removing connection: %s->%s' % (srcnode, + destnode)) + self._graph.remove_edges_from([(srcnode, destnode)]) + elif not disconnect: + logger.debug('(%s, %s): No edge data' % (srcnode, destnode)) + self._graph.add_edges_from([(srcnode, destnode, + {'connect': connects})]) + edge_data = self._graph.get_edge_data(srcnode, destnode) + logger.debug('(%s, %s): new edge data: %s' % (srcnode, destnode, + str(edge_data))) + + def disconnect(self, *args): + """Disconnect two nodes + + See the docstring for connect for format. + """ + # yoh: explicit **dict was introduced for compatibility with Python 2.5 + return self.connect(*args, **dict(disconnect=True)) + + def add_nodes(self, nodes): + """ Add nodes to a workflow + + Parameters + ---------- + nodes : list + A list of WorkflowBase-based objects + """ + newnodes = [] + all_nodes = self._get_all_nodes() + for node in nodes: + if self._has_node(node): + raise IOError('Node %s already exists in the workflow' % node) + if isinstance(node, Workflow): + for subnode in node._get_all_nodes(): + if subnode in all_nodes: + raise IOError(('Subnode %s of node %s already exists ' + 'in the workflow') % (subnode, node)) + newnodes.append(node) + if not newnodes: + logger.debug('no new nodes to add') + return + for node in newnodes: + if not issubclass(node.__class__, WorkflowBase): + raise Exception('Node %s must be a subclass of WorkflowBase' % + str(node)) + self._check_nodes(newnodes) + for node in newnodes: + if node._hierarchy is None: + node._hierarchy = self.name + self._graph.add_nodes_from(newnodes) + + def remove_nodes(self, nodes): + """ Remove nodes from a workflow + + Parameters + ---------- + nodes : list + A list of WorkflowBase-based objects + """ + self._graph.remove_nodes_from(nodes) + + # Input-Output access + @property + def inputs(self): + return self._get_inputs() + + @property + def outputs(self): + return self._get_outputs() + + def get_node(self, name): + """Return an internal node by name + """ + nodenames = name.split('.') + nodename = nodenames[0] + outnode = [node for node in self._graph.nodes() if + str(node).endswith('.' + nodename)] + if outnode: + outnode = outnode[0] + if nodenames[1:] and issubclass(outnode.__class__, Workflow): + outnode = outnode.get_node('.'.join(nodenames[1:])) + else: + outnode = None + return outnode + + def list_node_names(self): + """List names of all nodes in a workflow + """ + outlist = [] + for node in nx.topological_sort(self._graph): + if isinstance(node, Workflow): + outlist.extend(['.'.join((node.name, nodename)) for nodename in + node.list_node_names()]) + else: + outlist.append(node.name) + return sorted(outlist) + + def write_graph(self, dotfilename='graph.dot', graph2use='hierarchical', + format="png", simple_form=True): + """Generates a graphviz dot file and a png file + + Parameters + ---------- + + graph2use: 'orig', 'hierarchical' (default), 'flat', 'exec' + orig - creates a top level graph without expanding internal + workflow nodes; + flat - expands workflow nodes recursively; + exec - expands workflows to depict iterables + + format: 'png', 'svg' + + simple_form: boolean (default: True) + Determines if the node name used in the graph should be of the form + 'nodename (package)' when True or 'nodename.Class.package' when + False. + + """ + graphtypes = ['orig', 'flat', 'hierarchical', 'exec'] + if graph2use not in graphtypes: + raise ValueError('Unknown graph2use keyword. Must be one of: ' + + str(graphtypes)) + base_dir, dotfilename = os.path.split(dotfilename) + if base_dir == '': + if self.base_dir: + base_dir = self.base_dir + if self.name: + base_dir = os.path.join(base_dir, self.name) + else: + base_dir = os.getcwd() + base_dir = make_output_dir(base_dir) + if graph2use == 'hierarchical': + dotfilename = os.path.join(base_dir, dotfilename) + self.write_hierarchical_dotfile(dotfilename=dotfilename, + colored=False, + simple_form=simple_form) + format_dot(dotfilename, format=format) + else: + graph = self._graph + if graph2use in ['flat', 'exec']: + graph = self._create_flat_graph() + if graph2use == 'exec': + graph = generate_expanded_graph(deepcopy(graph)) + export_graph(graph, base_dir, dotfilename=dotfilename, + format=format, simple_form=simple_form) + + def write_hierarchical_dotfile(self, dotfilename=None, colored=True, + simple_form=True): + dotlist = ['digraph %s{' % self.name] + if colored: + dotlist.append(' ' + 'colorscheme=pastel28;') + dotlist.append(self._get_dot(prefix=' ', colored=colored, + simple_form=simple_form)) + dotlist.append('}') + dotstr = '\n'.join(dotlist) + if dotfilename: + fp = open(dotfilename, 'wt') + fp.writelines(dotstr) + fp.close() + else: + logger.info(dotstr) + + def export(self, filename=None, prefix="output", format="python", + include_config=False): + """Export object into a different format + + Parameters + ---------- + filename: string + file to save the code to; overrides prefix + prefix: string + prefix to use for output file + format: string + one of "python" + include_config: boolean + whether to include node and workflow config values + + """ + formats = ["python"] + if format not in formats: + raise ValueError('format must be one of: %s' % '|'.join(formats)) + flatgraph = self._create_flat_graph() + nodes = nx.topological_sort(flatgraph) + + lines = ['# Workflow'] + importlines = ['from nipype.pipeline.engine import Workflow, ' + 'Node, MapNode'] + functions = {} + if format == "python": + connect_template = '%s.connect(%%s, %%s, %%s, "%%s")' % self.name + connect_template2 = '%s.connect(%%s, "%%s", %%s, "%%s")' \ + % self.name + wfdef = '%s = Workflow("%s")' % (self.name, self.name) + lines.append(wfdef) + if include_config: + lines.append('%s.config = %s' % (self.name, self.config)) + for idx, node in enumerate(nodes): + nodename = node.fullname.replace('.', '_') + # write nodes + nodelines = format_node(node, format='python', + include_config=include_config) + for line in nodelines: + if line.startswith('from'): + if line not in importlines: + importlines.append(line) + else: + lines.append(line) + # write connections + for u, _, d in flatgraph.in_edges_iter(nbunch=node, + data=True): + for cd in d['connect']: + if isinstance(cd[0], tuple): + args = list(cd[0]) + if args[1] in functions: + funcname = functions[args[1]] + else: + func = create_function_from_source(args[1]) + funcname = [name for name in func.func_globals + if name != '__builtins__'][0] + functions[args[1]] = funcname + args[1] = funcname + args = tuple([arg for arg in args if arg]) + line_args = (u.fullname.replace('.', '_'), + args, nodename, cd[1]) + line = connect_template % line_args + line = line.replace("'%s'" % funcname, funcname) + lines.append(line) + else: + line_args = (u.fullname.replace('.', '_'), + cd[0], nodename, cd[1]) + lines.append(connect_template2 % line_args) + functionlines = ['# Functions'] + for function in functions: + functionlines.append(cPickle.loads(function).rstrip()) + all_lines = importlines + functionlines + lines + + if not filename: + filename = '%s%s.py' % (prefix, self.name) + with open(filename, 'wt') as fp: + fp.writelines('\n'.join(all_lines)) + return all_lines + + def run(self, plugin=None, plugin_args=None, updatehash=False): + """ Execute the workflow + + Parameters + ---------- + + plugin: plugin name or object + Plugin to use for execution. You can create your own plugins for + execution. + plugin_args : dictionary containing arguments to be sent to plugin + constructor. see individual plugin doc strings for details. + """ + if plugin is None: + plugin = config.get('execution', 'plugin') + if type(plugin) is not str: + runner = plugin + else: + name = 'nipype.pipeline.plugins' + try: + __import__(name) + except ImportError: + msg = 'Could not import plugin module: %s' % name + logger.error(msg) + raise ImportError(msg) + else: + plugin_mod = getattr(sys.modules[name], '%sPlugin' % plugin) + runner = plugin_mod(plugin_args=plugin_args) + flatgraph = self._create_flat_graph() + self.config = merge_dict(deepcopy(config._sections), self.config) + if 'crashdump_dir' in self.config: + warn(("Deprecated: workflow.config['crashdump_dir']\n" + "Please use config['execution']['crashdump_dir']")) + crash_dir = self.config['crashdump_dir'] + self.config['execution']['crashdump_dir'] = crash_dir + del self.config['crashdump_dir'] + logger.info(str(sorted(self.config))) + self._set_needed_outputs(flatgraph) + execgraph = generate_expanded_graph(deepcopy(flatgraph)) + for index, node in enumerate(execgraph.nodes()): + node.config = merge_dict(deepcopy(self.config), node.config) + node.base_dir = self.base_dir + node.index = index + if isinstance(node, MapNode): + node.use_plugin = (plugin, plugin_args) + self._configure_exec_nodes(execgraph) + if str2bool(self.config['execution']['create_report']): + self._write_report_info(self.base_dir, self.name, execgraph) + runner.run(execgraph, updatehash=updatehash, config=self.config) + return execgraph + + # PRIVATE API AND FUNCTIONS + + def _write_report_info(self, workingdir, name, graph): + if workingdir is None: + workingdir = os.getcwd() + report_dir = os.path.join(workingdir, name, 'report') + if os.path.exists(report_dir): + shutil.rmtree(report_dir) + os.makedirs(report_dir) + fp = open(os.path.join(report_dir, 'index.html'), 'wt') + fp.writelines('') + with open(os.path.join(os.path.dirname(__file__), + 'report_template.html')) as fpt: + script = Template(fpt.read()) + nodes = nx.topological_sort(graph) + report_files = [] + for i, node in enumerate(nodes): + report_files.append('result_files[%d] = "%s/result_%s.pklz";' + % (i, os.path.realpath(node.output_dir()), + node.name)) + report_files.append('report_files[%d] = "%s/_report/report.rst";' % + (i, os.path.realpath(node.output_dir()))) + report_files = '\n'.join(report_files) + fp.writelines(script.substitute(num_nodes=len(nodes), + report_files=report_files)) + fp.writelines('
\n') + fp.writelines('
\n') + fp.writelines(('
Works only with mozilla/firefox browsers
' + '
\n')) + script_file = os.path.join(os.path.dirname(sys.argv[0]), sys.argv[0]) + fp.writelines(('Script
\n') % script_file) + if self.base_dir: + graph_file = 'file://' + os.path.join(self.base_dir, self.name, + 'graph.dot.png') + fp.writelines(('Graph - requires write_graph() in ' + 'script
\n') % graph_file) + fp.writelines('\n') + fp.writelines(('' + '\n')) + for i, node in enumerate(nodes): + report_file = '%s/_report/report.rst' % \ + os.path.realpath(node.output_dir()) + local_file = '%s.rst' % node._id + url = ('') % (i, + report_file, + node._id) + url += '' % ('.'.join(node.fullname.split('.')[:-1])) + url += '\n' % \ + ('.'.join(get_print_name(node).split('.')[1:])) + fp.writelines(url) + fp.writelines('
NameHierarchySource
%s%s%s
') + fp.writelines('
content
') + fp.writelines('
') + fp.close() + + def _set_needed_outputs(self, graph): + """Initialize node with list of which outputs are needed.""" + rm_outputs = self.config['execution']['remove_unnecessary_outputs'] + if not str2bool(rm_outputs): + return + for node in graph.nodes(): + node.needed_outputs = [] + for edge in graph.out_edges_iter(node): + data = graph.get_edge_data(*edge) + for sourceinfo, _ in sorted(data['connect']): + if isinstance(sourceinfo, tuple): + input_name = sourceinfo[0] + else: + input_name = sourceinfo + if input_name not in node.needed_outputs: + node.needed_outputs += [input_name] + if node.needed_outputs: + node.needed_outputs = sorted(node.needed_outputs) + + def _configure_exec_nodes(self, graph): + """Ensure that each node knows where to get inputs from + """ + for node in graph.nodes(): + node.input_source = {} + for edge in graph.in_edges_iter(node): + data = graph.get_edge_data(*edge) + for sourceinfo, field in sorted(data['connect']): + node.input_source[field] = \ + (os.path.join(edge[0].output_dir(), + 'result_%s.pklz' % edge[0].name), + sourceinfo) + + def _check_nodes(self, nodes): + """Checks if any of the nodes are already in the graph + + """ + node_names = [node.name for node in self._graph.nodes()] + node_lineage = [node._hierarchy for node in self._graph.nodes()] + for node in nodes: + if node.name in node_names: + idx = node_names.index(node.name) + if node_lineage[idx] in [node._hierarchy, self.name]: + raise IOError('Duplicate node name %s found.' % node.name) + else: + node_names.append(node.name) + + def _has_attr(self, parameter, subtype='in'): + """Checks if a parameter is available as an input or output + """ + if subtype == 'in': + subobject = self.inputs + else: + subobject = self.outputs + attrlist = parameter.split('.') + cur_out = subobject + for attr in attrlist: + if not hasattr(cur_out, attr): + return False + cur_out = getattr(cur_out, attr) + return True + + def _get_parameter_node(self, parameter, subtype='in'): + """Returns the underlying node corresponding to an input or + output parameter + """ + if subtype == 'in': + subobject = self.inputs + else: + subobject = self.outputs + attrlist = parameter.split('.') + cur_out = subobject + for attr in attrlist[:-1]: + cur_out = getattr(cur_out, attr) + return cur_out.traits()[attrlist[-1]].node + + def _check_outputs(self, parameter): + return self._has_attr(parameter, subtype='out') + + def _check_inputs(self, parameter): + return self._has_attr(parameter, subtype='in') + + def _get_inputs(self): + """Returns the inputs of a workflow + + This function does not return any input ports that are already + connected + """ + inputdict = TraitedSpec() + for node in self._graph.nodes(): + inputdict.add_trait(node.name, traits.Instance(TraitedSpec)) + if isinstance(node, Workflow): + setattr(inputdict, node.name, node.inputs) + else: + taken_inputs = [] + for _, _, d in self._graph.in_edges_iter(nbunch=node, + data=True): + for cd in d['connect']: + taken_inputs.append(cd[1]) + unconnectedinputs = TraitedSpec() + for key, trait in node.inputs.items(): + if key not in taken_inputs: + unconnectedinputs.add_trait(key, + traits.Trait(trait, + node=node)) + value = getattr(node.inputs, key) + setattr(unconnectedinputs, key, value) + setattr(inputdict, node.name, unconnectedinputs) + getattr(inputdict, node.name).on_trait_change(self._set_input) + return inputdict + + def _get_outputs(self): + """Returns all possible output ports that are not already connected + """ + outputdict = TraitedSpec() + for node in self._graph.nodes(): + outputdict.add_trait(node.name, traits.Instance(TraitedSpec)) + if isinstance(node, Workflow): + setattr(outputdict, node.name, node.outputs) + elif node.outputs: + outputs = TraitedSpec() + for key, _ in node.outputs.items(): + outputs.add_trait(key, traits.Any(node=node)) + setattr(outputs, key, None) + setattr(outputdict, node.name, outputs) + return outputdict + + def _set_input(self, object, name, newvalue): + """Trait callback function to update a node input + """ + object.traits()[name].node.set_input(name, newvalue) + + def _set_node_input(self, node, param, source, sourceinfo): + """Set inputs of a node given the edge connection""" + if isinstance(sourceinfo, str): + val = source.get_output(sourceinfo) + elif isinstance(sourceinfo, tuple): + if callable(sourceinfo[1]): + val = sourceinfo[1](source.get_output(sourceinfo[0]), + *sourceinfo[2:]) + newval = val + if isinstance(val, TraitDictObject): + newval = dict(val) + if isinstance(val, TraitListObject): + newval = val[:] + logger.debug('setting node input: %s->%s', param, str(newval)) + node.set_input(param, deepcopy(newval)) + + def _get_all_nodes(self): + allnodes = [] + for node in self._graph.nodes(): + if isinstance(node, Workflow): + allnodes.extend(node._get_all_nodes()) + else: + allnodes.append(node) + return allnodes + + def _has_node(self, wanted_node): + for node in self._graph.nodes(): + if wanted_node == node: + return True + if isinstance(node, Workflow): + if node._has_node(wanted_node): + return True + return False + + def _create_flat_graph(self): + """Make a simple DAG where no node is a workflow.""" + logger.debug('Creating flat graph for workflow: %s', self.name) + workflowcopy = deepcopy(self) + workflowcopy._generate_flatgraph() + return workflowcopy._graph + + def _reset_hierarchy(self): + """Reset the hierarchy on a graph + """ + for node in self._graph.nodes(): + if isinstance(node, Workflow): + node._reset_hierarchy() + for innernode in node._graph.nodes(): + innernode._hierarchy = '.'.join((self.name, + innernode._hierarchy)) + else: + node._hierarchy = self.name + + def _generate_flatgraph(self): + """Generate a graph containing only Nodes or MapNodes + """ + logger.debug('expanding workflow: %s', self) + nodes2remove = [] + if not nx.is_directed_acyclic_graph(self._graph): + raise Exception(('Workflow: %s is not a directed acyclic graph ' + '(DAG)') % self.name) + nodes = nx.topological_sort(self._graph) + for node in nodes: + logger.debug('processing node: %s' % node) + if isinstance(node, Workflow): + nodes2remove.append(node) + # use in_edges instead of in_edges_iter to allow + # disconnections to take place properly. otherwise, the + # edge dict is modified. + for u, _, d in self._graph.in_edges(nbunch=node, data=True): + logger.debug('in: connections-> %s' % str(d['connect'])) + for cd in deepcopy(d['connect']): + logger.debug("in: %s" % str(cd)) + dstnode = node._get_parameter_node(cd[1], subtype='in') + srcnode = u + srcout = cd[0] + dstin = cd[1].split('.')[-1] + logger.debug('in edges: %s %s %s %s' % + (srcnode, srcout, dstnode, dstin)) + self.disconnect(u, cd[0], node, cd[1]) + self.connect(srcnode, srcout, dstnode, dstin) + # do not use out_edges_iter for reasons stated in in_edges + for _, v, d in self._graph.out_edges(nbunch=node, data=True): + logger.debug('out: connections-> %s' % str(d['connect'])) + for cd in deepcopy(d['connect']): + logger.debug("out: %s" % str(cd)) + dstnode = v + if isinstance(cd[0], tuple): + parameter = cd[0][0] + else: + parameter = cd[0] + srcnode = node._get_parameter_node(parameter, + subtype='out') + if isinstance(cd[0], tuple): + srcout = list(cd[0]) + srcout[0] = parameter.split('.')[-1] + srcout = tuple(srcout) + else: + srcout = parameter.split('.')[-1] + dstin = cd[1] + logger.debug('out edges: %s %s %s %s' % (srcnode, + srcout, + dstnode, + dstin)) + self.disconnect(node, cd[0], v, cd[1]) + self.connect(srcnode, srcout, dstnode, dstin) + # expand the workflow node + #logger.debug('expanding workflow: %s', node) + node._generate_flatgraph() + for innernode in node._graph.nodes(): + innernode._hierarchy = '.'.join((self.name, + innernode._hierarchy)) + self._graph.add_nodes_from(node._graph.nodes()) + self._graph.add_edges_from(node._graph.edges(data=True)) + if nodes2remove: + self._graph.remove_nodes_from(nodes2remove) + logger.debug('finished expanding workflow: %s', self) + + def _get_dot(self, prefix=None, hierarchy=None, colored=True, + simple_form=True): + """Create a dot file with connection info + """ + if prefix is None: + prefix = ' ' + if hierarchy is None: + hierarchy = [] + level = (len(prefix) / 2) + 1 + dotlist = ['%slabel="%s";' % (prefix, self.name)] + if colored: + dotlist.append('%scolor=%d;' % (prefix, level)) + for node in nx.topological_sort(self._graph): + fullname = '.'.join(hierarchy + [node.fullname]) + nodename = fullname.replace('.', '_') + if not isinstance(node, Workflow): + node_class_name = get_print_name(node, simple_form=simple_form) + if not simple_form: + node_class_name = '.'.join(node_class_name.split('.')[1:]) + if hasattr(node, 'iterables') and node.iterables: + dotlist.append(('%s[label="%s", style=filled, colorscheme' + '=greys7 color=2];') % (nodename, + node_class_name)) + else: + dotlist.append('%s[label="%s"];' % (nodename, + node_class_name)) + for node in nx.topological_sort(self._graph): + if isinstance(node, Workflow): + fullname = '.'.join(hierarchy + [node.fullname]) + nodename = fullname.replace('.', '_') + dotlist.append('subgraph cluster_%s {' % nodename) + if colored: + dotlist.append(prefix + prefix + 'style=filled;') + dotlist.append(node._get_dot(prefix=prefix + prefix, + hierarchy=hierarchy + [self.name], + colored=colored, + simple_form=simple_form)) + dotlist.append('}') + else: + for subnode in self._graph.successors_iter(node): + if node._hierarchy != subnode._hierarchy: + continue + if not isinstance(subnode, Workflow): + nodefullname = '.'.join(hierarchy + [node.fullname]) + subnodefullname = '.'.join(hierarchy + + [subnode.fullname]) + nodename = nodefullname.replace('.', '_') + subnodename = subnodefullname.replace('.', '_') + for _ in self._graph.get_edge_data(node, + subnode)['connect']: + dotlist.append('%s -> %s;' % (nodename, + subnodename)) + logger.debug('connection: ' + dotlist[-1]) + # add between workflow connections + for u, v, d in self._graph.edges_iter(data=True): + uname = '.'.join(hierarchy + [u.fullname]) + vname = '.'.join(hierarchy + [v.fullname]) + for src, dest in d['connect']: + uname1 = uname + vname1 = vname + if isinstance(src, tuple): + srcname = src[0] + else: + srcname = src + if '.' in srcname: + uname1 += '.' + '.'.join(srcname.split('.')[:-1]) + if '.' in dest and '@' not in dest: + if not isinstance(v, Workflow): + if 'datasink' not in \ + str(v._interface.__class__).lower(): + vname1 += '.' + '.'.join(dest.split('.')[:-1]) + else: + vname1 += '.' + '.'.join(dest.split('.')[:-1]) + if uname1.split('.')[:-1] != vname1.split('.')[:-1]: + dotlist.append('%s -> %s;' % (uname1.replace('.', '_'), + vname1.replace('.', '_'))) + logger.debug('cross connection: ' + dotlist[-1]) + return ('\n' + prefix).join(dotlist) + + +class Node(WorkflowBase): + """Wraps interface objects for use in pipeline + + A Node creates a sandbox-like directory for executing the underlying + interface. It will copy or link inputs into this directory to ensure that + input data are not overwritten. A hash of the input state is used to + determine if the Node inputs have changed and whether the node needs to be + re-executed. + + Examples + -------- + + >>> from nipype import Node, spm + >>> realign = Node(spm.Realign(), 'realign') + >>> realign.inputs.in_files = 'functional.nii' + >>> realign.inputs.register_to_mean = True + >>> realign.run() # doctest: +SKIP + + """ + + def __init__(self, interface, name, iterables=None, overwrite=None, + needed_outputs=None, run_without_submitting=False, **kwargs): + """ + Parameters + ---------- + + interface : interface object + node specific interface (fsl.Bet(), spm.Coregister()) + + name : alphanumeric string + node specific name + + iterables : generator + input field and list to iterate using the pipeline engine + for example to iterate over different frac values in fsl.Bet() + for a single field the input can be a tuple, otherwise a list + of tuples + node.iterables = ('frac',[0.5,0.6,0.7]) + node.iterables = [('fwhm',[2,4]),('fieldx',[0.5,0.6,0.7])] + + overwrite : Boolean + Whether to overwrite contents of output directory if it already + exists. If directory exists and hash matches it + assumes that process has been executed + + needed_outputs : list of output_names + Force the node to keep only specific outputs. By default all + outputs are kept. Setting this attribute will delete any output + files and directories from the node's working directory that are + not part of the `needed_outputs`. + + run_without_submitting : boolean + Run the node without submitting to a job engine or to a + multiprocessing pool + + """ + super(Node, self).__init__(name, **kwargs) + if interface is None: + raise IOError('Interface must be provided') + if not isinstance(interface, Interface): + raise IOError('interface must be an instance of an Interface') + self._interface = interface + self.name = name + self._result = None + self.iterables = iterables + self.overwrite = overwrite + self.parameterization = None + self.run_without_submitting = run_without_submitting + self.input_source = {} + self.needed_outputs = [] + self.plugin_args = {} + if needed_outputs: + self.needed_outputs = sorted(needed_outputs) + self._got_inputs = False + + @property + def interface(self): + """Return the underlying interface object""" + return self._interface + + @property + def result(self): + if self._result: + return self._result + else: + cwd = self.output_dir() + result, _, _ = self._load_resultfile(cwd) + return result + + @property + def inputs(self): + """Return the inputs of the underlying interface""" + return self._interface.inputs + + @property + def outputs(self): + """Return the output fields of the underlying interface""" + return self._interface._outputs() + + def output_dir(self): + """Return the location of the output directory for the node""" + if self.base_dir is None: + self.base_dir = mkdtemp() + outputdir = self.base_dir + if self._hierarchy: + outputdir = os.path.join(outputdir, *self._hierarchy.split('.')) + if self.parameterization: + outputdir = os.path.join(outputdir, *self.parameterization) + return os.path.abspath(os.path.join(outputdir, + self.name)) + + def set_input(self, parameter, val): + """ Set interface input value""" + logger.debug('setting nodelevel(%s) input %s = %s' % (str(self), + parameter, + str(val))) + setattr(self.inputs, parameter, deepcopy(val)) + + def get_output(self, parameter): + """Retrieve a particular output of the node""" + val = None + if self._result: + val = getattr(self._result.outputs, parameter) + else: + cwd = self.output_dir() + result, _, _ = self._load_resultfile(cwd) + if result and result.outputs: + val = getattr(result.outputs, parameter) + return val + + def help(self): + """ Print interface help""" + self._interface.help() + + def hash_exists(self, updatehash=False): + # Get a dictionary with hashed filenames and a hashvalue + # of the dictionary itself. + hashed_inputs, hashvalue = self._get_hashval() + outdir = self.output_dir() + hashfiles = glob(os.path.join(outdir, '_0x*.json')) + if len(hashfiles) > 1: + logger.info(hashfiles) + logger.info('Removing multiple hashfiles and forcing node to rerun') + for hashfile in hashfiles: + os.unlink(hashfile) + hashfile = os.path.join(outdir, '_0x%s.json' % hashvalue) + if updatehash and os.path.exists(outdir): + logger.debug("Updating hash: %s" % hashvalue) + for file in glob(os.path.join(outdir, '_0x*.json')): + os.remove(file) + self._save_hashfile(hashfile, hashed_inputs) + return os.path.exists(hashfile), hashvalue, hashfile, hashed_inputs + + def run(self, updatehash=False): + """Execute the node in its directory. + + Parameters + ---------- + + updatehash: boolean + Update the hash stored in the output directory + """ + # check to see if output directory and hash exist + if self.config is None: + self.config = deepcopy(config._sections) + else: + self.config = merge_dict(deepcopy(config._sections), self.config) + if not self._got_inputs: + self._get_inputs() + self._got_inputs = True + outdir = self.output_dir() + logger.info("Executing node %s in dir: %s" % (self._id, outdir)) + hash_info = self.hash_exists(updatehash=updatehash) + hash_exists, hashvalue, hashfile, hashed_inputs = hash_info + if (not updatehash and (((self.overwrite is None + and self._interface.always_run) + or self.overwrite) or + not hash_exists)): + logger.debug("Node hash: %s" % hashvalue) + + # by rerunning we mean only nodes that did finish to run previously + json_pat = op.join(outdir, '_0x*.json') + json_unfinished_pat = op.join(outdir, '_0x*_unfinished.json') + need_rerun = (os.path.exists(outdir) + and not isinstance(self, MapNode) + and len(glob(json_pat)) != 0 + and len(glob(json_unfinished_pat)) == 0) + if need_rerun: + logger.debug("Rerunning node") + logger.debug(("updatehash = %s, " + "self.overwrite = %s, " + "self._interface.always_run = %s, " + "os.path.exists(%s) = %s, " + "hash_method = %s") % + (str(updatehash), + str(self.overwrite), + str(self._interface.always_run), + hashfile, + str(os.path.exists(hashfile)), + self.config['execution']['hash_method'].lower())) + log_debug = config.get('logging', 'workflow_level') == 'DEBUG' + if log_debug and not op.exists(hashfile): + exp_hash_paths = glob(json_pat) + if len(exp_hash_paths) == 1: + split_out = split_filename(exp_hash_paths[0]) + exp_hash_file_base = split_out[1] + exp_hash = exp_hash_file_base[len('_0x'):] + logger.debug("Previous node hash = %s" % exp_hash) + try: + prev_inputs = load_json(exp_hash_paths[0]) + except: + pass + else: + logging.logdebug_dict_differences(prev_inputs, + hashed_inputs) + cannot_rerun = (str2bool( + self.config['execution']['stop_on_first_rerun']) + and (self.overwrite is not None + and self._interface.always_run)) + if cannot_rerun: + raise Exception(("Cannot rerun when 'stop_on_first_rerun' " + "is set to True")) + hashfile_unfinished = os.path.join(outdir, + '_0x%s_unfinished.json' % + hashvalue) + if op.exists(hashfile): + os.remove(hashfile) + rm_outdir = (op.exists(outdir) + and not (op.exists(hashfile_unfinished) + and self._interface.can_resume) + and not isinstance(self, MapNode)) + if rm_outdir: + logger.debug("Removing old %s and its contents" % outdir) + rmtree(outdir) + else: + logger.debug(("%s found and can_resume is True or Node is a " + "MapNode - resuming execution") % + hashfile_unfinished) + if isinstance(self, MapNode): + # remove old json files + for filename in glob(os.path.join(outdir, '_0x*.json')): + os.unlink(filename) + outdir = make_output_dir(outdir) + self._save_hashfile(hashfile_unfinished, hashed_inputs) + self.write_report(report_type='preexec', cwd=outdir) + savepkl(os.path.join(outdir, '_node.pklz'), self) + savepkl(os.path.join(outdir, '_inputs.pklz'), + self.inputs.get_traitsfree()) + try: + self._run_interface() + except: + os.remove(hashfile_unfinished) + raise + shutil.move(hashfile_unfinished, hashfile) + self.write_report(report_type='postexec', cwd=outdir) + else: + if not os.path.exists(os.path.join(outdir, '_inputs.pklz')): + logger.debug('%s: creating inputs file' % self.name) + savepkl(os.path.join(outdir, '_inputs.pklz'), + self.inputs.get_traitsfree()) + if not os.path.exists(os.path.join(outdir, '_node.pklz')): + logger.debug('%s: creating node file' % self.name) + savepkl(os.path.join(outdir, '_node.pklz'), self) + logger.debug("Hashfile exists. Skipping execution") + self._run_interface(execute=False, updatehash=updatehash) + logger.debug('Finished running %s in dir: %s\n' % (self._id, outdir)) + return self._result + + # Private functions + def _get_hashval(self): + """Return a hash of the input state""" + if not self._got_inputs: + self._get_inputs() + self._got_inputs = True + hashed_inputs, hashvalue = self.inputs.get_hashval( + hash_method=self.config['execution']['hash_method']) + rm_extra = self.config['execution']['remove_unnecessary_outputs'] + if str2bool(rm_extra) and self.needed_outputs: + hashobject = md5() + hashobject.update(hashvalue) + sorted_outputs = sorted(self.needed_outputs) + hashobject.update(str(sorted_outputs)) + hashvalue = hashobject.hexdigest() + hashed_inputs['needed_outputs'] = sorted_outputs + return hashed_inputs, hashvalue + + def _save_hashfile(self, hashfile, hashed_inputs): + try: + save_json(hashfile, hashed_inputs) + except (IOError, TypeError): + err_type = sys.exc_info()[0] + if err_type is TypeError: + # XXX - SG current workaround is to just + # create the hashed file and not put anything + # in it + fd = open(hashfile, 'wt') + fd.writelines(str(hashed_inputs)) + fd.close() + logger.debug(('Unable to write a particular type to the json ' + 'file')) + else: + logger.critical('Unable to open the file in write mode: %s' % + hashfile) + + def _get_inputs(self): + """Retrieve inputs from pointers to results file + + This mechanism can be easily extended/replaced to retrieve data from + other data sources (e.g., XNAT, HTTP, etc.,.) + """ + logger.debug('Setting node inputs') + for key, info in self.input_source.items(): + logger.debug('input: %s' % key) + results_file = info[0] + logger.debug('results file: %s' % results_file) + results = loadpkl(results_file) + output_value = Undefined + if isinstance(info[1], tuple): + output_name = info[1][0] + value = getattr(results.outputs, output_name) + if isdefined(value): + output_value = evaluate_connect_function(info[1][1], + info[1][2], + value) + else: + output_name = info[1] + try: + output_value = results.outputs.get()[output_name] + except TypeError: + output_value = results.outputs.dictcopy()[output_name] + logger.debug('output: %s' % output_name) + try: + self.set_input(key, deepcopy(output_value)) + except traits.TraitError, e: + msg = ['Error setting node input:', + 'Node: %s' % self.name, + 'input: %s' % key, + 'results_file: %s' % results_file, + 'value: %s' % str(output_value)] + e.args = (e.args[0] + "\n" + '\n'.join(msg),) + raise + + def _run_interface(self, execute=True, updatehash=False): + if updatehash: + return + old_cwd = os.getcwd() + os.chdir(self.output_dir()) + self._result = self._run_command(execute) + os.chdir(old_cwd) + + def _save_results(self, result, cwd): + resultsfile = os.path.join(cwd, 'result_%s.pklz' % self.name) + if result.outputs: + try: + outputs = result.outputs.get() + except TypeError: + outputs = result.outputs.dictcopy() # outputs was a bunch + result.outputs.set(**modify_paths(outputs, relative=True, + basedir=cwd)) + + savepkl(resultsfile, result) + logger.debug('saved results in %s' % resultsfile) + + if result.outputs: + result.outputs.set(**outputs) + + def _load_resultfile(self, cwd): + """Load results if it exists in cwd + + Parameter + --------- + + cwd : working directory of node + + Returns + ------- + + result : InterfaceResult structure + aggregate : boolean indicating whether node should aggregate_outputs + attribute error : boolean indicating whether there was some mismatch in + versions of traits used to store result and hence node needs to + rerun + """ + aggregate = True + resultsoutputfile = os.path.join(cwd, 'result_%s.pklz' % self.name) + result = None + attribute_error = False + if os.path.exists(resultsoutputfile): + pkl_file = gzip.open(resultsoutputfile, 'rb') + try: + result = cPickle.load(pkl_file) + except (traits.TraitError, AttributeError, ImportError), err: + if isinstance(err, (AttributeError, ImportError)): + attribute_error = True + logger.debug(('attribute error: %s probably using ' + 'different trait pickled file') % str(err)) + else: + logger.debug(('some file does not exist. hence trait ' + 'cannot be set')) + else: + if result.outputs: + try: + outputs = result.outputs.get() + except TypeError: + outputs = result.outputs.dictcopy() # outputs == Bunch + try: + result.outputs.set(**modify_paths(outputs, + relative=False, + basedir=cwd)) + except FileNotFoundError: + logger.debug(('conversion to full path results in ' + 'non existent file')) + aggregate = False + pkl_file.close() + logger.debug('Aggregate: %s', aggregate) + return result, aggregate, attribute_error + + def _load_results(self, cwd): + result, aggregate, attribute_error = self._load_resultfile(cwd) + # try aggregating first + if aggregate: + logger.debug('aggregating results') + if attribute_error: + old_inputs = loadpkl(os.path.join(cwd, '_inputs.pklz')) + self.inputs.set(**old_inputs) + if not isinstance(self, MapNode): + self._copyfiles_to_wd(cwd, True, linksonly=True) + aggouts = self._interface.aggregate_outputs( + needed_outputs=self.needed_outputs) + runtime = Bunch(cwd=cwd, + returncode=0, + environ=deepcopy(os.environ.data), + hostname=gethostname()) + result = InterfaceResult( + interface=self._interface.__class__, + runtime=runtime, + inputs=self._interface.inputs.get_traitsfree(), + outputs=aggouts) + self._save_results(result, cwd) + else: + logger.debug('aggregating mapnode results') + self._run_interface() + result = self._result + return result + + def _run_command(self, execute, copyfiles=True): + cwd = os.getcwd() + if execute and copyfiles: + self._originputs = deepcopy(self._interface.inputs) + if execute: + runtime = Bunch(returncode=1, + environ=deepcopy(os.environ.data), + hostname=gethostname()) + result = InterfaceResult( + interface=self._interface.__class__, + runtime=runtime, + inputs=self._interface.inputs.get_traitsfree()) + self._result = result + logger.debug('Executing node') + if copyfiles: + self._copyfiles_to_wd(cwd, execute) + if issubclass(self._interface.__class__, CommandLine): + try: + cmd = self._interface.cmdline + except Exception, msg: + self._result.runtime.stderr = msg + raise + cmdfile = os.path.join(cwd, 'command.txt') + fd = open(cmdfile, 'wt') + fd.writelines(cmd + "\n") + fd.close() + logger.info('Running: %s' % cmd) + try: + result = self._interface.run() + except Exception, msg: + self._result.runtime.stderr = msg + raise + + dirs2keep = None + if isinstance(self, MapNode): + dirs2keep = [os.path.join(cwd, 'mapflow')] + result.outputs = clean_working_directory(result.outputs, cwd, + self._interface.inputs, + self.needed_outputs, + self.config, + dirs2keep=dirs2keep) + self._save_results(result, cwd) + else: + logger.info("Collecting precomputed outputs") + try: + result = self._load_results(cwd) + except (FileNotFoundError, AttributeError): + # if aggregation does not work, rerun the node + logger.info(("Some of the outputs were not found: " + "rerunning node.")) + result = self._run_command(execute=True, copyfiles=False) + return result + + def _strip_temp(self, files, wd): + out = [] + for f in files: + if isinstance(f, list): + out.append(self._strip_temp(f, wd)) + else: + out.append(f.replace(os.path.join(wd, '_tempinput'), wd)) + return out + + def _copyfiles_to_wd(self, outdir, execute, linksonly=False): + """ copy files over and change the inputs""" + if hasattr(self._interface, '_get_filecopy_info'): + logger.debug('copying files to wd [execute=%s, linksonly=%s]' % + (str(execute), str(linksonly))) + if execute and linksonly: + olddir = outdir + outdir = os.path.join(outdir, '_tempinput') + os.makedirs(outdir) + for info in self._interface._get_filecopy_info(): + files = self.inputs.get().get(info['key']) + if not isdefined(files): + continue + if files: + infiles = filename_to_list(files) + if execute: + if linksonly: + if not info['copy']: + newfiles = copyfiles(infiles, + [outdir], + copy=info['copy'], + create_new=True) + else: + newfiles = fnames_presuffix(infiles, + newpath=outdir) + newfiles = self._strip_temp( + newfiles, + op.abspath(olddir).split(os.path.sep)[-1]) + else: + newfiles = copyfiles(infiles, + [outdir], + copy=info['copy'], + create_new=True) + else: + newfiles = fnames_presuffix(infiles, newpath=outdir) + if not isinstance(files, list): + newfiles = list_to_filename(newfiles) + setattr(self.inputs, info['key'], newfiles) + if execute and linksonly: + rmtree(outdir) + + def update(self, **opts): + self.inputs.update(**opts) + + def write_report(self, report_type=None, cwd=None): + if not str2bool(self.config['execution']['create_report']): + return + report_dir = os.path.join(cwd, '_report') + report_file = os.path.join(report_dir, 'report.rst') + if not os.path.exists(report_dir): + os.makedirs(report_dir) + if report_type == 'preexec': + logger.debug('writing pre-exec report to %s' % report_file) + fp = open(report_file, 'wt') + fp.writelines(write_rst_header('Node: %s' % get_print_name(self), + level=0)) + fp.writelines(write_rst_list(['Hierarchy : %s' % self.fullname, + 'Exec ID : %s' % self._id])) + fp.writelines(write_rst_header('Original Inputs', level=1)) + fp.writelines(write_rst_dict(self.inputs.get())) + if report_type == 'postexec': + logger.debug('writing post-exec report to %s' % report_file) + fp = open(report_file, 'at') + fp.writelines(write_rst_header('Execution Inputs', level=1)) + fp.writelines(write_rst_dict(self.inputs.get())) + exit_now = (not hasattr(self.result, 'outputs') + or self.result.outputs is None) + if exit_now: + return + fp.writelines(write_rst_header('Execution Outputs', level=1)) + if isinstance(self.result.outputs, Bunch): + fp.writelines(write_rst_dict(self.result.outputs.dictcopy())) + elif self.result.outputs: + fp.writelines(write_rst_dict(self.result.outputs.get())) + if isinstance(self, MapNode): + fp.close() + return + fp.writelines(write_rst_header('Runtime info', level=1)) + if hasattr(self.result.runtime, 'cmdline'): + fp.writelines(write_rst_dict( + {'hostname': self.result.runtime.hostname, + 'duration': self.result.runtime.duration, + 'command': self.result.runtime.cmdline})) + else: + fp.writelines(write_rst_dict( + {'hostname': self.result.runtime.hostname, + 'duration': self.result.runtime.duration})) + if hasattr(self.result.runtime, 'merged'): + fp.writelines(write_rst_header('Terminal output', level=2)) + fp.writelines(write_rst_list(self.result.runtime.merged)) + if hasattr(self.result.runtime, 'environ'): + fp.writelines(write_rst_header('Environment', level=2)) + fp.writelines(write_rst_dict(self.result.runtime.environ)) + fp.close() + + +class MapNode(Node): + """Wraps interface objects that need to be iterated on a list of inputs. + + Examples + -------- + + >>> from nipype import MapNode, fsl + >>> realign = MapNode(fsl.MCFLIRT(), 'in_file', 'realign') + >>> realign.inputs.in_file = ['functional.nii', + ... 'functional2.nii', + ... 'functional3.nii'] + >>> realign.run() # doctest: +SKIP + + """ + + def __init__(self, interface, iterfield, name, **kwargs): + """ + + Parameters + ---------- + interface : interface object + node specific interface (fsl.Bet(), spm.Coregister()) + iterfield : string or list of strings + name(s) of input fields that will receive a list of whatever kind + of input they take. the node will be run separately for each + value in these lists. for more than one input, the values are + paired (i.e. it does not compute a combinatorial product). + name : alphanumeric string + node specific name + + See Node docstring for additional keyword arguments. + """ + super(MapNode, self).__init__(interface, name, **kwargs) + if isinstance(iterfield, str): + iterfield = [iterfield] + self.iterfield = iterfield + self._inputs = self._create_dynamic_traits(self._interface.inputs, + fields=self.iterfield) + self._inputs.on_trait_change(self._set_mapnode_input) + self._got_inputs = False + + def _create_dynamic_traits(self, basetraits, fields=None, nitems=None): + """Convert specific fields of a trait to accept multiple inputs + """ + output = DynamicTraitedSpec() + if fields is None: + fields = basetraits.copyable_trait_names() + for name, spec in basetraits.items(): + if name in fields and ((nitems is None) or (nitems > 1)): + logger.debug('adding multipath trait: %s' % name) + output.add_trait(name, InputMultiPath(spec.trait_type)) + else: + output.add_trait(name, traits.Trait(spec)) + setattr(output, name, Undefined) + value = getattr(basetraits, name) + if isdefined(value): + setattr(output, name, value) + value = getattr(output, name) + return output + + def set_input(self, parameter, val): + """ Set interface input value or nodewrapper attribute + + Priority goes to interface. + """ + logger.debug('setting nodelevel(%s) input %s = %s' % (str(self), + parameter, + str(val))) + self._set_mapnode_input(self.inputs, parameter, deepcopy(val)) + + def _set_mapnode_input(self, object, name, newvalue): + logger.debug('setting mapnode(%s) input: %s -> %s' % (str(self), + name, + str(newvalue))) + if name in self.iterfield: + setattr(self._inputs, name, newvalue) + else: + setattr(self._interface.inputs, name, newvalue) + + def _get_hashval(self): + """ Compute hash including iterfield lists.""" + if not self._got_inputs: + self._get_inputs() + self._got_inputs = True + self._check_iterfield() + hashinputs = deepcopy(self._interface.inputs) + for name in self.iterfield: + hashinputs.remove_trait(name) + hashinputs.add_trait( + name, + InputMultiPath( + self._interface.inputs.traits()[name].trait_type)) + logger.debug('setting hashinput %s-> %s' % + (name, getattr(self._inputs, name))) + setattr(hashinputs, name, getattr(self._inputs, name)) + hashed_inputs, hashvalue = hashinputs.get_hashval( + hash_method=self.config['execution']['hash_method']) + rm_extra = self.config['execution']['remove_unnecessary_outputs'] + if str2bool(rm_extra) and self.needed_outputs: + hashobject = md5() + hashobject.update(hashvalue) + sorted_outputs = sorted(self.needed_outputs) + hashobject.update(str(sorted_outputs)) + hashvalue = hashobject.hexdigest() + hashed_inputs['needed_outputs'] = sorted_outputs + return hashed_inputs, hashvalue + + @property + def inputs(self): + return self._inputs + + @property + def outputs(self): + if self._interface._outputs(): + return Bunch(self._interface._outputs().get()) + else: + return None + + def _make_nodes(self, cwd=None): + if cwd is None: + cwd = self.output_dir() + nitems = len(filename_to_list(getattr(self.inputs, self.iterfield[0]))) + for i in range(nitems): + nodename = '_' + self.name + str(i) + node = Node(deepcopy(self._interface), name=nodename) + node.overwrite = self.overwrite + node.run_without_submitting = self.run_without_submitting + node.plugin_args = self.plugin_args + node._interface.inputs.set( + **deepcopy(self._interface.inputs.get())) + for field in self.iterfield: + fieldvals = filename_to_list(getattr(self.inputs, field)) + logger.debug('setting input %d %s %s' % (i, field, + fieldvals[i])) + setattr(node.inputs, field, + fieldvals[i]) + node.config = self.config + node.base_dir = os.path.join(cwd, 'mapflow') + yield i, node + + def _node_runner(self, nodes, updatehash=False): + for i, node in nodes: + err = None + try: + node.run(updatehash=updatehash) + except Exception, err: + if str2bool(self.config['execution']['stop_on_first_crash']): + self._result = node.result + raise + yield i, node, err + + def _collate_results(self, nodes): + self._result = InterfaceResult(interface=[], runtime=[], + provenance=[], outputs=self.outputs) + returncode = [] + for i, node, err in nodes: + self._result.runtime.insert(i, None) + if node.result: + if hasattr(node.result, 'runtime'): + self._result.interface.insert(i, node.result.interface) + self._result.runtime[i] = node.result.runtime + if hasattr(node.result, 'provenance'): + self._result.provenance.insert(i, node.result.provenance) + returncode.insert(i, err) + if self.outputs: + for key, _ in self.outputs.items(): + rm_extra = (self.config['execution'] + ['remove_unnecessary_outputs']) + if str2bool(rm_extra) and self.needed_outputs: + if key not in self.needed_outputs: + continue + values = getattr(self._result.outputs, key) + if not isdefined(values): + values = [] + if node.result.outputs: + values.insert(i, node.result.outputs.get()[key]) + else: + values.insert(i, None) + defined_vals = [isdefined(val) for val in values] + if any(defined_vals) and self._result.outputs: + setattr(self._result.outputs, key, values) + if returncode and any([code is not None for code in returncode]): + msg = [] + for i, code in enumerate(returncode): + if code is not None: + msg += ['Subnode %d failed' % i] + msg += ['Error:', str(code)] + raise Exception('Subnodes of node: %s failed:\n%s' % + (self.name, '\n'.join(msg))) + + def write_report(self, report_type=None, cwd=None): + if not str2bool(self.config['execution']['create_report']): + return + if report_type == 'preexec': + super(MapNode, self).write_report(report_type=report_type, cwd=cwd) + if report_type == 'postexec': + super(MapNode, self).write_report(report_type=report_type, cwd=cwd) + report_dir = os.path.join(cwd, '_report') + report_file = os.path.join(report_dir, 'report.rst') + fp = open(report_file, 'at') + fp.writelines(write_rst_header('Subnode reports', level=1)) + nitems = len(filename_to_list( + getattr(self.inputs, self.iterfield[0]))) + subnode_report_files = [] + for i in range(nitems): + nodename = '_' + self.name + str(i) + subnode_report_files.insert(i, 'subnode %d' % i + ' : ' + + os.path.join(cwd, + 'mapflow', + nodename, + '_report', + 'report.rst')) + fp.writelines(write_rst_list(subnode_report_files)) + fp.close() + + def get_subnodes(self): + if not self._got_inputs: + self._get_inputs() + self._got_inputs = True + self._check_iterfield() + self.write_report(report_type='preexec', cwd=self.output_dir()) + return [node for _, node in self._make_nodes()] + + def num_subnodes(self): + if not self._got_inputs: + self._get_inputs() + self._got_inputs = True + self._check_iterfield() + return len(filename_to_list(getattr(self.inputs, self.iterfield[0]))) + + def _get_inputs(self): + old_inputs = self._inputs.get() + self._inputs = self._create_dynamic_traits(self._interface.inputs, + fields=self.iterfield) + self._inputs.set(**old_inputs) + super(MapNode, self)._get_inputs() + + def _check_iterfield(self): + """Checks iterfield + + * iterfield must be in inputs + * number of elements must match across iterfield + """ + for iterfield in self.iterfield: + if not isdefined(getattr(self.inputs, iterfield)): + raise ValueError(("Input %s was not set but it is listed " + "in iterfields.") % iterfield) + if len(self.iterfield) > 1: + first_len = len(filename_to_list(getattr(self.inputs, + self.iterfield[0]))) + for iterfield in self.iterfield[1:]: + if first_len != len(filename_to_list(getattr(self.inputs, + iterfield))): + raise ValueError(("All iterfields of a MapNode have to " + "have the same length. %s") % + str(self.inputs)) + + def _run_interface(self, execute=True, updatehash=False): + """Run the mapnode interface + + This is primarily intended for serial execution of mapnode. A parallel + execution requires creation of new nodes that can be spawned + """ + old_cwd = os.getcwd() + cwd = self.output_dir() + os.chdir(cwd) + self._check_iterfield() + if execute: + nitems = len(filename_to_list(getattr(self.inputs, + self.iterfield[0]))) + nodenames = ['_' + self.name + str(i) for i in range(nitems)] + # map-reduce formulation + self._collate_results(self._node_runner(self._make_nodes(cwd), + updatehash=updatehash)) + self._save_results(self._result, cwd) + # remove any node directories no longer required + dirs2remove = [] + for path in glob(os.path.join(cwd, 'mapflow', '*')): + if os.path.isdir(path): + if path.split(os.path.sep)[-1] not in nodenames: + dirs2remove.append(path) + for path in dirs2remove: + shutil.rmtree(path) + else: + self._result = self._load_results(cwd) + os.chdir(old_cwd) From d41d48e05b3c1c414b698673516fc8b1babbb7af Mon Sep 17 00:00:00 2001 From: cdla Date: Tue, 27 Aug 2013 14:59:09 -0400 Subject: [PATCH 35/60] minor changes --- examples/rsfmri_preprocessing.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index d753411808..85b2fb20eb 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -268,7 +268,7 @@ def create_workflow(files, FM_sigma=2, FM_echo_spacing=.7, target_subject=['fsaverage3', 'fsaverage4'], - name='resting'): + name='resting'): wf = Workflow(name=name) @@ -719,6 +719,7 @@ def create_workflow(files, from nibabel import load img = load(args.files[0]) slice_thickness = max(img.get_header().get_zooms()[:3]) + print TR, slice_times, slice_thickness if args.field_maps: wf = create_workflow([os.path.abspath(filename) for filename in args.files], subject_id=args.subject_id, From 3d530de7c9db87bb88c7f5bf1f2f68c5b91fa1ac Mon Sep 17 00:00:00 2001 From: cdla Date: Wed, 28 Aug 2013 10:28:12 -0400 Subject: [PATCH 36/60] fixed tab spacing issue --- examples/rsfmri_preprocessing.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index 85b2fb20eb..7c4b1597a0 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -731,10 +731,10 @@ def create_workflow(files, lowpass_freq=args.lowpass_freq, highpass_freq=args.highpass_freq, sink_directory=os.path.abspath(args.sink), - fieldmap_images=args.field_maps, - FM_TEdiff=float(args.TE_diff), - FM_echo_spacing=float(args.echo_spacing), - FM_sigma=int(args.sigma)) + fieldmap_images=args.field_maps, + FM_TEdiff=float(args.TE_diff), + FM_echo_spacing=float(args.echo_spacing), + FM_sigma=int(args.sigma)) else: wf = create_workflow([os.path.abspath(filename) for filename in args.files], From 57a421b2c804ebbd78b1bbf4b4054ed5fa544761 Mon Sep 17 00:00:00 2001 From: cdla Date: Wed, 28 Aug 2013 11:08:14 -0400 Subject: [PATCH 37/60] fixed more tabbing issues --- examples/rsfmri_preprocessing.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index 7c4b1597a0..974331b6f8 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -720,8 +720,8 @@ def create_workflow(files, img = load(args.files[0]) slice_thickness = max(img.get_header().get_zooms()[:3]) print TR, slice_times, slice_thickness - if args.field_maps: - wf = create_workflow([os.path.abspath(filename) for filename in args.files], + if args.field_maps: + wf = create_workflow([os.path.abspath(filename) for filename in args.files], subject_id=args.subject_id, n_vol=args.n_vol, despike=args.despike, @@ -736,8 +736,8 @@ def create_workflow(files, FM_echo_spacing=float(args.echo_spacing), FM_sigma=int(args.sigma)) - else: - wf = create_workflow([os.path.abspath(filename) for filename in args.files], + else: + wf = create_workflow([os.path.abspath(filename) for filename in args.files], subject_id=args.subject_id, n_vol=args.n_vol, despike=args.despike, From 76c7dd9847601ef9d0247c1f80244feb9997e02a Mon Sep 17 00:00:00 2001 From: cdla Date: Wed, 28 Aug 2013 11:30:22 -0400 Subject: [PATCH 38/60] tabbing issues --- examples/rsfmri_preprocessing.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index 974331b6f8..71e556ffa9 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -716,10 +716,10 @@ def create_workflow(files, TR, slice_times, slice_thickness = get_info(args.dicom_file) if slice_thickness is None: - from nibabel import load - img = load(args.files[0]) - slice_thickness = max(img.get_header().get_zooms()[:3]) - print TR, slice_times, slice_thickness + from nibabel import load + img = load(args.files[0]) + slice_thickness = max(img.get_header().get_zooms()[:3]) + print TR, slice_times, slice_thickness if args.field_maps: wf = create_workflow([os.path.abspath(filename) for filename in args.files], subject_id=args.subject_id, From d9b4b16d27453b692f65eb52371c0f253e5547a5 Mon Sep 17 00:00:00 2001 From: cdla Date: Wed, 28 Aug 2013 11:37:57 -0400 Subject: [PATCH 39/60] revenge of the tab issues --- examples/rsfmri_preprocessing.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index 71e556ffa9..628c52f57a 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -719,7 +719,7 @@ def create_workflow(files, from nibabel import load img = load(args.files[0]) slice_thickness = max(img.get_header().get_zooms()[:3]) - print TR, slice_times, slice_thickness + print TR, slice_times, slice_thickness if args.field_maps: wf = create_workflow([os.path.abspath(filename) for filename in args.files], subject_id=args.subject_id, @@ -735,7 +735,6 @@ def create_workflow(files, FM_TEdiff=float(args.TE_diff), FM_echo_spacing=float(args.echo_spacing), FM_sigma=int(args.sigma)) - else: wf = create_workflow([os.path.abspath(filename) for filename in args.files], subject_id=args.subject_id, From 6b031077ab3ef7ae5e43af05cb80b7ab7a394f59 Mon Sep 17 00:00:00 2001 From: cdla Date: Wed, 28 Aug 2013 11:41:10 -0400 Subject: [PATCH 40/60] tab issues --- examples/rsfmri_preprocessing.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index 628c52f57a..00ef7944ca 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -720,7 +720,8 @@ def create_workflow(files, img = load(args.files[0]) slice_thickness = max(img.get_header().get_zooms()[:3]) print TR, slice_times, slice_thickness - if args.field_maps: + +if args.field_maps: wf = create_workflow([os.path.abspath(filename) for filename in args.files], subject_id=args.subject_id, n_vol=args.n_vol, @@ -735,7 +736,8 @@ def create_workflow(files, FM_TEdiff=float(args.TE_diff), FM_echo_spacing=float(args.echo_spacing), FM_sigma=int(args.sigma)) - else: + +else: wf = create_workflow([os.path.abspath(filename) for filename in args.files], subject_id=args.subject_id, n_vol=args.n_vol, From c16ad6cd05071d6ad8bb834b971ebd7383f7e878 Mon Sep 17 00:00:00 2001 From: cdla Date: Wed, 28 Aug 2013 11:44:28 -0400 Subject: [PATCH 41/60] tab issues --- examples/rsfmri_preprocessing.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index 00ef7944ca..75868f583e 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -721,7 +721,7 @@ def create_workflow(files, slice_thickness = max(img.get_header().get_zooms()[:3]) print TR, slice_times, slice_thickness -if args.field_maps: + if args.field_maps: wf = create_workflow([os.path.abspath(filename) for filename in args.files], subject_id=args.subject_id, n_vol=args.n_vol, @@ -737,7 +737,7 @@ def create_workflow(files, FM_echo_spacing=float(args.echo_spacing), FM_sigma=int(args.sigma)) -else: + else: wf = create_workflow([os.path.abspath(filename) for filename in args.files], subject_id=args.subject_id, n_vol=args.n_vol, From 8fc6abe8f5fa44082d0dc3c3e9e07187ccc0efcd Mon Sep 17 00:00:00 2001 From: cdla Date: Wed, 28 Aug 2013 11:47:03 -0400 Subject: [PATCH 42/60] tab... --- examples/rsfmri_preprocessing.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index 75868f583e..51ba34e717 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -721,7 +721,7 @@ def create_workflow(files, slice_thickness = max(img.get_header().get_zooms()[:3]) print TR, slice_times, slice_thickness - if args.field_maps: + if args.field_maps: wf = create_workflow([os.path.abspath(filename) for filename in args.files], subject_id=args.subject_id, n_vol=args.n_vol, @@ -737,7 +737,7 @@ def create_workflow(files, FM_echo_spacing=float(args.echo_spacing), FM_sigma=int(args.sigma)) - else: + else: wf = create_workflow([os.path.abspath(filename) for filename in args.files], subject_id=args.subject_id, n_vol=args.n_vol, From 68a433fe8692fd257c852bfb7e404053639c6f74 Mon Sep 17 00:00:00 2001 From: cdla Date: Wed, 28 Aug 2013 11:51:07 -0400 Subject: [PATCH 43/60] tab --- examples/rsfmri_preprocessing.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index 51ba34e717..c1d932f366 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -720,9 +720,8 @@ def create_workflow(files, img = load(args.files[0]) slice_thickness = max(img.get_header().get_zooms()[:3]) print TR, slice_times, slice_thickness - - if args.field_maps: - wf = create_workflow([os.path.abspath(filename) for filename in args.files], + if args.field_maps: + wf = create_workflow([os.path.abspath(filename) for filename in args.files], subject_id=args.subject_id, n_vol=args.n_vol, despike=args.despike, @@ -736,9 +735,8 @@ def create_workflow(files, FM_TEdiff=float(args.TE_diff), FM_echo_spacing=float(args.echo_spacing), FM_sigma=int(args.sigma)) - else: - wf = create_workflow([os.path.abspath(filename) for filename in args.files], + wf = create_workflow([os.path.abspath(filename) for filename in args.files], subject_id=args.subject_id, n_vol=args.n_vol, despike=args.despike, From f11e0c6bab3310d4f57036ca3cc36849f43a7ccc Mon Sep 17 00:00:00 2001 From: cdla Date: Wed, 28 Aug 2013 12:01:38 -0400 Subject: [PATCH 44/60] tab --- examples/rsfmri_preprocessing.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index c1d932f366..665f99e678 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -720,7 +720,8 @@ def create_workflow(files, img = load(args.files[0]) slice_thickness = max(img.get_header().get_zooms()[:3]) print TR, slice_times, slice_thickness - if args.field_maps: + + if args.field_maps: wf = create_workflow([os.path.abspath(filename) for filename in args.files], subject_id=args.subject_id, n_vol=args.n_vol, From 3a50048df24d1b3d9954e011274878b1789bd5cb Mon Sep 17 00:00:00 2001 From: cdla Date: Wed, 28 Aug 2013 12:14:40 -0400 Subject: [PATCH 45/60] tab --- examples/rsfmri_preprocessing.py | 1 + 1 file changed, 1 insertion(+) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index 665f99e678..27e2755aeb 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -721,6 +721,7 @@ def create_workflow(files, slice_thickness = max(img.get_header().get_zooms()[:3]) print TR, slice_times, slice_thickness + if args.field_maps: wf = create_workflow([os.path.abspath(filename) for filename in args.files], subject_id=args.subject_id, From 25754ac9345060f5742497c18234c1a03c98ba75 Mon Sep 17 00:00:00 2001 From: cdla Date: Wed, 28 Aug 2013 12:16:51 -0400 Subject: [PATCH 46/60] tab issue --- examples/rsfmri_preprocessing.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index 27e2755aeb..c8e0192fe2 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -716,10 +716,10 @@ def create_workflow(files, TR, slice_times, slice_thickness = get_info(args.dicom_file) if slice_thickness is None: - from nibabel import load - img = load(args.files[0]) - slice_thickness = max(img.get_header().get_zooms()[:3]) - print TR, slice_times, slice_thickness + from nibabel import load + img = load(args.files[0]) + slice_thickness = max(img.get_header().get_zooms()[:3]) + print TR, slice_times, slice_thickness if args.field_maps: From 839f8799fae2417e18c272d15f62d28cfafec667 Mon Sep 17 00:00:00 2001 From: cdla Date: Thu, 29 Aug 2013 11:25:26 -0400 Subject: [PATCH 47/60] repositioned int() and float() functions --- examples/rsfmri_preprocessing.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index c8e0192fe2..5fa0a0899e 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -701,11 +701,11 @@ def create_workflow(files, help="Plugin args") parser.add_argument("--field_maps", dest="field_maps", nargs="+", help="field map niftis") - parser.add_argument("--fm_echospacing",dest="echo_spacing", + parser.add_argument("--fm_echospacing",dest="echo_spacing", type=float, help="field map echo spacing") - parser.add_argument("--fm_TE_diff", dest='TE_diff', + parser.add_argument("--fm_TE_diff", dest='TE_diff', type=float, help="field map echo time difference") - parser.add_argument("--fm_sigma", dest='sigma', + parser.add_argument("--fm_sigma", dest='sigma', type=int, help="field map sigma value") args = parser.parse_args() @@ -734,9 +734,9 @@ def create_workflow(files, highpass_freq=args.highpass_freq, sink_directory=os.path.abspath(args.sink), fieldmap_images=args.field_maps, - FM_TEdiff=float(args.TE_diff), - FM_echo_spacing=float(args.echo_spacing), - FM_sigma=int(args.sigma)) + FM_TEdiff=args.TE_diff, + FM_echo_spacing=args.echo_spacing, + FM_sigma=args.sigma) else: wf = create_workflow([os.path.abspath(filename) for filename in args.files], subject_id=args.subject_id, From ad53ba9e752a6b507a49f61987c6646ce72715bc Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Mon, 2 Sep 2013 04:23:22 -0400 Subject: [PATCH 48/60] fix: write timeseries appropriately --- examples/rsfmri_preprocessing.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index 5fa0a0899e..8e93408878 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -219,9 +219,9 @@ def extract_subrois(timeseries_file, label_file, indices): ijk = np.nonzero(rois == cmaindex) ts = data[ijk] for i0, row in enumerate(ts): - fp.writelines('%d,%d,%d,%d,' % (fsindex, ijk[0][i0], - ijk[1][i0], ijk[2][i0]) + - ','.join(['%.10f' % val for val in row])) + fp.write('%d,%d,%d,%d,' % (fsindex, ijk[0][i0], + ijk[1][i0], ijk[2][i0]) + + ','.join(['%.10f' % val for val in row]) + '\n') return out_ts_file @@ -664,10 +664,15 @@ def create_workflow(files, datasink, 'resting.parcellations.aparc') wf.connect(sampleaparc, 'avgwf_txt_file', datasink, 'resting.parcellations.aparc.@avgwf') - wf.connect(combiner, 'out_file', - datasink, 'resting.parcellations.grayo.@surface') wf.connect(ts2txt, 'out_file', datasink, 'resting.parcellations.grayo.@subcortical') + datasink2 = Node(interface=DataSink(), name="datasink2") + datasink2.inputs.base_directory = sink_directory + datasink2.inputs.container = subject_id + datasink2.inputs.substitutions = [('_target_subject_', '')] + datasink2.inputs.regexp_substitutions = (r'(/_.*(\d+/))', r'/run\2') #(r'(_.*)(\d+/)', r'run\2') + wf.connect(combiner, 'out_file', + datasink2, 'resting.parcellations.grayo.@surface') return wf if __name__ == "__main__": From fa1a1f8ccddc44a1fdad0a0a2ce05c31be0ab1e3 Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Mon, 2 Sep 2013 11:53:41 +0200 Subject: [PATCH 49/60] sty: pep8 fixes --- examples/rsfmri_preprocessing.py | 43 +++++++++++++------------------- 1 file changed, 17 insertions(+), 26 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index 8e93408878..6555f7a7d4 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -240,7 +240,7 @@ def combine_hemi(left, right): rh_data.squeeze())))) filename = 'combined_surf.txt' np.savetxt(filename, all_data, - fmt=','.join(['%d'] + ['%.10f'] * (all_data.shape[1] -1))) + fmt=','.join(['%d'] + ['%.10f'] * (all_data.shape[1] - 1))) return os.path.abspath(filename) @@ -281,8 +281,8 @@ def create_workflow(files, # Run AFNI's despike. This is always run, however, whether this is fed to # realign depends on the input configuration despiker = MapNode(afni.Despike(outputtype='NIFTI_GZ'), - iterfield=['in_file'], - name='despike') + iterfield=['in_file'], + name='despike') #despiker.plugin_args = {'qsub_args': '-l nodes=1:ppn='} wf.connect(remove_vol, 'roi_file', despiker, 'in_file') @@ -496,12 +496,11 @@ def create_workflow(files, sampleaparc, 'segmentation_file') wf.connect(bandpass, 'out_file', sampleaparc, 'in_file') - # Sample the time series onto the surface of the target surface. Performs # sampling into left and right hemisphere target = Node(IdentityInterface(fields=['target_subject']), name='target') target.iterables = ('target_subject', filename_to_list(target_subject)) - + samplerlh = MapNode(freesurfer.SampleToSurface(), iterfield=['source_file'], name='sampler_lh') @@ -519,7 +518,7 @@ def create_workflow(files, wf.connect(bandpass, 'out_file', samplerlh, 'source_file') wf.connect(register, 'out_reg_file', samplerlh, 'reg_file') wf.connect(target, 'target_subject', samplerlh, 'target_subject') - + samplerrh.set_input('hemi', 'rh') wf.connect(bandpass, 'out_file', samplerrh, 'source_file') wf.connect(register, 'out_reg_file', samplerrh, 'reg_file') @@ -632,7 +631,7 @@ def create_workflow(files, datasink.inputs.base_directory = sink_directory datasink.inputs.container = subject_id datasink.inputs.substitutions = [('_target_subject_', '')] - datasink.inputs.regexp_substitutions = (r'(/_.*(\d+/))', r'/run\2') #(r'(_.*)(\d+/)', r'run\2') + datasink.inputs.regexp_substitutions = (r'(/_.*(\d+/))', r'/run\2') wf.connect(despiker, 'out_file', datasink, 'resting.qa.despike') wf.connect(realign, 'par_file', datasink, 'resting.qa.motion') wf.connect(tsnr, 'tsnr_file', datasink, 'resting.qa.tsnr') @@ -670,7 +669,7 @@ def create_workflow(files, datasink2.inputs.base_directory = sink_directory datasink2.inputs.container = subject_id datasink2.inputs.substitutions = [('_target_subject_', '')] - datasink2.inputs.regexp_substitutions = (r'(/_.*(\d+/))', r'/run\2') #(r'(_.*)(\d+/)', r'run\2') + datasink2.inputs.regexp_substitutions = (r'(/_.*(\d+/))', r'/run\2') wf.connect(combiner, 'out_file', datasink2, 'resting.parcellations.grayo.@surface') return wf @@ -705,13 +704,13 @@ def create_workflow(files, default='plugin_args=dict()', help="Plugin args") parser.add_argument("--field_maps", dest="field_maps", nargs="+", - help="field map niftis") - parser.add_argument("--fm_echospacing",dest="echo_spacing", type=float, - help="field map echo spacing") + help="field map niftis") + parser.add_argument("--fm_echospacing", dest="echo_spacing", type=float, + help="field map echo spacing") parser.add_argument("--fm_TE_diff", dest='TE_diff', type=float, - help="field map echo time difference") - parser.add_argument("--fm_sigma", dest='sigma', type=int, - help="field map sigma value") + help="field map echo time difference") + parser.add_argument("--fm_sigma", dest='sigma', type=float, + help="field map sigma value") args = parser.parse_args() TR = args.TR @@ -724,11 +723,10 @@ def create_workflow(files, from nibabel import load img = load(args.files[0]) slice_thickness = max(img.get_header().get_zooms()[:3]) - print TR, slice_times, slice_thickness - if args.field_maps: - wf = create_workflow([os.path.abspath(filename) for filename in args.files], + wf = create_workflow([os.path.abspath(filename) for + filename in args.files], subject_id=args.subject_id, n_vol=args.n_vol, despike=args.despike, @@ -743,7 +741,8 @@ def create_workflow(files, FM_echo_spacing=args.echo_spacing, FM_sigma=args.sigma) else: - wf = create_workflow([os.path.abspath(filename) for filename in args.files], + wf = create_workflow([os.path.abspath(filename) for + filename in args.files], subject_id=args.subject_id, n_vol=args.n_vol, despike=args.despike, @@ -761,13 +760,5 @@ def create_workflow(files, wf.config['execution'].update(**{'remove_unnecessary_outputs': False}) wf.base_dir = work_dir - #wf.write_graph(graph2use='flat') exec args.plugin_args - print plugin_args wf.run(**plugin_args) - -''' -#compute similarity matrix and partial correlation -def compute_similarity(): - return matrix -''' From 21ec38b4fdaa86efd8bb7b757c6d08e2486f1ca0 Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Sun, 8 Sep 2013 13:33:57 -0500 Subject: [PATCH 50/60] fix: updated ants example to use new parameters from ants --- examples/smri_ants_registration.py | 36 ++++++++++++++++-------------- 1 file changed, 19 insertions(+), 17 deletions(-) diff --git a/examples/smri_ants_registration.py b/examples/smri_ants_registration.py index 5a9c19cdf3..3db3e28972 100644 --- a/examples/smri_ants_registration.py +++ b/examples/smri_ants_registration.py @@ -59,27 +59,29 @@ reg.inputs.output_transform_prefix = 'thisTransform' reg.inputs.output_warped_image = 'INTERNAL_WARPED.nii.gz' +reg.inputs.output_transform_prefix = "output_" reg.inputs.transforms = ['Translation', 'Rigid', 'Affine', 'SyN'] -reg.inputs.transform_parameters = [(0.1,), (0.1,), (0.1,), (0.3, 3.0, 0.0)] -reg.inputs.number_of_iterations = [[10000, 0, 0], [10000, 0, 0], [10000, 0, 0], [10, 0, 0]] +reg.inputs.transform_parameters = [(0.1,), (0.1,), (0.1,), (0.2, 3.0, 0.0)] +reg.inputs.number_of_iterations = ([[10000, 111110, 11110]]*3 + + [[100, 50, 30]]) reg.inputs.dimension = 3 reg.inputs.write_composite_transform = True -reg.inputs.collapse_output_transforms = True -reg.inputs.metric = ['Mattes']*4 -reg.inputs.metric_weight = [1]*4 # Default (value ignored currently by ANTs) -reg.inputs.radius_or_number_of_bins = [32]*4 -reg.inputs.sampling_strategy = ['Regular']*3 + [None] -reg.inputs.sampling_percentage = [0.1]*3 + [None] -reg.inputs.convergence_threshold = [1.e-8]*4 -reg.inputs.convergence_window_size = [20]*4 -reg.inputs.smoothing_sigmas = [[4,2,1]]*3 + [[2,1,0]] -reg.inputs.sigma_units = ['vox']*4 -reg.inputs.shrink_factors = [[6,4,2]]*3 + [[4,2,1]] -reg.inputs.use_estimate_learning_rate_once = [True, True, True, True] -reg.inputs.use_histogram_matching = [False]*3 + [True] # This is the default +reg.inputs.collapse_output_transforms = False +reg.inputs.metric = ['Mattes'] * 3 + [['Mattes', 'CC']] +reg.inputs.metric_weight = [1] * 3 + [[0.5, 0.5]] +reg.inputs.radius_or_number_of_bins = [32] * 3 + [[32, 4]] +reg.inputs.sampling_strategy = ['Regular'] * 3 + [[None, None]] +reg.inputs.sampling_percentage = [0.3] * 3 + [[None, None]] +reg.inputs.convergence_threshold = [1.e-8] * 3 + [-0.01] +reg.inputs.convergence_window_size = [20] * 3 + [5] +reg.inputs.smoothing_sigmas = [[4, 2, 1]] * 3 + [[1, 0.5, 0]] +reg.inputs.sigma_units = ['vox'] * 4 +reg.inputs.shrink_factors = [[6, 4, 2]] + [[3, 2, 1]]*2 + [[4, 2, 1]] +reg.inputs.use_estimate_learning_rate_once = [True] * 4 +reg.inputs.use_histogram_matching = [False] * 3 + [True] reg.inputs.initial_moving_transform_com = True -reg.inputs.output_warped_image = True -reg.cmdline + +print reg.cmdline """ From 7b501edff1393e5bf3f96a60ebeda6130f414b3d Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Sun, 8 Sep 2013 18:23:37 -0500 Subject: [PATCH 51/60] enh: replaced erosion with slicethickness and added epimask support for bbregister --- examples/rsfmri_preprocessing.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index 6555f7a7d4..28544b0444 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -319,6 +319,7 @@ def create_workflow(files, register.inputs.init = 'fsl' register.inputs.contrast_type = 't2' register.inputs.out_fsl_file = True + register.inputs.epi_mask = True # Compute fieldmaps and unwarp using them if fieldmap_images: @@ -356,7 +357,7 @@ def create_workflow(files, wmcsf.inputs.wm_ven_csf = True wmcsf.inputs.match = [4, 5, 14, 15, 24, 31, 43, 44, 63] wmcsf.inputs.binary_file = 'wmcsf.nii.gz' - wmcsf.inputs.erode = 1 + wmcsf.inputs.erode = int(slice_thickness) wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), wmcsf, 'in_file') if fieldmap_images: wf.connect(fieldmap, 'exf_mask', wmcsftransform, 'source_file') From 3d21ad1e2476da535dbc7afaa57c590fead21679 Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Mon, 9 Sep 2013 13:47:14 -0500 Subject: [PATCH 52/60] enh: compressing more into functions --- examples/rsfmri_preprocessing.py | 115 +++++++++++++------------------ 1 file changed, 49 insertions(+), 66 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index 28544b0444..e0f88cfa88 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -31,7 +31,15 @@ from nipype.utils.filemanip import filename_to_list import numpy as np +import scipy as sp +import nibabel as nb +imports = ['import os', + 'import nibabel as nb', + 'import numpy as np', + 'import scipy as sp', + 'from nipype.utils.filemanip import filename_to_list' + ] def median(in_files): """Computes an average of the median of each realigned timeseries @@ -47,10 +55,6 @@ def median(in_files): out_file: a 3D Nifti file """ - import os - import nibabel as nb - import numpy as np - from nipype.utils.filemanip import filename_to_list average = None for idx, filename in enumerate(filename_to_list(in_files)): img = nb.load(filename) @@ -98,9 +102,6 @@ def motion_regressors(motion_params, order=2, derivatives=2): motion + d(motion)/dt + d2(motion)/dt2 (linear + quadratic) """ - from nipype.utils.filemanip import filename_to_list - import numpy as np - import os out_files = [] for idx, filename in enumerate(filename_to_list(motion_params)): params = np.genfromtxt(filename) @@ -137,9 +138,6 @@ def build_filter1(motion_params, comp_norm, outliers): components_file: a text file containing all the regressors """ - from nipype.utils.filemanip import filename_to_list - import numpy as np - import os out_files = [] for idx, filename in enumerate(filename_to_list(motion_params)): params = np.genfromtxt(filename) @@ -172,14 +170,8 @@ def extract_noise_components(realigned_file, mask_file, num_components=6): ------- components_file: a text file containing the noise components """ - - import os - from nibabel import load - import numpy as np - import scipy as sp - - imgseries = load(realigned_file) - noise_mask = load(mask_file) + imgseries = nb.load(realigned_file) + noise_mask = nb.load(mask_file) voxel_timecourses = imgseries.get_data()[np.nonzero(noise_mask.get_data())] voxel_timecourses = voxel_timecourses.byteswap().newbyteorder() voxel_timecourses[np.isnan(np.sum(voxel_timecourses, axis=1)), :] = 0 @@ -206,9 +198,6 @@ def extract_subrois(timeseries_file, label_file, indices): The first four columns are: freesurfer index, i, j, k positions in the label file """ - import os - import nibabel as nb - import numpy as np img = nb.load(timeseries_file) data = img.get_data() roiimg = nb.load(label_file) @@ -228,11 +217,8 @@ def extract_subrois(timeseries_file, label_file, indices): def combine_hemi(left, right): """Combine left and right hemisphere time series into a single text file """ - import os - from nibabel import load - import numpy as np - lh_data = load(left).get_data() - rh_data = load(right).get_data() + lh_data = nb.load(left).get_data() + rh_data = nb.load(right).get_data() indices = np.vstack((1000000 + np.arange(0, lh_data.shape[0])[:, None], 2000000 + np.arange(0, rh_data.shape[0])[:, None])) @@ -675,6 +661,42 @@ def create_workflow(files, datasink2, 'resting.parcellations.grayo.@surface') return wf + +""" +Creates the full workflow including getting information from dicom files +""" + +def create_resting_workflow(args): + TR = args.TR + slice_times = args.slice_times + slice_thickness = None + if args.dicom_file: + TR, slice_times, slice_thickness = get_info(args.dicom_file) + + if slice_thickness is None: + from nibabel import load + img = load(args.files[0]) + slice_thickness = max(img.get_header().get_zooms()[:3]) + + kwargs = dict(files =[os.path.abspath(filename) for + filename in args.files], + subject_id=args.subject_id, + n_vol=args.n_vol, + despike=args.despike, + TR=TR, + slice_times=slice_times, + slice_thickness=slice_thickness, + lowpass_freq=args.lowpass_freq, + highpass_freq=args.highpass_freq, + sink_directory=os.path.abspath(args.sink)) + if args.field_maps: + kwargs.update(**dict(fieldmap_images=args.field_maps, + FM_TEdiff=args.TE_diff, + FM_echo_spacing=args.echo_spacing, + FM_sigma=args.sigma)) + wf = create_workflow(**kwargs) + return wf + if __name__ == "__main__": from argparse import ArgumentParser parser = ArgumentParser(description=__doc__) @@ -714,52 +736,13 @@ def create_workflow(files, help="field map sigma value") args = parser.parse_args() - TR = args.TR - slice_times = args.slice_times - slice_thickness = None - if args.dicom_file: - TR, slice_times, slice_thickness = get_info(args.dicom_file) - - if slice_thickness is None: - from nibabel import load - img = load(args.files[0]) - slice_thickness = max(img.get_header().get_zooms()[:3]) - - if args.field_maps: - wf = create_workflow([os.path.abspath(filename) for - filename in args.files], - subject_id=args.subject_id, - n_vol=args.n_vol, - despike=args.despike, - TR=TR, - slice_times=slice_times, - slice_thickness=slice_thickness, - lowpass_freq=args.lowpass_freq, - highpass_freq=args.highpass_freq, - sink_directory=os.path.abspath(args.sink), - fieldmap_images=args.field_maps, - FM_TEdiff=args.TE_diff, - FM_echo_spacing=args.echo_spacing, - FM_sigma=args.sigma) - else: - wf = create_workflow([os.path.abspath(filename) for - filename in args.files], - subject_id=args.subject_id, - n_vol=args.n_vol, - despike=args.despike, - TR=TR, - slice_times=slice_times, - slice_thickness=slice_thickness, - lowpass_freq=args.lowpass_freq, - highpass_freq=args.highpass_freq, - sink_directory=os.path.abspath(args.sink)) + wf = create_resting_workflow(args) if args.work_dir: work_dir = os.path.abspath(args.work_dir) else: work_dir = os.getcwd() - wf.config['execution'].update(**{'remove_unnecessary_outputs': False}) wf.base_dir = work_dir exec args.plugin_args wf.run(**plugin_args) From cc2dd07705647ddae7dab257d2545da6693d8b0f Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Mon, 9 Sep 2013 15:52:37 -0500 Subject: [PATCH 53/60] fix: add imports to the functions --- examples/rsfmri_preprocessing.py | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index e0f88cfa88..d9b0cf99bb 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -294,7 +294,8 @@ def create_workflow(files, # Compute the median image across runs calc_median = Node(Function(input_names=['in_files'], output_names=['median_file'], - function=median), + function=median, + imports=imports), name='median') wf.connect(tsnr, 'detrended_file', calc_median, 'in_files') @@ -387,7 +388,8 @@ def create_workflow(files, motreg = Node(Function(input_names=['motion_params', 'order', 'derivatives'], output_names=['out_files'], - function=motion_regressors), + function=motion_regressors, + imports=imports), name='getmotionregress') wf.connect(realign, 'par_file', motreg, 'motion_params') @@ -395,7 +397,8 @@ def create_workflow(files, createfilter1 = Node(Function(input_names=['motion_params', 'comp_norm', 'outliers'], output_names=['out_files'], - function=build_filter1), + function=build_filter1, + imports=imports), name='makemotionbasedfilter') wf.connect(motreg, 'out_files', createfilter1, 'motion_params') wf.connect(art, 'norm_files', createfilter1, 'comp_norm') @@ -417,7 +420,8 @@ def create_workflow(files, createfilter2 = MapNode(Function(input_names=['realigned_file', 'mask_file', 'num_components'], output_names=['out_files'], - function=extract_noise_components), + function=extract_noise_components, + imports=imports), iterfield=['realigned_file'], name='makecompcorrfilter') createfilter2.inputs.num_components = num_components @@ -514,7 +518,8 @@ def create_workflow(files, # Combine left and right hemisphere to text file combiner = MapNode(Function(input_names=['left', 'right'], output_names=['out_file'], - function=combine_hemi), + function=combine_hemi, + imports=imports), iterfield=['left', 'right'], name="combiner") wf.connect(samplerlh, 'out_file', combiner, 'left') @@ -601,7 +606,8 @@ def create_workflow(files, ts2txt = MapNode(Function(input_names=['timeseries_file', 'label_file', 'indices'], output_names=['out_file'], - function=extract_subrois), + function=extract_subrois, + imports=imports), iterfield=['timeseries_file'], name='getsubcortts') ts2txt.inputs.indices = dict(zip([8] + range(10, 14) + [17, 18, 26, 47] + From 080137629d8880f399be6c18c1444190ccb44ae4 Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Mon, 9 Sep 2013 16:08:37 -0500 Subject: [PATCH 54/60] fix: use ceiling of slice thickness for erosion purposes --- examples/rsfmri_preprocessing.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index d9b0cf99bb..f945031fcb 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -344,7 +344,7 @@ def create_workflow(files, wmcsf.inputs.wm_ven_csf = True wmcsf.inputs.match = [4, 5, 14, 15, 24, 31, 43, 44, 63] wmcsf.inputs.binary_file = 'wmcsf.nii.gz' - wmcsf.inputs.erode = int(slice_thickness) + wmcsf.inputs.erode = int(np.ceil(slice_thickness)) wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), wmcsf, 'in_file') if fieldmap_images: wf.connect(fieldmap, 'exf_mask', wmcsftransform, 'source_file') @@ -354,8 +354,8 @@ def create_workflow(files, wf.connect(wmcsf, 'binary_file', wmcsftransform, 'target_file') mask.inputs.binary_file = 'mask.nii.gz' - mask.inputs.dilate = int(slice_thickness) + 1 - mask.inputs.erode = int(slice_thickness) + mask.inputs.dilate = int(np.ceil(slice_thickness)) + 1 + mask.inputs.erode = int(np.ceil(slice_thickness)) mask.inputs.min = 0.5 wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), mask, 'in_file') masktransform = wmcsftransform.clone("masktransform") From f4c5282a9a8e3d7c708d4e7ec3b56725549a2f6f Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Mon, 9 Sep 2013 16:25:51 -0500 Subject: [PATCH 55/60] fix: moved imports for dicominfo outside of the function --- examples/rsfmri_preprocessing.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index f945031fcb..404214f888 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -33,6 +33,8 @@ import numpy as np import scipy as sp import nibabel as nb +from dcmstack.extract import default_extractor +from dicom import read_file imports = ['import os', 'import nibabel as nb', @@ -87,9 +89,6 @@ def get_info(dicom_files): Slice Acquisition Times Spacing between slices """ - from dcmstack.extract import default_extractor - from dicom import read_file - from nipype.utils.filemanip import filename_to_list meta = default_extractor(read_file(filename_to_list(dicom_files)[0], stop_before_pixels=True, force=True)) From c2031ca3dcff83fb4de865c02e7348ebddc3cda6 Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Mon, 9 Sep 2013 16:40:29 -0500 Subject: [PATCH 56/60] api: always run datasink and let copyfiles decide if things need to be copied or not. in case inputs are directories, this will allow refreshing outputs. --- nipype/interfaces/io.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/nipype/interfaces/io.py b/nipype/interfaces/io.py index 07a53b831b..e70c1349c6 100644 --- a/nipype/interfaces/io.py +++ b/nipype/interfaces/io.py @@ -214,7 +214,7 @@ class DataSink(IOBase): input_spec = DataSinkInputSpec output_spec = DataSinkOutputSpec - def __init__(self, infields=None, **kwargs): + def __init__(self, infields=None, force_run=True, **kwargs): """ Parameters ---------- @@ -232,6 +232,8 @@ def __init__(self, infields=None, **kwargs): self.inputs._outputs[key] = Undefined undefined_traits[key] = Undefined self.inputs.trait_set(trait_change_notify=False, **undefined_traits) + if force_run: + self._always_run = True def _get_dst(self, src): ## If path is directory with trailing os.path.sep, From 90608b0e5da0f0044f70837cc51c0fb39216a150 Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Mon, 9 Sep 2013 17:52:23 -0500 Subject: [PATCH 57/60] enh: added SpaceTimeRealign from nipy 0.4.dev --- nipype/interfaces/nipy/__init__.py | 2 +- nipype/interfaces/nipy/preprocess.py | 130 ++++++++++++++++++++++++++- 2 files changed, 130 insertions(+), 2 deletions(-) diff --git a/nipype/interfaces/nipy/__init__.py b/nipype/interfaces/nipy/__init__.py index ec541b443f..d78ae64447 100644 --- a/nipype/interfaces/nipy/__init__.py +++ b/nipype/interfaces/nipy/__init__.py @@ -1,3 +1,3 @@ from .model import FitGLM, EstimateContrast -from .preprocess import ComputeMask, FmriRealign4d +from .preprocess import ComputeMask, FmriRealign4d, SpaceTimeRealigner from .utils import Similarity diff --git a/nipype/interfaces/nipy/preprocess.py b/nipype/interfaces/nipy/preprocess.py index 46f514f9da..b0c12ea9e0 100644 --- a/nipype/interfaces/nipy/preprocess.py +++ b/nipype/interfaces/nipy/preprocess.py @@ -24,6 +24,8 @@ from nipy.labs.mask import compute_mask from nipy.algorithms.registration import FmriRealign4d as FR4d from nipy import save_image, load_image + from nipy.algorithms.registration import SpaceTimeRealign + from nipy.algorithms.registration.groupwise_registration import SpaceRealign from ..base import (TraitedSpec, BaseInterface, traits, BaseInterfaceInputSpec, isdefined, File, @@ -82,7 +84,7 @@ class FmriRealign4dInputSpec(BaseInterfaceInputSpec): desc="File to realign") tr = traits.Float(desc="TR in seconds", mandatory=True) - slice_order = traits.List(traits.Int(), maxver=0.3, + slice_order = traits.List(traits.Int(), desc=('0 based slice order. This would be equivalent to entering' 'np.argsort(spm_slice_order) for this field. This effects' 'interleaved acquisition. This field will be deprecated in' @@ -117,6 +119,7 @@ class FmriRealign4dOutputSpec(TraitedSpec): desc="Motion parameter files") +@np.deprecate_with_doc('Please use SpaceTimeRealign instead') class FmriRealign4d(BaseInterface): """Simultaneous motion and slice timing correction algorithm @@ -196,6 +199,131 @@ def _list_outputs(self): return outputs +class SpaceTimeRealignerInputSpec(BaseInterfaceInputSpec): + + in_file = InputMultiPath(exists=True, + mandatory=True, + desc="File to realign") + tr = traits.Float(desc="TR in seconds", requires=['slice_times']) + slice_times = traits.Either(traits.List(traits.Float()), + traits.Enum('asc_alt_2', 'asc_alt_2_1', + 'asc_alt_half', 'asc_alt_siemens', + 'ascending', 'desc_alt_2', + 'desc_alt_half', 'descending'), + desc=('Actual slice acquisition times.')) + slice_info = traits.Either(traits.Int, + traits.List(min_len=2, max_len=2), + desc=('Single integer or length 2 sequence ' + 'If int, the axis in `images` that is the ' + 'slice axis. In a 4D image, this will ' + 'often be axis = 2. If a 2 sequence, then' + ' elements are ``(slice_axis, ' + 'slice_direction)``, where ``slice_axis`` ' + 'is the slice axis in the image as above, ' + 'and ``slice_direction`` is 1 if the ' + 'slices were acquired slice 0 first, slice' + ' -1 last, or -1 if acquired slice -1 ' + 'first, slice 0 last. If `slice_info` is ' + 'an int, assume ' + '``slice_direction`` == 1.'), + requires=['slice_times'], + ) + + +class SpaceTimeRealignerOutputSpec(TraitedSpec): + out_file = OutputMultiPath(File(exists=True), + desc="Realigned files") + par_file = OutputMultiPath(File(exists=True), + desc=("Motion parameter files. Angles are not " + "euler angles")) + + +class SpaceTimeRealigner(BaseInterface): + """Simultaneous motion and slice timing correction algorithm + + If slice_times is not specified, this algorithm performs spatial motion + correction + + This interface wraps nipy's SpaceTimeRealign algorithm [1]_ or simply the + SpatialRealign algorithm when timing info is not provided. + + Examples + -------- + >>> from nipype.interfaces.nipy import SpaceTimeRealigner + >>> #Run spatial realignment only + >>> realigner = SpaceTimeRealigner() + >>> realigner.inputs.in_file = ['functional.nii'] + >>> res = realigner.run() # doctest: +SKIP + + >>> realigner = SpaceTimeRealigner() + >>> realigner.inputs.in_file = ['functional.nii'] + >>> realigner.inputs.tr = 2 + >>> realigner.inputs.slice_times = range(0, 3, 67) + >>> realigner.inputs.slice_info = 2 + >>> res = realigner.run() # doctest: +SKIP + + + References + ---------- + .. [1] Roche A. A four-dimensional registration algorithm with \ + application to joint correction of motion and slice timing \ + in fMRI. IEEE Trans Med Imaging. 2011 Aug;30(8):1546-54. DOI_. + + .. _DOI: http://dx.doi.org/10.1109/TMI.2011.2131152 + + """ + + input_spec = SpaceTimeRealignerInputSpec + output_spec = SpaceTimeRealignerOutputSpec + keywords = ['slice timing', 'motion correction'] + + def _run_interface(self, runtime): + + all_ims = [load_image(fname) for fname in self.inputs.in_file] + + if not isdefined(self.inputs.slice_times): + R = SpaceRealign(all_ims) + else: + R = SpaceTimeRealign(all_ims, + tr=self.inputs.tr, + slice_times=self.inputs.slice_times, + slice_info=self.inputs.slice_info, + ) + + R.estimate(refscan=None) + + corr_run = R.resample() + self._out_file_path = [] + self._par_file_path = [] + + for j, corr in enumerate(corr_run): + self._out_file_path.append(os.path.abspath('corr_%s.nii.gz' % + (split_filename(self.inputs.in_file[j])[1]))) + save_image(corr, self._out_file_path[j]) + + self._par_file_path.append(os.path.abspath('%s.par' % + (os.path.split(self.inputs.in_file[j])[1]))) + mfile = open(self._par_file_path[j], 'w') + motion = R._transforms[j] + # nipy does not encode euler angles. return in original form of + # translation followed by rotation vector see: + # http://en.wikipedia.org/wiki/Rodrigues'_rotation_formula + for i, mo in enumerate(motion): + params = ['%.10f' % item for item in np.hstack((mo.translation, + mo.rotation))] + string = ' '.join(params) + '\n' + mfile.write(string) + mfile.close() + + return runtime + + def _list_outputs(self): + outputs = self._outputs().get() + outputs['out_file'] = self._out_file_path + outputs['par_file'] = self._par_file_path + return outputs + + class TrimInputSpec(BaseInterfaceInputSpec): in_file = File( exists=True, mandatory=True, From f51b95146f268146574efd86725360e4859ab879 Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Mon, 9 Sep 2013 18:00:27 -0500 Subject: [PATCH 58/60] enh: replace realignment routine with newer Nipype interface for Nipy's newer class --- examples/rsfmri_preprocessing.py | 55 ++++++++++++++++---------------- 1 file changed, 28 insertions(+), 27 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index 404214f888..9dadf30a7c 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -14,9 +14,11 @@ For example: -python rsfmri_preprocessing.py -d /data/12345-34-1.dcm -f /data/Resting.nii -s subj001 -n 2 --despike -o output -p "plugin_args=dict(plugin='PBS', plugin_args=dict(qsub_args='-q many'))" - +python rsfmri_preprocessing.py -d /data/12345-34-1.dcm -f /data/Resting.nii + -s subj001 -n 2 --despike -o output + -p "plugin_args=dict(plugin='PBS', plugin_args=dict(qsub_args='-q many'))" """ + import os from nipype import (ants, afni, fsl, freesurfer, nipy, Function, DataSink) @@ -43,6 +45,23 @@ 'from nipype.utils.filemanip import filename_to_list' ] + +def get_info(dicom_files): + """Given a Siemens dicom file return metadata + + Returns + ------- + RepetitionTime + Slice Acquisition Times + Spacing between slices + """ + meta = default_extractor(read_file(filename_to_list(dicom_files)[0], + stop_before_pixels=True, + force=True)) + return (meta['RepetitionTime']/1000., meta['CsaImage.MosaicRefAcqTimes'], + meta['SpacingBetweenSlices']) + + def median(in_files): """Computes an average of the median of each realigned timeseries @@ -80,22 +99,6 @@ def get_aparc_aseg(files): raise ValueError('aparc+aseg.mgz not found') -def get_info(dicom_files): - """Given a Siemens dicom file return metadata - - Returns - ------- - RepetitionTime - Slice Acquisition Times - Spacing between slices - """ - meta = default_extractor(read_file(filename_to_list(dicom_files)[0], - stop_before_pixels=True, - force=True)) - return (meta['RepetitionTime']/1000., meta['CsaImage.MosaicRefAcqTimes'], - meta['SpacingBetweenSlices']) - - def motion_regressors(motion_params, order=2, derivatives=2): """Compute motion regressors upto given order and derivative @@ -273,14 +276,11 @@ def create_workflow(files, wf.connect(remove_vol, 'roi_file', despiker, 'in_file') # Run Nipy joint slice timing and realignment algorithm - realign = Node(nipy.FmriRealign4d(), name='realign') + realign = Node(nipy.SpaceTimeRealigner(), name='realign') realign.inputs.tr = TR - realign.inputs.time_interp = True - # FIX # dbg - # This double argsort is necessary to convert slice order to space order - # that's required by nipy realign currently. This will change in the next - # release of Nipy. - realign.inputs.slice_order = np.argsort(np.argsort(slice_times)).tolist() + realign.inputs.slice_times = (np.array(slice_times)/1000.).tolist() + realign.inputs.slice_info = 2 + if despike: wf.connect(despiker, 'out_file', realign, 'in_file') else: @@ -671,6 +671,7 @@ def create_workflow(files, Creates the full workflow including getting information from dicom files """ + def create_resting_workflow(args): TR = args.TR slice_times = args.slice_times @@ -683,8 +684,8 @@ def create_resting_workflow(args): img = load(args.files[0]) slice_thickness = max(img.get_header().get_zooms()[:3]) - kwargs = dict(files =[os.path.abspath(filename) for - filename in args.files], + kwargs = dict(files=[os.path.abspath(filename) for + filename in args.files], subject_id=args.subject_id, n_vol=args.n_vol, despike=args.despike, From 7b18731f22b9bc6b10d3ffd02344d9613a4c33ae Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Mon, 9 Sep 2013 20:02:22 -0500 Subject: [PATCH 59/60] fix: replaced plugin_args with proper arguments --- examples/rsfmri_preprocessing.py | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index 9dadf30a7c..9d401fb24d 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -16,7 +16,7 @@ python rsfmri_preprocessing.py -d /data/12345-34-1.dcm -f /data/Resting.nii -s subj001 -n 2 --despike -o output - -p "plugin_args=dict(plugin='PBS', plugin_args=dict(qsub_args='-q many'))" + -p PBS --plugin_args "dict(qsub_args='-q many')" """ import os @@ -471,7 +471,8 @@ def create_workflow(files, else: wf.connect(calc_median, 'median_file', aparctransform, 'source_file') wf.connect(register, 'out_reg_file', aparctransform, 'reg_file') - wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), aparctransform, 'target_file') + wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), + aparctransform, 'target_file') # Sample the average time series in aparc ROIs sampleaparc = MapNode(freesurfer.SegStats(avgwf_txt_file=True, @@ -729,9 +730,11 @@ def create_resting_workflow(args): help="Output directory base") parser.add_argument("-w", "--work_dir", dest="work_dir", help="Output directory base") - parser.add_argument("-p", "--plugin_args", dest="plugin_args", - default='plugin_args=dict()', - help="Plugin args") + parser.add_argument("-p", "--plugin", dest="plugin", + default='Linear', + help="Plugin to use") + parser.add_argument("--plugin_args", dest="plugin_args", + help="Plugin arguments") parser.add_argument("--field_maps", dest="field_maps", nargs="+", help="field map niftis") parser.add_argument("--fm_echospacing", dest="echo_spacing", type=float, @@ -750,5 +753,7 @@ def create_resting_workflow(args): work_dir = os.getcwd() wf.base_dir = work_dir - exec args.plugin_args - wf.run(**plugin_args) + if args.plugin_args: + wf.run(args.plugin, plugin_args=eval(args.plugin_args)) + else: + wf.run(args.plugin) From 489f65691ca536680b0811fff86c9c7dc7d422af Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Mon, 9 Sep 2013 20:15:58 -0500 Subject: [PATCH 60/60] fix: set slice_times as seconds outside workflow generating function --- examples/rsfmri_preprocessing.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/examples/rsfmri_preprocessing.py b/examples/rsfmri_preprocessing.py index 9d401fb24d..1cc8360ab6 100644 --- a/examples/rsfmri_preprocessing.py +++ b/examples/rsfmri_preprocessing.py @@ -278,7 +278,7 @@ def create_workflow(files, # Run Nipy joint slice timing and realignment algorithm realign = Node(nipy.SpaceTimeRealigner(), name='realign') realign.inputs.tr = TR - realign.inputs.slice_times = (np.array(slice_times)/1000.).tolist() + realign.inputs.slice_times = slice_times realign.inputs.slice_info = 2 if despike: @@ -679,6 +679,7 @@ def create_resting_workflow(args): slice_thickness = None if args.dicom_file: TR, slice_times, slice_thickness = get_info(args.dicom_file) + slice_times = (np.array(slice_times)/1000.).tolist() if slice_thickness is None: from nibabel import load @@ -719,9 +720,9 @@ def create_resting_workflow(args): parser.add_argument("--despike", dest="despike", default=False, action="store_true", help="Use despiked data") parser.add_argument("--TR", dest="TR", default=None, - help="TR if dicom not provided") + help="TR if dicom not provided in seconds") parser.add_argument("--slice_times", dest="slice_times", nargs="+", - help="Slice times") + type=float, help="Slice times in seconds") parser.add_argument("-l", "--lowpass_freq", dest="lowpass_freq", default=-1, help="Low pass frequency (Hz)") parser.add_argument("-u", "--highpass_freq", dest="highpass_freq",