Skip to content

Commit

Permalink
Merge pull request #223 from chrisfilo/enh/mem_mangement
Browse files Browse the repository at this point in the history
[RTM] memory consumption driven scheduling
  • Loading branch information
chrisgorgo committed Dec 6, 2016
2 parents aba85a4 + 45315d6 commit b8714c8
Show file tree
Hide file tree
Showing 6 changed files with 31 additions and 4 deletions.
8 changes: 8 additions & 0 deletions fmriprep/utils/misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,5 +114,13 @@ def collect_bids_data(dataset, subject, session=None, run=None):
return imaging_data


def get_biggest_epi_file_size_gb(files):
max_size = 0
for file in files:
size = os.path.getsize(file)/(1024*1024*1024)
if size > max_size:
max_size = size
return max_size

if __name__ == '__main__':
pass
5 changes: 4 additions & 1 deletion fmriprep/workflows/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from nipype.interfaces import fsl

from fmriprep.interfaces import BIDSDataGrabber
from fmriprep.utils.misc import collect_bids_data
from fmriprep.utils.misc import collect_bids_data, get_biggest_epi_file_size_gb
from fmriprep.workflows import confounds

from fmriprep.workflows.anatomical import t1w_preprocessing
Expand All @@ -36,6 +36,9 @@ def base_workflow_enumerator(subject_list, settings):

def base_workflow_generator(subject_id, settings):
subject_data = collect_bids_data(settings['bids_root'], subject_id)

settings["biggest_epi_file_size_gb"] = get_biggest_epi_file_size_gb(subject_data['func'])

if subject_data['t1w'] != [] and subject_data['sbref'] != []:
return wf_ds054_type(subject_data, settings, name=subject_id)
if subject_data['t1w'] != [] and subject_data['sbref'] == []:
Expand Down
10 changes: 10 additions & 0 deletions fmriprep/workflows/confounds.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,17 +40,27 @@ def discover_wf(settings, name="ConfoundDiscoverer"):
signals = pe.Node(nilearn.SignalExtraction(include_global=True, detrend=True,
class_labels=FAST_DEFAULT_SEGS),
name="SignalExtraction")
signals.interface.estimated_memory_gb = settings[
"biggest_epi_file_size_gb"] * 3
# DVARS
dvars = pe.Node(confounds.ComputeDVARS(save_all=True, remove_zerovariance=True),
name="ComputeDVARS")
dvars.interface.estimated_memory_gb = settings[
"biggest_epi_file_size_gb"] * 3
# Frame displacement
frame_displace = pe.Node(confounds.FramewiseDisplacement(), name="FramewiseDisplacement")
frame_displace.interface.estimated_memory_gb = settings[
"biggest_epi_file_size_gb"] * 3
# CompCor
tcompcor = pe.Node(confounds.TCompCor(components_file='tcompcor.tsv'), name="tCompCor")
tcompcor.interface.estimated_memory_gb = settings[
"biggest_epi_file_size_gb"] * 3
acompcor_roi = pe.Node(mask.BinarizeSegmentation(
false_values=[FAST_DEFAULT_SEGS.index('GrayMatter') + 1, 0]), # 0 denotes background
name="CalcaCompCorROI")
acompcor = pe.Node(confounds.ACompCor(components_file='acompcor.tsv'), name="aCompCor")
acompcor.interface.estimated_memory_gb = settings[
"biggest_epi_file_size_gb"] * 3

# misc utilities
concat = pe.Node(utility.Function(function=_gather_confounds, input_names=['signals', 'dvars',
Expand Down
5 changes: 5 additions & 0 deletions fmriprep/workflows/epi.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ def epi_hmc(name='EPI_HMC', settings=None):
# Head motion correction (hmc)
hmc = pe.Node(fsl.MCFLIRT(
save_mats=True, save_plots=True, mean_vol=True), name='EPI_hmc')
hmc.interface.estimated_memory_gb = settings["biggest_epi_file_size_gb"] * 3

hcm2itk = pe.MapNode(c3.C3dAffineTool(fsl2ras=True, itk_transform=True),
iterfield=['transform_file'], name='hcm2itk')
Expand Down Expand Up @@ -353,6 +354,8 @@ def _aslist(in_value):
'1mm_T1.nii.gz')

split = pe.Node(fsl.Split(dimension='t'), name='SplitEPI')
split.interface.estimated_memory_gb = settings["biggest_epi_file_size_gb"] * 3

merge_transforms = pe.MapNode(niu.Merge(3),
iterfield=['in3'], name='MergeTransforms')
epi_to_mni_transform = pe.MapNode(
Expand All @@ -362,6 +365,8 @@ def _aslist(in_value):
merge = pe.Node(niu.Function(input_names=["in_files"],
output_names=["merged_file"],
function=nii_concat), name='MergeEPI')
merge.interface.estimated_memory_gb = settings[
"biggest_epi_file_size_gb"] * 3

mask_merge_tfms = pe.Node(niu.Merge(2), name='MaskMergeTfms')
mask_mni_tfm = pe.Node(
Expand Down
5 changes: 3 additions & 2 deletions test/workflows/test_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ def test_wf_ds054_type(self, _):
# set up
mock_subject_data = {'t1w': ['um'], 'sbref': ['um'], 'func': 'um'}
mock_settings = {'output_dir': '.', 'work_dir': '.',
'ants_nthreads': 1}
'ants_nthreads': 1, 'biggest_epi_file_size_gb': 1}

# run
wf054 = wf_ds054_type(mock_subject_data, mock_settings)
Expand All @@ -38,7 +38,8 @@ def test_wf_ds054_type(self, _):
def test_wf_ds005_type(self, _):
# set up
mock_subject_data = {'func': ''}
mock_settings = {'output_dir': '.', 'ants_nthreads': 1}
mock_settings = {'output_dir': '.', 'ants_nthreads': 1,
'biggest_epi_file_size_gb': 1}

# run
wf005 = wf_ds005_type(mock_subject_data, mock_settings)
Expand Down
2 changes: 1 addition & 1 deletion test/workflows/test_confounds.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ class TestConfounds(TestWorkflow):

def test_discover_wf(self):
# run
workflow = discover_wf(stub.settings())
workflow = discover_wf(stub.settings({'biggest_epi_file_size_gb': 1}))
workflow.write_hierarchical_dotfile()

# assert
Expand Down

0 comments on commit b8714c8

Please sign in to comment.