From 43e8bffd3b12f9ab597f0e9f1cd5b01cc63a1de7 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Wed, 15 Dec 2021 12:02:38 -0500 Subject: [PATCH 01/18] WIP: Anatomical derivatives --- nibabies/data/anat-io-spec.json | 84 +++++++++++++++++++++++++++++++++ nibabies/utils/bids.py | 82 ++++++++++++++++++++++++++++++++ 2 files changed, 166 insertions(+) create mode 100644 nibabies/data/anat-io-spec.json diff --git a/nibabies/data/anat-io-spec.json b/nibabies/data/anat-io-spec.json new file mode 100644 index 00000000..6994ad7f --- /dev/null +++ b/nibabies/data/anat-io-spec.json @@ -0,0 +1,84 @@ +{ + "queries": { + "baseline": { + "preproc": { + "datatype": "anat", + "desc": "preproc", + "suffix": "T1w" + }, + "mask": { + "datatype": "anat", + "desc": "brain", + "suffix": "mask" + }, + "dseg": { + "datatype": "anat", + "suffix": "dseg" + }, + "tpms": { + "datatype": "anat", + "label": ["GM", "WM", "CSF"], + "suffix": "probseg" + } + }, + "std_xfms": { + "anat2std_xfm": { + "datatype": "anat", + "extension": "h5", + "from": "T1w", + "to": [], + "suffix": "xfm", + "mode": "image" + }, + "std2anat_xfm": { + "datatype": "anat", + "extension": "h5", + "from": [], + "to": "T1w", + "suffix": "xfm", + "mode": "image" + } + }, + "surfaces": { + "t1w_aseg": { + "datatype": "anat", + "desc": "aseg", + "suffix": "dseg" + }, + "t1w_aparc": { + "datatype": "anat", + "desc": "aparcaseg", + "suffix": "dseg" + }, + "t1w2fsnative_xfm": { + "datatype": "anat", + "from": "T1w", + "to": "fsnative", + "extension": "txt", + "suffix": "xfm", + "mode": "image" + }, + "fsnative2t1w_xfm": { + "datatype": "anat", + "from": "fsnative", + "to": "T1w", + "extension": "txt", + "suffix": "xfm", + "mode": "image" + }, + "surfaces": { + "datatype": "anat", + "hemi": ["L", "R"], + "extension": "surf.gii", + "suffix": [ "inflated", "midthickness", "pial", "smoothwm"] + } + } + }, + "patterns": [ + "sub-{subject}[/ses-{session}]/{datatype|anat}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_space-{space}][_desc-{desc}]_{suffix}.{extension|nii.gz}", + "sub-{subject}[/ses-{session}]/{datatype|anat}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}]_from-{from}_to-{to}_mode-{mode|image}_{suffix|xfm}.{extension}", + "sub-{subject}[/ses-{session}]/{datatype|anat}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}]_hemi-{hemi}_{suffix}.{extension}", + "sub-{subject}[/ses-{session}]/{datatype|anat}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_space-{space}]_desc-{desc}_{suffix|mask}.{extension|nii.gz}", + "sub-{subject}[/ses-{session}]/{datatype|anat}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_space-{space}]_label-{label}[_desc-{desc}]_{suffix|probseg}.{extension|nii.gz}" + ] +} diff --git a/nibabies/utils/bids.py b/nibabies/utils/bids.py index 88ee23f4..f76fec53 100644 --- a/nibabies/utils/bids.py +++ b/nibabies/utils/bids.py @@ -304,3 +304,85 @@ def validate_input_dir(exec_env, bids_dir, participant_label): subprocess.check_call(["bids-validator", str(bids_dir), "-c", temp.name]) except FileNotFoundError: print("bids-validator does not appear to be installed", file=sys.stderr) + + +def collect_derivatives( + derivatives_dir, subject_id, std_spaces, freesurfer, spec=None, patterns=None, +): + """Gather existing derivatives and compose a cache.""" + from collections import defaultdict + import json + from pkg_resources import resource_filename as pkgrf + + from bids.layout.writing import build_path + + + if spec is None or patterns is None: + _spec, _patterns = tuple( + json.loads(Path(pkgrf("nibabies", "data/io_spec.json")).read_text()).values() + ) + + if spec is None: + spec = _spec + if patterns is None: + patterns = _patterns + + derivs_cache = defaultdict(list, {}) + derivatives_dir = Path(derivatives_dir) + + def _check_item(item): + if not item: + return None + + if isinstance(item, str): + item = [item] + + result = [] + for i in item: + if not (derivatives_dir / i).exists(): + i = i[:-3] if i.endswith('.gz') else i + if not (derivatives_dir / i).exists(): + return None + result.append(str(derivatives_dir / i)) + + return result + + for space in [None] + std_spaces: + for k, q in spec["baseline"].items(): + q["subject"] = subject_id + if space is not None: + q["space"] = space + item = _check_item(build_path(q, patterns, strict=True)) + if not item: + return None + + if space: + derivs_cache["std_%s" % k] += item if len(item) == 1 else [item] + else: + derivs_cache["t1w_%s" % k] = item[0] if len(item) == 1 else item + + for space in std_spaces: + for k, q in spec["std_xfms"].items(): + q["subject"] = subject_id + q["from"] = q["from"] or space + q["to"] = q["to"] or space + item = _check_item(build_path(q, patterns)) + if not item: + return None + derivs_cache[k] += item + + derivs_cache = dict(derivs_cache) # Back to a standard dictionary + + if freesurfer: + for k, q in spec["surfaces"].items(): + q["subject"] = subject_id + item = _check_item(build_path(q, patterns)) + if not item: + return None + + if len(item) == 1: + item = item[0] + derivs_cache[k] = item + + derivs_cache["template"] = std_spaces + return derivs_cache From 0d3c28fc0b377894f7e41aa4e2244b88bf28765c Mon Sep 17 00:00:00 2001 From: mathiasg Date: Tue, 4 Jan 2022 13:38:52 -0500 Subject: [PATCH 02/18] RF: Ensure derivatives are not selected in existing queries --- nibabies/cli/parser.py | 2 +- nibabies/utils/bids.py | 2 +- nibabies/workflows/bold/base.py | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/nibabies/cli/parser.py b/nibabies/cli/parser.py index c49c3f0a..1a4428d9 100644 --- a/nibabies/cli/parser.py +++ b/nibabies/cli/parser.py @@ -758,7 +758,7 @@ def parse_args(args=None, namespace=None): # Force initialization of the BIDSLayout config.execution.init() - all_subjects = config.execution.layout.get_subjects() + all_subjects = config.execution.layout.get_subjects(scope="raw") if config.execution.participant_label is None: config.execution.participant_label = all_subjects diff --git a/nibabies/utils/bids.py b/nibabies/utils/bids.py index f76fec53..338f3838 100644 --- a/nibabies/utils/bids.py +++ b/nibabies/utils/bids.py @@ -158,7 +158,7 @@ def group_bolds_ref(*, layout, subject): for ses, suffix in sorted( product( - layout.get_sessions(subject=subject) or (None,), + layout.get_sessions(subject=subject, scope="raw") or (None,), { "bold", }, diff --git a/nibabies/workflows/bold/base.py b/nibabies/workflows/bold/base.py index bc721d81..18105bd0 100644 --- a/nibabies/workflows/bold/base.py +++ b/nibabies/workflows/bold/base.py @@ -233,7 +233,7 @@ def init_func_preproc_wf(bold_file, has_fieldmap=False): entities.pop("echo", None) # reorder echoes from shortest to largest tes, bold_file = zip( - *sorted([(layout.get_metadata(bf)["EchoTime"], bf) for bf in bold_file]) + *sorted([(layout.get_metadata(bf, scope="raw")["EchoTime"], bf) for bf in bold_file]) ) ref_file = bold_file[0] # Reset reference to be the shortest TE @@ -250,7 +250,7 @@ def init_func_preproc_wf(bold_file, has_fieldmap=False): # Find associated sbref, if possible entities["suffix"] = "sbref" entities["extension"] = [".nii", ".nii.gz"] # Overwrite extensions - sbref_files = layout.get(return_type="file", **entities) + sbref_files = layout.get(scope="raw", return_type="file", **entities) sbref_msg = f"No single-band-reference found for {os.path.basename(ref_file)}." if sbref_files and "sbref" in config.workflow.ignore: From 835a3b31fb8b7146bcfacdf48c0fe9b1212971f0 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Tue, 4 Jan 2022 19:55:49 -0500 Subject: [PATCH 03/18] ENH: Allow the use of precalculated derivatives --- nibabies/cli/parser.py | 8 +++- nibabies/config.py | 4 ++ nibabies/data/anat-io-spec.json | 84 --------------------------------- nibabies/utils/bids.py | 48 +++++++++++++++++++ nibabies/workflows/base.py | 33 ++++--------- nibabies/workflows/bold/base.py | 2 +- 6 files changed, 69 insertions(+), 110 deletions(-) delete mode 100644 nibabies/data/anat-io-spec.json diff --git a/nibabies/cli/parser.py b/nibabies/cli/parser.py index 1a4428d9..c7e2c551 100644 --- a/nibabies/cli/parser.py +++ b/nibabies/cli/parser.py @@ -625,7 +625,13 @@ def _slice_time_ref(value, parser): "--fd-radius", type=float, default=45, - help="Head radius in mm for framewise displacement calculation", + help="Head radius in mm for framewise displacement calculation.", + ) + g_baby.add_argument( + "--derivatives", + type=PathExists, + nargs="+", + help="One or more directory containing pre-computed derivatives.", ) return parser diff --git a/nibabies/config.py b/nibabies/config.py index d3ea60c2..95ea44ec 100644 --- a/nibabies/config.py +++ b/nibabies/config.py @@ -356,6 +356,8 @@ class execution(_Config): """Run in sloppy mode (meaning, suboptimal parameters that minimize run-time).""" debug = [] """Debug mode(s).""" + derivatives = None + """One or more paths where pre-computed derivatives are found.""" echo_idx = None """Select a particular echo for multi-echo EPI datasets.""" fs_license_file = _fs_license @@ -408,6 +410,7 @@ class execution(_Config): "anat_derivatives", "bids_dir", "bids_database_dir", + "derivatives", "fs_license_file", "fs_subjects_dir", "layout", @@ -451,6 +454,7 @@ def init(cls): database_path=_db_path, reset_database=cls.bids_database_dir is None, indexer=_indexer, + derivatives=cls.derivatives or False, ) cls.bids_database_dir = _db_path cls.layout = cls._layout diff --git a/nibabies/data/anat-io-spec.json b/nibabies/data/anat-io-spec.json deleted file mode 100644 index 6994ad7f..00000000 --- a/nibabies/data/anat-io-spec.json +++ /dev/null @@ -1,84 +0,0 @@ -{ - "queries": { - "baseline": { - "preproc": { - "datatype": "anat", - "desc": "preproc", - "suffix": "T1w" - }, - "mask": { - "datatype": "anat", - "desc": "brain", - "suffix": "mask" - }, - "dseg": { - "datatype": "anat", - "suffix": "dseg" - }, - "tpms": { - "datatype": "anat", - "label": ["GM", "WM", "CSF"], - "suffix": "probseg" - } - }, - "std_xfms": { - "anat2std_xfm": { - "datatype": "anat", - "extension": "h5", - "from": "T1w", - "to": [], - "suffix": "xfm", - "mode": "image" - }, - "std2anat_xfm": { - "datatype": "anat", - "extension": "h5", - "from": [], - "to": "T1w", - "suffix": "xfm", - "mode": "image" - } - }, - "surfaces": { - "t1w_aseg": { - "datatype": "anat", - "desc": "aseg", - "suffix": "dseg" - }, - "t1w_aparc": { - "datatype": "anat", - "desc": "aparcaseg", - "suffix": "dseg" - }, - "t1w2fsnative_xfm": { - "datatype": "anat", - "from": "T1w", - "to": "fsnative", - "extension": "txt", - "suffix": "xfm", - "mode": "image" - }, - "fsnative2t1w_xfm": { - "datatype": "anat", - "from": "fsnative", - "to": "T1w", - "extension": "txt", - "suffix": "xfm", - "mode": "image" - }, - "surfaces": { - "datatype": "anat", - "hemi": ["L", "R"], - "extension": "surf.gii", - "suffix": [ "inflated", "midthickness", "pial", "smoothwm"] - } - } - }, - "patterns": [ - "sub-{subject}[/ses-{session}]/{datatype|anat}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_space-{space}][_desc-{desc}]_{suffix}.{extension|nii.gz}", - "sub-{subject}[/ses-{session}]/{datatype|anat}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}]_from-{from}_to-{to}_mode-{mode|image}_{suffix|xfm}.{extension}", - "sub-{subject}[/ses-{session}]/{datatype|anat}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}]_hemi-{hemi}_{suffix}.{extension}", - "sub-{subject}[/ses-{session}]/{datatype|anat}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_space-{space}]_desc-{desc}_{suffix|mask}.{extension|nii.gz}", - "sub-{subject}[/ses-{session}]/{datatype|anat}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_space-{space}]_label-{label}[_desc-{desc}]_{suffix|probseg}.{extension|nii.gz}" - ] -} diff --git a/nibabies/utils/bids.py b/nibabies/utils/bids.py index 338f3838..190e2c49 100644 --- a/nibabies/utils/bids.py +++ b/nibabies/utils/bids.py @@ -386,3 +386,51 @@ def _check_item(item): derivs_cache["template"] = std_spaces return derivs_cache + + +def collect_precomputed_derivatives(layout, subject_id, session_id=None): + """ + Query and collect precomputed derivatives. + + This function is used to determine which workflow steps can be skipped, + based on the files found. + """ + + deriv_queries = { + 'anat_mask': { + 'datatype': 'anat', + 'desc': 'brain', + 'space': 'orig', + 'suffix': 'mask', + }, + 'anat_aseg': { + 'datatype': 'anat', + 'desc': 'aseg', + 'space': 'orig', + 'suffix': 'dseg', + }, + 'bold_mask': { + 'datatype': 'func', + 'desc': 'brain', + 'space': 'orig', + 'suffix': 'mask', + }, + } + derivatives = {} + + for deriv, query in deriv_queries: + res = layout.get( + scope='derivatives', + subject=subject_id, + session=session_id, + extension=['nii', 'nii.gz'], + **query, + ) + if not res: + continue + if len(res) > 1: + raise Exception( + f"When searching for <{deriv}>, found multiple results: {[f.path for f in res]}" + ) + derivatives[deriv] = res[0] + return derivatives diff --git a/nibabies/workflows/base.py b/nibabies/workflows/base.py index 06bc99cf..f359831f 100644 --- a/nibabies/workflows/base.py +++ b/nibabies/workflows/base.py @@ -156,29 +156,12 @@ def init_single_subject_wf(subject_id): ) ) - if anat_derivatives: - from smriprep.utils.bids import collect_derivatives - - std_spaces = spaces.get_spaces(nonstandard=False, dim=(3,)) - anat_derivatives = collect_derivatives( - anat_derivatives.absolute(), + if config.execution.derivatives is not None: + from ..utils.bids import collect_precomputed_derivatives + derivatives = collect_precomputed_derivatives( + config.execution.layout, subject_id, - std_spaces, - config.workflow.run_reconall, - ) - if anat_derivatives is None: - config.loggers.workflow.warning( - f"""\ -Attempted to access pre-existing anatomical derivatives at \ -<{config.execution.anat_derivatives}>, however not all expectations of NiBabies \ -were met (for participant <{subject_id}>, spaces <{', '.join(std_spaces)}>, \ -reconall <{config.workflow.run_reconall}>).""" - ) - - if not anat_derivatives and not subject_data[anat_modality]: - raise Exception( - f"No {anat_modality} images found for participant {subject_id}. " - "All workflows require T1w images." + session_id=None, # TODO: Ensure session is visible at workflow level ) workflow = Workflow(name=name) @@ -222,11 +205,12 @@ def init_single_subject_wf(subject_id): inputnode = pe.Node(niu.IdentityInterface(fields=["subjects_dir"]), name="inputnode") + # TODO: Revisit T1w/T2w restrictions for BIDSDataGrabber bidssrc = pe.Node( BIDSDataGrabber( subject_data=subject_data, anat_only=anat_only, - anat_derivatives=anat_derivatives, + anat_derivatives=False, subject_id=subject_id, ), name="bidssrc", @@ -282,7 +266,7 @@ def init_single_subject_wf(subject_id): t1w=subject_data["t1w"], t2w=subject_data["t2w"], bids_root=config.execution.bids_dir, - existing_derivatives=anat_derivatives, + existing_derivatives=derivatives, freesurfer=config.workflow.run_reconall, longitudinal=config.workflow.longitudinal, omp_nthreads=config.nipype.omp_nthreads, @@ -419,6 +403,7 @@ def init_single_subject_wf(subject_id): func_preproc_wf = init_func_preproc_wf( bold_file, has_fieldmap=has_fieldmap, + existing_derivatives=derivatives, ) # fmt: off workflow.connect([ diff --git a/nibabies/workflows/bold/base.py b/nibabies/workflows/bold/base.py index 18105bd0..ebb6f02d 100644 --- a/nibabies/workflows/bold/base.py +++ b/nibabies/workflows/bold/base.py @@ -74,7 +74,7 @@ from .outputs import init_func_derivatives_wf -def init_func_preproc_wf(bold_file, has_fieldmap=False): +def init_func_preproc_wf(bold_file, has_fieldmap=False, existing_derivatives=None): """ This workflow controls the functional preprocessing stages of *NiBabies*. From 8cbefa374538dbb297e9ed88d5e155701a3b0345 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Wed, 5 Jan 2022 09:30:38 -0500 Subject: [PATCH 04/18] STY: black --- nibabies/utils/bids.py | 82 -------------------------------------- nibabies/workflows/base.py | 1 + 2 files changed, 1 insertion(+), 82 deletions(-) diff --git a/nibabies/utils/bids.py b/nibabies/utils/bids.py index 190e2c49..0142ba2a 100644 --- a/nibabies/utils/bids.py +++ b/nibabies/utils/bids.py @@ -306,88 +306,6 @@ def validate_input_dir(exec_env, bids_dir, participant_label): print("bids-validator does not appear to be installed", file=sys.stderr) -def collect_derivatives( - derivatives_dir, subject_id, std_spaces, freesurfer, spec=None, patterns=None, -): - """Gather existing derivatives and compose a cache.""" - from collections import defaultdict - import json - from pkg_resources import resource_filename as pkgrf - - from bids.layout.writing import build_path - - - if spec is None or patterns is None: - _spec, _patterns = tuple( - json.loads(Path(pkgrf("nibabies", "data/io_spec.json")).read_text()).values() - ) - - if spec is None: - spec = _spec - if patterns is None: - patterns = _patterns - - derivs_cache = defaultdict(list, {}) - derivatives_dir = Path(derivatives_dir) - - def _check_item(item): - if not item: - return None - - if isinstance(item, str): - item = [item] - - result = [] - for i in item: - if not (derivatives_dir / i).exists(): - i = i[:-3] if i.endswith('.gz') else i - if not (derivatives_dir / i).exists(): - return None - result.append(str(derivatives_dir / i)) - - return result - - for space in [None] + std_spaces: - for k, q in spec["baseline"].items(): - q["subject"] = subject_id - if space is not None: - q["space"] = space - item = _check_item(build_path(q, patterns, strict=True)) - if not item: - return None - - if space: - derivs_cache["std_%s" % k] += item if len(item) == 1 else [item] - else: - derivs_cache["t1w_%s" % k] = item[0] if len(item) == 1 else item - - for space in std_spaces: - for k, q in spec["std_xfms"].items(): - q["subject"] = subject_id - q["from"] = q["from"] or space - q["to"] = q["to"] or space - item = _check_item(build_path(q, patterns)) - if not item: - return None - derivs_cache[k] += item - - derivs_cache = dict(derivs_cache) # Back to a standard dictionary - - if freesurfer: - for k, q in spec["surfaces"].items(): - q["subject"] = subject_id - item = _check_item(build_path(q, patterns)) - if not item: - return None - - if len(item) == 1: - item = item[0] - derivs_cache[k] = item - - derivs_cache["template"] = std_spaces - return derivs_cache - - def collect_precomputed_derivatives(layout, subject_id, session_id=None): """ Query and collect precomputed derivatives. diff --git a/nibabies/workflows/base.py b/nibabies/workflows/base.py index f359831f..ad69981e 100644 --- a/nibabies/workflows/base.py +++ b/nibabies/workflows/base.py @@ -158,6 +158,7 @@ def init_single_subject_wf(subject_id): if config.execution.derivatives is not None: from ..utils.bids import collect_precomputed_derivatives + derivatives = collect_precomputed_derivatives( config.execution.layout, subject_id, From 1ab319e2d777e16e13f42bd490def8a81afece9d Mon Sep 17 00:00:00 2001 From: mathiasg Date: Wed, 5 Jan 2022 19:01:26 -0500 Subject: [PATCH 05/18] FIX: Syntax errors, remove anatomical derivatives --- nibabies/config.py | 1 - nibabies/utils/bids.py | 2 +- nibabies/workflows/base.py | 50 +++++++++++++------------------------- 3 files changed, 18 insertions(+), 35 deletions(-) diff --git a/nibabies/config.py b/nibabies/config.py index 95ea44ec..38405e18 100644 --- a/nibabies/config.py +++ b/nibabies/config.py @@ -410,7 +410,6 @@ class execution(_Config): "anat_derivatives", "bids_dir", "bids_database_dir", - "derivatives", "fs_license_file", "fs_subjects_dir", "layout", diff --git a/nibabies/utils/bids.py b/nibabies/utils/bids.py index 0142ba2a..9b9a7924 100644 --- a/nibabies/utils/bids.py +++ b/nibabies/utils/bids.py @@ -336,7 +336,7 @@ def collect_precomputed_derivatives(layout, subject_id, session_id=None): } derivatives = {} - for deriv, query in deriv_queries: + for deriv, query in deriv_queries.items(): res = layout.get( scope='derivatives', subject=subject_id, diff --git a/nibabies/workflows/base.py b/nibabies/workflows/base.py index ad69981e..eee36814 100644 --- a/nibabies/workflows/base.py +++ b/nibabies/workflows/base.py @@ -143,7 +143,7 @@ def init_single_subject_wf(subject_id): subject_data["t2w"] = [] anat_only = config.workflow.anat_only - anat_derivatives = config.execution.anat_derivatives + derivatives = config.execution.derivatives anat_modality = "t1w" if subject_data["t1w"] else "t2w" spaces = config.workflow.spaces # Make sure we always go through these two checks @@ -156,7 +156,7 @@ def init_single_subject_wf(subject_id): ) ) - if config.execution.derivatives is not None: + if derivatives is not None: from ..utils.bids import collect_precomputed_derivatives derivatives = collect_precomputed_derivatives( @@ -308,37 +308,21 @@ def init_single_subject_wf(subject_id): ]), ]) - if not anat_derivatives: - workflow.connect([ - (bidssrc, bids_info, [ - (('t1w', fix_multi_source_name), 'in_file'), - ]), - (bidssrc, summary, [ - ('t1w', 't1w'), - ('t2w', 't2w'), - ]), - (bidssrc, ds_report_summary, [ - (('t1w', fix_multi_source_name), 'source_file'), - ]), - (bidssrc, ds_report_about, [ - (('t1w', fix_multi_source_name), 'source_file'), - ]), - ]) - else: - workflow.connect([ - (bidssrc, bids_info, [ - (('bold', fix_multi_source_name), 'in_file'), - ]), - (anat_preproc_wf, summary, [ - ('outputnode.t1w_preproc', 't1w'), - ]), - (anat_preproc_wf, ds_report_summary, [ - ('outputnode.t1w_preproc', 'source_file'), - ]), - (anat_preproc_wf, ds_report_about, [ - ('outputnode.t1w_preproc', 'source_file'), - ]), - ]) + workflow.connect([ + (bidssrc, bids_info, [ + (('t1w', fix_multi_source_name), 'in_file'), + ]), + (bidssrc, summary, [ + ('t1w', 't1w'), + ('t2w', 't2w'), + ]), + (bidssrc, ds_report_summary, [ + (('t1w', fix_multi_source_name), 'source_file'), + ]), + (bidssrc, ds_report_about, [ + (('t1w', fix_multi_source_name), 'source_file'), + ]), + ]) # fmt: on # Overwrite ``out_path_base`` of smriprep's DataSinks From f51390485db37017bbbdd3de170934b6fad9c434 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Wed, 5 Jan 2022 19:02:08 -0500 Subject: [PATCH 06/18] ENH: Port `--derivatives` to wrapper script --- wrapper/nibabies_wrapper.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/wrapper/nibabies_wrapper.py b/wrapper/nibabies_wrapper.py index e770f800..27393f7c 100755 --- a/wrapper/nibabies_wrapper.py +++ b/wrapper/nibabies_wrapper.py @@ -466,6 +466,13 @@ def _is_file(path, parser): type=os.path.abspath, help="Directory containing prelabeled segmentations to use for JointLabelFusion." ) + g_wrap.add_argument( + "--derivatives", + nargs="+", + metavar="PATH", + type=os.path.abspath, + help="One or more directory containing pre-computed derivatives" + ) # Developer patch/shell options g_dev = parser.add_argument_group( @@ -650,6 +657,13 @@ def main(): if opts.segmentation_atlases_dir: container.add_mount(opts.segmentation_atlases_dir, "/opt/segmentations") unknown_args.extend(["--segmentation-atlases-dir", "/opt/segmentations"]) + if opts.derivatives: + derivative_args = ["--derivatives"] + for derivative in opts.derivatives: + derivative_target = "/opt/derivatives/%s" % os.path.basename(derivative) + container.add_mount(derivative, derivative_target, read_only=False) + derivative_args.append(derivative_target) + unknown_args.extend(derivative_args) # Check that work_dir is not a child of bids_dir if opts.work_dir and opts.bids_dir: From 2964055da4ded7ef79eb486d8da8c7238bf00cf7 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Wed, 5 Jan 2022 19:03:59 -0500 Subject: [PATCH 07/18] ENH: Fast-track if precomputed mask is available --- nibabies/workflows/anatomical/base.py | 156 ++++++++++-------- .../workflows/anatomical/brain_extraction.py | 50 +++++- 2 files changed, 138 insertions(+), 68 deletions(-) diff --git a/nibabies/workflows/anatomical/base.py b/nibabies/workflows/anatomical/base.py index 45701d1e..8d14e4b9 100644 --- a/nibabies/workflows/anatomical/base.py +++ b/nibabies/workflows/anatomical/base.py @@ -78,7 +78,7 @@ def init_infant_anat_wf( from niworkflows.engine.workflows import LiterateWorkflow as Workflow from ...utils.misc import fix_multi_source_name - from .brain_extraction import init_infant_brain_extraction_wf + from .brain_extraction import init_infant_brain_extraction_wf, init_precomputed_mask_wf from .norm import init_anat_norm_wf from .outputs import init_anat_reports_wf, init_coreg_report_wf, init_anat_derivatives_wf from .preproc import init_anat_average_wf @@ -90,6 +90,9 @@ def init_infant_anat_wf( num_t1w = len(t1w) if t1w else 0 num_t2w = len(t2w) if t2w else 0 + precomp_mask = existing_derivatives.get("anat_mask") + precomp_aseg = existing_derivatives.get("anat_aseg") + wf = Workflow(name=name) desc = f"""\n Anatomical data preprocessing @@ -129,28 +132,30 @@ def init_infant_anat_wf( name="outputnode", ) - if existing_derivatives: - # TODO: verify reuse anat - raise NotImplementedError("Reusing derivatives is not yet supported.") - desc += ( - """ -All of the T1-weighted images were corrected for intensity non-uniformity (INU) -""" + """\ +All of the T1-weighted images were corrected for intensity non-uniformity (INU)""" if num_t1w > 1 else """\ -The T1-weighted (T1w) image was corrected for intensity non-uniformity (INU) -""" +The T1-weighted (T1w) image was corrected for intensity non-uniformity (INU)""" ) + desc += """\ with `N4BiasFieldCorrection` [@n4], distributed with ANTs {ants_ver} \ [@ants, RRID:SCR_004757]""" desc += ".\n" if num_t1w > 1 else ", and used as T1w-reference throughout the workflow.\n" - desc += """\ + desc += ( + "A previously computed mask was used to skull-strip the anatomical image." + if precomp_mask + else """\ The T1w-reference was then skull-stripped with a modified implementation of the `antsBrainExtraction.sh` workflow (from ANTs), using {skullstrip_tpl} as target template. +""" + ) + + desc += """\ Brain tissue segmentation of cerebrospinal fluid (CSF), white-matter (WM) and gray-matter (GM) was performed on the brain-extracted T1w using ANTs JointFusion, distributed with ANTs {ants_ver}. @@ -174,8 +179,6 @@ def init_infant_anat_wf( output_dir=output_dir, spaces=spaces, ) - # # HACK: remove resolution from TFSelect - # anat_derivatives_wf.get_node("select_tpl").inputs.resolution = Undefined # Multiple T1w files -> generate average reference t1w_template_wf = init_anat_average_wf( @@ -196,24 +199,29 @@ def init_infant_anat_wf( if skull_strip_mode != "force": raise NotImplementedError("Skull stripping is currently required.") - brain_extraction_wf = init_infant_brain_extraction_wf( - age_months=age_months, - ants_affine_init=ants_affine_init, - skull_strip_template=skull_strip_template.space, - template_specs=skull_strip_template.spec, - omp_nthreads=omp_nthreads, - sloppy=sloppy, - debug="registration" in config.execution.debug, - ) + if precomp_mask: + precomp_mask_wf = init_precomputed_mask_wf(omp_nthreads=omp_nthreads) + precomp_mask_wf.inputs.inputnode.t1w_mask = precomp_mask - coregistration_wf = init_coregistration_wf( - omp_nthreads=omp_nthreads, - sloppy=sloppy, - debug="registration" in config.execution.debug, - ) - coreg_report_wf = init_coreg_report_wf( - output_dir=output_dir, - ) + else: + brain_extraction_wf = init_infant_brain_extraction_wf( + age_months=age_months, + ants_affine_init=ants_affine_init, + skull_strip_template=skull_strip_template.space, + template_specs=skull_strip_template.spec, + omp_nthreads=omp_nthreads, + sloppy=sloppy, + debug="registration" in config.execution.debug, + ) + coregistration_wf = init_coregistration_wf( + omp_nthreads=omp_nthreads, + sloppy=sloppy, + debug="registration" in config.execution.debug, + ) + coreg_report_wf = init_coreg_report_wf( + output_dir=output_dir, + ) + t1w_preproc_wf = precomp_mask_wf if precomp_mask else coregistration_wf # Segmentation - initial implementation should be simple: JLF anat_seg_wf = init_anat_seg_wf( @@ -234,33 +242,9 @@ def init_infant_anat_wf( # fmt: off wf.connect([ (inputnode, t1w_template_wf, [("t1w", "inputnode.in_files")]), - (inputnode, t2w_template_wf, [("t2w", "inputnode.in_files")]), (t1w_template_wf, outputnode, [ ("outputnode.realign_xfms", "anat_ref_xfms"), ]), - (t2w_template_wf, brain_extraction_wf, [ - ("outputnode.out_file", "inputnode.in_t2w"), - ]), - (t1w_template_wf, coregistration_wf, [ - ("outputnode.out_file", "inputnode.in_t1w"), - ]), - (brain_extraction_wf, coregistration_wf, [ - ("outputnode.t2w_preproc", "inputnode.in_t2w_preproc"), - ("outputnode.out_mask", "inputnode.in_mask"), - ("outputnode.out_probmap", "inputnode.in_probmap"), - ]), - (coregistration_wf, anat_norm_wf, [ - ("outputnode.t1w_preproc", "inputnode.moving_image"), - ("outputnode.t1w_mask", "inputnode.moving_mask"), - ]), - (coregistration_wf, outputnode, [ - ("outputnode.t1w_preproc", "anat_preproc"), - ("outputnode.t1w_brain", "anat_brain"), - ("outputnode.t1w_mask", "anat_mask"), - ]), - (coregistration_wf, anat_seg_wf, [ - ("outputnode.t1w_brain", "inputnode.anat_brain") - ]), (anat_seg_wf, outputnode, [ ("outputnode.anat_dseg", "anat_dseg"), ("outputnode.anat_tpms", "anat_tpms"), @@ -281,21 +265,63 @@ def init_infant_anat_wf( (inputnode, anat_norm_wf, [ (("t1w", fix_multi_source_name), "inputnode.orig_t1w"), # anat_validate? not used... ]), + (t1w_preproc_wf, anat_norm_wf, [ + ("outputnode.t1w_preproc", "inputnode.moving_image"), + ("outputnode.t1w_mask", "inputnode.moving_mask"), + ]), + (t1w_preproc_wf, anat_seg_wf, [ + ("outputnode.t1w_brain", "inputnode.anat_brain"), + ]), + (t1w_preproc_wf, anat_derivatives_wf, [ + ("outputnode.t1w_mask", "inputnode.t1w_mask"), + ("outputnode.t1w_preproc", "inputnode.t1w_preproc"), + ]), + (t1w_preproc_wf, outputnode, [ + ("outputnode.t1w_preproc", "anat_preproc"), + ("outputnode.t1w_brain", "anat_brain"), + ("outputnode.t1w_mask", "anat_mask"), + ]), ]) + if precomp_mask: + wf.connect([ + (t1w_template_wf, precomp_mask_wf, [ + ("outputnode.out_file", "inputnode.t1w"), + ]), + ]) + else: + wf.connect([ + (inputnode, t2w_template_wf, [("t2w", "inputnode.in_files")]), + (t2w_template_wf, brain_extraction_wf, [ + ("outputnode.out_file", "inputnode.in_t2w"), + ]), + (t1w_template_wf, coregistration_wf, [ + ("outputnode.out_file", "inputnode.in_t1w"), + ]), + (brain_extraction_wf, coregistration_wf, [ + ("outputnode.t2w_preproc", "inputnode.in_t2w_preproc"), + ("outputnode.out_mask", "inputnode.in_mask"), + ("outputnode.out_probmap", "inputnode.in_probmap"), + ]), + (inputnode, coreg_report_wf, [ + ("t1w", "inputnode.source_file"), + ]), + (t1w_preproc_wf, coreg_report_wf, [ + ("outputnode.t1w_preproc", "inputnode.t1w_preproc"), + ("outputnode.t2w_preproc", "inputnode.t2w_preproc"), + ("outputnode.t1w_mask", "inputnode.in_mask"), + ]), + (t1w_preproc_wf, anat_derivatives_wf, [ + ("outputnode.t1w_mask", "inputnode.t1w_mask"), + ("outputnode.t1w_preproc", "inputnode.t1w_preproc"), + ]), + ]) + wf.connect([ # reports (inputnode, anat_reports_wf, [ ("t1w", "inputnode.source_file"), ]), - (inputnode, coreg_report_wf, [ - ("t1w", "inputnode.source_file"), - ]), - (coregistration_wf, coreg_report_wf, [ - ("outputnode.t1w_preproc", "inputnode.t1w_preproc"), - ("outputnode.t2w_preproc", "inputnode.t2w_preproc"), - ("outputnode.t1w_mask", "inputnode.in_mask"), - ]), (outputnode, anat_reports_wf, [ ("anat_preproc", "inputnode.t1w_preproc"), ("anat_mask", "inputnode.t1w_mask"), @@ -314,10 +340,6 @@ def init_infant_anat_wf( ("outputnode.valid_list", "inputnode.source_files"), ("outputnode.realign_xfms", "inputnode.t1w_ref_xfms"), ]), - (coregistration_wf, anat_derivatives_wf, [ - ("outputnode.t1w_mask", "inputnode.t1w_mask"), - ("outputnode.t1w_preproc", "inputnode.t1w_preproc"), - ]), (anat_norm_wf, anat_derivatives_wf, [ ("outputnode.template", "inputnode.template"), ("outputnode.anat2std_xfm", "inputnode.anat2std_xfm"), @@ -352,7 +374,7 @@ def init_infant_anat_wf( (t1w_template_wf, surface_recon_wf, [ ("outputnode.out_file", "inputnode.anat_orig"), ]), - (coregistration_wf, surface_recon_wf, [ + (t1w_preproc_wf, surface_recon_wf, [ ("outputnode.t1w_brain", "inputnode.anat_skullstripped"), ("outputnode.t1w_preproc", "inputnode.anat_preproc"), ]), diff --git a/nibabies/workflows/anatomical/brain_extraction.py b/nibabies/workflows/anatomical/brain_extraction.py index 8783cb44..b163c47e 100644 --- a/nibabies/workflows/anatomical/brain_extraction.py +++ b/nibabies/workflows/anatomical/brain_extraction.py @@ -9,7 +9,7 @@ def init_infant_brain_extraction_wf( age_months=None, - ants_affine_init=False, + ants_affine_init=True, bspline_fitting_distance=200, sloppy=False, skull_strip_template="UNCInfant", @@ -268,6 +268,54 @@ def init_infant_brain_extraction_wf( return workflow +def init_precomputed_mask_wf( + bspline_fitting_distance=200, omp_nthreads=None, name="precomputed_mask_wf" +): + from nipype.interfaces.ants import N4BiasFieldCorrection + from niworkflows.interfaces.header import ValidateImage + from niworkflows.interfaces.nibabel import ApplyMask, IntensityClip + + workflow = pe.Workflow(name=name) + inputnode = pe.Node(niu.IdentityInterface(fields=["t1w", "t1w_mask"]), name="inputnode") + outputnode = pe.Node( + niu.IdentityInterface(fields=["t1w_preproc", "t1w_mask", "t1w_brain"]), + name="outputnode", + ) + + validate_mask = pe.Node(ValidateImage(), name="validate_mask") + final_n4 = pe.Node( + N4BiasFieldCorrection( + dimension=3, + bspline_fitting_distance=bspline_fitting_distance, + save_bias=True, + copy_header=True, + n_iterations=[50] * 5, + convergence_threshold=1e-7, + rescale_intensities=True, + shrink_factor=4, + ), + n_procs=omp_nthreads, + name="final_n4", + ) + final_clip = pe.Node(IntensityClip(p_min=5.0, p_max=99.5), name="final_clip") + apply_mask = pe.Node(ApplyMask(), name="apply_mask") + + # fmt:off + workflow.connect([ + (inputnode, validate_mask, [("t1w_mask", "in_file")]), + (inputnode, final_n4, [("t1w", "input_image")]), + (validate_mask, final_n4, [("out_file", "weight_image")]), + (validate_mask, apply_mask, [("out_file", "in_mask")]), + (final_n4, apply_mask, [("output_image", "in_file")]), + (final_n4, final_clip, [("output_image", "in_file")]), + (validate_mask, outputnode, [("out_file", "t1w_mask")]), + (final_clip, outputnode, [("out_file", "t1w_preproc")]), + (apply_mask, outputnode, [("out_file", "t1w_brain")]), + ]) + # fmt:on + return workflow + + def _pop(in_files): if isinstance(in_files, (list, tuple)): return in_files[0] From 63f6d03bfd36bce8f976ec9195c4e07aa1fced30 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Thu, 6 Jan 2022 00:35:57 -0500 Subject: [PATCH 08/18] ENH: Fast-track if precomputed aseg is available --- nibabies/workflows/anatomical/base.py | 16 +- nibabies/workflows/anatomical/segmentation.py | 218 +++++++++--------- 2 files changed, 114 insertions(+), 120 deletions(-) diff --git a/nibabies/workflows/anatomical/base.py b/nibabies/workflows/anatomical/base.py index 8d14e4b9..17865833 100644 --- a/nibabies/workflows/anatomical/base.py +++ b/nibabies/workflows/anatomical/base.py @@ -83,7 +83,7 @@ def init_infant_anat_wf( from .outputs import init_anat_reports_wf, init_coreg_report_wf, init_anat_derivatives_wf from .preproc import init_anat_average_wf from .registration import init_coregistration_wf - from .segmentation import init_anat_seg_wf + from .segmentation import init_anat_segmentations_wf from .surfaces import init_infant_surface_recon_wf # for now, T1w only @@ -224,12 +224,12 @@ def init_infant_anat_wf( t1w_preproc_wf = precomp_mask_wf if precomp_mask else coregistration_wf # Segmentation - initial implementation should be simple: JLF - anat_seg_wf = init_anat_seg_wf( - age_months=age_months, + anat_seg_wf = init_anat_segmentations_wf( anat_modality=anat_modality.capitalize(), template_dir=segmentation_atlases, sloppy=sloppy, omp_nthreads=omp_nthreads, + precomp_aseg=precomp_aseg, ) # Spatial normalization (requires segmentation) @@ -239,7 +239,7 @@ def init_infant_anat_wf( templates=spaces.get_spaces(nonstandard=False, dim=(3,)), ) - # fmt: off + # fmt:off wf.connect([ (inputnode, t1w_template_wf, [("t1w", "inputnode.in_files")]), (t1w_template_wf, outputnode, [ @@ -269,9 +269,6 @@ def init_infant_anat_wf( ("outputnode.t1w_preproc", "inputnode.moving_image"), ("outputnode.t1w_mask", "inputnode.moving_mask"), ]), - (t1w_preproc_wf, anat_seg_wf, [ - ("outputnode.t1w_brain", "inputnode.anat_brain"), - ]), (t1w_preproc_wf, anat_derivatives_wf, [ ("outputnode.t1w_mask", "inputnode.t1w_mask"), ("outputnode.t1w_preproc", "inputnode.t1w_preproc"), @@ -283,6 +280,11 @@ def init_infant_anat_wf( ]), ]) + if not precomp_aseg: + wf.connect([ + (t1w_preproc_wf, anat_seg_wf, [("outputnode.t1w_brain", "inputnode.anat_brain")]), + ]) + if precomp_mask: wf.connect([ (t1w_template_wf, precomp_mask_wf, [ diff --git a/nibabies/workflows/anatomical/segmentation.py b/nibabies/workflows/anatomical/segmentation.py index 64e48819..df30b3de 100644 --- a/nibabies/workflows/anatomical/segmentation.py +++ b/nibabies/workflows/anatomical/segmentation.py @@ -1,4 +1,6 @@ from pkg_resources import resource_filename as pkgr_fn +import sys + from nipype.pipeline import engine as pe from nipype.interfaces import utility as niu, fsl from nipype.interfaces.ants.segmentation import JointFusion @@ -12,27 +14,39 @@ from ...config import DEFAULT_MEMORY_MIN_GB -def init_anat_seg_wf( - age_months=None, +def init_anat_segmentations_wf( anat_modality="T1w", template_dir=None, sloppy=False, omp_nthreads=1, - name="anat_seg_wf", + precomp_aseg=None, + name="anat_segmentations_wf", ): """ - Calculate brain tissue segmentations from either: - A) a collection of manually segmented templates - B) FSL's FAST + Create discrete and probabilistic segmentations from an anatomical image. + + There are a number of heuristics used to calculate the aseg, based on what is available. + 1) A precomputed aseg is provided (via `precomp_aseg`) + 2) Two or more segmentation templates are provided (via `template_dir`) + 3) Otherwise, fallback to FSL FAST """ + from niworkflows.engine.workflows import LiterateWorkflow as Workflow + from niworkflows.utils.connections import listify if anat_modality != "T1w": raise NotImplementedError( "Only T1w images are currently accepted for the segmentation workflow." ) - wf = pe.Workflow(name=name) - inputnode = pe.Node(niu.IdentityInterface(fields=["anat_brain"]), name="inputnode") + if precomp_aseg and template_dir: + print("Found precomputed aseg; skipping JointLabelFusion", file=sys.stderr) + template_dir = None + + wf = Workflow(name=name) + inputnode = pe.Node( + niu.IdentityInterface(fields=["anat_brain", "anat_aseg"]), + name="inputnode", + ) outputnode = pe.Node( niu.IdentityInterface(fields=["anat_aseg", "anat_dseg", "anat_tpms"]), name="outputnode", @@ -41,7 +55,7 @@ def init_anat_seg_wf( # Coerce segmentation labels to BIDS lut_anat_dseg = pe.Node(niu.Function(function=_apply_bids_lut), name="lut_anat_dseg") - if template_dir is None: + if not any((precomp_aseg, template_dir)): # Use FSL FAST for segmentations anat_dseg = pe.Node( fsl.FAST(segments=True, no_bias=True, probability_maps=True), @@ -55,119 +69,101 @@ def init_anat_seg_wf( run_without_submitting=True, ) - wf.connect( - [ - ( - inputnode, - anat_dseg, - [ - ("anat_brain", "in_files"), - ], - ), - ( - anat_dseg, - lut_anat_dseg, - [ - ("partial_volume_map", "in_dseg"), - ], - ), - ( - lut_anat_dseg, - outputnode, - [ - ("out", "anat_dseg"), - ], - ), - ( - anat_dseg, - fast2bids, - [ - ("partial_volume_files", "inlist"), - ], - ), - ( - fast2bids, - outputnode, - [ - ("out", "anat_tpms"), - ], - ), - ] - ) + # fmt:off + wf.connect([ + (inputnode, anat_dseg, [("anat_brain", "in_files")]), + (anat_dseg, lut_anat_dseg, [("partial_volume_map", "in_dseg")]), + (lut_anat_dseg, outputnode, [("out", "anat_dseg")]), + (anat_dseg, fast2bids, [("partial_volume_files", "inlist")]), + (fast2bids, outputnode, [("out", "anat_tpms")]), + ]) + # fmt:on return wf - # Otherwise, register to templates and run ANTs JointFusion - lut_anat_dseg.inputs.lut = _aseg_to_three() - tmpl_anats, tmpl_segs = _parse_segmentation_atlases(anat_modality, template_dir) - - # register to templates - ants_params = "testing" if sloppy else "precise" - # Register to each subject space - norm = pe.MapNode( - Registration( - from_file=pkgr_fn("niworkflows.data", f"antsBrainExtraction_{ants_params}.json") - ), - name="norm", - iterfield=["moving_image"], - n_procs=omp_nthreads, - mem_gb=DEFAULT_MEMORY_MIN_GB, - ) - norm.inputs.moving_image = tmpl_anats - norm.inputs.float = True - - apply_atlas = pe.MapNode( - ApplyTransforms( - dimension=3, - interpolation="NearestNeighbor", - float=True, - ), - iterfield=["transforms", "input_image"], - name="apply_atlas", - ) - apply_atlas.inputs.input_image = tmpl_anats + # Joint Label Fusion + if template_dir: + tmpl_anats, tmpl_segs = _parse_segmentation_atlases(anat_modality, template_dir) + + # register to templates + ants_params = "testing" if sloppy else "precise" + # Register to each subject space + norm = pe.MapNode( + Registration( + from_file=pkgr_fn("niworkflows.data", f"antsBrainExtraction_{ants_params}.json") + ), + name="norm", + iterfield=["moving_image"], + n_procs=omp_nthreads, + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + norm.inputs.moving_image = tmpl_anats + norm.inputs.float = True + + apply_atlas = pe.MapNode( + ApplyTransforms( + dimension=3, + interpolation="NearestNeighbor", + float=True, + ), + iterfield=["transforms", "input_image"], + name="apply_atlas", + ) + apply_atlas.inputs.input_image = tmpl_anats - apply_seg = pe.MapNode( - ApplyTransforms(dimension=3, interpolation="MultiLabel"), # NearestNeighbor? - name="apply_seg", - iterfield=["transforms", "input_image"], - ) - apply_seg.inputs.input_image = tmpl_segs - - jointfusion = pe.Node( - JointFusion( - dimension=3, - out_label_fusion="fusion_labels.nii.gz", - num_threads=omp_nthreads, - ), - name="jointfusion", - ) + apply_seg = pe.MapNode( + ApplyTransforms(dimension=3, interpolation="MultiLabel"), # NearestNeighbor? + name="apply_seg", + iterfield=["transforms", "input_image"], + ) + apply_seg.inputs.input_image = tmpl_segs + + jointfusion = pe.Node( + JointFusion( + dimension=3, + out_label_fusion="fusion_labels.nii.gz", + num_threads=omp_nthreads, + ), + name="jointfusion", + ) - jf_label = pe.Node(niu.Function(function=_to_dtype), name="jf_label") + jf_label = pe.Node( + niu.Function(function=_to_dtype, output_names=["out_file"]), + name="jf_label" + ) - # split each tissue into individual masks + wf.connect([ + (inputnode, norm, [('anat_brain', 'fixed_image')]), + (norm, apply_atlas, [('forward_transforms', 'transforms')]), + (inputnode, apply_atlas, [('anat_brain', 'reference_image')]), + (norm, apply_seg, [('forward_transforms', 'transforms')]), + (inputnode, apply_seg, [('anat_brain', 'reference_image')]), + (inputnode, jointfusion, [(('anat_brain', listify), 'target_image')]), + (apply_atlas, jointfusion, [('output_image', 'atlas_image')]), + (apply_seg, jointfusion, [('output_image', 'atlas_segmentation_image')]), + (jointfusion, jf_label, [('out_label_fusion', 'in_file')]), + ]) + + elif precomp_aseg: + from niworkflows.interfaces.header import ValidateImage + + inputnode.inputs.anat_aseg = precomp_aseg + validate_aseg = pe.Node(ValidateImage(), name="validate_aseg") + wf.connect(inputnode, "anat_aseg", validate_aseg, "in_file") + + # Otherwise, the final aseg will be split into three tissue classes + # regardless if it was precomputed or generated via JLF + lut_anat_dseg.inputs.lut = _aseg_to_three() split_seg = pe.Node(niu.Function(function=_split_segments), name="split_seg") - to_list = pe.Node(niu.Function(function=_to_list), name="to_list") - + out_aseg = validate_aseg if precomp_aseg else jf_label # fmt: off wf.connect([ - (inputnode, norm, [('anat_brain', 'fixed_image')]), - (norm, apply_atlas, [('forward_transforms', 'transforms')]), - (inputnode, apply_atlas, [('anat_brain', 'reference_image')]), - (norm, apply_seg, [('forward_transforms', 'transforms')]), - (inputnode, apply_seg, [('anat_brain', 'reference_image')]), - (inputnode, to_list, [('anat_brain', 'in_file')]), - (to_list, jointfusion, [('out', 'target_image')]), - (apply_atlas, jointfusion, [('output_image', 'atlas_image')]), - (apply_seg, jointfusion, [('output_image', 'atlas_segmentation_image')]), - (jointfusion, jf_label, [('out_label_fusion', 'in_file')]), - (jf_label, outputnode, [('out', 'anat_aseg')]), - (jf_label, lut_anat_dseg, [('out', 'in_dseg')]), + (out_aseg, outputnode, [('out_file', 'anat_aseg')]), + (out_aseg, lut_anat_dseg, [('out_file', 'in_dseg')]), (lut_anat_dseg, outputnode, [('out', 'anat_dseg')]), (lut_anat_dseg, split_seg, [('out', 'in_file')]), (split_seg, outputnode, [('out', 'anat_tpms')]), ]) # fmt: on - return wf @@ -196,10 +192,6 @@ def _parse_segmentation_atlases(anat_modality, template_dir): return sorted(anats), sorted(segs) -def _to_list(in_file): - return [in_file] - - def _to_dtype(in_file, dtype="uint8"): """ Freesurfer's ``mri_convert`` complains about unsigned 32-bit integers. From 94b20935025bf0dced2d6d120b58974ee0206ce5 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Thu, 6 Jan 2022 09:02:54 -0500 Subject: [PATCH 09/18] STY: black --- nibabies/workflows/anatomical/segmentation.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/nibabies/workflows/anatomical/segmentation.py b/nibabies/workflows/anatomical/segmentation.py index df30b3de..14d8fc27 100644 --- a/nibabies/workflows/anatomical/segmentation.py +++ b/nibabies/workflows/anatomical/segmentation.py @@ -127,10 +127,10 @@ def init_anat_segmentations_wf( ) jf_label = pe.Node( - niu.Function(function=_to_dtype, output_names=["out_file"]), - name="jf_label" + niu.Function(function=_to_dtype, output_names=["out_file"]), name="jf_label" ) + # fmt:off wf.connect([ (inputnode, norm, [('anat_brain', 'fixed_image')]), (norm, apply_atlas, [('forward_transforms', 'transforms')]), @@ -142,6 +142,7 @@ def init_anat_segmentations_wf( (apply_seg, jointfusion, [('output_image', 'atlas_segmentation_image')]), (jointfusion, jf_label, [('out_label_fusion', 'in_file')]), ]) + # fmt:on elif precomp_aseg: from niworkflows.interfaces.header import ValidateImage From a7cb1d12b657775b1a1c1a28af4d11b281cfa580 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Thu, 6 Jan 2022 09:37:24 -0500 Subject: [PATCH 10/18] DOC: Add section on `--derivatives` usage --- docs/faqs.md | 25 ++++++++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/docs/faqs.md b/docs/faqs.md index 26c73598..156c16fa 100644 --- a/docs/faqs.md +++ b/docs/faqs.md @@ -1,6 +1,29 @@ # Tips and FAQs -## Providing your own segmentations +## Leveraging precomputed results + +Whether to allow for manual intervention for tough cases, or simply to reduce processing time, *NiBabies* can allow the use of certain pre-computed files during processing. +Initial support is limited to the following files: +- Anatomical mask in T1w space +- Antomical segmentation (aseg) in T1w space + +To use pre-computed results, one or more [BIDS Derivatives](https://bids-specification.readthedocs.io/en/stable/05-derivatives/01-introduction.html#bids-derivatives) directories must be passed in to *NiBabies* using the `--derivatives` flag. +Derivative directories must include a [`dataset_description.json` and the required fields](https://bids-specification.readthedocs.io/en/stable/03-modality-agnostic-files.html#derived-dataset-and-pipeline-description). +Additionally, files must include the `space-orig` key-value pair in the name. + +A sample layout of a derivatives directory can be found below: + +``` +my_precomputed/ +├── dataset_description.json +└── sub-01 + └── anat + ├── sub-01_space-orig_desc-aseg_dseg.nii.gz + ├── sub-01_space-orig_desc-brain_mask.json + └── sub-01_space-orig_desc-brain_mask.nii.gz +``` + +## Multi-atlas segmentation with joint label fusion By default, *NiBabies* will run [FSL FAST](https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/FAST) for tissue segmentation, and Infant FreeSurfer for segmentation labels. However, you can instead use ANTs Joint Label Fusion to generate both, granted you provide multiple atlases with anatomicals / segmentations via the `--segmentation-atlases-dir` flag. From 9f180a7f5827a7d4c8af330d258de9f8d9ca4838 Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Thu, 6 Jan 2022 11:00:32 -0500 Subject: [PATCH 11/18] Apply suggestions from code review Co-authored-by: Chris Markiewicz --- nibabies/cli/parser.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nibabies/cli/parser.py b/nibabies/cli/parser.py index c7e2c551..99da599c 100644 --- a/nibabies/cli/parser.py +++ b/nibabies/cli/parser.py @@ -628,6 +628,7 @@ def _slice_time_ref(value, parser): help="Head radius in mm for framewise displacement calculation.", ) g_baby.add_argument( + "-d", "--derivatives", type=PathExists, nargs="+", From 11190e08523d439c5c801890b543753a72079904 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Thu, 13 Jan 2022 13:45:21 -0500 Subject: [PATCH 12/18] FIX: Remove session restriction for now, add info log --- nibabies/utils/bids.py | 10 +--------- nibabies/workflows/base.py | 1 + 2 files changed, 2 insertions(+), 9 deletions(-) diff --git a/nibabies/utils/bids.py b/nibabies/utils/bids.py index 9b9a7924..a0134a32 100644 --- a/nibabies/utils/bids.py +++ b/nibabies/utils/bids.py @@ -327,26 +327,18 @@ def collect_precomputed_derivatives(layout, subject_id, session_id=None): 'space': 'orig', 'suffix': 'dseg', }, - 'bold_mask': { - 'datatype': 'func', - 'desc': 'brain', - 'space': 'orig', - 'suffix': 'mask', - }, } derivatives = {} - for deriv, query in deriv_queries.items(): res = layout.get( scope='derivatives', subject=subject_id, - session=session_id, extension=['nii', 'nii.gz'], **query, ) if not res: continue - if len(res) > 1: + if len(res) > 1: # Some queries may want multiple results raise Exception( f"When searching for <{deriv}>, found multiple results: {[f.path for f in res]}" ) diff --git a/nibabies/workflows/base.py b/nibabies/workflows/base.py index eee36814..014944ea 100644 --- a/nibabies/workflows/base.py +++ b/nibabies/workflows/base.py @@ -164,6 +164,7 @@ def init_single_subject_wf(subject_id): subject_id, session_id=None, # TODO: Ensure session is visible at workflow level ) + config.loggers.workflow.info(f"Found precomputed derivatives: {derivatives}") workflow = Workflow(name=name) workflow.__desc__ = """ From 150a3a5fea21e60a37205788b6ad8050f55b60f8 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Fri, 14 Jan 2022 12:09:42 -0500 Subject: [PATCH 13/18] FIX: Remove duplicate connection --- nibabies/workflows/anatomical/base.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/nibabies/workflows/anatomical/base.py b/nibabies/workflows/anatomical/base.py index 17865833..7c0e6232 100644 --- a/nibabies/workflows/anatomical/base.py +++ b/nibabies/workflows/anatomical/base.py @@ -313,10 +313,6 @@ def init_infant_anat_wf( ("outputnode.t2w_preproc", "inputnode.t2w_preproc"), ("outputnode.t1w_mask", "inputnode.in_mask"), ]), - (t1w_preproc_wf, anat_derivatives_wf, [ - ("outputnode.t1w_mask", "inputnode.t1w_mask"), - ("outputnode.t1w_preproc", "inputnode.t1w_preproc"), - ]), ]) wf.connect([ From 7cf5fb80d722a43e28e21e91a037be091ea02f0f Mon Sep 17 00:00:00 2001 From: mathiasg Date: Fri, 14 Jan 2022 12:10:35 -0500 Subject: [PATCH 14/18] ENH: Add flag to control derivatives filtering --- nibabies/cli/parser.py | 7 +++++++ nibabies/config.py | 16 ++++++++++++---- nibabies/utils/bids.py | 6 +++++- nibabies/workflows/base.py | 7 ++++--- 4 files changed, 28 insertions(+), 8 deletions(-) diff --git a/nibabies/cli/parser.py b/nibabies/cli/parser.py index 99da599c..253bef9b 100644 --- a/nibabies/cli/parser.py +++ b/nibabies/cli/parser.py @@ -634,6 +634,13 @@ def _slice_time_ref(value, parser): nargs="+", help="One or more directory containing pre-computed derivatives.", ) + g_baby.add_argument( + "--deriv-filter-file", + dest="derivatives_filters", + type=_bids_filter, + metavar="FILE", + help="A JSON file for customizing the derivatives queries." + ) return parser diff --git a/nibabies/config.py b/nibabies/config.py index 38405e18..9f767e1a 100644 --- a/nibabies/config.py +++ b/nibabies/config.py @@ -358,6 +358,8 @@ class execution(_Config): """Debug mode(s).""" derivatives = None """One or more paths where pre-computed derivatives are found.""" + derivatives_filters = None + """A dictionary of BIDS selection filters""" echo_idx = None """Select a particular echo for multi-echo EPI datasets.""" fs_license_file = _fs_license @@ -457,15 +459,21 @@ def init(cls): ) cls.bids_database_dir = _db_path cls.layout = cls._layout - if cls.bids_filters: + + def _unserialize_bids_queries(queries): from bids.layout import Query - # unserialize pybids Query enum values - for acq, filters in cls.bids_filters.items(): - cls.bids_filters[acq] = { + for acq, filters in queries.items(): + queries[acq] = { k: getattr(Query, v[7:-4]) if not isinstance(v, Query) and "Query" in v else v for k, v in filters.items() } + return queries + + if cls.bids_filters: + cls.bids_filters = _unserialize_bids_queries(cls.bids_filters) + if cls.derivatives_filters: + cls.derivatives_filters = _unserialize_bids_queries(cls.derivatives_filters) if "all" in cls.debug: cls.debug = list(DEBUG_MODES) diff --git a/nibabies/utils/bids.py b/nibabies/utils/bids.py index a0134a32..65460d67 100644 --- a/nibabies/utils/bids.py +++ b/nibabies/utils/bids.py @@ -306,7 +306,7 @@ def validate_input_dir(exec_env, bids_dir, participant_label): print("bids-validator does not appear to be installed", file=sys.stderr) -def collect_precomputed_derivatives(layout, subject_id, session_id=None): +def collect_precomputed_derivatives(layout, subject_id, derivatives_filters=None): """ Query and collect precomputed derivatives. @@ -328,6 +328,10 @@ def collect_precomputed_derivatives(layout, subject_id, session_id=None): 'suffix': 'dseg', }, } + if derivatives_filters is not None: + deriv_queries.update(derivatives_filters) + print(deriv_queries) + derivatives = {} for deriv, query in deriv_queries.items(): res = layout.get( diff --git a/nibabies/workflows/base.py b/nibabies/workflows/base.py index 014944ea..4fcf252a 100644 --- a/nibabies/workflows/base.py +++ b/nibabies/workflows/base.py @@ -143,7 +143,7 @@ def init_single_subject_wf(subject_id): subject_data["t2w"] = [] anat_only = config.workflow.anat_only - derivatives = config.execution.derivatives + derivatives = config.execution.derivatives or {} anat_modality = "t1w" if subject_data["t1w"] else "t2w" spaces = config.workflow.spaces # Make sure we always go through these two checks @@ -156,13 +156,14 @@ def init_single_subject_wf(subject_id): ) ) - if derivatives is not None: + if derivatives: from ..utils.bids import collect_precomputed_derivatives derivatives = collect_precomputed_derivatives( config.execution.layout, subject_id, - session_id=None, # TODO: Ensure session is visible at workflow level + derivatives_filters=config.execution.derivatives_filters, + # session_id=None, # TODO: Ensure session is visible at workflow level ) config.loggers.workflow.info(f"Found precomputed derivatives: {derivatives}") From 9585d14309916ebb4cc7ad501b827d909fa21598 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Fri, 14 Jan 2022 12:11:14 -0500 Subject: [PATCH 15/18] ENH: Add filtering options to wrapper arguments --- wrapper/nibabies_wrapper.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/wrapper/nibabies_wrapper.py b/wrapper/nibabies_wrapper.py index 27393f7c..dd38e51a 100755 --- a/wrapper/nibabies_wrapper.py +++ b/wrapper/nibabies_wrapper.py @@ -268,6 +268,9 @@ def _get_posargs(usage): expected_overlap = { "anat-derivatives", "bids-database-dir", + "bids-filter-file", + "derivatives", + "deriv-filter-file", "fs-license-file", "fs-subjects-dir", "config-file", @@ -473,6 +476,18 @@ def _is_file(path, parser): type=os.path.abspath, help="One or more directory containing pre-computed derivatives" ) + g_wrap.add_argument( + "--bids-filter-file", + metavar="PATH", + type=os.path.abspath, + help="Filter file", + ) + g_wrap.add_argument( + "--deriv-filter-file", + metavar="PATH", + type=os.path.abspath, + help="Filter file", + ) # Developer patch/shell options g_dev = parser.add_argument_group( @@ -657,6 +672,12 @@ def main(): if opts.segmentation_atlases_dir: container.add_mount(opts.segmentation_atlases_dir, "/opt/segmentations") unknown_args.extend(["--segmentation-atlases-dir", "/opt/segmentations"]) + if opts.bids_filter_file: + container.add_mount(opts.bids_filter_file, "/opt/bids_filters.json") + unknown_args.extend(["--bids-filter-file", "/opt/bids_filters.json"]) + if opts.deriv_filter_file: + container.add_mount(opts.deriv_filter_file, "/opt/derivative_filters.json") + unknown_args.extend(["--deriv-filter-file", "/opt/derivative_filters.json"]) if opts.derivatives: derivative_args = ["--derivatives"] for derivative in opts.derivatives: From ac632f300210285b273621e91d4906a01d6c3d85 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Fri, 14 Jan 2022 13:01:22 -0500 Subject: [PATCH 16/18] STY: black this is the final straw --- nibabies/cli/parser.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabies/cli/parser.py b/nibabies/cli/parser.py index 253bef9b..1dc8c0b1 100644 --- a/nibabies/cli/parser.py +++ b/nibabies/cli/parser.py @@ -639,7 +639,7 @@ def _slice_time_ref(value, parser): dest="derivatives_filters", type=_bids_filter, metavar="FILE", - help="A JSON file for customizing the derivatives queries." + help="A JSON file for customizing the derivatives queries.", ) return parser From ad8092766226f58728cf022ecb0b963bec6efd84 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Tue, 18 Jan 2022 13:18:27 -0500 Subject: [PATCH 17/18] FIX: Remove excess print --- nibabies/utils/bids.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nibabies/utils/bids.py b/nibabies/utils/bids.py index 65460d67..6b9222fb 100644 --- a/nibabies/utils/bids.py +++ b/nibabies/utils/bids.py @@ -330,7 +330,6 @@ def collect_precomputed_derivatives(layout, subject_id, derivatives_filters=None } if derivatives_filters is not None: deriv_queries.update(derivatives_filters) - print(deriv_queries) derivatives = {} for deriv, query in deriv_queries.items(): From 708f7360e500766230f5e9d36631018c42b8aea6 Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Mon, 24 Jan 2022 16:19:26 -0500 Subject: [PATCH 18/18] Update nibabies/utils/bids.py Co-authored-by: Chris Markiewicz --- nibabies/utils/bids.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabies/utils/bids.py b/nibabies/utils/bids.py index 6b9222fb..4fa9d08b 100644 --- a/nibabies/utils/bids.py +++ b/nibabies/utils/bids.py @@ -336,7 +336,7 @@ def collect_precomputed_derivatives(layout, subject_id, derivatives_filters=None res = layout.get( scope='derivatives', subject=subject_id, - extension=['nii', 'nii.gz'], + extension=['.nii', '.nii.gz'], **query, ) if not res: