diff --git a/docs/faqs.md b/docs/faqs.md index 26c73598..156c16fa 100644 --- a/docs/faqs.md +++ b/docs/faqs.md @@ -1,6 +1,29 @@ # Tips and FAQs -## Providing your own segmentations +## Leveraging precomputed results + +Whether to allow for manual intervention for tough cases, or simply to reduce processing time, *NiBabies* can allow the use of certain pre-computed files during processing. +Initial support is limited to the following files: +- Anatomical mask in T1w space +- Antomical segmentation (aseg) in T1w space + +To use pre-computed results, one or more [BIDS Derivatives](https://bids-specification.readthedocs.io/en/stable/05-derivatives/01-introduction.html#bids-derivatives) directories must be passed in to *NiBabies* using the `--derivatives` flag. +Derivative directories must include a [`dataset_description.json` and the required fields](https://bids-specification.readthedocs.io/en/stable/03-modality-agnostic-files.html#derived-dataset-and-pipeline-description). +Additionally, files must include the `space-orig` key-value pair in the name. + +A sample layout of a derivatives directory can be found below: + +``` +my_precomputed/ +├── dataset_description.json +└── sub-01 + └── anat + ├── sub-01_space-orig_desc-aseg_dseg.nii.gz + ├── sub-01_space-orig_desc-brain_mask.json + └── sub-01_space-orig_desc-brain_mask.nii.gz +``` + +## Multi-atlas segmentation with joint label fusion By default, *NiBabies* will run [FSL FAST](https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/FAST) for tissue segmentation, and Infant FreeSurfer for segmentation labels. However, you can instead use ANTs Joint Label Fusion to generate both, granted you provide multiple atlases with anatomicals / segmentations via the `--segmentation-atlases-dir` flag. diff --git a/nibabies/cli/parser.py b/nibabies/cli/parser.py index c49c3f0a..1dc8c0b1 100644 --- a/nibabies/cli/parser.py +++ b/nibabies/cli/parser.py @@ -625,7 +625,21 @@ def _slice_time_ref(value, parser): "--fd-radius", type=float, default=45, - help="Head radius in mm for framewise displacement calculation", + help="Head radius in mm for framewise displacement calculation.", + ) + g_baby.add_argument( + "-d", + "--derivatives", + type=PathExists, + nargs="+", + help="One or more directory containing pre-computed derivatives.", + ) + g_baby.add_argument( + "--deriv-filter-file", + dest="derivatives_filters", + type=_bids_filter, + metavar="FILE", + help="A JSON file for customizing the derivatives queries.", ) return parser @@ -758,7 +772,7 @@ def parse_args(args=None, namespace=None): # Force initialization of the BIDSLayout config.execution.init() - all_subjects = config.execution.layout.get_subjects() + all_subjects = config.execution.layout.get_subjects(scope="raw") if config.execution.participant_label is None: config.execution.participant_label = all_subjects diff --git a/nibabies/config.py b/nibabies/config.py index d3ea60c2..9f767e1a 100644 --- a/nibabies/config.py +++ b/nibabies/config.py @@ -356,6 +356,10 @@ class execution(_Config): """Run in sloppy mode (meaning, suboptimal parameters that minimize run-time).""" debug = [] """Debug mode(s).""" + derivatives = None + """One or more paths where pre-computed derivatives are found.""" + derivatives_filters = None + """A dictionary of BIDS selection filters""" echo_idx = None """Select a particular echo for multi-echo EPI datasets.""" fs_license_file = _fs_license @@ -451,18 +455,25 @@ def init(cls): database_path=_db_path, reset_database=cls.bids_database_dir is None, indexer=_indexer, + derivatives=cls.derivatives or False, ) cls.bids_database_dir = _db_path cls.layout = cls._layout - if cls.bids_filters: + + def _unserialize_bids_queries(queries): from bids.layout import Query - # unserialize pybids Query enum values - for acq, filters in cls.bids_filters.items(): - cls.bids_filters[acq] = { + for acq, filters in queries.items(): + queries[acq] = { k: getattr(Query, v[7:-4]) if not isinstance(v, Query) and "Query" in v else v for k, v in filters.items() } + return queries + + if cls.bids_filters: + cls.bids_filters = _unserialize_bids_queries(cls.bids_filters) + if cls.derivatives_filters: + cls.derivatives_filters = _unserialize_bids_queries(cls.derivatives_filters) if "all" in cls.debug: cls.debug = list(DEBUG_MODES) diff --git a/nibabies/utils/bids.py b/nibabies/utils/bids.py index 88ee23f4..4fa9d08b 100644 --- a/nibabies/utils/bids.py +++ b/nibabies/utils/bids.py @@ -158,7 +158,7 @@ def group_bolds_ref(*, layout, subject): for ses, suffix in sorted( product( - layout.get_sessions(subject=subject) or (None,), + layout.get_sessions(subject=subject, scope="raw") or (None,), { "bold", }, @@ -304,3 +304,46 @@ def validate_input_dir(exec_env, bids_dir, participant_label): subprocess.check_call(["bids-validator", str(bids_dir), "-c", temp.name]) except FileNotFoundError: print("bids-validator does not appear to be installed", file=sys.stderr) + + +def collect_precomputed_derivatives(layout, subject_id, derivatives_filters=None): + """ + Query and collect precomputed derivatives. + + This function is used to determine which workflow steps can be skipped, + based on the files found. + """ + + deriv_queries = { + 'anat_mask': { + 'datatype': 'anat', + 'desc': 'brain', + 'space': 'orig', + 'suffix': 'mask', + }, + 'anat_aseg': { + 'datatype': 'anat', + 'desc': 'aseg', + 'space': 'orig', + 'suffix': 'dseg', + }, + } + if derivatives_filters is not None: + deriv_queries.update(derivatives_filters) + + derivatives = {} + for deriv, query in deriv_queries.items(): + res = layout.get( + scope='derivatives', + subject=subject_id, + extension=['.nii', '.nii.gz'], + **query, + ) + if not res: + continue + if len(res) > 1: # Some queries may want multiple results + raise Exception( + f"When searching for <{deriv}>, found multiple results: {[f.path for f in res]}" + ) + derivatives[deriv] = res[0] + return derivatives diff --git a/nibabies/workflows/anatomical/base.py b/nibabies/workflows/anatomical/base.py index 45701d1e..7c0e6232 100644 --- a/nibabies/workflows/anatomical/base.py +++ b/nibabies/workflows/anatomical/base.py @@ -78,18 +78,21 @@ def init_infant_anat_wf( from niworkflows.engine.workflows import LiterateWorkflow as Workflow from ...utils.misc import fix_multi_source_name - from .brain_extraction import init_infant_brain_extraction_wf + from .brain_extraction import init_infant_brain_extraction_wf, init_precomputed_mask_wf from .norm import init_anat_norm_wf from .outputs import init_anat_reports_wf, init_coreg_report_wf, init_anat_derivatives_wf from .preproc import init_anat_average_wf from .registration import init_coregistration_wf - from .segmentation import init_anat_seg_wf + from .segmentation import init_anat_segmentations_wf from .surfaces import init_infant_surface_recon_wf # for now, T1w only num_t1w = len(t1w) if t1w else 0 num_t2w = len(t2w) if t2w else 0 + precomp_mask = existing_derivatives.get("anat_mask") + precomp_aseg = existing_derivatives.get("anat_aseg") + wf = Workflow(name=name) desc = f"""\n Anatomical data preprocessing @@ -129,28 +132,30 @@ def init_infant_anat_wf( name="outputnode", ) - if existing_derivatives: - # TODO: verify reuse anat - raise NotImplementedError("Reusing derivatives is not yet supported.") - desc += ( - """ -All of the T1-weighted images were corrected for intensity non-uniformity (INU) -""" + """\ +All of the T1-weighted images were corrected for intensity non-uniformity (INU)""" if num_t1w > 1 else """\ -The T1-weighted (T1w) image was corrected for intensity non-uniformity (INU) -""" +The T1-weighted (T1w) image was corrected for intensity non-uniformity (INU)""" ) + desc += """\ with `N4BiasFieldCorrection` [@n4], distributed with ANTs {ants_ver} \ [@ants, RRID:SCR_004757]""" desc += ".\n" if num_t1w > 1 else ", and used as T1w-reference throughout the workflow.\n" - desc += """\ + desc += ( + "A previously computed mask was used to skull-strip the anatomical image." + if precomp_mask + else """\ The T1w-reference was then skull-stripped with a modified implementation of the `antsBrainExtraction.sh` workflow (from ANTs), using {skullstrip_tpl} as target template. +""" + ) + + desc += """\ Brain tissue segmentation of cerebrospinal fluid (CSF), white-matter (WM) and gray-matter (GM) was performed on the brain-extracted T1w using ANTs JointFusion, distributed with ANTs {ants_ver}. @@ -174,8 +179,6 @@ def init_infant_anat_wf( output_dir=output_dir, spaces=spaces, ) - # # HACK: remove resolution from TFSelect - # anat_derivatives_wf.get_node("select_tpl").inputs.resolution = Undefined # Multiple T1w files -> generate average reference t1w_template_wf = init_anat_average_wf( @@ -196,32 +199,37 @@ def init_infant_anat_wf( if skull_strip_mode != "force": raise NotImplementedError("Skull stripping is currently required.") - brain_extraction_wf = init_infant_brain_extraction_wf( - age_months=age_months, - ants_affine_init=ants_affine_init, - skull_strip_template=skull_strip_template.space, - template_specs=skull_strip_template.spec, - omp_nthreads=omp_nthreads, - sloppy=sloppy, - debug="registration" in config.execution.debug, - ) + if precomp_mask: + precomp_mask_wf = init_precomputed_mask_wf(omp_nthreads=omp_nthreads) + precomp_mask_wf.inputs.inputnode.t1w_mask = precomp_mask - coregistration_wf = init_coregistration_wf( - omp_nthreads=omp_nthreads, - sloppy=sloppy, - debug="registration" in config.execution.debug, - ) - coreg_report_wf = init_coreg_report_wf( - output_dir=output_dir, - ) + else: + brain_extraction_wf = init_infant_brain_extraction_wf( + age_months=age_months, + ants_affine_init=ants_affine_init, + skull_strip_template=skull_strip_template.space, + template_specs=skull_strip_template.spec, + omp_nthreads=omp_nthreads, + sloppy=sloppy, + debug="registration" in config.execution.debug, + ) + coregistration_wf = init_coregistration_wf( + omp_nthreads=omp_nthreads, + sloppy=sloppy, + debug="registration" in config.execution.debug, + ) + coreg_report_wf = init_coreg_report_wf( + output_dir=output_dir, + ) + t1w_preproc_wf = precomp_mask_wf if precomp_mask else coregistration_wf # Segmentation - initial implementation should be simple: JLF - anat_seg_wf = init_anat_seg_wf( - age_months=age_months, + anat_seg_wf = init_anat_segmentations_wf( anat_modality=anat_modality.capitalize(), template_dir=segmentation_atlases, sloppy=sloppy, omp_nthreads=omp_nthreads, + precomp_aseg=precomp_aseg, ) # Spatial normalization (requires segmentation) @@ -231,36 +239,12 @@ def init_infant_anat_wf( templates=spaces.get_spaces(nonstandard=False, dim=(3,)), ) - # fmt: off + # fmt:off wf.connect([ (inputnode, t1w_template_wf, [("t1w", "inputnode.in_files")]), - (inputnode, t2w_template_wf, [("t2w", "inputnode.in_files")]), (t1w_template_wf, outputnode, [ ("outputnode.realign_xfms", "anat_ref_xfms"), ]), - (t2w_template_wf, brain_extraction_wf, [ - ("outputnode.out_file", "inputnode.in_t2w"), - ]), - (t1w_template_wf, coregistration_wf, [ - ("outputnode.out_file", "inputnode.in_t1w"), - ]), - (brain_extraction_wf, coregistration_wf, [ - ("outputnode.t2w_preproc", "inputnode.in_t2w_preproc"), - ("outputnode.out_mask", "inputnode.in_mask"), - ("outputnode.out_probmap", "inputnode.in_probmap"), - ]), - (coregistration_wf, anat_norm_wf, [ - ("outputnode.t1w_preproc", "inputnode.moving_image"), - ("outputnode.t1w_mask", "inputnode.moving_mask"), - ]), - (coregistration_wf, outputnode, [ - ("outputnode.t1w_preproc", "anat_preproc"), - ("outputnode.t1w_brain", "anat_brain"), - ("outputnode.t1w_mask", "anat_mask"), - ]), - (coregistration_wf, anat_seg_wf, [ - ("outputnode.t1w_brain", "inputnode.anat_brain") - ]), (anat_seg_wf, outputnode, [ ("outputnode.anat_dseg", "anat_dseg"), ("outputnode.anat_tpms", "anat_tpms"), @@ -281,21 +265,61 @@ def init_infant_anat_wf( (inputnode, anat_norm_wf, [ (("t1w", fix_multi_source_name), "inputnode.orig_t1w"), # anat_validate? not used... ]), + (t1w_preproc_wf, anat_norm_wf, [ + ("outputnode.t1w_preproc", "inputnode.moving_image"), + ("outputnode.t1w_mask", "inputnode.moving_mask"), + ]), + (t1w_preproc_wf, anat_derivatives_wf, [ + ("outputnode.t1w_mask", "inputnode.t1w_mask"), + ("outputnode.t1w_preproc", "inputnode.t1w_preproc"), + ]), + (t1w_preproc_wf, outputnode, [ + ("outputnode.t1w_preproc", "anat_preproc"), + ("outputnode.t1w_brain", "anat_brain"), + ("outputnode.t1w_mask", "anat_mask"), + ]), ]) + if not precomp_aseg: + wf.connect([ + (t1w_preproc_wf, anat_seg_wf, [("outputnode.t1w_brain", "inputnode.anat_brain")]), + ]) + + if precomp_mask: + wf.connect([ + (t1w_template_wf, precomp_mask_wf, [ + ("outputnode.out_file", "inputnode.t1w"), + ]), + ]) + else: + wf.connect([ + (inputnode, t2w_template_wf, [("t2w", "inputnode.in_files")]), + (t2w_template_wf, brain_extraction_wf, [ + ("outputnode.out_file", "inputnode.in_t2w"), + ]), + (t1w_template_wf, coregistration_wf, [ + ("outputnode.out_file", "inputnode.in_t1w"), + ]), + (brain_extraction_wf, coregistration_wf, [ + ("outputnode.t2w_preproc", "inputnode.in_t2w_preproc"), + ("outputnode.out_mask", "inputnode.in_mask"), + ("outputnode.out_probmap", "inputnode.in_probmap"), + ]), + (inputnode, coreg_report_wf, [ + ("t1w", "inputnode.source_file"), + ]), + (t1w_preproc_wf, coreg_report_wf, [ + ("outputnode.t1w_preproc", "inputnode.t1w_preproc"), + ("outputnode.t2w_preproc", "inputnode.t2w_preproc"), + ("outputnode.t1w_mask", "inputnode.in_mask"), + ]), + ]) + wf.connect([ # reports (inputnode, anat_reports_wf, [ ("t1w", "inputnode.source_file"), ]), - (inputnode, coreg_report_wf, [ - ("t1w", "inputnode.source_file"), - ]), - (coregistration_wf, coreg_report_wf, [ - ("outputnode.t1w_preproc", "inputnode.t1w_preproc"), - ("outputnode.t2w_preproc", "inputnode.t2w_preproc"), - ("outputnode.t1w_mask", "inputnode.in_mask"), - ]), (outputnode, anat_reports_wf, [ ("anat_preproc", "inputnode.t1w_preproc"), ("anat_mask", "inputnode.t1w_mask"), @@ -314,10 +338,6 @@ def init_infant_anat_wf( ("outputnode.valid_list", "inputnode.source_files"), ("outputnode.realign_xfms", "inputnode.t1w_ref_xfms"), ]), - (coregistration_wf, anat_derivatives_wf, [ - ("outputnode.t1w_mask", "inputnode.t1w_mask"), - ("outputnode.t1w_preproc", "inputnode.t1w_preproc"), - ]), (anat_norm_wf, anat_derivatives_wf, [ ("outputnode.template", "inputnode.template"), ("outputnode.anat2std_xfm", "inputnode.anat2std_xfm"), @@ -352,7 +372,7 @@ def init_infant_anat_wf( (t1w_template_wf, surface_recon_wf, [ ("outputnode.out_file", "inputnode.anat_orig"), ]), - (coregistration_wf, surface_recon_wf, [ + (t1w_preproc_wf, surface_recon_wf, [ ("outputnode.t1w_brain", "inputnode.anat_skullstripped"), ("outputnode.t1w_preproc", "inputnode.anat_preproc"), ]), diff --git a/nibabies/workflows/anatomical/brain_extraction.py b/nibabies/workflows/anatomical/brain_extraction.py index 8783cb44..b163c47e 100644 --- a/nibabies/workflows/anatomical/brain_extraction.py +++ b/nibabies/workflows/anatomical/brain_extraction.py @@ -9,7 +9,7 @@ def init_infant_brain_extraction_wf( age_months=None, - ants_affine_init=False, + ants_affine_init=True, bspline_fitting_distance=200, sloppy=False, skull_strip_template="UNCInfant", @@ -268,6 +268,54 @@ def init_infant_brain_extraction_wf( return workflow +def init_precomputed_mask_wf( + bspline_fitting_distance=200, omp_nthreads=None, name="precomputed_mask_wf" +): + from nipype.interfaces.ants import N4BiasFieldCorrection + from niworkflows.interfaces.header import ValidateImage + from niworkflows.interfaces.nibabel import ApplyMask, IntensityClip + + workflow = pe.Workflow(name=name) + inputnode = pe.Node(niu.IdentityInterface(fields=["t1w", "t1w_mask"]), name="inputnode") + outputnode = pe.Node( + niu.IdentityInterface(fields=["t1w_preproc", "t1w_mask", "t1w_brain"]), + name="outputnode", + ) + + validate_mask = pe.Node(ValidateImage(), name="validate_mask") + final_n4 = pe.Node( + N4BiasFieldCorrection( + dimension=3, + bspline_fitting_distance=bspline_fitting_distance, + save_bias=True, + copy_header=True, + n_iterations=[50] * 5, + convergence_threshold=1e-7, + rescale_intensities=True, + shrink_factor=4, + ), + n_procs=omp_nthreads, + name="final_n4", + ) + final_clip = pe.Node(IntensityClip(p_min=5.0, p_max=99.5), name="final_clip") + apply_mask = pe.Node(ApplyMask(), name="apply_mask") + + # fmt:off + workflow.connect([ + (inputnode, validate_mask, [("t1w_mask", "in_file")]), + (inputnode, final_n4, [("t1w", "input_image")]), + (validate_mask, final_n4, [("out_file", "weight_image")]), + (validate_mask, apply_mask, [("out_file", "in_mask")]), + (final_n4, apply_mask, [("output_image", "in_file")]), + (final_n4, final_clip, [("output_image", "in_file")]), + (validate_mask, outputnode, [("out_file", "t1w_mask")]), + (final_clip, outputnode, [("out_file", "t1w_preproc")]), + (apply_mask, outputnode, [("out_file", "t1w_brain")]), + ]) + # fmt:on + return workflow + + def _pop(in_files): if isinstance(in_files, (list, tuple)): return in_files[0] diff --git a/nibabies/workflows/anatomical/segmentation.py b/nibabies/workflows/anatomical/segmentation.py index 64e48819..14d8fc27 100644 --- a/nibabies/workflows/anatomical/segmentation.py +++ b/nibabies/workflows/anatomical/segmentation.py @@ -1,4 +1,6 @@ from pkg_resources import resource_filename as pkgr_fn +import sys + from nipype.pipeline import engine as pe from nipype.interfaces import utility as niu, fsl from nipype.interfaces.ants.segmentation import JointFusion @@ -12,27 +14,39 @@ from ...config import DEFAULT_MEMORY_MIN_GB -def init_anat_seg_wf( - age_months=None, +def init_anat_segmentations_wf( anat_modality="T1w", template_dir=None, sloppy=False, omp_nthreads=1, - name="anat_seg_wf", + precomp_aseg=None, + name="anat_segmentations_wf", ): """ - Calculate brain tissue segmentations from either: - A) a collection of manually segmented templates - B) FSL's FAST + Create discrete and probabilistic segmentations from an anatomical image. + + There are a number of heuristics used to calculate the aseg, based on what is available. + 1) A precomputed aseg is provided (via `precomp_aseg`) + 2) Two or more segmentation templates are provided (via `template_dir`) + 3) Otherwise, fallback to FSL FAST """ + from niworkflows.engine.workflows import LiterateWorkflow as Workflow + from niworkflows.utils.connections import listify if anat_modality != "T1w": raise NotImplementedError( "Only T1w images are currently accepted for the segmentation workflow." ) - wf = pe.Workflow(name=name) - inputnode = pe.Node(niu.IdentityInterface(fields=["anat_brain"]), name="inputnode") + if precomp_aseg and template_dir: + print("Found precomputed aseg; skipping JointLabelFusion", file=sys.stderr) + template_dir = None + + wf = Workflow(name=name) + inputnode = pe.Node( + niu.IdentityInterface(fields=["anat_brain", "anat_aseg"]), + name="inputnode", + ) outputnode = pe.Node( niu.IdentityInterface(fields=["anat_aseg", "anat_dseg", "anat_tpms"]), name="outputnode", @@ -41,7 +55,7 @@ def init_anat_seg_wf( # Coerce segmentation labels to BIDS lut_anat_dseg = pe.Node(niu.Function(function=_apply_bids_lut), name="lut_anat_dseg") - if template_dir is None: + if not any((precomp_aseg, template_dir)): # Use FSL FAST for segmentations anat_dseg = pe.Node( fsl.FAST(segments=True, no_bias=True, probability_maps=True), @@ -55,119 +69,102 @@ def init_anat_seg_wf( run_without_submitting=True, ) - wf.connect( - [ - ( - inputnode, - anat_dseg, - [ - ("anat_brain", "in_files"), - ], - ), - ( - anat_dseg, - lut_anat_dseg, - [ - ("partial_volume_map", "in_dseg"), - ], - ), - ( - lut_anat_dseg, - outputnode, - [ - ("out", "anat_dseg"), - ], - ), - ( - anat_dseg, - fast2bids, - [ - ("partial_volume_files", "inlist"), - ], - ), - ( - fast2bids, - outputnode, - [ - ("out", "anat_tpms"), - ], - ), - ] - ) + # fmt:off + wf.connect([ + (inputnode, anat_dseg, [("anat_brain", "in_files")]), + (anat_dseg, lut_anat_dseg, [("partial_volume_map", "in_dseg")]), + (lut_anat_dseg, outputnode, [("out", "anat_dseg")]), + (anat_dseg, fast2bids, [("partial_volume_files", "inlist")]), + (fast2bids, outputnode, [("out", "anat_tpms")]), + ]) + # fmt:on return wf - # Otherwise, register to templates and run ANTs JointFusion - lut_anat_dseg.inputs.lut = _aseg_to_three() - tmpl_anats, tmpl_segs = _parse_segmentation_atlases(anat_modality, template_dir) - - # register to templates - ants_params = "testing" if sloppy else "precise" - # Register to each subject space - norm = pe.MapNode( - Registration( - from_file=pkgr_fn("niworkflows.data", f"antsBrainExtraction_{ants_params}.json") - ), - name="norm", - iterfield=["moving_image"], - n_procs=omp_nthreads, - mem_gb=DEFAULT_MEMORY_MIN_GB, - ) - norm.inputs.moving_image = tmpl_anats - norm.inputs.float = True - - apply_atlas = pe.MapNode( - ApplyTransforms( - dimension=3, - interpolation="NearestNeighbor", - float=True, - ), - iterfield=["transforms", "input_image"], - name="apply_atlas", - ) - apply_atlas.inputs.input_image = tmpl_anats + # Joint Label Fusion + if template_dir: + tmpl_anats, tmpl_segs = _parse_segmentation_atlases(anat_modality, template_dir) + + # register to templates + ants_params = "testing" if sloppy else "precise" + # Register to each subject space + norm = pe.MapNode( + Registration( + from_file=pkgr_fn("niworkflows.data", f"antsBrainExtraction_{ants_params}.json") + ), + name="norm", + iterfield=["moving_image"], + n_procs=omp_nthreads, + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + norm.inputs.moving_image = tmpl_anats + norm.inputs.float = True + + apply_atlas = pe.MapNode( + ApplyTransforms( + dimension=3, + interpolation="NearestNeighbor", + float=True, + ), + iterfield=["transforms", "input_image"], + name="apply_atlas", + ) + apply_atlas.inputs.input_image = tmpl_anats - apply_seg = pe.MapNode( - ApplyTransforms(dimension=3, interpolation="MultiLabel"), # NearestNeighbor? - name="apply_seg", - iterfield=["transforms", "input_image"], - ) - apply_seg.inputs.input_image = tmpl_segs - - jointfusion = pe.Node( - JointFusion( - dimension=3, - out_label_fusion="fusion_labels.nii.gz", - num_threads=omp_nthreads, - ), - name="jointfusion", - ) + apply_seg = pe.MapNode( + ApplyTransforms(dimension=3, interpolation="MultiLabel"), # NearestNeighbor? + name="apply_seg", + iterfield=["transforms", "input_image"], + ) + apply_seg.inputs.input_image = tmpl_segs + + jointfusion = pe.Node( + JointFusion( + dimension=3, + out_label_fusion="fusion_labels.nii.gz", + num_threads=omp_nthreads, + ), + name="jointfusion", + ) - jf_label = pe.Node(niu.Function(function=_to_dtype), name="jf_label") + jf_label = pe.Node( + niu.Function(function=_to_dtype, output_names=["out_file"]), name="jf_label" + ) - # split each tissue into individual masks + # fmt:off + wf.connect([ + (inputnode, norm, [('anat_brain', 'fixed_image')]), + (norm, apply_atlas, [('forward_transforms', 'transforms')]), + (inputnode, apply_atlas, [('anat_brain', 'reference_image')]), + (norm, apply_seg, [('forward_transforms', 'transforms')]), + (inputnode, apply_seg, [('anat_brain', 'reference_image')]), + (inputnode, jointfusion, [(('anat_brain', listify), 'target_image')]), + (apply_atlas, jointfusion, [('output_image', 'atlas_image')]), + (apply_seg, jointfusion, [('output_image', 'atlas_segmentation_image')]), + (jointfusion, jf_label, [('out_label_fusion', 'in_file')]), + ]) + # fmt:on + + elif precomp_aseg: + from niworkflows.interfaces.header import ValidateImage + + inputnode.inputs.anat_aseg = precomp_aseg + validate_aseg = pe.Node(ValidateImage(), name="validate_aseg") + wf.connect(inputnode, "anat_aseg", validate_aseg, "in_file") + + # Otherwise, the final aseg will be split into three tissue classes + # regardless if it was precomputed or generated via JLF + lut_anat_dseg.inputs.lut = _aseg_to_three() split_seg = pe.Node(niu.Function(function=_split_segments), name="split_seg") - to_list = pe.Node(niu.Function(function=_to_list), name="to_list") - + out_aseg = validate_aseg if precomp_aseg else jf_label # fmt: off wf.connect([ - (inputnode, norm, [('anat_brain', 'fixed_image')]), - (norm, apply_atlas, [('forward_transforms', 'transforms')]), - (inputnode, apply_atlas, [('anat_brain', 'reference_image')]), - (norm, apply_seg, [('forward_transforms', 'transforms')]), - (inputnode, apply_seg, [('anat_brain', 'reference_image')]), - (inputnode, to_list, [('anat_brain', 'in_file')]), - (to_list, jointfusion, [('out', 'target_image')]), - (apply_atlas, jointfusion, [('output_image', 'atlas_image')]), - (apply_seg, jointfusion, [('output_image', 'atlas_segmentation_image')]), - (jointfusion, jf_label, [('out_label_fusion', 'in_file')]), - (jf_label, outputnode, [('out', 'anat_aseg')]), - (jf_label, lut_anat_dseg, [('out', 'in_dseg')]), + (out_aseg, outputnode, [('out_file', 'anat_aseg')]), + (out_aseg, lut_anat_dseg, [('out_file', 'in_dseg')]), (lut_anat_dseg, outputnode, [('out', 'anat_dseg')]), (lut_anat_dseg, split_seg, [('out', 'in_file')]), (split_seg, outputnode, [('out', 'anat_tpms')]), ]) # fmt: on - return wf @@ -196,10 +193,6 @@ def _parse_segmentation_atlases(anat_modality, template_dir): return sorted(anats), sorted(segs) -def _to_list(in_file): - return [in_file] - - def _to_dtype(in_file, dtype="uint8"): """ Freesurfer's ``mri_convert`` complains about unsigned 32-bit integers. diff --git a/nibabies/workflows/base.py b/nibabies/workflows/base.py index 06bc99cf..4fcf252a 100644 --- a/nibabies/workflows/base.py +++ b/nibabies/workflows/base.py @@ -143,7 +143,7 @@ def init_single_subject_wf(subject_id): subject_data["t2w"] = [] anat_only = config.workflow.anat_only - anat_derivatives = config.execution.anat_derivatives + derivatives = config.execution.derivatives or {} anat_modality = "t1w" if subject_data["t1w"] else "t2w" spaces = config.workflow.spaces # Make sure we always go through these two checks @@ -156,30 +156,16 @@ def init_single_subject_wf(subject_id): ) ) - if anat_derivatives: - from smriprep.utils.bids import collect_derivatives + if derivatives: + from ..utils.bids import collect_precomputed_derivatives - std_spaces = spaces.get_spaces(nonstandard=False, dim=(3,)) - anat_derivatives = collect_derivatives( - anat_derivatives.absolute(), + derivatives = collect_precomputed_derivatives( + config.execution.layout, subject_id, - std_spaces, - config.workflow.run_reconall, - ) - if anat_derivatives is None: - config.loggers.workflow.warning( - f"""\ -Attempted to access pre-existing anatomical derivatives at \ -<{config.execution.anat_derivatives}>, however not all expectations of NiBabies \ -were met (for participant <{subject_id}>, spaces <{', '.join(std_spaces)}>, \ -reconall <{config.workflow.run_reconall}>).""" - ) - - if not anat_derivatives and not subject_data[anat_modality]: - raise Exception( - f"No {anat_modality} images found for participant {subject_id}. " - "All workflows require T1w images." + derivatives_filters=config.execution.derivatives_filters, + # session_id=None, # TODO: Ensure session is visible at workflow level ) + config.loggers.workflow.info(f"Found precomputed derivatives: {derivatives}") workflow = Workflow(name=name) workflow.__desc__ = """ @@ -222,11 +208,12 @@ def init_single_subject_wf(subject_id): inputnode = pe.Node(niu.IdentityInterface(fields=["subjects_dir"]), name="inputnode") + # TODO: Revisit T1w/T2w restrictions for BIDSDataGrabber bidssrc = pe.Node( BIDSDataGrabber( subject_data=subject_data, anat_only=anat_only, - anat_derivatives=anat_derivatives, + anat_derivatives=False, subject_id=subject_id, ), name="bidssrc", @@ -282,7 +269,7 @@ def init_single_subject_wf(subject_id): t1w=subject_data["t1w"], t2w=subject_data["t2w"], bids_root=config.execution.bids_dir, - existing_derivatives=anat_derivatives, + existing_derivatives=derivatives, freesurfer=config.workflow.run_reconall, longitudinal=config.workflow.longitudinal, omp_nthreads=config.nipype.omp_nthreads, @@ -323,37 +310,21 @@ def init_single_subject_wf(subject_id): ]), ]) - if not anat_derivatives: - workflow.connect([ - (bidssrc, bids_info, [ - (('t1w', fix_multi_source_name), 'in_file'), - ]), - (bidssrc, summary, [ - ('t1w', 't1w'), - ('t2w', 't2w'), - ]), - (bidssrc, ds_report_summary, [ - (('t1w', fix_multi_source_name), 'source_file'), - ]), - (bidssrc, ds_report_about, [ - (('t1w', fix_multi_source_name), 'source_file'), - ]), - ]) - else: - workflow.connect([ - (bidssrc, bids_info, [ - (('bold', fix_multi_source_name), 'in_file'), - ]), - (anat_preproc_wf, summary, [ - ('outputnode.t1w_preproc', 't1w'), - ]), - (anat_preproc_wf, ds_report_summary, [ - ('outputnode.t1w_preproc', 'source_file'), - ]), - (anat_preproc_wf, ds_report_about, [ - ('outputnode.t1w_preproc', 'source_file'), - ]), - ]) + workflow.connect([ + (bidssrc, bids_info, [ + (('t1w', fix_multi_source_name), 'in_file'), + ]), + (bidssrc, summary, [ + ('t1w', 't1w'), + ('t2w', 't2w'), + ]), + (bidssrc, ds_report_summary, [ + (('t1w', fix_multi_source_name), 'source_file'), + ]), + (bidssrc, ds_report_about, [ + (('t1w', fix_multi_source_name), 'source_file'), + ]), + ]) # fmt: on # Overwrite ``out_path_base`` of smriprep's DataSinks @@ -419,6 +390,7 @@ def init_single_subject_wf(subject_id): func_preproc_wf = init_func_preproc_wf( bold_file, has_fieldmap=has_fieldmap, + existing_derivatives=derivatives, ) # fmt: off workflow.connect([ diff --git a/nibabies/workflows/bold/base.py b/nibabies/workflows/bold/base.py index bc721d81..ebb6f02d 100644 --- a/nibabies/workflows/bold/base.py +++ b/nibabies/workflows/bold/base.py @@ -74,7 +74,7 @@ from .outputs import init_func_derivatives_wf -def init_func_preproc_wf(bold_file, has_fieldmap=False): +def init_func_preproc_wf(bold_file, has_fieldmap=False, existing_derivatives=None): """ This workflow controls the functional preprocessing stages of *NiBabies*. @@ -233,7 +233,7 @@ def init_func_preproc_wf(bold_file, has_fieldmap=False): entities.pop("echo", None) # reorder echoes from shortest to largest tes, bold_file = zip( - *sorted([(layout.get_metadata(bf)["EchoTime"], bf) for bf in bold_file]) + *sorted([(layout.get_metadata(bf, scope="raw")["EchoTime"], bf) for bf in bold_file]) ) ref_file = bold_file[0] # Reset reference to be the shortest TE @@ -250,7 +250,7 @@ def init_func_preproc_wf(bold_file, has_fieldmap=False): # Find associated sbref, if possible entities["suffix"] = "sbref" entities["extension"] = [".nii", ".nii.gz"] # Overwrite extensions - sbref_files = layout.get(return_type="file", **entities) + sbref_files = layout.get(scope="raw", return_type="file", **entities) sbref_msg = f"No single-band-reference found for {os.path.basename(ref_file)}." if sbref_files and "sbref" in config.workflow.ignore: diff --git a/wrapper/nibabies_wrapper.py b/wrapper/nibabies_wrapper.py index e770f800..dd38e51a 100755 --- a/wrapper/nibabies_wrapper.py +++ b/wrapper/nibabies_wrapper.py @@ -268,6 +268,9 @@ def _get_posargs(usage): expected_overlap = { "anat-derivatives", "bids-database-dir", + "bids-filter-file", + "derivatives", + "deriv-filter-file", "fs-license-file", "fs-subjects-dir", "config-file", @@ -466,6 +469,25 @@ def _is_file(path, parser): type=os.path.abspath, help="Directory containing prelabeled segmentations to use for JointLabelFusion." ) + g_wrap.add_argument( + "--derivatives", + nargs="+", + metavar="PATH", + type=os.path.abspath, + help="One or more directory containing pre-computed derivatives" + ) + g_wrap.add_argument( + "--bids-filter-file", + metavar="PATH", + type=os.path.abspath, + help="Filter file", + ) + g_wrap.add_argument( + "--deriv-filter-file", + metavar="PATH", + type=os.path.abspath, + help="Filter file", + ) # Developer patch/shell options g_dev = parser.add_argument_group( @@ -650,6 +672,19 @@ def main(): if opts.segmentation_atlases_dir: container.add_mount(opts.segmentation_atlases_dir, "/opt/segmentations") unknown_args.extend(["--segmentation-atlases-dir", "/opt/segmentations"]) + if opts.bids_filter_file: + container.add_mount(opts.bids_filter_file, "/opt/bids_filters.json") + unknown_args.extend(["--bids-filter-file", "/opt/bids_filters.json"]) + if opts.deriv_filter_file: + container.add_mount(opts.deriv_filter_file, "/opt/derivative_filters.json") + unknown_args.extend(["--deriv-filter-file", "/opt/derivative_filters.json"]) + if opts.derivatives: + derivative_args = ["--derivatives"] + for derivative in opts.derivatives: + derivative_target = "/opt/derivatives/%s" % os.path.basename(derivative) + container.add_mount(derivative, derivative_target, read_only=False) + derivative_args.append(derivative_target) + unknown_args.extend(derivative_args) # Check that work_dir is not a child of bids_dir if opts.work_dir and opts.bids_dir: