diff --git a/.circleci/config.yml b/.circleci/config.yml
index 6400602c..de31f1e0 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -251,6 +251,24 @@ jobs:
key: THP002-anat-v00-{{ .Branch }}-{{ .Revision }}-{{ epoch }}
paths:
- /tmp/THP002/work
+ - run:
+ name: Run full diffusion workflow on THP002
+ no_output_timeout: 2h
+ command: |
+ mkdir -p /tmp/THP002/work /tmp/THP002/derivatives
+ sudo setfacl -d -m group:$(id -gn):rwx /tmp/THP002/derivatives && \
+ sudo setfacl -m group:$(id -gn):rwx /tmp/THP002/derivatives
+ sudo setfacl -d -m group:$(id -gn):rwx /tmp/THP002/work && \
+ sudo setfacl -m group:$(id -gn):rwx /tmp/THP002/work
+ docker run -e FS_LICENSE=$FS_LICENSE --rm \
+ -v /tmp/data/THP002:/data \
+ -v /tmp/THP002/derivatives:/out \
+ -v /tmp/fslicense/license.txt:/tmp/fslicense/license.txt:ro \
+ -v /tmp/config/nipype.cfg:/home/dmriprep/.nipype/nipype.cfg \
+ -v /tmp/THP002/work:/work \
+ --user $(id -u):$(id -g) \
+ nipreps/dmriprep:latest /data /out participant -vv --sloppy \
+ --notrack --skip-bids-validation -w /work --omp-nthreads 2 --nprocs 2
- store_artifacts:
path: /tmp/THP002/derivatives/dmriprep
- run:
diff --git a/dmriprep/config/__init__.py b/dmriprep/config/__init__.py
index 8f7e8bdc..01273ce1 100644
--- a/dmriprep/config/__init__.py
+++ b/dmriprep/config/__init__.py
@@ -2,4 +2,5 @@
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""Settings."""
+DEFAULT_MEMORY_MIN_GB = 0.01
NONSTANDARD_REFERENCES = ['anat', 'T1w', 'dwi', 'fsnative']
diff --git a/dmriprep/config/reports-spec.yml b/dmriprep/config/reports-spec.yml
index 176a0b4d..cc0cc62c 100644
--- a/dmriprep/config/reports-spec.yml
+++ b/dmriprep/config/reports-spec.yml
@@ -26,6 +26,16 @@ sections:
caption: Surfaces (white and pial) reconstructed with FreeSurfer (recon-all
)
overlaid on the participant's T1w template.
subtitle: Surface reconstruction
+- name: Diffusion
+ ordering: session,acquisition,run
+ reportlets:
+ - bids: {datatype: dwi, desc: validation, suffix: dwi}
+ - bids: {datatype: dwi, desc: brain, suffix: mask}
+ caption: Average b=0 that serves for reference in early preprocessing steps.
+ descriptions: The reference b=0 is obtained as the voxel-wise median across
+ all b=0 found in the dataset, after accounting for signal drift.
+ The red contour shows the brain mask calculated using this reference b=0.
+ subtitle: Reference b=0 and brain mask
- name: About
reportlets:
- bids: {datatype: anat, desc: about, suffix: T1w}
diff --git a/dmriprep/data/tests/dwi_mask.nii.gz b/dmriprep/data/tests/dwi_mask.nii.gz
new file mode 100644
index 00000000..e69de29b
diff --git a/dmriprep/interfaces/images.py b/dmriprep/interfaces/images.py
new file mode 100644
index 00000000..a59fedfb
--- /dev/null
+++ b/dmriprep/interfaces/images.py
@@ -0,0 +1,145 @@
+"""Image tools interfaces."""
+import numpy as np
+import nibabel as nb
+from nipype.utils.filemanip import fname_presuffix
+from nipype import logging
+from nipype.interfaces.base import (
+ traits, TraitedSpec, BaseInterfaceInputSpec, SimpleInterface, File
+)
+
+LOGGER = logging.getLogger('nipype.interface')
+
+
+class _ExtractB0InputSpec(BaseInterfaceInputSpec):
+ in_file = File(exists=True, mandatory=True, desc='dwi file')
+ b0_ixs = traits.List(traits.Int, mandatory=True,
+ desc='Index of b0s')
+
+
+class _ExtractB0OutputSpec(TraitedSpec):
+ out_file = File(exists=True, desc='b0 file')
+
+
+class ExtractB0(SimpleInterface):
+ """
+ Extract all b=0 volumes from a dwi series.
+
+ Example
+ -------
+ >>> os.chdir(tmpdir)
+ >>> extract_b0 = ExtractB0()
+ >>> extract_b0.inputs.in_file = str(data_dir / 'dwi.nii.gz')
+ >>> extract_b0.inputs.b0_ixs = [0, 1, 2]
+ >>> res = extract_b0.run() # doctest: +SKIP
+
+ """
+
+ input_spec = _ExtractB0InputSpec
+ output_spec = _ExtractB0OutputSpec
+
+ def _run_interface(self, runtime):
+ self._results['out_file'] = extract_b0(
+ self.inputs.in_file,
+ self.inputs.b0_ixs,
+ newpath=runtime.cwd)
+ return runtime
+
+
+def extract_b0(in_file, b0_ixs, newpath=None):
+ """Extract the *b0* volumes from a DWI dataset."""
+ out_file = fname_presuffix(
+ in_file, suffix='_b0', newpath=newpath)
+
+ img = nb.load(in_file)
+ data = img.get_fdata(dtype='float32')
+
+ b0 = data[..., b0_ixs]
+
+ hdr = img.header.copy()
+ hdr.set_data_shape(b0.shape)
+ hdr.set_xyzt_units('mm')
+ hdr.set_data_dtype(np.float32)
+ nb.Nifti1Image(b0, img.affine, hdr).to_filename(out_file)
+ return out_file
+
+
+class _RescaleB0InputSpec(BaseInterfaceInputSpec):
+ in_file = File(exists=True, mandatory=True, desc='b0s file')
+ mask_file = File(exists=True, mandatory=True, desc='mask file')
+
+
+class _RescaleB0OutputSpec(TraitedSpec):
+ out_ref = File(exists=True, desc='One average b0 file')
+ out_b0s = File(exists=True, desc='series of rescaled b0 volumes')
+
+
+class RescaleB0(SimpleInterface):
+ """
+ Rescale the b0 volumes to deal with average signal decay over time.
+
+ Example
+ -------
+ >>> os.chdir(tmpdir)
+ >>> rescale_b0 = RescaleB0()
+ >>> rescale_b0.inputs.in_file = str(data_dir / 'dwi.nii.gz')
+ >>> rescale_b0.inputs.mask_file = str(data_dir / 'dwi_mask.nii.gz')
+ >>> res = rescale_b0.run() # doctest: +SKIP
+
+ """
+
+ input_spec = _RescaleB0InputSpec
+ output_spec = _RescaleB0OutputSpec
+
+ def _run_interface(self, runtime):
+ self._results['out_b0s'] = rescale_b0(
+ self.inputs.in_file,
+ self.inputs.mask_file,
+ newpath=runtime.cwd
+ )
+ self._results['out_ref'] = median(
+ self._results['out_b0s'],
+ newpath=runtime.cwd
+ )
+ return runtime
+
+
+def rescale_b0(in_file, mask_file, newpath=None):
+ """Rescale the input volumes using the median signal intensity."""
+ out_file = fname_presuffix(
+ in_file, suffix='_rescaled_b0', newpath=newpath)
+
+ img = nb.load(in_file)
+ if img.dataobj.ndim == 3:
+ return in_file
+
+ data = img.get_fdata(dtype='float32')
+ mask_img = nb.load(mask_file)
+ mask_data = mask_img.get_fdata(dtype='float32')
+
+ median_signal = np.median(data[mask_data > 0, ...], axis=0)
+ rescaled_data = 1000 * data / median_signal
+ hdr = img.header.copy()
+ nb.Nifti1Image(rescaled_data, img.affine, hdr).to_filename(out_file)
+ return out_file
+
+
+def median(in_file, newpath=None):
+ """Average a 4D dataset across the last dimension using median."""
+ out_file = fname_presuffix(
+ in_file, suffix='_b0ref', newpath=newpath)
+
+ img = nb.load(in_file)
+ if img.dataobj.ndim == 3:
+ return in_file
+ if img.shape[-1] == 1:
+ nb.squeeze_image(img).to_filename(out_file)
+ return out_file
+
+ median_data = np.median(img.get_fdata(dtype='float32'),
+ axis=-1)
+
+ hdr = img.header.copy()
+ hdr.set_xyzt_units('mm')
+ hdr.set_data_dtype(np.float32)
+ nb.Nifti1Image(median_data, img.affine, hdr).to_filename(out_file)
+ return out_file
diff --git a/dmriprep/interfaces/reports.py b/dmriprep/interfaces/reports.py
index c61b64bb..0e14c2e0 100644
--- a/dmriprep/interfaces/reports.py
+++ b/dmriprep/interfaces/reports.py
@@ -23,13 +23,6 @@
\t
"""
-DWI_TEMPLATE = """\t\t
{command}
flirt
with boundary-based registration'
- ' (BBR) metric - %d dof' % dof,
- 'FSL flirt
rigid registration - 6 dof'],
- 'FreeSurfer': [
- 'FreeSurfer bbregister
'
- '(boundary-based registration, BBR) - %d dof' % dof,
- 'FreeSurfer mri_coreg
- %d dof' % dof],
- }[self.inputs.registration][self.inputs.fallback]
- if self.inputs.pe_direction is None:
- pedir = 'MISSING - Assuming Anterior-Posterior'
- else:
- pedir = {'i': 'Left-Right', 'j': 'Anterior-Posterior'}[self.inputs.pe_direction[0]]
-
- return DWI_TEMPLATE.format(
- pedir=pedir, sdc=self.inputs.distortion_correction, registration=reg)
diff --git a/dmriprep/workflows/base.py b/dmriprep/workflows/base.py
index d4b9b587..38ee40a6 100755
--- a/dmriprep/workflows/base.py
+++ b/dmriprep/workflows/base.py
@@ -1,13 +1,4 @@
-# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
-# vi: set ft=python sts=4 ts=4 sw=4 et:
-"""
-dMRIPrep base processing workflows.
-
-.. autofunction:: init_dmriprep_wf
-.. autofunction:: init_single_subject_wf
-
-"""
-
+"""dMRIPrep base processing workflows."""
import sys
import os
from packaging.version import Version
@@ -29,7 +20,7 @@
from ..interfaces.reports import SubjectSummary, AboutSummary
from ..utils.bids import collect_data
from ..__about__ import __version__
-# from .dwi import init_dwi_preproc_wf
+from .dwi import init_dwi_preproc_wf
def init_dmriprep_wf(
@@ -435,59 +426,42 @@ def init_single_subject_wf(
if anat_only:
return workflow
- # for dwi_file in subject_data['dwi']:
- # dwi_preproc_wf = init_dwi_preproc_wf(
- # aroma_melodic_dim=aroma_melodic_dim,
- # bold2t1w_dof=bold2t1w_dof,
- # bold_file=bold_file,
- # cifti_output=cifti_output,
- # debug=debug,
- # dummy_scans=dummy_scans,
- # err_on_aroma_warn=err_on_aroma_warn,
- # fmap_bspline=fmap_bspline,
- # fmap_demean=fmap_demean,
- # force_syn=force_syn,
- # freesurfer=freesurfer,
- # ignore=ignore,
- # layout=layout,
- # low_mem=low_mem,
- # medial_surface_nan=medial_surface_nan,
- # num_bold=len(subject_data['bold']),
- # omp_nthreads=omp_nthreads,
- # output_dir=output_dir,
- # output_spaces=output_spaces,
- # reportlets_dir=reportlets_dir,
- # regressors_all_comps=regressors_all_comps,
- # regressors_fd_th=regressors_fd_th,
- # regressors_dvars_th=regressors_dvars_th,
- # t2s_coreg=t2s_coreg,
- # use_aroma=use_aroma,
- # use_syn=use_syn,
- # )
-
- # workflow.connect([
- # (anat_preproc_wf, dwi_preproc_wf,
- # [(('outputnode.t1_preproc', _pop), 'inputnode.t1_preproc'),
- # ('outputnode.t1_brain', 'inputnode.t1_brain'),
- # ('outputnode.t1_mask', 'inputnode.t1_mask'),
- # ('outputnode.t1_seg', 'inputnode.t1_seg'),
- # ('outputnode.t1_aseg', 'inputnode.t1_aseg'),
- # ('outputnode.t1_aparc', 'inputnode.t1_aparc'),
- # ('outputnode.t1_tpms', 'inputnode.t1_tpms'),
- # ('outputnode.template', 'inputnode.template'),
- # ('outputnode.forward_transform', 'inputnode.anat2std_xfm'),
- # ('outputnode.reverse_transform', 'inputnode.std2anat_xfm'),
- # ('outputnode.joint_template', 'inputnode.joint_template'),
- # ('outputnode.joint_forward_transform', 'inputnode.joint_anat2std_xfm'),
- # ('outputnode.joint_reverse_transform', 'inputnode.joint_std2anat_xfm'),
- # # Undefined if --no-freesurfer, but this is safe
- # ('outputnode.subjects_dir', 'inputnode.subjects_dir'),
- # ('outputnode.subject_id', 'inputnode.subject_id'),
- # ('outputnode.t1_2_fsnative_forward_transform',
- # 'inputnode.t1_2_fsnative_forward_transform'),
- # ('outputnode.t1_2_fsnative_reverse_transform',
- # 'inputnode.t1_2_fsnative_reverse_transform')]),
- # ])
+ for dwi_file in subject_data['dwi']:
+ dwi_preproc_wf = init_dwi_preproc_wf(
+ dwi_file=dwi_file,
+ debug=debug,
+ force_syn=force_syn,
+ ignore=ignore,
+ layout=layout,
+ low_mem=low_mem,
+ num_dwi=len(subject_data['dwi']),
+ omp_nthreads=omp_nthreads,
+ output_dir=output_dir,
+ reportlets_dir=reportlets_dir,
+ use_syn=use_syn,
+ )
+
+ workflow.connect([
+ (anat_preproc_wf, dwi_preproc_wf,
+ [(('outputnode.t1w_preproc', _pop), 'inputnode.t1w_preproc'),
+ ('outputnode.t1w_brain', 'inputnode.t1w_brain'),
+ ('outputnode.t1w_mask', 'inputnode.t1w_mask'),
+ ('outputnode.t1w_dseg', 'inputnode.t1w_dseg'),
+ ('outputnode.t1w_aseg', 'inputnode.t1w_aseg'),
+ ('outputnode.t1w_aparc', 'inputnode.t1w_aparc'),
+ ('outputnode.t1w_tpms', 'inputnode.t1w_tpms'),
+ ('outputnode.template', 'inputnode.template'),
+ ('outputnode.anat2std_xfm', 'inputnode.anat2std_xfm'),
+ ('outputnode.std2anat_xfm', 'inputnode.std2anat_xfm'),
+ ('outputnode.joint_template', 'inputnode.joint_template'),
+ ('outputnode.joint_anat2std_xfm', 'inputnode.joint_anat2std_xfm'),
+ ('outputnode.joint_std2anat_xfm', 'inputnode.joint_std2anat_xfm'),
+ # Undefined if --fs-no-reconall, but this is safe
+ ('outputnode.subjects_dir', 'inputnode.subjects_dir'),
+ ('outputnode.subject_id', 'inputnode.subject_id'),
+ ('outputnode.t1w2fsnative_xfm', 'inputnode.t1w2fsnative_xfm'),
+ ('outputnode.fsnative2t1w_xfm', 'inputnode.fsnative2t1w_xfm')]),
+ ])
return workflow
diff --git a/dmriprep/workflows/dwi/__init__.py b/dmriprep/workflows/dwi/__init__.py
index e69de29b..70f15d06 100644
--- a/dmriprep/workflows/dwi/__init__.py
+++ b/dmriprep/workflows/dwi/__init__.py
@@ -0,0 +1,9 @@
+"""Pre-processing dMRI workflows."""
+
+from .base import init_dwi_preproc_wf
+from .util import init_dwi_reference_wf
+
+__all__ = [
+ 'init_dwi_preproc_wf',
+ 'init_dwi_reference_wf',
+]
diff --git a/dmriprep/workflows/dwi/base.py b/dmriprep/workflows/dwi/base.py
new file mode 100644
index 00000000..315132aa
--- /dev/null
+++ b/dmriprep/workflows/dwi/base.py
@@ -0,0 +1,192 @@
+"""Orchestrating the dMRI-preprocessing workflow."""
+from nipype import logging
+from nipype.pipeline import engine as pe
+from nipype.interfaces import utility as niu
+
+from niworkflows.engine.workflows import LiterateWorkflow as Workflow
+from ...interfaces.vectors import CheckGradientTable
+from .util import init_dwi_reference_wf
+from .outputs import init_reportlets_wf
+
+
+LOGGER = logging.getLogger('nipype.workflow')
+
+
+def init_dwi_preproc_wf(
+ dwi_file,
+ debug,
+ force_syn,
+ ignore,
+ low_mem,
+ omp_nthreads,
+ output_dir,
+ reportlets_dir,
+ use_syn,
+ layout=None,
+ num_dwi=1,
+):
+ """
+ This workflow controls the diffusion preprocessing stages of *dMRIPrep*.
+
+ Workflow Graph
+ .. workflow::
+ :graph2use: orig
+ :simple_form: yes
+
+ from dmriprep.workflows.dwi import init_dwi_preproc_wf
+ from collections import namedtuple
+ BIDSLayout = namedtuple('BIDSLayout', ['root'])
+ wf = init_dwi_preproc_wf(
+ dwi_file='/completely/made/up/path/sub-01_dwi.nii.gz',
+ debug=False,
+ force_syn=True,
+ ignore=[],
+ low_mem=False,
+ omp_nthreads=1,
+ output_dir='.',
+ reportlets_dir='.',
+ use_syn=True,
+ layout=BIDSLayout('.'),
+ num_dwi=1,
+ )
+
+ Parameters
+ ----------
+ dwi_file : str
+ dwi NIfTI file
+ debug : bool
+ Enable debugging outputs
+ force_syn : bool
+ **Temporary**: Always run SyN-based SDC
+ ignore : list
+ Preprocessing steps to skip (may include "sdc")
+ low_mem : bool
+ Write uncompressed .nii files in some cases to reduce memory usage
+ omp_nthreads : int
+ Maximum number of threads an individual process may use
+ output_dir : str
+ Directory in which to save derivatives
+ reportlets_dir : str
+ Absolute path of a directory in which reportlets will be temporarily stored
+ use_syn : bool
+ **Experimental**: Enable ANTs SyN-based susceptibility distortion correction (SDC).
+ If fieldmaps are present and enabled, this is not run, by default.
+ layout : BIDSLayout
+ BIDSLayout structure to enable metadata retrieval
+ num_dwi : int
+ Total number of dwi files that have been set for preprocessing
+ (default is 1)
+
+ Inputs
+ ------
+ dwi_file
+ dwi NIfTI file
+ bvec_file
+ File path of the b-values
+ bval_file
+ File path of the b-vectors
+
+ Outputs
+ -------
+ dwi_file
+ dwi NIfTI file
+ dwi_mask
+ dwi mask
+
+ See also
+ --------
+ * :py:func:`~dmriprep.workflows.dwi.util.init_dwi_reference_wf`
+ * :py:func:`~dmriprep.workflows.dwi.outputs.init_reportlets_wf`
+
+ """
+ wf_name = _get_wf_name(dwi_file)
+
+ # Build workflow
+ workflow = Workflow(name=wf_name)
+ workflow.__desc__ = """
+
+Diffusion data preprocessing
+
+: For each of the {num_dwi} dwi scans found per subject (across all sessions),
+ the following preprocessing was performed.
+""".format(num_dwi=num_dwi)
+
+ workflow.__postdesc__ = """\
+ """
+
+ # For doc building purposes
+ if not hasattr(layout, 'parse_file_entities'):
+ LOGGER.log(25, 'No valid layout: building empty workflow.')
+ bvec_file = '/completely/made/up/path/sub-01_dwi.bvec'
+ bval_file = '/completely/made/up/path/sub-01_dwi.bval'
+ else:
+ bvec_file = layout.get_bvec(dwi_file)
+ bval_file = layout.get_bval(dwi_file)
+
+ inputnode = pe.Node(niu.IdentityInterface(
+ fields=['dwi_file', 'bvec_file', 'bval_file',
+ 'subjects_dir', 'subject_id',
+ 't1w_preproc', 't1w_brain', 't1w_mask', 't1w_dseg', 't1w_tpms',
+ 't1w_aseg', 't1w_aparc', 'anat2std_xfm', 'std2anat_xfm', 'template',
+ 'joint_anat2std_xfm', 'joint_std2anat_xfm', 'joint_template',
+ 't1w2fsnative_xfm', 'fsnative2t1w_xfm']),
+ name='inputnode')
+ inputnode.inputs.dwi_file = dwi_file
+ inputnode.inputs.bvec_file = bvec_file
+ inputnode.inputs.bval_file = bval_file
+
+ outputnode = pe.Node(niu.IdentityInterface(
+ fields=['out_dwi', 'out_bvec', 'out_bval', 'out_rasb',
+ 'out_dwi_mask']),
+ name='outputnode')
+
+ gradient_table = pe.Node(CheckGradientTable(), name='gradient_table')
+
+ dwi_reference_wf = init_dwi_reference_wf(omp_nthreads=1)
+
+ # MAIN WORKFLOW STRUCTURE
+ workflow.connect([
+ (inputnode, gradient_table, [
+ ('dwi_file', 'dwi_file'),
+ ('bvec_file', 'in_bvec'),
+ ('bval_file', 'in_bval')]),
+ (inputnode, dwi_reference_wf, [('dwi_file', 'inputnode.dwi_file')]),
+ (gradient_table, dwi_reference_wf, [('b0_ixs', 'inputnode.b0_ixs')]),
+ (dwi_reference_wf, outputnode, [
+ ('outputnode.ref_image', 'out_dwi'),
+ ('outputnode.dwi_mask', 'out_dwi_mask')]),
+ (gradient_table, outputnode, [
+ ('out_bvec', 'out_bvec'),
+ ('out_bval', 'out_bval'),
+ ('out_rasb', 'out_rasb')])
+ ])
+
+ # REPORTING
+ reportlets_wf = init_reportlets_wf(reportlets_dir)
+ workflow.connect([
+ (inputnode, reportlets_wf, [('dwi_file', 'inputnode.source_file')]),
+ (dwi_reference_wf, reportlets_wf, [
+ ('outputnode.ref_image', 'inputnode.dwi_ref'),
+ ('outputnode.dwi_mask', 'inputnode.dwi_mask'),
+ ('outputnode.validation_report', 'inputnode.validation_report')]),
+ ])
+ return workflow
+
+
+def _get_wf_name(dwi_fname):
+ """
+ Derive the workflow name for supplied dwi file.
+
+ >>> _get_wf_name('/completely/made/up/path/sub-01_dwi.nii.gz')
+ 'dwi_preproc_wf'
+ >>> _get_wf_name('/completely/made/up/path/sub-01_run-1_dwi.nii.gz')
+ 'dwi_preproc_run_1_wf'
+
+ """
+ from nipype.utils.filemanip import split_filename
+ fname = split_filename(dwi_fname)[1]
+ fname_nosub = '_'.join(fname.split("_")[1:])
+ name = "dwi_preproc_" + fname_nosub.replace(
+ ".", "_").replace(" ", "").replace("-", "_").replace("dwi", "wf")
+
+ return name
diff --git a/dmriprep/workflows/dwi/outputs.py b/dmriprep/workflows/dwi/outputs.py
new file mode 100644
index 00000000..64cde08e
--- /dev/null
+++ b/dmriprep/workflows/dwi/outputs.py
@@ -0,0 +1,36 @@
+"""Write outputs (derivatives and reportlets)."""
+from nipype.pipeline import engine as pe
+from nipype.interfaces import utility as niu
+from niworkflows.engine.workflows import LiterateWorkflow as Workflow
+from ...interfaces import DerivativesDataSink
+
+
+def init_reportlets_wf(reportlets_dir, name='reportlets_wf'):
+ """Set up a battery of datasinks to store reports in the right location."""
+ from niworkflows.interfaces.masks import SimpleShowMaskRPT
+ workflow = Workflow(name=name)
+
+ inputnode = pe.Node(niu.IdentityInterface(
+ fields=['source_file', 'dwi_ref', 'dwi_mask',
+ 'validation_report']),
+ name='inputnode')
+ mask_reportlet = pe.Node(SimpleShowMaskRPT(), name='mask_reportlet')
+
+ ds_report_mask = pe.Node(
+ DerivativesDataSink(base_directory=reportlets_dir,
+ desc='brain', suffix='mask'),
+ name='ds_report_mask', run_without_submitting=True)
+ ds_report_validation = pe.Node(
+ DerivativesDataSink(base_directory=reportlets_dir,
+ desc='validation', keep_dtype=True),
+ name='ds_report_validation', run_without_submitting=True)
+
+ workflow.connect([
+ (inputnode, mask_reportlet, [('dwi_ref', 'background_file'),
+ ('dwi_mask', 'mask_file')]),
+ (inputnode, ds_report_validation, [('source_file', 'source_file')]),
+ (inputnode, ds_report_mask, [('source_file', 'source_file')]),
+ (inputnode, ds_report_validation, [('validation_report', 'in_file')]),
+ (mask_reportlet, ds_report_mask, [('out_report', 'in_file')]),
+ ])
+ return workflow
diff --git a/dmriprep/workflows/dwi/util.py b/dmriprep/workflows/dwi/util.py
new file mode 100644
index 00000000..4e9ab603
--- /dev/null
+++ b/dmriprep/workflows/dwi/util.py
@@ -0,0 +1,238 @@
+"""Utility workflows for :abbr:`DWI (diffusion weighted imaging)` data."""
+
+from nipype.pipeline import engine as pe
+from nipype.interfaces import utility as niu, fsl, afni
+
+from niworkflows.engine.workflows import LiterateWorkflow as Workflow
+from niworkflows.interfaces.images import ValidateImage
+from niworkflows.interfaces.fixes import FixN4BiasFieldCorrection as N4BiasFieldCorrection
+from niworkflows.interfaces.utils import CopyXForm
+
+from ...interfaces.images import ExtractB0, RescaleB0
+
+DEFAULT_MEMORY_MIN_GB = 0.01
+
+
+def init_dwi_reference_wf(omp_nthreads, name='dwi_reference_wf'):
+ """
+ Build a workflow that generates a reference b0 image from a DWI dataset.
+
+ To generate the reference *b0*, this workflow takes in a DWI dataset,
+ extracts the b0s, registers them to each other, rescales the signal
+ intensity values, and calculates a median image.
+
+ Then, the reference *b0* and its skull-stripped version are generated using
+ a custom methodology adapted from *niworkflows*.
+
+ Workflow Graph
+ .. workflow::
+ :graph2use: orig
+ :simple_form: yes
+
+ from dmriprep.workflows.dwi.util import init_dwi_reference_wf
+ wf = init_dwi_reference_wf(omp_nthreads=1)
+ wf.inputs.inputnode.b0_ixs=[0]
+
+ Parameters
+ ----------
+ omp_nthreads : int
+ Maximum number of threads an individual process may use
+ name : str
+ Name of workflow (default: ``dwi_reference_wf``)
+
+ Inputs
+ ------
+ dwi_file
+ dwi NIfTI file
+ b0_ixs : list
+ index of b0s in dwi NIfTI file
+
+ Outputs
+ -------
+ dwi_file
+ Validated dwi NIfTI file
+ raw_ref_image
+ Reference image
+ ref_image
+ Contrast-enhanced reference image
+ ref_image_brain
+ Skull-stripped reference image
+ dwi_mask
+ Skull-stripping mask of reference image
+ validation_report
+ HTML reportlet indicating whether ``dwi_file`` had a valid affine
+
+ See also
+ --------
+ * :py:func:`~dmriprep.workflows.dwi.util.init_enhance_and_skullstrip_wf`
+
+ """
+ workflow = Workflow(name=name)
+
+ inputnode = pe.Node(niu.IdentityInterface(fields=['dwi_file', 'b0_ixs']),
+ name='inputnode')
+ outputnode = pe.Node(
+ niu.IdentityInterface(fields=['dwi_file', 'raw_ref_image', 'ref_image',
+ 'ref_image_brain',
+ 'dwi_mask', 'validation_report']),
+ name='outputnode')
+
+ validate = pe.Node(ValidateImage(), name='validate', mem_gb=DEFAULT_MEMORY_MIN_GB)
+
+ extract_b0 = pe.Node(ExtractB0(), name='extract_b0')
+
+ reg_b0 = pe.Node(fsl.MCFLIRT(ref_vol=0, interpolation='sinc'), name='reg_b0')
+
+ pre_mask = pe.Node(afni.Automask(dilate=1, outputtype='NIFTI_GZ'),
+ name='pre_mask')
+
+ rescale_b0 = pe.Node(RescaleB0(), name='rescale_b0')
+
+ enhance_and_skullstrip_dwi_wf = init_enhance_and_skullstrip_dwi_wf(
+ omp_nthreads=omp_nthreads)
+
+ workflow.connect([
+ (inputnode, validate, [('dwi_file', 'in_file')]),
+ (validate, extract_b0, [('out_file', 'in_file')]),
+ (inputnode, extract_b0, [('b0_ixs', 'b0_ixs')]),
+ (extract_b0, reg_b0, [('out_file', 'in_file')]),
+ (reg_b0, pre_mask, [('out_file', 'in_file')]),
+ (reg_b0, rescale_b0, [('out_file', 'in_file')]),
+ (pre_mask, rescale_b0, [('out_file', 'mask_file')]),
+ (rescale_b0, enhance_and_skullstrip_dwi_wf, [('out_ref', 'inputnode.in_file')]),
+ (pre_mask, enhance_and_skullstrip_dwi_wf, [('out_file', 'inputnode.pre_mask')]),
+ (validate, outputnode, [('out_file', 'dwi_file'),
+ ('out_report', 'validation_report')]),
+ (rescale_b0, outputnode, [('out_ref', 'raw_ref_image')]),
+ (enhance_and_skullstrip_dwi_wf, outputnode, [
+ ('outputnode.bias_corrected_file', 'ref_image'),
+ ('outputnode.mask_file', 'dwi_mask'),
+ ('outputnode.skull_stripped_file', 'ref_image_brain')]),
+ ])
+ return workflow
+
+
+def init_enhance_and_skullstrip_dwi_wf(
+ name='enhance_and_skullstrip_dwi_wf',
+ omp_nthreads=1):
+ """
+ Enhance a *b0* reference and perform brain extraction.
+
+ This workflow takes in a *b0* reference image and sharpens the histogram
+ with the application of the N4 algorithm for removing the
+ :abbr:`INU (intensity non-uniformity)` bias field and calculates a signal
+ mask.
+
+ Steps of this workflow are:
+
+ 1. Run ANTs' ``N4BiasFieldCorrection`` on the input
+ dwi reference image and mask.
+ 2. Calculate a loose mask using FSL's ``bet``, with one mathematical morphology
+ dilation of one iteration and a sphere of 6mm as structuring element.
+ 3. Mask the :abbr:`INU (intensity non-uniformity)`-corrected image
+ with the latest mask calculated in 3), then use AFNI's ``3dUnifize``
+ to *standardize* the T2* contrast distribution.
+ 4. Calculate a mask using AFNI's ``3dAutomask`` after the contrast
+ enhancement of 4).
+ 5. Calculate a final mask as the intersection of 2) and 4).
+ 6. Apply final mask on the enhanced reference.
+
+ Workflow Graph:
+ .. workflow ::
+ :graph2use: orig
+ :simple_form: yes
+
+ from dmriprep.workflows.dwi.util import init_enhance_and_skullstrip_dwi_wf
+ wf = init_enhance_and_skullstrip_dwi_wf(omp_nthreads=1)
+
+ .. _N4BiasFieldCorrection: https://hdl.handle.net/10380/3053
+
+ Parameters
+ ----------
+ name : str
+ Name of workflow (default: ``enhance_and_skullstrip_dwi_wf``)
+ omp_nthreads : int
+ number of threads available to parallel nodes
+
+ Inputs
+ ------
+ in_file
+ The *b0* reference (single volume)
+ pre_mask
+ initial mask
+
+ Outputs
+ -------
+ bias_corrected_file
+ the ``in_file`` after `N4BiasFieldCorrection`_
+ skull_stripped_file
+ the ``bias_corrected_file`` after skull-stripping
+ mask_file
+ mask of the skull-stripped input file
+ out_report
+ reportlet for the skull-stripping
+
+ """
+ workflow = Workflow(name=name)
+ inputnode = pe.Node(niu.IdentityInterface(fields=['in_file', 'pre_mask']),
+ name='inputnode')
+ outputnode = pe.Node(niu.IdentityInterface(fields=[
+ 'mask_file', 'skull_stripped_file', 'bias_corrected_file']), name='outputnode')
+
+ # Run N4 normally, force num_threads=1 for stability (images are small, no need for >1)
+ n4_correct = pe.Node(N4BiasFieldCorrection(
+ dimension=3, copy_header=True, bspline_fitting_distance=200), shrink_factor=2,
+ name='n4_correct', n_procs=1)
+ n4_correct.inputs.rescale_intensities = True
+
+ # Create a generous BET mask out of the bias-corrected EPI
+ skullstrip_first_pass = pe.Node(fsl.BET(frac=0.2, mask=True),
+ name='skullstrip_first_pass')
+ bet_dilate = pe.Node(fsl.DilateImage(
+ operation='max', kernel_shape='sphere', kernel_size=6.0,
+ internal_datatype='char'), name='skullstrip_first_dilate')
+ bet_mask = pe.Node(fsl.ApplyMask(), name='skullstrip_first_mask')
+
+ # Use AFNI's unifize for T2 contrast & fix header
+ unifize = pe.Node(afni.Unifize(
+ t2=True, outputtype='NIFTI_GZ',
+ args='-clfrac 0.2 -rbt 18.3 65.0 90.0',
+ out_file='uni.nii.gz'), name='unifize')
+ fixhdr_unifize = pe.Node(CopyXForm(), name='fixhdr_unifize', mem_gb=0.1)
+
+ # Run AFNI's 3dAutomask to extract a refined brain mask
+ skullstrip_second_pass = pe.Node(afni.Automask(dilate=1,
+ outputtype='NIFTI_GZ'),
+ name='skullstrip_second_pass')
+ fixhdr_skullstrip2 = pe.Node(CopyXForm(), name='fixhdr_skullstrip2', mem_gb=0.1)
+
+ # Take intersection of both masks
+ combine_masks = pe.Node(fsl.BinaryMaths(operation='mul'),
+ name='combine_masks')
+
+ # Compute masked brain
+ apply_mask = pe.Node(fsl.ApplyMask(), name='apply_mask')
+
+ workflow.connect([
+ (inputnode, n4_correct, [('in_file', 'input_image'),
+ ('pre_mask', 'mask_image')]),
+ (inputnode, fixhdr_unifize, [('in_file', 'hdr_file')]),
+ (inputnode, fixhdr_skullstrip2, [('in_file', 'hdr_file')]),
+ (n4_correct, skullstrip_first_pass, [('output_image', 'in_file')]),
+ (skullstrip_first_pass, bet_dilate, [('mask_file', 'in_file')]),
+ (bet_dilate, bet_mask, [('out_file', 'mask_file')]),
+ (skullstrip_first_pass, bet_mask, [('out_file', 'in_file')]),
+ (bet_mask, unifize, [('out_file', 'in_file')]),
+ (unifize, fixhdr_unifize, [('out_file', 'in_file')]),
+ (fixhdr_unifize, skullstrip_second_pass, [('out_file', 'in_file')]),
+ (skullstrip_first_pass, combine_masks, [('mask_file', 'in_file')]),
+ (skullstrip_second_pass, fixhdr_skullstrip2, [('out_file', 'in_file')]),
+ (fixhdr_skullstrip2, combine_masks, [('out_file', 'operand_file')]),
+ (fixhdr_unifize, apply_mask, [('out_file', 'in_file')]),
+ (combine_masks, apply_mask, [('out_file', 'mask_file')]),
+ (combine_masks, outputnode, [('out_file', 'mask_file')]),
+ (apply_mask, outputnode, [('out_file', 'skull_stripped_file')]),
+ (n4_correct, outputnode, [('output_image', 'bias_corrected_file')]),
+ ])
+
+ return workflow