Skip to content

Commit

Permalink
Merge pull request #21 from DCAN-Labs/develop
Browse files Browse the repository at this point in the history
v0.0.2 release
  • Loading branch information
ericearl committed Sep 2, 2019
2 parents ad43552 + 5d4f24e commit 0bf0a84
Show file tree
Hide file tree
Showing 7 changed files with 123 additions and 37 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
.idea
.nfs*
__pycache__/
build_and_zip_my_image.sh
24 changes: 14 additions & 10 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
#
# Timestamp: 2018-03-15 20:22:57

FROM ubuntu:17.10
FROM ubuntu:18.04

ARG DEBIAN_FRONTEND=noninteractive

Expand All @@ -24,6 +24,7 @@ RUN apt-get update && apt-get install -yq --no-install-recommends \
curl \
dirmngr\
locales \
gnupg2 \
python2.7 \
python-pip \
rsync \
Expand All @@ -34,10 +35,13 @@ RUN apt-get update && apt-get install -yq --no-install-recommends \
build-essential \
libglib2.0-0 \
python3 \
python3-pip \
git \
bc \
dc \
libgomp1 \
libssl1.0.0 \
libssl-dev \
libxmu6 \
libxt6 \
libfontconfig1 \
Expand All @@ -55,10 +59,10 @@ RUN apt-get update && apt-get install -yq --no-install-recommends \
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*

RUN pip install setuptools wheel
RUN pip3 install setuptools wheel
RUN pip install pyyaml numpy pillow pandas
RUN wget https://bootstrap.pypa.io/get-pip.py && python3 get-pip.py && rm -f get-pip.py

RUN wget -O- http://neuro.debian.net/lists/artful.us-ca.full | tee /etc/apt/sources.list.d/neurodebian.sources.list
RUN wget -O- http://neuro.debian.net/lists/bionic.us-ca.full | tee /etc/apt/sources.list.d/neurodebian.sources.list
RUN apt-key adv --recv-keys --keyserver hkp://ha.pool.sks-keyservers.net 0xA5D32F012649A5A9 || apt-key adv --recv-keys --keyserver hkp://pool.sks-keyservers.net:80 0xA5D32F012649A5A9
RUN apt-get clean \
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* \
Expand Down Expand Up @@ -157,13 +161,13 @@ ENV FSLDIR=/opt/fsl \
#---------------------
# Install MATLAB Compiler Runtime
#---------------------
RUN mkdir /opt/mcr /opt/mcr_download
WORKDIR /opt/mcr_download
RUN mkdir /opt/matlab /opt/matlab_download
WORKDIR /opt/matlab_download
RUN wget http://ssd.mathworks.com/supportfiles/downloads/R2016b/deployment_files/R2016b/installers/glnxa64/MCR_R2016b_glnxa64_installer.zip \
&& unzip MCR_R2016b_glnxa64_installer.zip \
&& ./install -agreeToLicense yes -mode silent -destinationFolder /opt/mcr \
&& rm -rf /opt/mcr_download
#ENV LD_LIBRARY_PATH=/opt/mcr/v91/bin/glnxa64:/opt/mcr/v91/glnxa64:/opt/mcr/v91/runtime/glnxa64:$LD_LIBRARY_PATH
&& ./install -agreeToLicense yes -mode silent -destinationFolder /opt/matlab \
&& rm -rf /opt/matlab_download
#ENV LD_LIBRARY_PATH=/opt/matlab/v91/bin/glnxa64:/opt/matlab/v91/glnxa64:/opt/matlab/v91/runtime/glnxa64:$LD_LIBRARY_PATH

#---------------------
# Install MSM Binaries
Expand Down Expand Up @@ -215,7 +219,7 @@ ENV WORKBENCHDIR=/opt/workbench \
RUN ln -s -f /lib/x86_64-linux-gnu/libz.so.1.2.11 /opt/workbench/libs_linux64/libz.so.1

# Fix libstdc++6 error
RUN ln -sf /usr/lib/x86_64-linux-gnu/libstdc++.so.6.0.24 /opt/mcr/v91/sys/os/glnxa64/libstdc++.so.6
RUN ln -sf /usr/lib/x86_64-linux-gnu/libstdc++.so.6.0.24 /opt/matlab/v91/sys/os/glnxa64/libstdc++.so.6

# add dcan dependencies
RUN mkdir /opt/dcan-tools
Expand Down Expand Up @@ -244,7 +248,7 @@ RUN mkdir /bids_input /output /atlases /config

# include bidsapp interface
COPY ["app", "/app"]
RUN python3 -m pip install -r /app/requirements.txt
RUN pip3 install -r /app/requirements.txt
# setup entrypoint
COPY ["./entrypoint.sh", "/entrypoint.sh"]
COPY ["LICENSE", "/LICENSE"]
Expand Down
8 changes: 5 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
# ABCD-HCP BIDS fMRI Pipeline

[![DOI](https://zenodo.org/badge/171551109.svg)](https://zenodo.org/badge/latestdoi/171551109)

This software takes a BIDS folder as input and determines parameters
for the DCAN Labs' modified HCP pipeline, calling upon the proper code
to run the subject(s).
Expand Down Expand Up @@ -68,8 +70,6 @@ and the call to singularity is prefaced by "env -i"
### Options:

```{bash}
usage: abcd-hcp-pipeline bids\_dir output\_dir --freesurfer-license=<LICENSE>
The Developmental Cognition and Neuroimaging (DCAN) Labs fMRI Pipeline [1].
This BIDS application initiates a functional MRI processing pipeline built
upon the Human Connectome Project's minimal processing pipelines [2]. The
Expand Down Expand Up @@ -149,7 +149,9 @@ Runtime options:
References
----------
[1] abcd-hcp-pipeline (for now, please cite [3] in use of this software)
[1] Sturgeon, D., Perrone, A., Earl, E., & Snider, K.
DCAN_Labs/abcd-hcp-pipeline. DOI: 10.5281/zenodo.2587209. (check on
zenodo.org for a version-specific DOI/citation)
[2] Glasser, MF. et al. The minimal preprocessing pipelines for the Human
Connectome Project. Neuroimage. 2013 Oct 15;80:105-24.
10.1016/j.neuroimage.2013.04.127
Expand Down
2 changes: 1 addition & 1 deletion app/SetupEnv.sh
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ export MSMBINDIR=/opt/msm/Ubuntu


# Set up DCAN Environment Variables
export MCRROOT=/opt/mcr/v91
export MCRROOT=/opt/matlab/v91
export DCANBOLDPROCDIR=/opt/dcan-tools/dcan_bold_proc
export DCANBOLDPROCVER=DCANBOLDProc_v4.0.0
export EXECSUMDIR=/opt/dcan-tools/executivesummary
Expand Down
2 changes: 1 addition & 1 deletion app/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ def set_functionals(layout, subject, sessions):

spec = {
'func': [f.filename for f in func],
'func_metadata': func_metadata[0]
'func_metadata': func_metadata
}
return spec

Expand Down
40 changes: 32 additions & 8 deletions app/pipelines.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,16 +118,20 @@ def __init__(self, bids_data, output_directory):
# distortion correction method: TOPUP, FIELDMAP, or NONE, inferred
# from files, defaults to spin echo (topup) if both field maps exist
self.unwarpdir = get_readoutdir(self.bids_data['t1w_metadata'])
fmap_types = {'magnitude', 'magnitude1', 'magnitude2', 'phasediff',
'phase1', 'phase2', 'fieldmap'}
if 'epi' in self.bids_data['types']:
self.dcmethod = 'TOPUP'
# spin echo field map spacing @TODO read during volume per fmap?
self.echospacing = self.bids_data['fmap_metadata']['positive'][0][
'EffectiveEchoSpacing']
self.echospacing = ('%.12f' % self.echospacing).rstrip('0')
# distortion correction phase encoding direction
# distortion correction phase encoding direction
# somewhat arbitrary in PreFreeSurfer
if self.bids_data['func']:
# take phase encoding direction from first functional.
self.seunwarpdir = ijk_to_xyz(
self.bids_data['func_metadata']['PhaseEncodingDirection'])
self.bids_data['func_metadata'][0]['PhaseEncodingDirection'])
else:
# if no functional data is provided, use positive spin echo
self.seunwarpdir = ijk_to_xyz(
Expand All @@ -138,13 +142,30 @@ def __init__(self, bids_data, output_directory):
self.fmapmag = self.fmapphase = self.fmapgeneralelectric = \
self.echodiff = self.gdcoeffs = None

elif 'magnitude' in self.bids_data['types']:
elif fmap_types.intersection(set(self.bids_data['types'])):
self.dcmethod = 'FIELDMAP'
types = self.bids_data['fmap'].keys()
# gradient field map delta TE
self.echodiff = None # @TODO
if 'magnitude1' in types and 'magnitude2' in types:
self.fmapmag = self.bids_data['fmap']['magnitude1']
self.echodiff = self.bids_data['fmap_metadata'][
'magnitude2']['EchoTime'] - self.bids_data[
'fmap_metadata']['magnitude1']['EchoTime']
self.echodiff = '%g' % (self.echodiff * 1000.) # milliseconds
self.fmapgeneralelectric = None
elif 'magnitude' in types:
raise NotImplementedError
else:
raise Exception('No FM magnitude image identified')

if 'phasediff' in types:
self.fmapphase = self.bids_data['fmap']['phasediff']
elif 'phase1' in types and 'phase2' in types:
raise NotImplementedError
else:
raise Exception('No FM phase image identified')
# set unused spin echo parameters to none
self.seunwarpdir = None
self.seunwarpdir = self.gdcoeffs = self.echospacing = None

else:
# all distortion correction parameters set to none
Expand Down Expand Up @@ -436,7 +457,7 @@ def check_expected_outputs(self):
dne_list = [f for i, f in enumerate(outputs) if not checklist[i]]
for f in dne_list:
print('file not found: %s' % f)
if self.ignore_expected_outputs:
if not self.ignore_expected_outputs:
return False

return True
Expand Down Expand Up @@ -777,7 +798,7 @@ def _get_intended_sefmaps(self):
intended_idx[direction] = idx
break
else:
if idx != 1:
if idx != 0:
print('WARNING: the intended %s spin echo for anatomical '
'distortion correction is not explicitly defined in '
'the sidecar json.' % direction)
Expand All @@ -790,12 +811,15 @@ def _get_intended_sefmaps(self):

@property
def args(self):
for fmri in self.config.get_bids('func'):
for fmri, meta in zip(self.config.get_bids('func'),
self.config.get_bids('func_metadata')):
# set ts parameters
self.kwargs['fmritcs'] = fmri
self.kwargs['fmriname'] = get_fmriname(fmri)
self.kwargs['fmriscout'] = None # not implemented
if self.kwargs['dcmethod'] == 'TOPUP':
self.kwargs['seunwarpdir'] = ijk_to_xyz(
meta['PhaseEncodingDirection'])
self.kwargs['sephasepos'], self.kwargs['sephaseneg'] = \
self._get_intended_sefmaps()
else:
Expand Down
83 changes: 69 additions & 14 deletions app/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,9 @@
__references__ = \
"""References
----------
[1] abcd-hcp-pipeline (for now, please cite [3] in use of this software)
[1] Sturgeon, D., Perrone, A., Earl, E., & Snider, K.
DCAN_Labs/abcd-hcp-pipeline. DOI: 10.5281/zenodo.2587209. (check on
zenodo.org for a version-specific DOI/citation)
[2] Glasser, MF. et al. The minimal preprocessing pipelines for the Human
Connectome Project. Neuroimage. 2013 Oct 15;80:105-24.
10.1016/j.neuroimage.2013.04.127
Expand All @@ -23,7 +25,7 @@
[6] Avants, BB et al. The Insight ToolKit image registration framework. Front
Neuroinform. 2014 Apr 28;8:44. doi: 10.3389/fninf.2014.00044. eCollection 2014.
"""
__version__ = "1.0.1"
__version__ = "0.0.1"

import argparse
import os
Expand All @@ -50,7 +52,7 @@ def _cli():
'subject_list': args.subject_list,
'collect': args.collect,
'ncpus': args.ncpus,
'start_stage': args.stage,
'stages': args.stages,
'bandstop_params': args.bandstop,
'check_only': args.check_outputs_only,
'run_abcd_task': args.abcd_task,
Expand Down Expand Up @@ -102,7 +104,7 @@ def generate_parser(parser=None):
'does not include the "sub-" prefix'
)
parser.add_argument(
'--freesurfer-license', dest='freesurfer_license',
'--freesurfer-license', dest='freesurfer_license',
metavar='LICENSE_FILE',
help='If using docker or singularity, you will need to acquire and '
'provide your own FreeSurfer license. The license can be '
Expand All @@ -122,8 +124,33 @@ def generate_parser(parser=None):
'produce non-deterministic results.'
)
parser.add_argument(
'--stage',
help='Begin from a given stage, continuing through. Options: '
'--stage','--stages', dest='stages',
help='specify a subset of stages to run.'
'If a single stage name is given, the pipeline with be '
'started at that stage. If a string with a ":" is given, '
'a stage name before the ":" will tell run.py where to '
'start and a stage name after the ":" will tell it where '
'to stop. If no ":" is found, the pipeline will start '
'with the stage specified and run to the end. '
'Calling run.py with: \n'
' --stage="PreFreeSurfer:PreFreeSurfer" \n'
'or with: \n'
' --stage=":PreFreeSurfer" \n'
'will cause only PreFreeSurfer to be run. '
'(This can be useful to do optional processing between'
'PreFreeSurfer and FreeSurfer.)'
'Calling run.py with: \n'
' --stages="FreeSurfer:FMRISurface" \n'
'will start with stage FreeSurfer and stop after'
'FMRISurface (before DCANBOLDProcessing).'
'Default start is PreFreeSurfer and default '
'stop is ExecutiveSummary. The specifications: \n'
' --stages="PreFreeSurfer:ExecutiveSummary" \n'
' --stages=":ExecutiveSummary" \n'
' --stages="PreFreeSurfer:" \n'
'are exactly identical to each other and to sending '
'no --stage argument. '
'Valid stage names: '
'PreFreeSurfer, FreeSurfer, PostFreeSurfer, FMRIVolume, '
'FMRISurface, DCANBOLDProcessing, ExecutiveSummary, CustomClean'
)
Expand Down Expand Up @@ -189,9 +216,9 @@ def generate_parser(parser=None):


def interface(bids_dir, output_dir, subject_list=None, collect=False, ncpus=1,
start_stage=None, bandstop_params=None, check_only=False,
stages=None, bandstop_params=None, check_only=False,
run_abcd_task=False, study_template=None, cleaning_json=None,
print_commands=False, ignore_expected_outputs=False,
print_commands=False, ignore_expected_outputs=False,
ignore_modalities=[], freesurfer_license=None):
"""
main application interface
Expand All @@ -202,7 +229,7 @@ def interface(bids_dir, output_dir, subject_list=None, collect=False, ncpus=1,
"helpers.read_bids_dataset" for more information.
:param collect: treats each subject as having only one session.
:param ncpus: number of cores for parallelized processing.
:param start_stage: start from a given stage.
:param stages: only run a subset of stages.
:param bandstop_params: tuple of lower and upper bound for stop-band filter
:param check_only: check expected outputs for each stage then terminate
:return:
Expand Down Expand Up @@ -272,12 +299,40 @@ def interface(bids_dir, output_dir, subject_list=None, collect=False, ncpus=1,
cclean = CustomClean(session_spec, cleaning_json)
order.append(cclean)

if start_stage:
if stages:
# User can indicate start or end or both; default
# to entire list built above.
start_idx = 0
end_idx = len(order)

idx_colon = stages.find(":")
if idx_colon > -1:
# Start stage is everything before the colon.
start_stage = stages[:idx_colon]
# End stage is everything after the colon.
end_stage = stages[(idx_colon+1):]
else:
# No colon means no end stage.
start_stage = stages
end_stage = None

names = [x.__class__.__name__ for x in order]
assert start_stage in names, \
'"%s" is unknown, check class name and case for given stage' \
% start_stage
order = order[names.index(start_stage):]

if start_stage:
assert start_stage in names, \
'"%s" is unknown, check class name and case for given stage' \
% start_stage
start_idx = names.index(start_stage)

if end_stage:
assert end_stage in names, \
'"%s" is unknown, check class name and case for given stage' \
% end_stage
end_idx = names.index(end_stage)
end_idx += 1 # Include end stage.

# Slice the list.
order = order[start_idx:end_idx]

# special runtime options
if check_only:
Expand Down

0 comments on commit 0bf0a84

Please sign in to comment.