Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[RTM] Code revision: confounds, utilities, etc. #641

Merged
merged 32 commits into from
Aug 4, 2017
Merged
Show file tree
Hide file tree
Changes from 9 commits
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
8c609e8
remove some Function Nodes
oesteban Aug 3, 2017
8dca00d
fix error importing from nipype.utils.filemanip
oesteban Aug 3, 2017
daf1734
fix flake8
oesteban Aug 3, 2017
8b42b1f
clear up underused functions
oesteban Aug 3, 2017
7686ce6
add doctests
oesteban Aug 3, 2017
92999cd
remove futures
oesteban Aug 3, 2017
0df1c06
remove builtins
oesteban Aug 3, 2017
0772732
Merge branch 'enh/CodeRevision-fmap' into enh/CodeRevision-other
oesteban Aug 3, 2017
e25ea45
fix nonexistent connections
oesteban Aug 3, 2017
b23a111
fixing tests (wip)
oesteban Aug 4, 2017
6aad57c
Merge branch 'enh/CodeRevision-fmap' into enh/CodeRevision-other
oesteban Aug 4, 2017
7818569
inputs of GatherConfounds are now optional
oesteban Aug 4, 2017
1c380c7
add pthres to TPM2ROI interface
oesteban Aug 4, 2017
1240890
fix typo
oesteban Aug 4, 2017
deedd25
fix failing doctests
oesteban Aug 4, 2017
faa5913
fix failing doctest in confounds
oesteban Aug 4, 2017
7acd91a
fix building docs
oesteban Aug 4, 2017
f3795d9
Merge branch 'enh/CodeRevision-fmap' into enh/CodeRevision-other
oesteban Aug 4, 2017
0c7b3fc
fixing flake8 problems in tests
oesteban Aug 4, 2017
75e8c4a
remove mock from docs (autodoc has now an internal mock utility)
oesteban Aug 4, 2017
ce7d2d0
pacify flake8
oesteban Aug 4, 2017
891f2bc
refactor tests, also fixes #139 (even though this issue was already c…
oesteban Aug 4, 2017
eb0fe88
Merge branch 'enh/CodeRevision-fmap' into enh/CodeRevision-other
oesteban Aug 4, 2017
6b1b20a
fix up ICAConfounds interface
oesteban Aug 4, 2017
ce27889
remove test (does not exist anymore) from flake8 command in travis
oesteban Aug 4, 2017
3852301
remove some remaining absolute imports
oesteban Aug 4, 2017
220ea51
Merge remote-tracking branch 'upstream/master' into enh/CodeRevision-…
oesteban Aug 4, 2017
648465b
fix final problems
oesteban Aug 4, 2017
1d93c78
revise all suffices in fname_presuffix start with _
oesteban Aug 4, 2017
bec30ed
use `runtime.cwd` when posible for setting newpath of fname_presuffix
oesteban Aug 4, 2017
beeed3e
final fixes
oesteban Aug 4, 2017
36b818b
remove unnecessary import os
oesteban Aug 4, 2017
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion fmriprep/cli/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
fMRI preprocessing workflow
=====
"""
from __future__ import absolute_import, division, print_function, unicode_literals
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is the plan to completely discontinue Py2 support? At least a couple months ago, we were accepting patches from users who needed Py2, as long as they didn't complicate the code.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I would discontinue py2 for good at this point, simplify our lives. But you are right, we haven't talked about this. WDYT @chrisfilo @rwblair ?


import os
import os.path as op
Expand Down
1 change: 0 additions & 1 deletion fmriprep/info.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
"""
Base module variables
"""
from __future__ import unicode_literals

__version__ = '0.6.1-dev'
__author__ = 'The CRN developers'
Expand Down
3 changes: 2 additions & 1 deletion fmriprep/interfaces/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,5 +11,6 @@
)
from .surf import NormalizeSurf, GiftiNameSource, GiftiSetAnatomicalStructure
from .reports import AnatomicalSummary
from .utils import ApplyMask
from .utils import ApplyMask, TPM2ROI, ConcatROIs, CombineROIs, AddTSVHeader
from .fmap import FieldEnhance
from .confounds import GatherConfounds, ICAConfounds
4 changes: 1 addition & 3 deletions fmriprep/interfaces/bids.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
>>> os.chdir(data_root)

"""
from __future__ import print_function, division, absolute_import, unicode_literals

import os
import os.path as op
Expand All @@ -32,7 +31,6 @@
traits, isdefined, TraitedSpec, BaseInterfaceInputSpec,
File, Directory, InputMultiPath, OutputMultiPath, Str
)
from builtins import str, bytes

from niworkflows.interfaces.base import SimpleInterface

Expand Down Expand Up @@ -73,7 +71,7 @@ def _run_interface(self, runtime):


class BIDSDataGrabberInputSpec(BaseInterfaceInputSpec):
subject_data = traits.Dict((str, bytes), traits.Any)
subject_data = traits.Dict(Str, traits.Any)
subject_id = Str()


Expand Down
182 changes: 182 additions & 0 deletions fmriprep/interfaces/confounds.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,182 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:

import os
import shutil
import numpy as np
import pandas as pd
from niworkflows.nipype import logging
from niworkflows.nipype.interfaces.base import (
traits, TraitedSpec, BaseInterfaceInputSpec, File, Directory
)
from niworkflows.interfaces.base import SimpleInterface

LOGGER = logging.getLogger('interface')


class GatherConfoundsInputSpec(BaseInterfaceInputSpec):
signals = File(exists=True, mandatory=True, desc='input signals')
dvars = File(exists=True, mandatory=True, desc='file containing DVARS')
fd = File(exists=True, mandatory=True, desc='input framewise displacement')
tcompcor = File(exists=True, mandatory=True, desc='input tCompCorr')
acompcor = File(exists=True, mandatory=True, desc='input aCompCorr')
cos_basis = File(exists=True, mandatory=True, desc='input cosine basis')
motion = File(exists=True, mandatory=True, desc='input motion parameters')
aroma = File(exists=True, mandatory=True, desc='input ICA-AROMA')
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

These are not mandatory as presently written. I believe the idea is that the individual inputs can fail or (in the case of aroma, at least) not be run, and the interface will still work.



class GatherConfoundsOutputSpec(TraitedSpec):
confounds_file = File(exists=True, desc='output confounds file')
confounds_list = traits.List(traits.Str, desc='list of headers')


class GatherConfounds(SimpleInterface):
"""
Combine various sources of confounds in one TSV file
"""
input_spec = GatherConfoundsInputSpec
output_spec = GatherConfoundsOutputSpec

def _run_interface(self, runtime):
combined_out, confounds_list = _gather_confounds(
self.inputs.signals,
self.inputs.dvars,
self.inputs.fd,
self.inputs.tcompcor,
self.inputs.acompcor,
self.inputs.cos_basis,
self.inputs.motion,
self.inputs.aroma,
)
self._results['combined_out'] = combined_out
self._results['confounds_file'] = confounds_list
return runtime


class ICAConfoundsInputSpec(BaseInterfaceInputSpec):
in_directory = Directory(mandatory=True, desc='directory where ICA derivatives are found')
ignore_aroma_err = traits.Bool(False, usedefault=True, desc='ignore ICA-AROMA errors')


class ICAConfoundsOutputSpec(TraitedSpec):
out_file = File(exists=True, desc='output average file')


class ICAConfounds(SimpleInterface):
input_spec = ICAConfoundsInputSpec
output_spec = ICAConfoundsOutputSpec

def _run_interface(self, runtime):
aroma_confounds, motion_ics_out, melodic_mix_out = _get_ica_confounds(
self.inputs.in_directory)

if aroma_confounds is not None:
self._results['aroma_confounds'] = aroma_confounds
elif not self.inputs.ignore_aroma_err:
raise RuntimeError('ICA-AROMA failed')

self._results['motion_ics_out'] = motion_ics_out
self._results['melodic_mix_out'] = melodic_mix_out
return runtime


def _gather_confounds(signals=None, dvars=None, fdisp=None,
tcompcor=None, acompcor=None, cos_basis=None,
motion=None, aroma=None):
''' load confounds from the filenames, concatenate together horizontally, and re-save '''

def less_breakable(a_string):
''' hardens the string to different envs (i.e. case insensitive, no whitespace, '#' '''
return ''.join(a_string.split()).strip('#')

def _adjust_indices(left_df, right_df):
# This forces missing values to appear at the beggining of the DataFrame
# instead of the end
index_diff = len(left_df.index) - len(right_df.index)
if index_diff > 0:
right_df.index = range(index_diff,
len(right_df.index) + index_diff)
elif index_diff < 0:
left_df.index = range(-index_diff,
len(left_df.index) - index_diff)

all_files = []
confounds_list = []
for confound, name in ((signals, 'Global signals'),
(dvars, 'DVARS'),
(fdisp, 'Framewise displacement'),
(tcompcor, 'tCompCor'),
(acompcor, 'aCompCor'),
(cos_basis, 'Cosine basis'),
(motion, 'Motion parameters'),
(aroma, 'ICA-AROMA')):
if confound is not None:
confounds_list.append(name)
if os.path.exists(confound) and os.stat(confound).st_size > 0:
all_files.append(confound)

confounds_data = pd.DataFrame()
for file_name in all_files: # assumes they all have headings already
new = pd.read_csv(file_name, sep="\t")
for column_name in new.columns:
new.rename(columns={column_name: less_breakable(column_name)},
inplace=True)

_adjust_indices(confounds_data, new)
confounds_data = pd.concat((confounds_data, new), axis=1)

combined_out = os.path.abspath('confounds.tsv')
confounds_data.to_csv(combined_out, sep=str("\t"), index=False,
na_rep="n/a")

return combined_out, confounds_list


def _get_ica_confounds(ica_out_dir):
# load the txt files from ICA-AROMA
melodic_mix = os.path.join(ica_out_dir, 'melodic.ica/melodic_mix')
motion_ics = os.path.join(ica_out_dir, 'classified_motion_ICs.txt')

# Change names of motion_ics and melodic_mix for output
melodic_mix_out = os.path.join(ica_out_dir, 'MELODICmix.tsv')
motion_ics_out = os.path.join(ica_out_dir, 'AROMAnoiseICs.csv')

# melodic_mix replace spaces with tabs
with open(melodic_mix, 'r') as melodic_file:
melodic_mix_out_char = melodic_file.read().replace(' ', '\t')
# write to output file
with open(melodic_mix_out, 'w+') as melodic_file_out:
melodic_file_out.write(melodic_mix_out_char)

# copy metion_ics file to derivatives name
shutil.copyfile(motion_ics, motion_ics_out)

# -1 since python lists start at index 0
motion_ic_indices = np.loadtxt(motion_ics, dtype=int, delimiter=',') - 1
melodic_mix_arr = np.loadtxt(melodic_mix, ndmin=2)

# Return dummy list of ones if no noise compnents were found
if motion_ic_indices.size == 0:
LOGGER.warning('No noise components were classified')
return None, motion_ics_out, melodic_mix_out

# the "good" ics, (e.g. not motion related)
good_ic_arr = np.delete(melodic_mix_arr, motion_ic_indices, 1).T

# return dummy lists of zeros if no signal components were found
if good_ic_arr.size == 0:
LOGGER.warning('No signal components were classified')
return None, motion_ics_out, melodic_mix_out

# transpose melodic_mix_arr so x refers to the correct dimension
aggr_confounds = np.asarray([melodic_mix_arr.T[x] for x in motion_ic_indices])

# add one to motion_ic_indices to match melodic report.
aroma_confounds = os.path.abspath("AROMAAggrCompAROMAConfounds.tsv")
pd.DataFrame(aggr_confounds.T,
columns=['AROMAAggrComp%02d' % (x + 1) for x in motion_ic_indices]).to_csv(
aroma_confounds, sep="\t", index=None)

return aroma_confounds, motion_ics_out, melodic_mix_out
8 changes: 3 additions & 5 deletions fmriprep/interfaces/fmap.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,8 @@
Interfaces to deal with the various types of fieldmap sources

"""
from __future__ import print_function, division, absolute_import, unicode_literals

import os
from builtins import range
import numpy as np
import nibabel as nb
from niworkflows.nipype import logging
Expand Down Expand Up @@ -139,11 +137,11 @@ def _despike2d(data, thres, neigh=None):
neigh = [-1, 0, 1]
nslices = data.shape[-1]

for k in range(nslices):
for k in list(range(nslices)):
data2d = data[..., k]

for i in range(data2d.shape[0]):
for j in range(data2d.shape[1]):
for i in list(range(data2d.shape[0])):
for j in list(range(data2d.shape[1])):
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why list? You can iterate over range.

vals = []
thisval = data2d[i, j]
for ii in neigh:
Expand Down
1 change: 0 additions & 1 deletion fmriprep/interfaces/freesurfer.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
~~~~~~~~~~~~~~~~~~~~~~~~~~~

"""
from __future__ import print_function, division, absolute_import, unicode_literals

import os.path as op
import nibabel as nb
Expand Down
45 changes: 16 additions & 29 deletions fmriprep/interfaces/images.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@


"""
from __future__ import print_function, division, absolute_import, unicode_literals

import os
import numpy as np
Expand All @@ -23,7 +22,6 @@
from niworkflows.nipype.interfaces import fsl
from niworkflows.interfaces.base import SimpleInterface

from ..utils.misc import genfname

LOGGER = logging.getLogger('interface')

Expand Down Expand Up @@ -77,8 +75,8 @@ def _run_interface(self, runtime):
in_files = [self.inputs.in_files]

# Generate output average name early
self._results['out_avg'] = genfname(self.inputs.in_files[0],
suffix='avg')
self._results['out_avg'] = fname_presuffix(self.inputs.in_files[0],
suffix='avg', newpath=os.getcwd())
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

suffix='_avg', newpath=runtime.cwd


if self.inputs.to_ras:
in_files = [reorient(inf) for inf in in_files]
Expand All @@ -93,7 +91,8 @@ def _run_interface(self, runtime):
if sqdata.ndim == 5:
raise RuntimeError('Input image (%s) is 5D' % in_files[0])
else:
in_files = [genfname(in_files[0], suffix='squeezed')]
in_files = [fname_presuffix(in_files[0], suffix='squeezed',
newpath=os.getcwd())]
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

suffix='_squeezed', newpath=runtime.cwd

nb.Nifti1Image(sqdata, filenii.get_affine(),
filenii.get_header()).to_filename(in_files[0])

Expand Down Expand Up @@ -366,24 +365,15 @@ def _run_interface(self, runtime):


def reorient(in_file, out_file=None):
import nibabel as nb
from fmriprep.utils.misc import genfname
from builtins import (str, bytes)

"""Reorient Nifti files to RAS"""
if out_file is None:
out_file = genfname(in_file, suffix='ras')

if isinstance(in_file, (str, bytes)):
nii = nb.load(in_file)
nii = nb.as_closest_canonical(nii)
nii.to_filename(out_file)
out_file = fname_presuffix(in_file, suffix='ras', newpath=os.getcwd())
nb.as_closest_canonical(nb.load(in_file)).to_filename(out_file)
return out_file


def _flatten_split_merge(in_files):
from builtins import bytes, str

if isinstance(in_files, (bytes, str)):
if isinstance(in_files, str):
in_files = [in_files]

nfiles = len(in_files)
Expand All @@ -398,13 +388,13 @@ def _flatten_split_merge(in_files):
all_nii.append(nii)

if len(all_nii) == 1:
LOGGER.warn('File %s cannot be split', all_nii[0])
LOGGER.warning('File %s cannot be split', all_nii[0])
return in_files[0], in_files

if len(all_nii) == nfiles:
flat_split = in_files
else:
splitname = genfname(in_files[0], suffix='split%04d')
splitname = fname_presuffix(in_files[0], suffix='split%04d', newpath=os.getcwd())
flat_split = []
for i, nii in enumerate(all_nii):
flat_split.append(splitname % i)
Expand All @@ -415,24 +405,21 @@ def _flatten_split_merge(in_files):
merged = in_files[0]
else:
# More that one in_files - need merge
merged = genfname(in_files[0], suffix='merged')
merged = fname_presuffix(in_files[0], suffix='merged', newpath=os.getcwd())
nb.concat_images(all_nii).to_filename(merged)

return merged, flat_split


def _gen_reference(fixed_image, moving_image, out_file=None):
import numpy
from nilearn.image import resample_img, load_img

if out_file is None:
out_file = genfname(fixed_image, suffix='reference')
new_zooms = load_img(moving_image).header.get_zooms()[:3]
out_file = fname_presuffix(fixed_image, suffix='reference', newpath=os.getcwd())
new_zooms = nli.load_img(moving_image).header.get_zooms()[:3]
# Avoid small differences in reported resolution to cause changes to
# FOV. See https://github.com/poldracklab/fmriprep/issues/512
new_zooms_round = numpy.round(new_zooms, 3)
resample_img(fixed_image, target_affine=numpy.diag(new_zooms_round),
interpolation='nearest').to_filename(out_file)
new_zooms_round = np.round(new_zooms, 3)
nli.resample_img(fixed_image, target_affine=np.diag(new_zooms_round),
interpolation='nearest').to_filename(out_file)
return out_file


Expand Down
Loading