Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

👽️ Resolve relative paths and links right at the beginning of a run #1551

Closed
wants to merge 23 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
34d84be
:alembic: Resolve symlinks in datasource
shnizzedy Aug 18, 2021
e76ebb4
:alembic: Debug KeyError
shnizzedy Aug 18, 2021
d2bf1c6
:goal_net: Handle inputs without 'in_file's
shnizzedy Aug 18, 2021
c8ac479
:goal_net: Catch non-dict resultsfiles
shnizzedy Aug 19, 2021
56f90d5
:pencil2: Operati~onal~^ing system
shnizzedy Aug 19, 2021
b6c1864
:necktie: Exit with failure code (1) on failure
shnizzedy Aug 19, 2021
f04a738
:art: Always dereference local_path
shnizzedy Aug 19, 2021
d50ccb5
:loud_sound: Include run command in displayed information
shnizzedy Aug 19, 2021
89203ac
fixup! :loud_sound: Include run command in displayed information
shnizzedy Aug 19, 2021
b8bdbf6
:art: Dereference relative paths right away
shnizzedy Aug 19, 2021
442395f
:zap: Use 'in_file_a' if available
shnizzedy Aug 19, 2021
239b90d
:speech_balloon: Clarify preconfig message (change "Running" to "Load…
shnizzedy Aug 19, 2021
81925d7
:alembic: Return None if prov has no length
shnizzedy Aug 19, 2021
1017a4e
:zap: CopyImageHeaderInformation before n4_correct
shnizzedy Sep 1, 2021
6f87606
:adhesive_bandage: Force relative paths for FSL Merge
shnizzedy Sep 3, 2021
9278c47
:arrow_up: Upgrade NVM (0.35.2 → 0.38.0)
shnizzedy Sep 3, 2021
47e84ed
:arrow_up: Upgrade Node (NPM) to v16.8.0 (v7.5.3)
shnizzedy Sep 3, 2021
d4168a1
:twisted_rightwards_arrows: Merge develop into enh/allow-links
shnizzedy Sep 4, 2021
778c4ba
fixup! :adhesive_bandage: Force relative paths for FSL Merge
shnizzedy Sep 7, 2021
31bb02c
:twisted_rightwards_arrows: Merge branch 'fix/n4-imprecision' into en…
shnizzedy Sep 7, 2021
8ca11e8
fixup! :adhesive_bandage: Force relative paths for FSL Merge
shnizzedy Sep 7, 2021
3fe3a5f
:bug: Set undefined imageout from imagetocopyrefimageinfoto
shnizzedy Sep 9, 2021
2b38667
:bug: Set outputs for CopyImageHeaderInformation
shnizzedy Sep 9, 2021
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion CPAC/anat_preproc/anat_preproc.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
wb_command, \
fslmaths_command, \
VolumeRemoveIslands
from CPAC.utils.interfaces.fsl import Merge as fslMerge

from CPAC.unet.function import predict_volumes

Expand Down Expand Up @@ -3147,7 +3148,7 @@ def correct_restore_brain_intensity_abcd(wf, cfg, strat_pool, pipe_num, opt=None
wf.connect(node, out, merge_t1_acpc_to_list, 'in2')
wf.connect(node, out, merge_t1_acpc_to_list, 'in3')

merge_t1_acpc = pe.Node(interface=fsl.Merge(),
merge_t1_acpc = pe.Node(interface=fslMerge(),
name='merge_t1_acpc')

merge_t1_acpc.inputs.dimension = 't'
Expand Down
20 changes: 16 additions & 4 deletions CPAC/func_preproc/func_preproc.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,9 @@
from CPAC.utils.utils import check_prov_for_motion_tool

# niworkflows
from ..utils.interfaces.ants import AI
from ..utils.interfaces.ants import AI, \
CopyImageHeaderInformation # noqa: E402


def collect_arguments(*args):
command_args = []
Expand Down Expand Up @@ -1661,7 +1663,13 @@ def bold_mask_fsl_afni(wf, cfg, strat_pool, pipe_num, opt=None):
name=f"pre_mask_dilate_{pipe_num}",
)

# Run N4 normally, force num_threads=1 for stability (images are small, no need for >1)
# Fix precision errors
# https://github.com/ANTsX/ANTs/wiki/Inputs-do-not-occupy-the-same-physical-space#fixing-precision-errors
restore_header = pe.Node(CopyImageHeaderInformation(),
name=f'restore_header_{pipe_num}')

# Run N4 normally, force num_threads=1 for stability (images are
# small, no need for >1)
n4_correct = pe.Node(
ants.N4BiasFieldCorrection(
dimension=3, copy_header=True, bspline_fitting_distance=200
Expand Down Expand Up @@ -1704,14 +1712,18 @@ def bold_mask_fsl_afni(wf, cfg, strat_pool, pipe_num, opt=None):
wf.connect([(node, init_aff, [(out, "moving_image")]),
(node, map_brainmask, [(out, "reference_image")]),
(node, norm, [(out, "moving_image")]),
(init_aff, norm, [("output_transform", "initial_moving_transform")]),
(node, restore_header, [(out, "refimage")]),
(init_aff, norm, [
("output_transform", "initial_moving_transform")]),
(norm, map_brainmask, [
("reverse_invert_flags", "invert_transform_flags"),
("reverse_transforms", "transforms"),
]),
(map_brainmask, binarize_mask, [("output_image", "in_file")]),
(binarize_mask, pre_dilate, [("out_file", "in_file")]),
(pre_dilate, n4_correct, [("out_file", "mask_image")]),
(pre_dilate, restore_header, [
("out_file", "imagetocopyrefimageinfoto")]),
(restore_header, n4_correct, [("imageout", "mask_image")]),
(node, n4_correct, [(out, "input_image")]),
(n4_correct, skullstrip_first_pass,
[('output_image', 'in_file')]),
Expand Down
2 changes: 1 addition & 1 deletion CPAC/group_analysis/group_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,7 @@ def create_fsl_flame_wf(ftest=False, wf_name='groupAnalysis'):
name='outputspec')

'''
merge_to_4d = pe.Node(interface=fsl.Merge(),
merge_to_4d = pe.Node(interface=fslMerge(),
name='merge_to_4d')
merge_to_4d.inputs.dimension = 't'

Expand Down
3 changes: 2 additions & 1 deletion CPAC/nuisance/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

from CPAC.nuisance.utils.compcor import calc_compcor_components
from CPAC.nuisance.utils.crc import encode as crc_encode
from CPAC.utils.interfaces.fsl import Merge as fslMerge
from CPAC.utils.interfaces.function import Function
from CPAC.registration.utils import check_transforms, generate_inverse_transform_flags

Expand Down Expand Up @@ -254,7 +255,7 @@ def temporal_variance_mask(threshold, by_slice=False, erosion=False, degree=1):
wf.connect(mapper, 'out_file', threshold_mask, 'in_file')
wf.connect(threshold_node, 'threshold', threshold_mask, 'thresh')

merge_slice_masks = pe.Node(interface=fsl.Merge(), name='merge_slice_masks')
merge_slice_masks = pe.Node(interface=fslMerge(), name='merge_slice_masks')
merge_slice_masks.inputs.dimension = 'z'
wf.connect(
threshold_mask, 'out_file',
Expand Down
8 changes: 7 additions & 1 deletion CPAC/pipeline/cpac_pipeline.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import os
import sys
import time
import csv
import shutil
Expand Down Expand Up @@ -227,6 +228,7 @@ def run_workflow(sub_dict, c, run, pipeline_timing_info=None, p_name=None,
the prepared nipype workflow object containing the parameters
specified in the config
'''
exitcode = 0

# Assure that changes on config will not affect other parts
c = copy.copy(c)
Expand Down Expand Up @@ -322,6 +324,7 @@ def run_workflow(sub_dict, c, run, pipeline_timing_info=None, p_name=None,
encrypt_data = False

information = """
Run command: {run_command}

C-PAC version: {cpac_version}

Expand Down Expand Up @@ -350,6 +353,7 @@ def run_workflow(sub_dict, c, run, pipeline_timing_info=None, p_name=None,
"""

logger.info(information.format(
run_command=' '.join(['run', *sys.argv[1:]]),
cpac_version=CPAC.__version__,
cores=c.pipeline_setup['system_config']['max_cores_per_participant'],
participants=c.pipeline_setup['system_config'][
Expand Down Expand Up @@ -663,7 +667,8 @@ def run_workflow(sub_dict, c, run, pipeline_timing_info=None, p_name=None,
logger.error(err_msg, log_dir, exc)

except Exception as e:
import traceback;
import traceback
exitcode = 1
traceback.print_exc()
execution_info = """

Expand Down Expand Up @@ -706,6 +711,7 @@ def run_workflow(sub_dict, c, run, pipeline_timing_info=None, p_name=None,
except (FileNotFoundError, PermissionError):
logger.warn('Could not remove working directory %s',
working_dir)
sys.exit(exitcode)


def initialize_nipype_wf(cfg, sub_data_dct, name=""):
Expand Down
4 changes: 2 additions & 2 deletions CPAC/pipeline/cpac_randomise_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@

import nipype.interfaces.io as nio
from CPAC.pipeline import nipype_pipeline_engine as pe

from CPAC.pipeline.cpac_group_runner import load_config_yml
from CPAC.utils.interfaces.fsl import Merge as fslMerge


def load_subject_file(group_config_path):
Expand All @@ -17,7 +17,7 @@ def load_subject_file(group_config_path):

def randomise_merged_file(s_paths):

merge = pe.Node(interface=fsl.Merge(), name='fsl_merge')
merge = pe.Node(interface=fslMerge(), name='fsl_merge')
merge.inputs.dimension = 't'
merge.inputs.merged_file = "randomise_merged.nii.gz"
merge.inputs.in_files = s_paths
Expand Down
2 changes: 1 addition & 1 deletion CPAC/pipeline/cpac_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -296,7 +296,7 @@ def run(subject_list_file, config_file=None, p_name=None, plugin=None,
if len(c.pipeline_setup['working_directory']['path']) > 70:
warnings.warn("We recommend that the working directory full path "
"should have less then 70 characters. "
"Long paths might not work in your operational system.")
"Long paths might not work in your operating system.")
warnings.warn("Current working directory: %s" % c.pipeline_setup['working_directory']['path'])

# Get the pipeline name
Expand Down
2 changes: 2 additions & 0 deletions CPAC/pipeline/engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,8 @@ def get_resource_from_prov(self, prov):
# data) has its own provenance list. the name of the resource, and
# the node that produced it, is always the last item in the provenance
# list, with the two separated by a colon :
if not len(prov):
return None
if isinstance(prov[-1], list):
return prov[-1][-1].split(':')[0]
elif isinstance(prov[-1], str):
Expand Down
19 changes: 17 additions & 2 deletions CPAC/pipeline/nipype_pipeline_engine/engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -244,8 +244,23 @@ def _configure_exec_nodes(self, graph):
# already exists
multiplicand_path = _load_resultfile(
input_resultfile
).inputs['in_file']
node._apply_mem_x(multiplicand_path)
).inputs
if isinstance(
multiplicand_path,
dict
):
if 'in_file' in multiplicand_path:
node._apply_mem_x(
multiplicand_path['in_file']
)
elif 'in_file_a' in multiplicand_path:
node._apply_mem_x(
multiplicand_path['in_file_a']
)
else:
raise FileNotFoundError
else:
raise FileNotFoundError
except FileNotFoundError:
if hasattr(self, '_largest_func'):
node._apply_mem_x(self._largest_func)
Expand Down
3 changes: 2 additions & 1 deletion CPAC/qpp/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from CPAC.pipeline import nipype_pipeline_engine as pe
import nipype.interfaces.utility as util
import nipype.interfaces.fsl as fsl
from CPAC.utils.interfaces.fsl import Merge as fslMerge

from CPAC.utils.interfaces.function import Function

Expand Down Expand Up @@ -77,7 +78,7 @@ def create_qpp(name='qpp', working_dir=None, crash_dir=None):
outputspec = pe.Node(util.IdentityInterface(fields=['qpp']),
name='outputspec')

merge = pe.Node(fsl.Merge(), name='joint_datasets')
merge = pe.Node(fslMerge(), name='joint_datasets')
merge.inputs.dimension = 't'
merge.inputs.output_type = 'NIFTI_GZ'

Expand Down
2 changes: 1 addition & 1 deletion CPAC/randomise/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def create_randomise(name='randomise',working_dir=None,crash_dir=None):
outputspec = pe.Node(util.IdentityInterface(fields=['tstat_files' ,'t_corrected_p_files','index_file','threshold_file','localmax_txt_file','localmax_vol_file','max_file','mean_file','pval_file','size_file']), name='outputspec')


#merge = pe.Node(interface=fsl.Merge(), name='fsl_merge')
#merge = pe.Node(interface=fslMerge(), name='fsl_merge')
#merge.inputs.dimension = 't'
#merge.inputs.merged_file = "randomise_merged.nii.gz"

Expand Down
27 changes: 15 additions & 12 deletions CPAC/registration/registration.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
run_c3d, \
run_c4d

from CPAC.utils.interfaces.fsl import Merge as fslMerge
from CPAC.utils.utils import check_prov_for_regtool


Expand Down Expand Up @@ -2541,7 +2542,7 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None
wf.connect(split_combined_warp, 'output3',
merge_xfms_to_list, 'in3')

merge_xfms = pe.Node(interface=fsl.Merge(),
merge_xfms = pe.Node(interface=fslMerge(),
name=f'merge_t1_to_template_xfms_{pipe_num}')
merge_xfms.inputs.dimension = 't'

Expand All @@ -2559,8 +2560,8 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None
wf.connect(split_combined_inv_warp, 'output3',
merge_inv_xfms_to_list, 'in3')

merge_inv_xfms = pe.Node(interface=fsl.Merge(),
name=f'merge_template_to_t1_xfms_{pipe_num}')
merge_inv_xfms = pe.Node(interface=fslMerge(),
name=f'merge_template_to_t1_xfms_{pipe_num}')
merge_inv_xfms.inputs.dimension = 't'

wf.connect(merge_inv_xfms_to_list, 'out',
Expand Down Expand Up @@ -3272,17 +3273,18 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None):
### Loop ends! ###

# fslmerge -tr ${OutputfMRI} $FrameMergeSTRING $TR_vol
merge_func_to_standard = pe.Node(interface=fsl.Merge(),
name=f'merge_func_to_standard_{pipe_num}')
merge_func_to_standard = pe.Node(interface=fslMerge(),
name=f'merge_func_to_standard_{pipe_num}')

merge_func_to_standard.inputs.dimension = 't'

wf.connect(applywarp_func_to_standard, 'out_file',
merge_func_to_standard, 'in_files')

# fslmerge -tr ${OutputfMRI}_mask $FrameMergeSTRINGII $TR_vol
merge_func_mask_to_standard = pe.Node(interface=fsl.Merge(),
name=f'merge_func_mask_to_standard_{pipe_num}')
merge_func_mask_to_standard = pe.Node(interface=fslMerge(),
name='merge_func_mask_to_'
f'standard_{pipe_num}')

merge_func_mask_to_standard.inputs.dimension = 't'

Expand Down Expand Up @@ -3587,17 +3589,18 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No
### Loop ends! ###

# fslmerge -tr ${OutputfMRI} $FrameMergeSTRING $TR_vol
merge_func_to_standard = pe.Node(interface=fsl.Merge(),
name=f'merge_func_to_standard_{pipe_num}')
merge_func_to_standard = pe.Node(interface=fslMerge(),
name=f'merge_func_to_standard_{pipe_num}')

merge_func_to_standard.inputs.dimension = 't'

wf.connect(applywarp_func_to_standard, 'out_file',
merge_func_to_standard, 'in_files')

# fslmerge -tr ${OutputfMRI}_mask $FrameMergeSTRINGII $TR_vol
merge_func_mask_to_standard = pe.Node(interface=fsl.Merge(),
name=f'merge_func_mask_to_standard_{pipe_num}')
merge_func_mask_to_standard = pe.Node(interface=fslMerge(),
name='merge_func_mask_to_'
f'standard_{pipe_num}')

merge_func_mask_to_standard.inputs.dimension = 't'

Expand Down Expand Up @@ -3765,7 +3768,7 @@ def single_step_resample_timeseries_to_T1template(wf, cfg, strat_pool, pipe_num,

### Loop ends! ###

merge_func_to_standard = pe.Node(interface=fsl.Merge(),
merge_func_to_standard = pe.Node(interface=fslMerge(),
name=f'merge_func_to_standard_{pipe_num}')

merge_func_to_standard.inputs.dimension = 't'
Expand Down
4 changes: 2 additions & 2 deletions CPAC/utils/datasource.py
Original file line number Diff line number Diff line change
Expand Up @@ -668,9 +668,9 @@ def check_for_s3(file_path, creds_path=None, dl_dir=None, img_type='other',
% (bucket_name, exc)
raise Exception(err_msg)

# Otherwise just return what was passed in
# Otherwise just return what was passed in, resolving if a link
else:
local_path = file_path
local_path = os.path.realpath(file_path)

# Check if it exists or it is successfully downloaded
if not os.path.exists(local_path):
Expand Down
Loading