-
Notifications
You must be signed in to change notification settings - Fork 575
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
[ENH] integrate load_confounds into first_level_from_bids #4103
Changes from 4 commits
fb14470
84914ea
a3ff639
ecce8e2
659794a
815bdc0
a21a4c4
eb16889
56e580d
4d1cc24
23444e5
a79055e
103e326
c4089c8
9f77bcc
b457a11
c043f78
a62c27c
93ab4f7
a8a1eb1
d3588e1
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,39 @@ | ||
from nilearn.interfaces.bids._utils import _bids_entities | ||
|
||
|
||
def create_bids_filename(fields, entities_to_include=None): | ||
"""Create BIDS filename from dictionary of entity-label pairs. | ||
|
||
Parameters | ||
---------- | ||
fields : :obj:`dict` of :obj:`str` | ||
Dictionary of entity-label pairs, for example: | ||
|
||
{ | ||
"suffix": "T1w", | ||
"extension": "nii.gz", | ||
"entities": {"acq": "ap", | ||
"desc": "preproc"} | ||
}. | ||
|
||
Returns | ||
------- | ||
BIDS filename : :obj:`str` | ||
|
||
""" | ||
if entities_to_include is None: | ||
entities_to_include = _bids_entities()["raw"] | ||
|
||
filename = "" | ||
|
||
for key in entities_to_include: | ||
if key in fields["entities"]: | ||
value = fields["entities"][key] | ||
if value not in (None, ""): | ||
filename += f"{key}-{value}_" | ||
if "suffix" in fields: | ||
filename += f"{fields['suffix']}" | ||
if "extension" in fields: | ||
filename += f".{fields['extension']}" | ||
|
||
return filename |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -16,7 +16,9 @@ | |
|
||
from nilearn import datasets, image, maskers, masking | ||
from nilearn._utils import as_ndarray, logger | ||
from nilearn._utils.bids import create_bids_filename | ||
from nilearn.interfaces.bids._utils import _bids_entities, _check_bids_label | ||
from nilearn.interfaces.fmriprep.tests._testing import get_legal_confound | ||
|
||
|
||
def generate_mni_space_img(n_scans=1, res=30, random_state=0, mask_dilation=2): | ||
|
@@ -735,9 +737,15 @@ def _basic_confounds(length, random_state=0): | |
|
||
""" | ||
rand_gen = check_random_state(random_state) | ||
columns = ['csf', 'white_matter', 'global_signal', | ||
'rot_x', 'rot_y', 'rot_z', | ||
'trans_x', 'trans_y', 'trans_z'] | ||
columns = ['csf', | ||
'white_matter', | ||
'global_signal', | ||
'rot_x', | ||
'rot_y', | ||
'rot_z', | ||
'trans_x', | ||
'trans_y', | ||
'trans_z'] | ||
data = rand_gen.rand(length, len(columns)) | ||
confounds = pd.DataFrame(data, columns=columns) | ||
return confounds | ||
|
@@ -1198,46 +1206,6 @@ def _listify(n): | |
return [""] if n <= 0 else [f"{label:02}" for label in range(1, n + 1)] | ||
|
||
|
||
def _create_bids_filename( | ||
fields, entities_to_include=None | ||
): | ||
"""Create BIDS filename from dictionary of entity-label pairs. | ||
|
||
Parameters | ||
---------- | ||
fields : :obj:`dict` of :obj:`str` | ||
Dictionary of entity-label pairs, for example: | ||
|
||
{ | ||
"suffix": "T1w", | ||
"extension": "nii.gz", | ||
"entities": {"acq": "ap", | ||
"desc": "preproc"} | ||
}. | ||
|
||
Returns | ||
------- | ||
BIDS filename : :obj:`str` | ||
|
||
""" | ||
if entities_to_include is None: | ||
entities_to_include = _bids_entities()["raw"] | ||
|
||
filename = "" | ||
|
||
for key in entities_to_include: | ||
if key in fields["entities"]: | ||
value = fields["entities"][key] | ||
if value not in (None, ""): | ||
filename += f"{key}-{value}_" | ||
if "suffix" in fields: | ||
filename += f"{fields['suffix']}" | ||
if "extension" in fields: | ||
filename += f".{fields['extension']}" | ||
|
||
return filename | ||
|
||
|
||
def _init_fields(subject, | ||
session, | ||
task, | ||
|
@@ -1265,7 +1233,7 @@ def _init_fields(subject, | |
|
||
See Also | ||
-------- | ||
_create_bids_filename | ||
create_bids_filename | ||
|
||
""" | ||
fields = { | ||
|
@@ -1302,7 +1270,7 @@ def _write_bids_raw_anat(subses_dir, subject, session) -> None: | |
"extension": "nii.gz", | ||
"entities": {"sub": subject, "ses": session}, | ||
} | ||
(anat_path / _create_bids_filename(fields)).write_text("") | ||
(anat_path / create_bids_filename(fields)).write_text("") | ||
|
||
|
||
def _write_bids_raw_func( | ||
|
@@ -1330,7 +1298,7 @@ def _write_bids_raw_func( | |
|
||
""" | ||
n_time_points = 100 | ||
bold_path = func_path / _create_bids_filename(fields) | ||
bold_path = func_path / create_bids_filename(fields) | ||
write_fake_bold_img( | ||
bold_path, | ||
[n_voxels, n_voxels, n_voxels, n_time_points], | ||
|
@@ -1339,12 +1307,12 @@ def _write_bids_raw_func( | |
|
||
repetition_time = 1.5 | ||
fields["extension"] = "json" | ||
param_path = func_path / _create_bids_filename(fields) | ||
param_path = func_path / create_bids_filename(fields) | ||
param_path.write_text(json.dumps({"RepetitionTime": repetition_time})) | ||
|
||
fields["suffix"] = "events" | ||
fields["extension"] = "tsv" | ||
events_path = func_path / _create_bids_filename(fields) | ||
events_path = func_path / create_bids_filename(fields) | ||
basic_paradigm().to_csv(events_path, sep="\t", index=None) | ||
|
||
|
||
|
@@ -1390,12 +1358,15 @@ def _write_bids_derivative_func( | |
if confounds_tag is not None: | ||
fields["suffix"] = confounds_tag | ||
fields["extension"] = "tsv" | ||
confounds_path = func_path / _create_bids_filename( | ||
confounds_path = func_path / create_bids_filename( | ||
fields=fields, entities_to_include=_bids_entities()["raw"] | ||
) | ||
_basic_confounds(length=n_time_points, random_state=rand_gen).to_csv( | ||
confounds_path, sep="\t", index=None | ||
confounds, metadata = get_legal_confound() | ||
confounds.to_csv( | ||
confounds_path, sep="\t", index=None, encoding="utf-8" | ||
) | ||
with open(confounds_path.with_suffix(".json"), "w") as f: | ||
json.dump(metadata, f) | ||
Comment on lines
+1367
to
+1372
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. One issue with this approach is that the "legal_confounds" have a set number of time points so when creating a fake bids dataset, we end up with images that have a number of time points that does not match the number of time points in the confounds. This does not affect any tests AFIACT but this may lead to confusing errors when testing down the line. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. You mean, because of scrubbing ? Sorry if I miss something obvious. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. nope let me try to rephrase the way to generate "fake confounds" for the fake fmriprep datasets we use for testing would only create 6 confounds for the realignment parameters filled with random data and for a specified number of time points to allow testing the load_confounds we need more realistic confounds with more columns with names that match what's in an actual fmriprep dataset to do this I reuse the strategy used to test the load_confounds functions: use an actual confound file from an fmriprep dataset and copy its content every time it is needed in the fake fmriprep dataset but this "template" confound file has only a limited number of time points so we end up with fake fmriprep datasets that have nifti images with 100 volumes but with confounds with only 30 time points possible solutions:
hope this is clearer for now I will go for the easy solution but we may have to implement the harder option in the future if we want to test more "exotic" stuff There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Let's go for the easy one. The number of volumes should be a parameters of the data simulation function anyhow ? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. for now it is not: it is hard coded. I would keep it that way for now until we need more flexibility during testing. but I will change the place where it is hard coded so it is easier to adapt in the future. will also add a comment to explain why this value was chosen.
Comment on lines
+1371
to
+1372
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. minor change: using legal_confounds allows to add metada files for the confounds in the fake bids derivatives. some tests had to be changed to account for this. |
||
|
||
fields["suffix"] = "bold" | ||
fields["extension"] = "nii.gz" | ||
|
@@ -1416,7 +1387,7 @@ def _write_bids_derivative_func( | |
fields["entities"]["space"] = space | ||
fields["entities"]["desc"] = desc | ||
|
||
bold_path = func_path / _create_bids_filename( | ||
bold_path = func_path / create_bids_filename( | ||
fields=fields, entities_to_include=entities_to_include | ||
) | ||
write_fake_bold_img(bold_path, shape=shape, random_state=rand_gen) | ||
|
@@ -1426,7 +1397,7 @@ def _write_bids_derivative_func( | |
fields["entities"].pop("desc") | ||
for hemi in ["L", "R"]: | ||
fields["entities"]["hemi"] = hemi | ||
gifti_path = func_path / _create_bids_filename( | ||
gifti_path = func_path / create_bids_filename( | ||
fields=fields, | ||
entities_to_include=entities_to_include | ||
) | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -1860,3 +1860,30 @@ def test_missing_trial_type_column_warning(tmp_path_factory): | |
"No column named 'trial_type' found" in r.message.args[0] | ||
for r in record | ||
) | ||
|
||
|
||
def test_first_level_from_bids_load_confounds(tmp_path): | ||
"""Test several BIDS structure.""" | ||
n_sub = 2 | ||
|
||
bids_path = create_fake_bids_dataset( | ||
base_dir=tmp_path, n_sub=n_sub, n_ses=2, tasks=["main"], n_runs=[2] | ||
) | ||
|
||
models, m_imgs, m_events, m_confounds = first_level_from_bids( | ||
dataset_path=bids_path, | ||
task_label="main", | ||
space_label="MNI", | ||
img_filters=[("desc", "preproc")], | ||
slice_time_ref=None, | ||
confounds_strategy=("motion", "wm_csf", "scrub"), | ||
confounds_motion="full", | ||
confounds_wm_csf="basic", | ||
confounds_scrub=1, | ||
confounds_fd_threshold=0.2, | ||
confounds_std_dvars_threshold=3, | ||
) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. this what the "API" could look like to select some confounds: does it look sensible? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I guess this could be made lighter if we expect to have predefined configurations for confounds, but I don't think that we're at that point. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Actually, we might get rid of There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Note this test just show a subset of the possible arguments that can be passed to load_confounds
Actually I am just reusing the API from load_confounds, so it can almost be passed as is and we can let load_confounds do the argument validation https://nilearn.github.io/dev/modules/generated/nilearn.interfaces.fmriprep.load_confounds.html In short There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. will update the doc string to try to explain this so we can see if it makes sense |
||
|
||
_check_output_first_level_from_bids( | ||
n_sub, models, m_imgs, m_events, m_confounds | ||
) |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
had to move the create_bids_filename in a different module to help avoid circular imports