Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[FIX] find confounds even when BIDS derivatives bold files contain res or den entities #3742

Merged
merged 11 commits into from
Jun 14, 2023
1 change: 0 additions & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@ files: |

exclude: |
(?x)^(
doc/changes/(names|latest).rst
| doc/glm/first_level_model\\.rst
| introduction\\.rst
| maintenance\\.rst
Expand Down
1 change: 1 addition & 0 deletions doc/changes/latest.rst
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ NEW
Fixes
-----

- Fix bug in :func:`~glm.first_level.first_level_from_bids` that returned no confound files if the corresponding bold files contained derivatives BIDS entities (:gh:`3742` by `Rémi Gau`_).
- :bdg-dark:`Code` Fix bug where the `cv_params_` attribute of fitter Decoder objects sometimes had missing entries if `grid_param` is a sequence of dicts with different keys (:gh:`3733` by `Michelle Wang`_).

Enhancements
Expand Down
9 changes: 5 additions & 4 deletions nilearn/_utils/data_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -1399,6 +1399,11 @@ def _write_bids_derivative_func(

shape = [n_voxels, n_voxels, n_voxels, n_time_points]

entities_to_include = [
*_bids_entities()["raw"],
*_bids_entities()["derivatives"]
]

for space in ("MNI", "T1w"):
for desc in ("preproc", "fmriprep"):
# Only space 'T1w' include both descriptions.
Expand All @@ -1408,10 +1413,6 @@ def _write_bids_derivative_func(
fields["entities"]["space"] = space
fields["entities"]["desc"] = desc

entities_to_include = [
*_bids_entities()["raw"], *_bids_entities()["derivatives"]
]

bold_path = func_path / _create_bids_filename(
fields=fields, entities_to_include=entities_to_include
)
Expand Down
9 changes: 1 addition & 8 deletions nilearn/glm/first_level/first_level.py
Original file line number Diff line number Diff line change
Expand Up @@ -1334,16 +1334,9 @@ def _get_confounds(
List of fullpath to the confounds.tsv files

"""
supported_filters = (
_bids_entities()["raw"]
+ _bids_entities()["derivatives"]
)
# confounds use a desc-confounds,
# so we must remove desc if it was passed as a filter
supported_filters.remove("desc")
filters = _make_bids_files_filter(
task_label=task_label,
supported_filters=supported_filters,
supported_filters=_bids_entities()["raw"],
extra_filter=img_filters,
verbose=verbose,
)
Expand Down
56 changes: 41 additions & 15 deletions nilearn/glm/tests/test_first_level.py
Original file line number Diff line number Diff line change
Expand Up @@ -1112,10 +1112,11 @@ def test_first_level_from_bids(tmp_path,
slice_time_ref=None,
)

assert len(models) == n_sub
assert len(models) == len(m_imgs)
assert len(models) == len(m_events)
assert len(models) == len(m_confounds)
_check_output_first_level_from_bids(n_sub,
models,
m_imgs,
m_events,
m_confounds)

n_imgs_expected = n_ses * n_runs[task_index]

Expand Down Expand Up @@ -1144,10 +1145,11 @@ def test_first_level_from_bids_select_one_run_per_session(bids_dataset):
slice_time_ref=None,
)

assert len(models) == n_sub
assert len(models) == len(m_imgs)
assert len(models) == len(m_events)
assert len(models) == len(m_confounds)
_check_output_first_level_from_bids(n_sub,
models,
m_imgs,
m_events,
m_confounds)

n_imgs_expected = n_ses
assert len(m_imgs[0]) == n_imgs_expected
Expand All @@ -1165,10 +1167,11 @@ def test_first_level_from_bids_select_all_runs_of_one_session(bids_dataset):
slice_time_ref=None,
)

assert len(models) == n_sub
assert len(models) == len(m_imgs)
assert len(models) == len(m_events)
assert len(models) == len(m_confounds)
_check_output_first_level_from_bids(n_sub,
models,
m_imgs,
m_events,
m_confounds)

n_imgs_expected = n_runs[0]
assert len(m_imgs[0]) == n_imgs_expected
Expand Down Expand Up @@ -1217,13 +1220,36 @@ def test_first_level_from_bids_several_labels_per_entity(tmp_path, entity):
img_filters=[("desc", "preproc"), (entity, "A")],
slice_time_ref=None,
)

_check_output_first_level_from_bids(n_sub,
models,
m_imgs,
m_events,
m_confounds)
n_imgs_expected = n_ses * n_runs[0]
assert len(m_imgs[0]) == n_imgs_expected


def _check_output_first_level_from_bids(n_sub,
models,
m_imgs,
m_events,
m_confounds):
assert len(models) == n_sub
assert all(isinstance(model, FirstLevelModel) for model in models)
assert len(models) == len(m_imgs)
for imgs in m_imgs:
assert isinstance(imgs, list)
assert all(Path(img_).exists() for img_ in imgs)
assert len(models) == len(m_events)
for events in m_events:
assert isinstance(events, list)
assert all(isinstance(event_, pd.DataFrame) for event_ in events)
assert len(models) == len(m_confounds)

n_imgs_expected = n_ses * n_runs[0]
assert len(m_imgs[0]) == n_imgs_expected
for confounds in m_confounds:
assert isinstance(confounds, list)
assert all(isinstance(confound_, pd.DataFrame)
for confound_ in confounds)


def test_first_level_from_bids_with_subject_labels(bids_dataset):
Expand Down
2 changes: 1 addition & 1 deletion nilearn/plotting/tests/test_matrix_plotting.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ def test_sanitize_reorder_error(reorder):
else:
param_to_print.append(str(item))
with pytest.raises(ValueError,
match=("Parameter reorder needs to be one of:\n"
match=("Parameter reorder needs to be one of:\\n"
f"{', '.join(param_to_print)}.")):
_sanitize_reorder(reorder)

Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -181,5 +181,5 @@ doctest_optionflags = "NORMALIZE_WHITESPACE ELLIPSIS"
junit_family = "xunit2"

[tool.codespell]
skip = "./.git,plotly-gl3d-latest.min.js,jquery.min.js,localizer_behavioural.tsv,.mypy_cache,env,venv"
skip = "./.git,plotly-gl3d-latest.min.js,jquery.min.js,localizer_behavioural.tsv,.mypy_cache,env,venv,./doc/auto_examples"
ignore-words = ".github/codespell_ignore_words.txt"