Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[MAINT] nilearn.interface: make private functions used outside of their module public #4168

Merged
merged 11 commits into from
Dec 18, 2023
3 changes: 2 additions & 1 deletion doc/changes/latest.rst
Original file line number Diff line number Diff line change
Expand Up @@ -46,4 +46,5 @@ Changes
- :bdg-dark:`Code` Private utility context manager ``write_tmp_imgs`` is refactored into function ``write_imgs_to_path`` (:gh:`4094` by `Yasmin Mzayek`_).
- :bdg-danger:`Deprecation` :func:`~plotting.plot_surf_roi` will raise a warning if ``roi_map`` contains negative or non-integer values; in version 0.13 this will be a ``ValueError`` (:gh:`4131` by `Michelle Wang`_).
- :bdg-dark:`Code` Remove leading underscore from non private functions to align with PEP8 (:gh:`4086` by `Rémi Gau`_).
- :bdg-dark:`Code` Make ``decoding/proximal_operator`` explicitly private to align with PEP8 (:gh:`4153` by `Rémi Gau`_).
- :bdg-dark:`Code` Make ``decoding/proximal_operator`` explicitly private to align with PEP8 (:gh:`4153` by `Rémi Gau`_).
- :bdg-dark:`Code` Make private functions public when used outside of their module ``nilearn.interface`` to align with PEP8 (:gh:`4168` by `Rémi Gau`_).
39 changes: 0 additions & 39 deletions nilearn/_utils/bids.py

This file was deleted.

29 changes: 17 additions & 12 deletions nilearn/_utils/data_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,13 @@

from nilearn import datasets, image, maskers, masking
from nilearn._utils import as_ndarray, logger
from nilearn._utils.bids import create_bids_filename
from nilearn.interfaces.bids._utils import _bids_entities, _check_bids_label
from nilearn.interfaces.bids.utils import (
bids_entities,
check_bids_label,
create_bids_filename,
)

# TODO get legal_confounds out of private testing module
from nilearn.interfaces.fmriprep.tests._testing import get_legal_confound


Expand Down Expand Up @@ -926,7 +931,7 @@ def create_fake_bids_dataset(
bids_path = Path(base_dir) / bids_dataset_dir

for task_ in tasks:
_check_bids_label(task_)
check_bids_label(task_)

if not isinstance(n_runs, list) or not all(
isinstance(x, int) for x in n_runs
Expand Down Expand Up @@ -991,15 +996,15 @@ def _check_entities_and_labels(entities):
raise ValueError("Only a single extra entity is supported for now.")

for key in entities:
if key not in [*_bids_entities()["raw"],
*_bids_entities()["derivatives"]]:
allowed_entities = [*_bids_entities()['raw'],
*_bids_entities()['derivatives']]
if key not in [*bids_entities()["raw"],
*bids_entities()["derivatives"]]:
allowed_entities = [*bids_entities()['raw'],
*bids_entities()['derivatives']]
raise ValueError(
f"Invalid entity: {key}. Allowed entities are: "
f"{allowed_entities}"
)
[_check_bids_label(label_) for label_ in entities[key]]
[check_bids_label(label_) for label_ in entities[key]]


def _mock_bids_dataset(
Expand Down Expand Up @@ -1073,7 +1078,7 @@ def _mock_bids_dataset(
task=task,
run=run,
)
if key in _bids_entities()["raw"]:
if key in bids_entities()["raw"]:
fields["entities"][key] = label
_write_bids_raw_func(
func_path=func_path,
Expand Down Expand Up @@ -1362,7 +1367,7 @@ def _write_bids_derivative_func(
fields["suffix"] = confounds_tag
fields["extension"] = "tsv"
confounds_path = func_path / create_bids_filename(
fields=fields, entities_to_include=_bids_entities()["raw"]
fields=fields, entities_to_include=bids_entities()["raw"]
)
confounds, metadata = get_legal_confound()
confounds.to_csv(
Expand All @@ -1377,8 +1382,8 @@ def _write_bids_derivative_func(
shape = [n_voxels, n_voxels, n_voxels, n_time_points]

entities_to_include = [
*_bids_entities()["raw"],
*_bids_entities()["derivatives"]
*bids_entities()["raw"],
*bids_entities()["derivatives"]
]

for space in ("MNI", "T1w"):
Expand Down
2 changes: 1 addition & 1 deletion nilearn/_utils/tests/test_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def test_number_public_functions():
If this is intentional, then the number should be updated in the test.
Otherwise it means that the public API of nilearn has changed by mistake.
"""
assert len({_[0] for _ in all_functions()}) == 211
assert len({_[0] for _ in all_functions()}) == 228


def test_number_public_classes():
Expand Down
44 changes: 22 additions & 22 deletions nilearn/glm/first_level/first_level.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,11 +43,11 @@
)
from nilearn.image import get_data
from nilearn.interfaces.bids import get_bids_files, parse_bids_filename
from nilearn.interfaces.bids._utils import _bids_entities, _check_bids_label
from nilearn.interfaces.bids.query import (
_infer_repetition_time_from_dataset,
_infer_slice_timing_start_time_from_dataset,
infer_repetition_time_from_dataset,
infer_slice_timing_start_time_from_dataset,
)
from nilearn.interfaces.bids.utils import bids_entities, check_bids_label
from nilearn.interfaces.fmriprep.load_confounds import load_confounds


Expand Down Expand Up @@ -1165,23 +1165,23 @@ def first_level_from_bids(
task_label=task_label,
space_label=space_label,
supported_filters=[
*_bids_entities()["raw"],
*_bids_entities()["derivatives"],
*bids_entities()["raw"],
*bids_entities()["derivatives"],
],
extra_filter=img_filters,
verbose=verbose,
)
inferred_t_r = _infer_repetition_time_from_dataset(
inferred_t_r = infer_repetition_time_from_dataset(
bids_path=derivatives_path, filters=filters, verbose=verbose
)
if inferred_t_r is None:
filters = _make_bids_files_filter(
task_label=task_label,
supported_filters=[*_bids_entities()["raw"]],
supported_filters=[*bids_entities()["raw"]],
extra_filter=img_filters,
verbose=verbose,
)
inferred_t_r = _infer_repetition_time_from_dataset(
inferred_t_r = infer_repetition_time_from_dataset(
bids_path=dataset_path, filters=filters, verbose=verbose
)

Expand Down Expand Up @@ -1211,13 +1211,13 @@ def first_level_from_bids(
task_label=task_label,
space_label=space_label,
supported_filters=[
*_bids_entities()["raw"],
*_bids_entities()["derivatives"],
*bids_entities()["raw"],
*bids_entities()["derivatives"],
],
extra_filter=img_filters,
verbose=verbose,
)
StartTime = _infer_slice_timing_start_time_from_dataset(
StartTime = infer_slice_timing_start_time_from_dataset(
bids_path=derivatives_path, filters=filters, verbose=verbose
)
if StartTime is not None and t_r is not None:
Expand Down Expand Up @@ -1426,8 +1426,8 @@ def _get_processed_imgs(
filters = _make_bids_files_filter(
task_label=task_label,
space_label=space_label,
supported_filters=_bids_entities()["raw"]
+ _bids_entities()["derivatives"],
supported_filters=bids_entities()["raw"]
+ bids_entities()["derivatives"],
extra_filter=img_filters,
verbose=verbose,
)
Expand Down Expand Up @@ -1491,7 +1491,7 @@ def _get_events_files(
"""
events_filters = _make_bids_files_filter(
task_label=task_label,
supported_filters=_bids_entities()["raw"],
supported_filters=bids_entities()["raw"],
extra_filter=img_filters,
verbose=verbose,
)
Expand Down Expand Up @@ -1564,7 +1564,7 @@ def _get_confounds(
"""
filters = _make_bids_files_filter(
task_label=task_label,
supported_filters=_bids_entities()["raw"],
supported_filters=bids_entities()["raw"],
extra_filter=img_filters,
verbose=verbose,
)
Expand Down Expand Up @@ -1684,26 +1684,26 @@ def _check_args_first_level_from_bids(
f"{derivatives_folder}"
)

_check_bids_label(task_label)
check_bids_label(task_label)

if space_label is not None:
_check_bids_label(space_label)
check_bids_label(space_label)

if not isinstance(sub_labels, list):
raise TypeError(
f"sub_labels must be a list, instead {type(sub_labels)} was given"
)
for sub_label_ in sub_labels:
_check_bids_label(sub_label_)
check_bids_label(sub_label_)

if not isinstance(img_filters, list):
raise TypeError(
f"'img_filters' must be a list. "
f"Got {type(img_filters)} instead."
)
supported_filters = [
*_bids_entities()["raw"],
*_bids_entities()["derivatives"],
*bids_entities()["raw"],
*bids_entities()["derivatives"],
]
for filter_ in img_filters:
if len(filter_) != 2 or not all(isinstance(x, str) for x in filter_):
Expand All @@ -1716,7 +1716,7 @@ def _check_args_first_level_from_bids(
f"Entity {filter_[0]} for {filter_} is not a possible filter. "
f"Only {supported_filters} are allowed."
)
_check_bids_label(filter_[1])
check_bids_label(filter_[1])


def _check_kwargs_load_confounds(**kwargs):
Expand Down Expand Up @@ -1932,7 +1932,7 @@ def _check_bids_events_list(
"sub",
"ses",
"task",
*_bids_entities()["raw"],
*bids_entities()["raw"],
]
for this_img in imgs:
parsed_filename = parse_bids_filename(this_img)
Expand Down