Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[MAINT] change function privacy in _utils #4061

Merged
merged 9 commits into from
Oct 17, 2023
8 changes: 4 additions & 4 deletions nilearn/_utils/__init__.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
from nilearn._utils.helpers import (
_compare_version,
compare_version,
remove_parameters,
rename_parameters,
stringify_path,
)

from .cache_mixin import CacheMixin
from .docs import fill_doc
from .logger import _compose_err_msg
from .logger import compose_err_msg
from .niimg import _repr_niimgs, copy_img, load_niimg
from .niimg_conversions import (
check_niimg,
Expand All @@ -27,10 +27,10 @@
"load_niimg",
"as_ndarray",
"CacheMixin",
"_compose_err_msg",
"compose_err_msg",
"rename_parameters",
"remove_parameters",
"fill_doc",
"stringify_path",
"_compare_version",
"compare_version",
]
14 changes: 7 additions & 7 deletions nilearn/_utils/data_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -515,7 +515,7 @@ def write_fake_fmri_data_and_design(shapes,
return mask_file, fmri_files, design_files


def write_fake_bold_gifti(file_path):
def _write_fake_bold_gifti(file_path):
"""Generate a gifti image and write it to disk.

Note this only generates an empty file for now.
Expand Down Expand Up @@ -572,7 +572,7 @@ def write_fake_bold_img(file_path,
return file_path


def generate_signals_from_precisions(precisions,
def _generate_signals_from_precisions(precisions,
min_n_samples=50,
max_n_samples=100,
random_state=0):
Expand Down Expand Up @@ -710,7 +710,7 @@ def generate_group_sparse_gaussian_graphs(n_subjects=5,
verbose=verbose)

# Generate temporal signals
signals = generate_signals_from_precisions(precisions,
signals = _generate_signals_from_precisions(precisions,
min_n_samples=min_n_samples,
max_n_samples=max_n_samples,
random_state=rand_gen)
Expand Down Expand Up @@ -746,7 +746,7 @@ def basic_paradigm(condition_names_have_spaces=False):
return events


def basic_confounds(length, random_state=0):
def _basic_confounds(length, random_state=0):
"""Generate random motion parameters \
(3 translation directions, 3 rotation directions).

Expand Down Expand Up @@ -779,7 +779,7 @@ def basic_confounds(length, random_state=0):
return confounds


def _add_metadata_to_bids_dataset(bids_path,
def add_metadata_to_bids_dataset(bids_path,
metadata,
json_file=None):
"""Add JSON file with specific metadata to BIDS dataset.
Expand Down Expand Up @@ -1429,7 +1429,7 @@ def _write_bids_derivative_func(
confounds_path = func_path / _create_bids_filename(
fields=fields, entities_to_include=_bids_entities()["raw"]
)
basic_confounds(length=n_time_points, random_state=rand_gen).to_csv(
_basic_confounds(length=n_time_points, random_state=rand_gen).to_csv(
confounds_path, sep="\t", index=None
)

Expand Down Expand Up @@ -1466,4 +1466,4 @@ def _write_bids_derivative_func(
fields=fields,
entities_to_include=entities_to_include
)
write_fake_bold_gifti(gifti_path)
_write_fake_bold_gifti(gifti_path)
6 changes: 3 additions & 3 deletions nilearn/_utils/fmriprep_confounds.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"""


def _flag_single_gifti(img_files):
def flag_single_gifti(img_files):
"""Test if the paired input files are giftis."""
# Possibly two gifti; if file is not correct, will be caught
if isinstance(img_files[0], list):
Expand All @@ -17,12 +17,12 @@ def _flag_single_gifti(img_files):
return all(flag_single_gifti)


def _is_camel_case(s):
def is_camel_case(s):
"""Check if the given string is in camel case."""
return s != s.lower() and s != s.upper() and "_" not in s


def _to_camel_case(snake_str):
def to_camel_case(snake_str):
"""Convert camel to snake case."""
components = snake_str.split("_")
return components[0] + "".join(x.title() for x in components)
4 changes: 2 additions & 2 deletions nilearn/_utils/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ def stringify_path(path):
}


def _compare_version(version_a, operator, version_b):
def compare_version(version_a, operator, version_b):
"""Compare two version strings via a user-specified operator.

Note: This function is inspired from MNE-Python.
Expand All @@ -200,7 +200,7 @@ def _compare_version(version_a, operator, version_b):
from packaging.version import parse

if operator not in VERSION_OPERATORS:
error_msg = "'_compare_version' received an unexpected operator "
error_msg = "'compare_version' received an unexpected operator "
raise ValueError(error_msg + operator + ".")
return VERSION_OPERATORS[operator](parse(version_a), parse(version_b))

Expand Down
4 changes: 2 additions & 2 deletions nilearn/_utils/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ def log(
print(f"[{func_name}] {msg}")


def _compose_err_msg(msg, **kwargs):
def compose_err_msg(msg, **kwargs):
"""Append key-value pairs to msg, for display. # noqa: D301.

Parameters
Expand All @@ -96,7 +96,7 @@ def _compose_err_msg(msg, **kwargs):

Example
-------
>>> _compose_err_msg('Error message with arguments...', arg_num=123, \
>>> compose_err_msg('Error message with arguments...', arg_num=123, \
arg_str='filename.nii', arg_bool=True)
'Error message with arguments...\\narg_str: filename.nii'
>>>
Expand Down
2 changes: 1 addition & 1 deletion nilearn/_utils/ndimage.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def get_border_data(data, border_size):
)


def _peak_local_max(
def peak_local_max(
image,
min_distance=10,
threshold_abs=0,
Expand Down
2 changes: 1 addition & 1 deletion nilearn/_utils/niimg.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ def load_niimg(niimg, dtype=None):
return niimg


def _is_binary_niimg(niimg):
def is_binary_niimg(niimg):
"""Return whether a given niimg is binary or not.

Parameters
Expand Down
16 changes: 8 additions & 8 deletions nilearn/_utils/niimg_conversions.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from .exceptions import DimensionError
from .helpers import stringify_path
from .niimg import _get_data, _safe_get_data, load_niimg
from .path_finding import _resolve_globbing
from .path_finding import resolve_globbing


def _check_fov(img, affine, shape):
Expand All @@ -25,7 +25,7 @@ def _check_fov(img, affine, shape):
return img.shape[:3] == shape and np.allclose(img.affine, affine)


def _check_same_fov(*args, **kwargs):
def check_same_fov(*args, **kwargs):
"""Return True if provided images have the same field of view (shape and \
affine) and return False or raise an error elsewhere, depending on the \
`raise_error` argument.
Expand Down Expand Up @@ -81,7 +81,7 @@ def _index_img(img, index):
)


def _iter_check_niimg(
def iter_check_niimg(
niimgs,
ensure_ndim=None,
atleast_4d=False,
Expand Down Expand Up @@ -134,7 +134,7 @@ def _iter_check_niimg(

"""
# If niimgs is a string, use glob to expand it to the matching filenames.
niimgs = _resolve_globbing(niimgs)
niimgs = resolve_globbing(niimgs)

ref_fov = None
resample_to_first_img = False
Expand Down Expand Up @@ -273,7 +273,7 @@ def check_niimg(

See Also
--------
_iter_check_niimg, check_niimg_3d, check_niimg_4d
iter_check_niimg, check_niimg_3d, check_niimg_4d

"""
from ..image import new_img_like # avoid circular imports
Expand Down Expand Up @@ -310,7 +310,7 @@ def check_niimg(
# in case of an iterable
if hasattr(niimg, "__iter__") and not isinstance(niimg, str):
if return_iterator:
return _iter_check_niimg(
return iter_check_niimg(
niimg, ensure_ndim=ensure_ndim, dtype=dtype
)
return concat_niimgs(niimg, ensure_ndim=ensure_ndim, dtype=dtype)
Expand Down Expand Up @@ -483,7 +483,7 @@ def concat_niimgs(
ndim = ensure_ndim - 1

# If niimgs is a string, use glob to expand it to the matching filenames.
niimgs = _resolve_globbing(niimgs)
niimgs = resolve_globbing(niimgs)

# First niimg is extracted to get information and for new_img_like
first_niimg = None
Expand Down Expand Up @@ -528,7 +528,7 @@ def concat_niimgs(
for index, (size, niimg) in enumerate(
zip(
lengths,
_iter_check_niimg(
iter_check_niimg(
iterator,
atleast_4d=True,
target_fov=target_fov,
Expand Down
10 changes: 5 additions & 5 deletions nilearn/_utils/param_validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def check_threshold(threshold, data, percentile_func, name="threshold"):
return threshold


def _get_mask_volume(mask_img):
def get_mask_volume(mask_img):
"""Compute the volume of a brain mask in mm^3.

Parameters
Expand All @@ -100,7 +100,7 @@ def _get_mask_volume(mask_img):
return prod_vox_dims * _get_data(mask_img).astype(bool).sum()


def _adjust_screening_percentile(screening_percentile, mask_img, verbose=0):
def adjust_screening_percentile(screening_percentile, mask_img, verbose=0):
"""Adjust the screening percentile according to the MNI152 template.

Parameters
Expand All @@ -126,7 +126,7 @@ def _adjust_screening_percentile(screening_percentile, mask_img, verbose=0):
"""
original_screening_percentile = screening_percentile
# correct screening_percentile according to the volume of the data mask
mask_volume = _get_mask_volume(mask_img)
mask_volume = get_mask_volume(mask_img)
if mask_volume > 1.1 * MNI152_BRAIN_VOLUME:
warnings.warn(
"Brain mask is bigger than the volume of a standard "
Expand Down Expand Up @@ -211,14 +211,14 @@ def check_feature_screening(
)
else:
# correct screening_percentile according to the volume of the data mask
screening_percentile_ = _adjust_screening_percentile(
screening_percentile_ = adjust_screening_percentile(
screening_percentile, mask_img, verbose=verbose
)

return SelectPercentile(f_test, percentile=int(screening_percentile_))


def _check_run_sample_masks(n_runs, sample_masks):
def check_run_sample_masks(n_runs, sample_masks):
"""Check that number of sample_mask matches number of runs."""
if not isinstance(sample_masks, (list, tuple, np.ndarray)):
raise TypeError(
Expand Down
3 changes: 2 additions & 1 deletion nilearn/_utils/path_finding.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@
from .helpers import stringify_path


def _resolve_globbing(path):
def resolve_globbing(path):
"""Resolve globbing patterns in a path."""
path = stringify_path(path)
if isinstance(path, str):
path_list = sorted(glob.glob(os.path.expanduser(path)))
Expand Down
4 changes: 1 addition & 3 deletions nilearn/_utils/segmentation.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,9 +156,7 @@ def _build_laplacian(data, spacing, mask=None, beta=50):
return lap


def _random_walker(
data, labels, beta=130, tol=1.0e-3, copy=True, spacing=None
):
def random_walker(data, labels, beta=130, tol=1.0e-3, copy=True, spacing=None):
"""Random walker algorithm for segmentation from markers.

Parameters
Expand Down
8 changes: 4 additions & 4 deletions nilearn/_utils/tests/test_data_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import pytest

from nilearn._utils.data_gen import (
_add_metadata_to_bids_dataset,
add_metadata_to_bids_dataset,
create_fake_bids_dataset,
generate_fake_fmri,
generate_labeled_regions,
Expand All @@ -19,10 +19,10 @@

def test_add_metadata_to_bids_derivatives_default_path(tmp_path):
"""Check the filename created is the default value \
of _add_metadata_to_bids_dataset."""
of add_metadata_to_bids_dataset."""
target_dir = tmp_path / "derivatives" / "sub-01" / "ses-01" / "func"
target_dir.mkdir(parents=True)
json_file = _add_metadata_to_bids_dataset(
json_file = add_metadata_to_bids_dataset(
bids_path=tmp_path, metadata={"foo": "bar"}
)
assert json_file.exists()
Expand All @@ -40,7 +40,7 @@ def test_add_metadata_to_bids_derivatives_with_json_path(tmp_path):
target_dir = tmp_path / "derivatives" / "sub-02"
target_dir.mkdir(parents=True)
json_file = "derivatives/sub-02/sub-02_task-main_bold.json"
json_file = _add_metadata_to_bids_dataset(
json_file = add_metadata_to_bids_dataset(
bids_path=tmp_path, metadata={"foo": "bar"}, json_file=json_file
)
assert json_file.exists()
Expand Down
6 changes: 3 additions & 3 deletions nilearn/_utils/tests/test_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,15 +141,15 @@ def test_future_warn_deprecated_params():
],
)
def test_compare_version(version_a, operator, version_b):
assert helpers._compare_version(version_a, operator, version_b)
assert helpers.compare_version(version_a, operator, version_b)


def test_compare_version_error():
with pytest.raises(
ValueError,
match="'_compare_version' received an unexpected operator <>.",
match="'compare_version' received an unexpected operator <>.",
):
helpers._compare_version("0.1.0", "<>", "1.1.0")
helpers.compare_version("0.1.0", "<>", "1.1.0")


def test_is_plotly_installed():
Expand Down