Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[MAINT] Update some missed tests and examples to use new standardize strategy to silence warnings #3821

Merged
merged 9 commits into from
Jul 11, 2023
Merged
Show file tree
Hide file tree
Changes from 8 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
2 changes: 2 additions & 0 deletions examples/03_connectivity/plot_atlas_comparison.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@
masker = MultiNiftiLabelsMasker(
labels_img=yeo["thick_17"],
standardize="zscore_sample",
standardize_confounds="zscore_sample",
memory="nilearn_cache",
n_jobs=2,
)
Expand Down Expand Up @@ -156,6 +157,7 @@ def lag_correlation(time_series, lag):
masker = MultiNiftiMapsMasker(
maps_img=difumo.maps,
standardize="zscore_sample",
standardize_confounds="zscore_sample",
memory="nilearn_cache",
n_jobs=2,
)
Expand Down
1 change: 1 addition & 0 deletions examples/03_connectivity/plot_group_level_connectivity.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@
memory="nilearn_cache",
memory_level=1,
standardize="zscore_sample",
standardize_confounds="zscore_sample",
).fit()

###############################################################################
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
masker = NiftiMapsMasker(
maps_img=atlas_filename,
standardize="zscore_sample",
standardize_confounds="zscore_sample",
memory="nilearn_cache",
verbose=5,
)
Expand Down
1 change: 1 addition & 0 deletions examples/03_connectivity/plot_multi_subject_connectome.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ def plot_matrices(cov, prec, title, labels):
high_pass=0.01,
t_r=2,
standardize="zscore_sample",
standardize_confounds="zscore_sample",
memory="nilearn_cache",
memory_level=1,
verbose=2,
Expand Down
2 changes: 2 additions & 0 deletions examples/03_connectivity/plot_seed_to_voxel_correlation.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@
radius=8,
detrend=True,
standardize="zscore_sample",
standardize_confounds="zscore_sample",
low_pass=0.1,
high_pass=0.01,
t_r=2,
Expand All @@ -93,6 +94,7 @@
smoothing_fwhm=6,
detrend=True,
standardize="zscore_sample",
standardize_confounds="zscore_sample",
low_pass=0.1,
high_pass=0.01,
t_r=2,
Expand Down
1 change: 1 addition & 0 deletions examples/03_connectivity/plot_signal_extraction.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@
masker = NiftiLabelsMasker(
labels_img=atlas_filename,
standardize="zscore_sample",
standardize_confounds="zscore_sample",
memory="nilearn_cache",
verbose=5,
)
Expand Down
3 changes: 3 additions & 0 deletions examples/03_connectivity/plot_sphere_based_connectome.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@
radius=8,
detrend=True,
standardize="zscore_sample",
standardize_confounds="zscore_sample",
low_pass=0.1,
high_pass=0.01,
t_r=2,
Expand Down Expand Up @@ -203,6 +204,7 @@
radius=5.0,
detrend=True,
standardize="zscore_sample",
standardize_confounds="zscore_sample",
low_pass=0.1,
high_pass=0.01,
t_r=2,
Expand Down Expand Up @@ -345,6 +347,7 @@
radius=4.5,
detrend=True,
standardize="zscore_sample",
standardize_confounds="zscore_sample",
low_pass=0.1,
high_pass=0.01,
t_r=2,
Expand Down
4 changes: 3 additions & 1 deletion examples/06_manipulating_images/plot_nifti_labels_simple.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,9 @@

# Instantiate the masker with label image and label values
masker = NiftiLabelsMasker(
atlas.maps, labels=atlas.labels, standardize="zscore_sample"
atlas.maps,
labels=atlas.labels,
standardize="zscore_sample",
)

# Visualize the atlas
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
memory="nilearn_cache",
memory_level=1,
standardize="zscore_sample",
standardize_confounds="zscore_sample",
).fit()

masked_data = [
Expand Down
8 changes: 6 additions & 2 deletions nilearn/decoding/tests/test_decoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -566,7 +566,9 @@ def test_decoder_binary_classification_cross_validation(
# check cross-validation scheme and fit attribute with groups enabled
rand_local = np.random.RandomState(42)

model = Decoder(estimator="svc", mask=mask, standardize=True, cv=cv)
model = Decoder(
estimator="svc", mask=mask, standardize="zscore_sample", cv=cv
)
groups = None
if isinstance(cv, LeaveOneGroupOut):
groups = rand_local.binomial(2, 0.3, size=len(y))
Expand Down Expand Up @@ -888,7 +890,9 @@ def test_decoder_multiclass_classification_cross_validation(
# check cross-validation scheme and fit attribute with groups enabled
rand_local = np.random.RandomState(42)

model = Decoder(estimator="svc", mask=mask, standardize=True, cv=cv)
model = Decoder(
estimator="svc", mask=mask, standardize="zscore_sample", cv=cv
)
groups = None
if isinstance(cv, LeaveOneGroupOut):
groups = rand_local.binomial(2, 0.3, size=len(y))
Expand Down
4 changes: 2 additions & 2 deletions nilearn/maskers/tests/test_nifti_maps_masker.py
Original file line number Diff line number Diff line change
Expand Up @@ -527,11 +527,11 @@ def test_standardization():
unstandarized_label_signals = masker.fit_transform(img)

# z-score
masker = NiftiMapsMasker(maps, standardize="zscore")
masker = NiftiMapsMasker(maps, standardize="zscore_sample")
trans_signals = masker.fit_transform(img)

np.testing.assert_almost_equal(trans_signals.mean(0), 0)
np.testing.assert_almost_equal(trans_signals.std(0), 1)
np.testing.assert_almost_equal(trans_signals.std(0), 1, decimal=3)

# psc
masker = NiftiMapsMasker(maps, standardize="psc")
Expand Down
4 changes: 2 additions & 2 deletions nilearn/maskers/tests/test_nifti_spheres_masker.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,12 +189,12 @@ def test_standardization():
img = nibabel.Nifti1Image(data, np.eye(4))

# test zscore
masker = NiftiSpheresMasker([(1, 1, 1)], standardize="zscore")
masker = NiftiSpheresMasker([(1, 1, 1)], standardize="zscore_sample")
# Test the fit
s = masker.fit_transform(img)

np.testing.assert_almost_equal(s.mean(), 0)
np.testing.assert_almost_equal(s.std(), 1)
np.testing.assert_almost_equal(s.std(), 1, decimal=1)

# test psc
masker = NiftiSpheresMasker([(1, 1, 1)], standardize="psc")
Expand Down
17 changes: 7 additions & 10 deletions nilearn/tests/test_masking.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,9 @@ def _cov_conf(tseries, conf):
return cov_mat


def _confounds_regression(standardize_signal=True, standardize_confounds=True):
def _confounds_regression(
standardize_signal="zscore_sample", standardize_confounds=True
):
img, mask, conf = _simu_img()
masker = NiftiMasker(standardize=standardize_signal,
standardize_confounds=standardize_confounds,
Expand All @@ -73,11 +75,11 @@ def _confounds_regression(standardize_signal=True, standardize_confounds=True):
def test_high_variance_confounds():
img, mask, conf = _simu_img()
hv_confounds = high_variance_confounds(img)
masker1 = NiftiMasker(standardize=True, detrend=False,
masker1 = NiftiMasker(standardize="zscore_sample", detrend=False,
high_variance_confounds=False,
mask_img=mask).fit()
tseries1 = masker1.transform(img, confounds=[hv_confounds, conf])
masker2 = NiftiMasker(standardize=True, detrend=False,
masker2 = NiftiMasker(standardize="zscore_sample", detrend=False,
high_variance_confounds=True,
mask_img=mask).fit()
tseries2 = masker2.transform(img, confounds=conf)
Expand All @@ -98,12 +100,7 @@ def test_confounds_standardization():

# Signal is z-scored with string arg
# Explicit standardization of confounds
assert (_confounds_regression(standardize_signal='zscore',
standardize_confounds=True) < eps)

# Signal is z-scored with boolean arg
# Explicit standardization of confounds
assert (_confounds_regression(standardize_signal=True,
assert (_confounds_regression(standardize_signal='zscore_sample',
standardize_confounds=True) < eps)

# Signal is psc standardized
Expand All @@ -120,7 +117,7 @@ def test_confounds_standardization():
# Signal is z-scored with string arg
# Confounds are not standardized
# In this case, the regression should fail...
assert (_confounds_regression(standardize_signal='zscore',
assert (_confounds_regression(standardize_signal='zscore_sample',
standardize_confounds=False) > 100)

# Signal is psc standardized
Expand Down
17 changes: 3 additions & 14 deletions nilearn/tests/test_signal.py
Original file line number Diff line number Diff line change
Expand Up @@ -257,12 +257,6 @@ def test_standardize():

# transpose array to fit _standardize input.
# Without trend removal
b = nisignal._standardize(a, standardize='zscore')
stds = np.std(b)
np.testing.assert_almost_equal(stds, np.ones(n_features))
np.testing.assert_almost_equal(b.sum(axis=0), np.zeros(n_features))

bthirion marked this conversation as resolved.
Show resolved Hide resolved
# Repeating test above but for new correct strategy
b = nisignal._standardize(a, standardize='zscore_sample')
stds = np.std(b)
np.testing.assert_almost_equal(stds, np.ones(n_features), decimal=1)
Expand All @@ -276,16 +270,11 @@ def test_standardize():
b = nisignal._standardize(a, detrend=True, standardize="zscore_sample")
np.testing.assert_almost_equal(b, np.zeros(b.shape))

length_1_signal = np.atleast_2d(np.linspace(0, 2., n_features))
np.testing.assert_array_equal(length_1_signal,
nisignal._standardize(length_1_signal,
standardize='zscore'))

# Repeating test above but for new correct strategy
bthirion marked this conversation as resolved.
Show resolved Hide resolved
length_1_signal = np.atleast_2d(np.linspace(0, 2., n_features))
np.testing.assert_array_equal(
length_1_signal,
nisignal._standardize(length_1_signal, standardize="zscore_sample")
length_1_signal, nisignal._standardize(
length_1_signal, standardize='zscore_sample'
)
)


Expand Down
2 changes: 2 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -187,6 +187,8 @@ minversion = "6.0"
addopts = "--doctest-modules -s -vv --durations=0"
doctest_optionflags = "NORMALIZE_WHITESPACE ELLIPSIS"
junit_family = "xunit2"
# TODO: Remove filter in release 0.13
filterwarnings = "ignore:.*Please use 'zscore_sample' instead.:FutureWarning"

[tool.codespell]
skip = "./.git,plotly-gl3d-latest.min.js,jquery.min.js,localizer_behavioural.tsv,.mypy_cache,env,venv,./doc/auto_examples,*/tmp,./doc/modules/generated/*"
Expand Down