Skip to content

Commit

Permalink
[MAINT] Update some missed tests and examples to use new standardize …
Browse files Browse the repository at this point in the history
…strategy to silence warnings (#3821)

* Update more tests

* [full doc] Update examples

* [full doc] Delete repeated tests

* Change almost equal decimal points

* Update spheres maskers standardization test

* Ignore warning configuration

* Revert unnessary changes

* Formatting

* Change last tests
  • Loading branch information
ymzayek committed Jul 11, 2023
1 parent 54fe838 commit 39c6dcb
Show file tree
Hide file tree
Showing 18 changed files with 43 additions and 37 deletions.
2 changes: 2 additions & 0 deletions examples/03_connectivity/plot_atlas_comparison.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@
masker = MultiNiftiLabelsMasker(
labels_img=yeo["thick_17"],
standardize="zscore_sample",
standardize_confounds="zscore_sample",
memory="nilearn_cache",
n_jobs=2,
)
Expand Down Expand Up @@ -156,6 +157,7 @@ def lag_correlation(time_series, lag):
masker = MultiNiftiMapsMasker(
maps_img=difumo.maps,
standardize="zscore_sample",
standardize_confounds="zscore_sample",
memory="nilearn_cache",
n_jobs=2,
)
Expand Down
1 change: 1 addition & 0 deletions examples/03_connectivity/plot_group_level_connectivity.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@
memory="nilearn_cache",
memory_level=1,
standardize="zscore_sample",
standardize_confounds="zscore_sample",
).fit()

###############################################################################
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
masker = NiftiMapsMasker(
maps_img=atlas_filename,
standardize="zscore_sample",
standardize_confounds="zscore_sample",
memory="nilearn_cache",
verbose=5,
)
Expand Down
1 change: 1 addition & 0 deletions examples/03_connectivity/plot_multi_subject_connectome.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ def plot_matrices(cov, prec, title, labels):
high_pass=0.01,
t_r=2,
standardize="zscore_sample",
standardize_confounds="zscore_sample",
memory="nilearn_cache",
memory_level=1,
verbose=2,
Expand Down
2 changes: 2 additions & 0 deletions examples/03_connectivity/plot_seed_to_voxel_correlation.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@
radius=8,
detrend=True,
standardize="zscore_sample",
standardize_confounds="zscore_sample",
low_pass=0.1,
high_pass=0.01,
t_r=2,
Expand All @@ -93,6 +94,7 @@
smoothing_fwhm=6,
detrend=True,
standardize="zscore_sample",
standardize_confounds="zscore_sample",
low_pass=0.1,
high_pass=0.01,
t_r=2,
Expand Down
1 change: 1 addition & 0 deletions examples/03_connectivity/plot_signal_extraction.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@
masker = NiftiLabelsMasker(
labels_img=atlas_filename,
standardize="zscore_sample",
standardize_confounds="zscore_sample",
memory="nilearn_cache",
verbose=5,
)
Expand Down
3 changes: 3 additions & 0 deletions examples/03_connectivity/plot_sphere_based_connectome.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@
radius=8,
detrend=True,
standardize="zscore_sample",
standardize_confounds="zscore_sample",
low_pass=0.1,
high_pass=0.01,
t_r=2,
Expand Down Expand Up @@ -203,6 +204,7 @@
radius=5.0,
detrend=True,
standardize="zscore_sample",
standardize_confounds="zscore_sample",
low_pass=0.1,
high_pass=0.01,
t_r=2,
Expand Down Expand Up @@ -345,6 +347,7 @@
radius=4.5,
detrend=True,
standardize="zscore_sample",
standardize_confounds="zscore_sample",
low_pass=0.1,
high_pass=0.01,
t_r=2,
Expand Down
4 changes: 3 additions & 1 deletion examples/06_manipulating_images/plot_nifti_labels_simple.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,9 @@

# Instantiate the masker with label image and label values
masker = NiftiLabelsMasker(
atlas.maps, labels=atlas.labels, standardize="zscore_sample"
atlas.maps,
labels=atlas.labels,
standardize="zscore_sample",
)

# Visualize the atlas
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
memory="nilearn_cache",
memory_level=1,
standardize="zscore_sample",
standardize_confounds="zscore_sample",
).fit()

masked_data = [
Expand Down
8 changes: 6 additions & 2 deletions nilearn/decoding/tests/test_decoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -565,7 +565,9 @@ def test_decoder_binary_classification_cross_validation(
# check cross-validation scheme and fit attribute with groups enabled
rand_local = np.random.RandomState(42)

model = Decoder(estimator="svc", mask=mask, standardize=True, cv=cv)
model = Decoder(
estimator="svc", mask=mask, standardize="zscore_sample", cv=cv
)
groups = None
if isinstance(cv, LeaveOneGroupOut):
groups = rand_local.binomial(2, 0.3, size=len(y))
Expand Down Expand Up @@ -887,7 +889,9 @@ def test_decoder_multiclass_classification_cross_validation(
# check cross-validation scheme and fit attribute with groups enabled
rand_local = np.random.RandomState(42)

model = Decoder(estimator="svc", mask=mask, standardize=True, cv=cv)
model = Decoder(
estimator="svc", mask=mask, standardize="zscore_sample", cv=cv
)
groups = None
if isinstance(cv, LeaveOneGroupOut):
groups = rand_local.binomial(2, 0.3, size=len(y))
Expand Down
4 changes: 2 additions & 2 deletions nilearn/maskers/tests/test_multi_nifti_masker.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,12 +241,12 @@ def test_standardization():
mask = Nifti1Image(np.ones(data_shape), np.eye(4))

# z-score
masker = MultiNiftiMasker(mask, standardize="zscore")
masker = MultiNiftiMasker(mask, standardize="zscore_sample")
trans_signals = masker.fit_transform([img1, img2])

for ts in trans_signals:
np.testing.assert_almost_equal(ts.mean(0), 0)
np.testing.assert_almost_equal(ts.std(0), 1)
np.testing.assert_almost_equal(ts.std(0), 1, decimal=3)

# psc
masker = MultiNiftiMasker(mask, standardize="psc")
Expand Down
4 changes: 2 additions & 2 deletions nilearn/maskers/tests/test_nifti_labels_masker.py
Original file line number Diff line number Diff line change
Expand Up @@ -543,11 +543,11 @@ def test_standardization():
unstandarized_label_signals = masker.fit_transform(img)

# z-score
masker = NiftiLabelsMasker(labels, standardize="zscore")
masker = NiftiLabelsMasker(labels, standardize="zscore_sample")
trans_signals = masker.fit_transform(img)

np.testing.assert_almost_equal(trans_signals.mean(0), 0)
np.testing.assert_almost_equal(trans_signals.std(0), 1)
np.testing.assert_almost_equal(trans_signals.std(0), 1, decimal=3)

# psc
masker = NiftiLabelsMasker(labels, standardize="psc")
Expand Down
4 changes: 2 additions & 2 deletions nilearn/maskers/tests/test_nifti_maps_masker.py
Original file line number Diff line number Diff line change
Expand Up @@ -527,11 +527,11 @@ def test_standardization():
unstandarized_label_signals = masker.fit_transform(img)

# z-score
masker = NiftiMapsMasker(maps, standardize="zscore")
masker = NiftiMapsMasker(maps, standardize="zscore_sample")
trans_signals = masker.fit_transform(img)

np.testing.assert_almost_equal(trans_signals.mean(0), 0)
np.testing.assert_almost_equal(trans_signals.std(0), 1)
np.testing.assert_almost_equal(trans_signals.std(0), 1, decimal=3)

# psc
masker = NiftiMapsMasker(maps, standardize="psc")
Expand Down
4 changes: 2 additions & 2 deletions nilearn/maskers/tests/test_nifti_masker.py
Original file line number Diff line number Diff line change
Expand Up @@ -505,11 +505,11 @@ def test_standardization():
mask = nibabel.Nifti1Image(np.ones(data_shape), np.eye(4))

# z-score
masker = NiftiMasker(mask, standardize="zscore")
masker = NiftiMasker(mask, standardize="zscore_sample")
trans_signals = masker.fit_transform(img)

np.testing.assert_almost_equal(trans_signals.mean(0), 0)
np.testing.assert_almost_equal(trans_signals.std(0), 1)
np.testing.assert_almost_equal(trans_signals.std(0), 1, decimal=3)

# psc
masker = NiftiMasker(mask, standardize="psc")
Expand Down
4 changes: 2 additions & 2 deletions nilearn/maskers/tests/test_nifti_spheres_masker.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,12 +189,12 @@ def test_standardization():
img = nibabel.Nifti1Image(data, np.eye(4))

# test zscore
masker = NiftiSpheresMasker([(1, 1, 1)], standardize="zscore")
masker = NiftiSpheresMasker([(1, 1, 1)], standardize="zscore_sample")
# Test the fit
s = masker.fit_transform(img)

np.testing.assert_almost_equal(s.mean(), 0)
np.testing.assert_almost_equal(s.std(), 1)
np.testing.assert_almost_equal(s.std(), 1, decimal=1)

# test psc
masker = NiftiSpheresMasker([(1, 1, 1)], standardize="psc")
Expand Down
17 changes: 7 additions & 10 deletions nilearn/tests/test_masking.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,9 @@ def _cov_conf(tseries, conf):
return cov_mat


def _confounds_regression(standardize_signal=True, standardize_confounds=True):
def _confounds_regression(
standardize_signal="zscore_sample", standardize_confounds=True
):
img, mask, conf = _simu_img()
masker = NiftiMasker(standardize=standardize_signal,
standardize_confounds=standardize_confounds,
Expand All @@ -72,11 +74,11 @@ def _confounds_regression(standardize_signal=True, standardize_confounds=True):
def test_high_variance_confounds():
img, mask, conf = _simu_img()
hv_confounds = high_variance_confounds(img)
masker1 = NiftiMasker(standardize=True, detrend=False,
masker1 = NiftiMasker(standardize="zscore_sample", detrend=False,
high_variance_confounds=False,
mask_img=mask).fit()
tseries1 = masker1.transform(img, confounds=[hv_confounds, conf])
masker2 = NiftiMasker(standardize=True, detrend=False,
masker2 = NiftiMasker(standardize="zscore_sample", detrend=False,
high_variance_confounds=True,
mask_img=mask).fit()
tseries2 = masker2.transform(img, confounds=conf)
Expand All @@ -97,12 +99,7 @@ def test_confounds_standardization():

# Signal is z-scored with string arg
# Explicit standardization of confounds
assert (_confounds_regression(standardize_signal='zscore',
standardize_confounds=True) < eps)

# Signal is z-scored with boolean arg
# Explicit standardization of confounds
assert (_confounds_regression(standardize_signal=True,
assert (_confounds_regression(standardize_signal='zscore_sample',
standardize_confounds=True) < eps)

# Signal is psc standardized
Expand All @@ -119,7 +116,7 @@ def test_confounds_standardization():
# Signal is z-scored with string arg
# Confounds are not standardized
# In this case, the regression should fail...
assert (_confounds_regression(standardize_signal='zscore',
assert (_confounds_regression(standardize_signal='zscore_sample',
standardize_confounds=False) > 100)

# Signal is psc standardized
Expand Down
17 changes: 3 additions & 14 deletions nilearn/tests/test_signal.py
Original file line number Diff line number Diff line change
Expand Up @@ -256,12 +256,6 @@ def test_standardize():

# transpose array to fit _standardize input.
# Without trend removal
b = nisignal._standardize(a, standardize='zscore')
stds = np.std(b)
np.testing.assert_almost_equal(stds, np.ones(n_features))
np.testing.assert_almost_equal(b.sum(axis=0), np.zeros(n_features))

# Repeating test above but for new correct strategy
b = nisignal._standardize(a, standardize='zscore_sample')
stds = np.std(b)
np.testing.assert_almost_equal(stds, np.ones(n_features), decimal=1)
Expand All @@ -275,16 +269,11 @@ def test_standardize():
b = nisignal._standardize(a, detrend=True, standardize="zscore_sample")
np.testing.assert_almost_equal(b, np.zeros(b.shape))

length_1_signal = np.atleast_2d(np.linspace(0, 2., n_features))
np.testing.assert_array_equal(length_1_signal,
nisignal._standardize(length_1_signal,
standardize='zscore'))

# Repeating test above but for new correct strategy
length_1_signal = np.atleast_2d(np.linspace(0, 2., n_features))
np.testing.assert_array_equal(
length_1_signal,
nisignal._standardize(length_1_signal, standardize="zscore_sample")
length_1_signal, nisignal._standardize(
length_1_signal, standardize='zscore_sample'
)
)


Expand Down
2 changes: 2 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -175,6 +175,8 @@ minversion = "6.0"
addopts = "--doctest-modules -s -vv --durations=0"
doctest_optionflags = "NORMALIZE_WHITESPACE ELLIPSIS"
junit_family = "xunit2"
# TODO: Remove filter in release 0.13
filterwarnings = "ignore:.*Please use 'zscore_sample' instead.:FutureWarning"

[tool.codespell]
skip = "./.git,plotly-gl3d-latest.min.js,jquery.min.js,localizer_behavioural.tsv,.mypy_cache,env,venv,./doc/auto_examples,*/tmp,./doc/modules/generated/*,./doc/_build"
Expand Down

0 comments on commit 39c6dcb

Please sign in to comment.