Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove make_covariances #280

Merged
merged 4 commits into from Feb 2, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
1 change: 0 additions & 1 deletion doc/api.rst
Expand Up @@ -182,7 +182,6 @@ Datasets

make_gaussian_blobs
make_outliers
make_covariances
make_matrices
make_masks
sample_gaussian_spd
Expand Down
2 changes: 2 additions & 0 deletions doc/whatsnew.rst
Expand Up @@ -36,6 +36,8 @@ v0.6.dev

- Add an example on augmented covariance matrix. :pr:`276` by :user:`carraraig`

- Remove function `make_covariances`. :pr:`280` by :user:`qbarthelemy`

v0.5 (Jun 2023)
---------------

Expand Down
20 changes: 10 additions & 10 deletions examples/simulated/plot_classifier_comparison.py
Expand Up @@ -27,7 +27,7 @@
from matplotlib.colors import ListedColormap
from sklearn.model_selection import train_test_split

from pyriemann.datasets import make_covariances, make_gaussian_blobs
from pyriemann.datasets import make_matrices, make_gaussian_blobs
from pyriemann.classification import (
MDM,
KNearestNeighbor,
Expand Down Expand Up @@ -202,28 +202,28 @@ def plot_classifiers(metric):
datasets = [
(
np.concatenate([
make_covariances(
n_matrices, n_channels, rs, evals_mean=10, evals_std=1
make_matrices(
n_matrices, n_channels, "spd", rs, evals_low=10, evals_high=14
),
make_covariances(
n_matrices, n_channels, rs, evals_mean=15, evals_std=1
make_matrices(
n_matrices, n_channels, "spd", rs, evals_low=13, evals_high=17
)
]),
y
),
(
np.concatenate([
make_covariances(
n_matrices, n_channels, rs, evals_mean=10, evals_std=2
make_matrices(
n_matrices, n_channels, "spd", rs, evals_low=10, evals_high=14
),
make_covariances(
n_matrices, n_channels, rs, evals_mean=12, evals_std=2
make_matrices(
n_matrices, n_channels, "spd", rs, evals_low=11, evals_high=15
)
]),
y
),
make_gaussian_blobs(
2*n_matrices, n_channels, random_state=rs, class_sep=1., class_disp=.2,
2*n_matrices, n_channels, random_state=rs, class_sep=1., class_disp=.5,
n_jobs=4
),
make_gaussian_blobs(
Expand Down
58 changes: 29 additions & 29 deletions examples/simulated/plot_nanmean.py
Expand Up @@ -5,7 +5,7 @@

Estimate the mean of SPD matrices corrupted by NaN values [1]_.
"""
# Author: Quentin Barthélemy, Sylvain Chevallier and Florian Yger
# Authors: Quentin Barthélemy, Sylvain Chevallier and Florian Yger
#
# License: BSD (3-clause)

Expand All @@ -14,27 +14,27 @@
import pandas as pd
import seaborn as sns

from pyriemann.datasets import make_covariances
from pyriemann.datasets import make_matrices
from pyriemann.utils.mean import mean_riemann, nanmean_riemann
from pyriemann.utils.distance import distance_riemann


###############################################################################


def corrupt(covmats, n_corrup_channels_max, rs):
n_matrices, n_channels, _ = covmats.shape
def corrupt(mats, n_corrup_channels_max, rs):
n_matrices, n_channels, _ = mats.shape
all_n_corrup_channels, all_corrup_channels = np.zeros(n_matrices), []
for i_matrix in range(n_matrices):
n_corrupt_channels = rs.randint(n_corrup_channels_max + 1, size=1)
corrup_channels = rs.choice(
np.arange(0, n_channels), size=n_corrupt_channels, replace=False)
for i_channel in corrup_channels:
covmats[i_matrix, i_channel] = np.nan
covmats[i_matrix, :, i_channel] = np.nan
mats[i_matrix, i_channel] = np.nan
mats[i_matrix, :, i_channel] = np.nan
all_corrup_channels.append(i_channel)
all_n_corrup_channels[i_matrix] = n_corrupt_channels
return covmats, all_n_corrup_channels, all_corrup_channels
return mats, all_n_corrup_channels, all_corrup_channels


###############################################################################
Expand All @@ -43,19 +43,19 @@ def corrupt(covmats, n_corrup_channels_max, rs):

rs = np.random.RandomState(42)
n_matrices, n_channels = 100, 10
covmats = make_covariances(
n_matrices, n_channels, rs, evals_mean=100., evals_std=20.)
mats = make_matrices(
n_matrices, n_channels, "spd", rs, evals_low=50, evals_high=130)

# Compute the reference, the Riemannian mean on all SPD matrices
C_ref = mean_riemann(covmats)
C_ref = mean_riemann(mats)

# Corrupt data randomly
n_corrup_channels_max = n_channels // 2
print("Maximum number of corrupted channels: {} over {}".format(
n_corrup_channels_max, n_channels))

covmats, all_n_corrup_channels, all_corrup_channels = corrupt(
covmats, n_corrup_channels_max, rs)
mats, all_n_corrup_channels, all_corrup_channels = corrupt(
mats, n_corrup_channels_max, rs)

fig, ax = plt.subplots(nrows=1, ncols=1)
ax.set(title='Histogram of the number of corrupted channels',
Expand Down Expand Up @@ -86,17 +86,17 @@ def corrupt(covmats, n_corrup_channels_max, rs):
# Nan-mean uses as much information as possible to estimate the mean [1]_.

# Euclidean NaN-mean
C_naneucl = np.nanmean(covmats, axis=0)
C_naneucl = np.nanmean(mats, axis=0)

# Riemannian NaN-mean
C_nanriem = nanmean_riemann(covmats)
C_nanriem = nanmean_riemann(mats)

# Riemannian mean, after matrix deletion: average only uncorrupted matrices
isnan = np.isnan(np.sum(covmats, axis=(1, 2)))
covmats_ = np.delete(covmats, np.where(isnan), axis=0)
perc = len(covmats_) / n_matrices * 100
isnan = np.isnan(np.sum(mats, axis=(1, 2)))
mats_ = np.delete(mats, np.where(isnan), axis=0)
perc = len(mats_) / n_matrices * 100
print("Percentage of uncorrupted matrices: {:.2f} %".format(perc))
C_mdriem = mean_riemann(covmats_)
C_mdriem = mean_riemann(mats_)


###############################################################################
Expand Down Expand Up @@ -125,20 +125,20 @@ def corrupt(covmats, n_corrup_channels_max, rs):
# Repeat the previous experiment, varying the maximum number of corrupted
# channels [1]_.

covmats_orig = make_covariances(
n_matrices, n_channels, rs, evals_mean=100., evals_std=20.)
C_ref = mean_riemann(covmats_orig)
mats_orig = make_matrices(
n_matrices, n_channels, "spd", rs, evals_low=50, evals_high=130)
C_ref = mean_riemann(mats_orig)

df = []
for n_corrup_channels_max in range(0, n_channels // 2 + 1):
covmats = np.copy(covmats_orig)
covmats, _, _ = corrupt(covmats, n_corrup_channels_max, rs)

C_naneucl = np.nanmean(covmats, axis=0)
C_nanriem = nanmean_riemann(covmats)
isnan = np.isnan(np.sum(covmats, axis=(1, 2)))
covmats_ = np.delete(covmats, np.where(isnan), axis=0)
C_mdriem = mean_riemann(covmats_)
mats = np.copy(mats_orig)
mats, _, _ = corrupt(mats, n_corrup_channels_max, rs)

C_naneucl = np.nanmean(mats, axis=0)
C_nanriem = nanmean_riemann(mats)
isnan = np.isnan(np.sum(mats, axis=(1, 2)))
mats_ = np.delete(mats, np.where(isnan), axis=0)
C_mdriem = mean_riemann(mats_)

res_naneucl = {'n_corrupt': n_corrup_channels_max,
'dist': distance_riemann(C_ref, C_naneucl),
Expand Down
2 changes: 0 additions & 2 deletions pyriemann/datasets/__init__.py
@@ -1,6 +1,5 @@
from .sampling import sample_gaussian_spd, generate_random_spd_matrix
from .simulated import (
make_covariances,
make_matrices,
make_masks,
make_gaussian_blobs,
Expand All @@ -12,7 +11,6 @@
__all__ = [
"sample_gaussian_spd",
"generate_random_spd_matrix",
"make_covariances",
"make_matrices",
"make_masks",
"make_gaussian_blobs",
Expand Down
50 changes: 0 additions & 50 deletions pyriemann/datasets/simulated.py
Expand Up @@ -6,56 +6,6 @@
from ..utils.base import invsqrtm, powm, sqrtm, expm
from .sampling import generate_random_spd_matrix, sample_gaussian_spd
from ..transfer import encode_domains
from ..utils import deprecated


@deprecated(
"make_covariances is deprecated and will be removed in 0.6.0; "
"please use make_matrices."
)
def make_covariances(n_matrices, n_channels, rs=None, return_params=False,
evals_mean=2.0, evals_std=0.1):
"""Generate a set of covariances matrices, with the same eigenvectors.

Parameters
----------
n_matrices : int
Number of matrices to generate.
n_channels : int
Number of channels in covariance matrices.
rs : RandomState instance, default=None
Random state for reproducible output across multiple function calls.
return_params : bool, default=False
If True, then return parameters.
evals_mean : float, default=2.0
Mean of eigen values.
evals_std : float, default=0.1
Standard deviation of eigen values.

Returns
-------
covmats : ndarray, shape (n_matrices, n_channels, n_channels)
Covariances matrices.
evals : ndarray, shape (n_matrices, n_channels)
Eigen values used for each covariance matrix.
Only returned if ``return_params=True``.
evecs : ndarray, shape (n_channels, n_channels)
Eigen vectors used for all covariance matrices.
Only returned if ``return_params=True``.
"""
rs = check_random_state(rs)

evals = np.abs(evals_mean + evals_std * rs.randn(n_matrices, n_channels))
evecs, _ = np.linalg.qr(rs.randn(n_channels, n_channels))

covmats = np.empty((n_matrices, n_channels, n_channels))
for i in range(n_matrices):
covmats[i] = evecs @ np.diag(evals[i]) @ evecs.T

if return_params:
return covmats, evals, evecs
else:
return covmats


def make_matrices(n_matrices, n_dim, kind, rs=None, return_params=False,
Expand Down
13 changes: 0 additions & 13 deletions tests/test_simulated.py
Expand Up @@ -4,7 +4,6 @@

from pyriemann.datasets.sampling import generate_random_spd_matrix
from pyriemann.datasets.simulated import (
make_covariances,
make_matrices,
make_masks,
make_gaussian_blobs,
Expand All @@ -19,18 +18,6 @@
)


def test_make_covariances(rndstate):
"""Test function for make covariances."""
n_matrices, n_channels = 5, 4
X, evals, evecs = make_covariances(n_matrices=n_matrices,
n_channels=n_channels,
return_params=True,
rs=rndstate)
assert X.shape == (n_matrices, n_channels, n_channels) # X shape mismatch
assert evals.shape == (n_matrices, n_channels) # evals shape mismatch
assert evecs.shape == (n_channels, n_channels) # evecs shape mismatch


@pytest.mark.parametrize(
"kind", ["real", "comp", "spd", "spsd", "hpd", "hpsd"]
)
Expand Down