Skip to content

Commit

Permalink
Remove some warnings in tests.
Browse files Browse the repository at this point in the history
  • Loading branch information
vnmabus committed Oct 13, 2023
1 parent 14b1416 commit 4f0bd4c
Show file tree
Hide file tree
Showing 7 changed files with 16 additions and 55 deletions.
8 changes: 7 additions & 1 deletion skfda/inference/anova/_anova_oneway.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,13 @@ def v_asymptotic_stat(
.. footbibliography::
"""
return float(_v_asymptotic_stat_with_reps(*fd, weights=weights, p=p))
return float(
_v_asymptotic_stat_with_reps(
*fd,
weights=weights,
p=p,
).reshape(())
)


def _anova_bootstrap(
Expand Down
4 changes: 2 additions & 2 deletions skfda/misc/metrics/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,8 +120,8 @@ class NormInducedMetric(Metric[VectorType]):
>>> l2_distance = NormInducedMetric(l2_norm)
>>> d = l2_distance(fd, fd2)
>>> float('%.3f'% d)
0.289
>>> float(d[0])
0.288...
"""

Expand Down
4 changes: 1 addition & 3 deletions skfda/ml/classification/_logistic_regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,11 +132,9 @@ def fit( # noqa: D102, WPS210
(self.max_features, n_features),
)

penalty = 'none' if self.penalty is None else self.penalty

# multivariate logistic regression
mvlr = mvLogisticRegression(
penalty=penalty,
penalty=self.penalty,
C=self.C,
solver=self.solver,
max_iter=self.max_iter,
Expand Down
2 changes: 1 addition & 1 deletion skfda/ml/clustering/_hierarchical.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ def _init_estimator(self) -> None:

self._estimator = sklearn.cluster.AgglomerativeClustering(
n_clusters=self.n_clusters,
affinity='precomputed',
metric='precomputed',
memory=self.memory,
connectivity=self.connectivity,
compute_full_tree=self.compute_full_tree,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ def _rkhs_vs(
[indexes[j]],
])

new_means = np.atleast_2d(means[new_selection])
new_means = means[new_selection]

lstsq_solution = linalg.lstsq(
variances[new_selection[:, np.newaxis], new_selection],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ def _mrmr(
redundancies[last_selected, j] = redundancy_dependence_measure(
X[:, last_selected, np.newaxis],
X[:, j, np.newaxis],
)
).item()
redundancies[j, last_selected] = redundancies[last_selected, j]

W = np.mean(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
overload,
)

import dcor
import numpy as np
import numpy.linalg as linalg
import numpy.ma as ma
Expand All @@ -25,7 +26,6 @@
from sklearn.base import clone
from typing_extensions import Literal

import dcor
from skfda.exploratory.stats.covariance import (
CovarianceEstimator,
EmpiricalCovariance,
Expand All @@ -41,7 +41,6 @@

if TYPE_CHECKING:
from ....misc.covariances import CovarianceLike
import GPy


def _transform_to_2d(t: ArrayLike) -> NDArrayFloat:
Expand All @@ -56,48 +55,6 @@ def _transform_to_2d(t: ArrayLike) -> NDArrayFloat:
return t


class _PicklableKernel():
"""Class used to pickle GPy kernels."""

def __init__(self, kernel: GPy.kern.Kern) -> None:
super().__setattr__('_PicklableKernel__kernel', kernel)

def __getattr__(self, name: str) -> Any:
if name != '__deepcopy__':
return getattr(self.__kernel, name)

def __setattr__(self, name: str, value: Any) -> None:
setattr(self.__kernel, name, value)

def __getstate__(self) -> Mapping[str, Any]:
return {
'class': self.__kernel.__class__,
'input_dim': self.__kernel.input_dim,
'values': self.__kernel.param_array,
}

def __setstate__(self, state: Mapping[str, Any]) -> None:
super().__setattr__('_PicklableKernel__kernel', state['class'](
input_dim=state['input_dim']),
)
self.__kernel.param_array[...] = state['values']

def __call__(self, *args: Any, **kwargs: Any) -> NDArrayFloat:
return self.__kernel.K(*args, **kwargs) # type: ignore[no-any-return]


def make_kernel(k: CovarianceLike) -> CovarianceLike:
try:
import GPy
except ImportError:
return k

if isinstance(k, GPy.kern.Kern):
return _PicklableKernel(k)

return k


def _absolute_argmax(
function: FDataGrid,
*,
Expand Down Expand Up @@ -255,7 +212,7 @@ def __init__(
super().__init__()

self.mean = mean
self.cov = make_kernel(cov)
self.cov = cov

def _evaluate_mean(self, t: NDArrayFloat) -> NDArrayFloat:

Expand Down Expand Up @@ -625,7 +582,7 @@ def __call__(
**kwargs: Any,
) -> bool:

score = float(dependences.data_matrix[0, selected_index, 0])
score = float(dependences.data_matrix[(0,) + selected_index + (0,)])

return score < self.threshold

Expand Down

0 comments on commit 4f0bd4c

Please sign in to comment.