Skip to content

Fixes for sklearn 1.7 pre-release support (backport #2451) #2527

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Jun 16, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions daal4py/sklearn/ensemble/_forest.py
Original file line number Diff line number Diff line change
Expand Up @@ -679,8 +679,8 @@ def _daal_predict_proba(self, X):
dfc_predictionResult = dfc_algorithm.compute(X, self.daal_model_)

pred = dfc_predictionResult.probabilities

return pred
# TODO: fix probabilities out of [0, 1] interval on oneDAL side
return pred.clip(0.0, 1.0)

def _daal_fit_classifier(self, X, y, sample_weight=None):
y = check_array(y, ensure_2d=False, dtype=None)
Expand Down
8 changes: 7 additions & 1 deletion daal4py/sklearn/manifold/_t_sne.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,13 @@ def _daal_tsne(self, P, n_samples, X_embedded):
[n_samples],
[P.nnz],
[self.n_iter_without_progress],
[self._max_iter if sklearn_check_version("1.5") else self.n_iter],
[
(
self.max_iter
if sklearn_check_version("1.7")
else (self._max_iter if sklearn_check_version("1.5") else self.n_iter)
)
],
]

# Pass params to daal4py backend
Expand Down
2 changes: 1 addition & 1 deletion daal4py/sklearn/metrics/_pairwise.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
from functools import partial

import numpy as np
from joblib import effective_n_jobs
from sklearn.exceptions import DataConversionWarning
from sklearn.metrics import pairwise_distances as pairwise_distances_original
from sklearn.metrics.pairwise import (
Expand All @@ -28,7 +29,6 @@
_parallel_pairwise,
check_pairwise_arrays,
)
from sklearn.utils._joblib import effective_n_jobs
from sklearn.utils.validation import check_non_negative

try:
Expand Down
4 changes: 3 additions & 1 deletion onedal/ensemble/forest.py
Original file line number Diff line number Diff line change
Expand Up @@ -424,7 +424,9 @@ def _predict_proba(self, X, hparams=None):
else:
result = self.infer(params, model, X)

return from_table(result.probabilities)
# TODO: fix probabilities out of [0, 1] interval on oneDAL side
pred = from_table(result.probabilities)
return pred.clip(0.0, 1.0)


class RandomForestClassifier(ClassifierMixin, BaseForest, metaclass=ABCMeta):
Expand Down
7 changes: 6 additions & 1 deletion onedal/utils/_array_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,12 @@ def _asarray(data, xp, *args, **kwargs):

def _is_numpy_namespace(xp):
"""Return True if xp is backed by NumPy."""
return xp.__name__ in {"numpy", "array_api_compat.numpy", "numpy.array_api"}
return xp.__name__ in {
"numpy",
"array_api_compat.numpy",
"numpy.array_api",
"sklearn.externals.array_api_compat.numpy",
}


def _get_sycl_namespace(*arrays):
Expand Down
Loading