diff --git a/pyproject.toml b/pyproject.toml index c6d98c36..a7b16e6c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -62,6 +62,7 @@ test = [ ] all_extras = [ "hyperactive[integrations]", + "optuna<5", ] diff --git a/src/hyperactive/opt/una/__init__.py b/src/hyperactive/opt/una/__init__.py new file mode 100644 index 00000000..58de4fd7 --- /dev/null +++ b/src/hyperactive/opt/una/__init__.py @@ -0,0 +1,6 @@ +"""Grid search with sklearn style grid and backends.""" +# copyright: hyperactive developers, MIT License (see LICENSE file) + +from hyperactive.opt.una._optuna import OptunaOptimizer + +__all__ = ["OptunaOptimizer"] diff --git a/src/hyperactive/opt/una/_optuna.py b/src/hyperactive/opt/una/_optuna.py new file mode 100644 index 00000000..1c210ff5 --- /dev/null +++ b/src/hyperactive/opt/una/_optuna.py @@ -0,0 +1,104 @@ +"""Optuna optimizer interface.""" +# copyright: hyperactive developers, MIT License (see LICENSE file) + +from hyperactive.base import BaseOptimizer + + +class OptunaOptimizer(BaseOptimizer): + """Optuna optimizer interface. + + Parameters + ---------- + param_space : dict[str, tuple or list or optuna distributions] + The search space to explore. Dictionary with parameter names + as keys and either tuples/lists of (low, high) or + optuna distribution objects as values. + n_trials : int, default=100 + Number of optimization trials. + experiment : BaseExperiment, optional + The experiment to optimize parameters for. + Optional, can be passed later via ``set_params``. + + Example + ------- + >>> from hyperactive.experiment.integrations import SklearnCvExperiment + >>> from hyperactive.opt.una import OptunaOptimizer + >>> from sklearn.datasets import load_iris + >>> from sklearn.svm import SVC + >>> X, y = load_iris(return_X_y=True) + >>> sklearn_exp = SklearnCvExperiment(estimator=SVC(), X=X, y=y) + >>> param_space = { + ... "C": (0.01, 10), + ... "gamma": (0.0001, 10), + ... } + >>> optimizer = OptunaOptimizer( + ... param_space=param_space, n_trials=50, experiment=sklearn_exp + ... ) + >>> best_params = optimizer.run() + """ + + _tags = { + "python_dependencies": ["optuna"], + "info:name": "Optuna-based optimizer", + } + + def __init__( + self, + param_space=None, + n_trials=100, + experiment=None + ): + self.param_space = param_space + self.n_trials = n_trials + self.experiment = experiment + super().__init__() + + def _objective(self, trial): + params = {} + for key, space in self.param_space.items(): + if hasattr(space, "suggest"): # optuna distribution object + params[key] = trial._suggest(space, key) + elif isinstance(space, (tuple, list)) and len(space) == 2: + low, high = space + # Decide type based on low/high type + if isinstance(low, int) and isinstance(high, int): + params[key] = trial.suggest_int(key, low, high) + else: + params[key] = trial.suggest_float(key, low, high, log=False) + else: + raise ValueError(f"Invalid parameter space for key '{key}': {space}") + + # Evaluate experiment with suggested params + return self.experiment(**params) + + def _run(self, experiment, param_space, n_trials): + import optuna + + study = optuna.create_study(direction="minimize") + study.optimize(self._objective, n_trials=n_trials) + + self.best_score_ = study.best_value + self.best_params_ = study.best_params + return study.best_params + + @classmethod + def get_test_params(cls, parameter_set="default"): + """Return testing parameter settings for the optimizer.""" + + from hyperactive.experiment.integrations import SklearnCvExperiment + from sklearn.datasets import load_iris + from sklearn.svm import SVC + + X, y = load_iris(return_X_y=True) + sklearn_exp = SklearnCvExperiment(estimator=SVC(), X=X, y=y) + + param_space = { + "C": (0.01, 10), + "gamma": (0.0001, 10), + } + + return [{ + "param_space": param_space, + "n_trials": 10, + "experiment": sklearn_exp, + }] diff --git a/src/hyperactive/tests/test_all_objects.py b/src/hyperactive/tests/test_all_objects.py index b5b4955f..9a14f372 100644 --- a/src/hyperactive/tests/test_all_objects.py +++ b/src/hyperactive/tests/test_all_objects.py @@ -6,6 +6,7 @@ from skbase.testing import BaseFixtureGenerator as _BaseFixtureGenerator from skbase.testing import QuickTester as _QuickTester from skbase.testing import TestAllObjects as _TestAllObjects +from skbase.utils.dependencies import _check_estimator_deps from hyperactive._registry import all_objects from hyperactive.tests._config import EXCLUDE_ESTIMATORS, EXCLUDED_TESTS @@ -120,11 +121,13 @@ def _all_objects(self): if isclass(filter): obj_list = [obj for obj in obj_list if issubclass(obj, filter)] + def run_test_for_class(obj): + return _check_estimator_deps(obj, severity="none") + # run_test_for_class selects the estimators to run - # based on whether they have changed, and whether they have all dependencies - # internally, uses the ONLY_CHANGED_MODULES flag, + # based on whether they have all dependencies # and checks the python env against python_dependencies tag - # obj_list = [obj for obj in obj_list if run_test_for_class(obj)] + obj_list = [obj for obj in obj_list if run_test_for_class(obj)] return obj_list