Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ test = [
]
all_extras = [
"hyperactive[integrations]",
"optuna<5",
]


Expand Down
6 changes: 6 additions & 0 deletions src/hyperactive/opt/una/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
"""Grid search with sklearn style grid and backends."""
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

copy-paste error

# copyright: hyperactive developers, MIT License (see LICENSE file)

from hyperactive.opt.una._optuna import OptunaOptimizer

__all__ = ["OptunaOptimizer"]
104 changes: 104 additions & 0 deletions src/hyperactive/opt/una/_optuna.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
"""Optuna optimizer interface."""
# copyright: hyperactive developers, MIT License (see LICENSE file)

from hyperactive.base import BaseOptimizer


class OptunaOptimizer(BaseOptimizer):
"""Optuna optimizer interface.

Parameters
----------
param_space : dict[str, tuple or list or optuna distributions]
The search space to explore. Dictionary with parameter names
as keys and either tuples/lists of (low, high) or
optuna distribution objects as values.
n_trials : int, default=100
Number of optimization trials.
experiment : BaseExperiment, optional
The experiment to optimize parameters for.
Optional, can be passed later via ``set_params``.

Example
-------
>>> from hyperactive.experiment.integrations import SklearnCvExperiment
>>> from hyperactive.opt.una import OptunaOptimizer
>>> from sklearn.datasets import load_iris
>>> from sklearn.svm import SVC
>>> X, y = load_iris(return_X_y=True)
>>> sklearn_exp = SklearnCvExperiment(estimator=SVC(), X=X, y=y)
>>> param_space = {
... "C": (0.01, 10),
... "gamma": (0.0001, 10),
... }
>>> optimizer = OptunaOptimizer(
... param_space=param_space, n_trials=50, experiment=sklearn_exp
... )
>>> best_params = optimizer.run()
"""

_tags = {
"python_dependencies": ["optuna"],
"info:name": "Optuna-based optimizer",
}

def __init__(
self,
param_space=None,
n_trials=100,
experiment=None
):
self.param_space = param_space
self.n_trials = n_trials
self.experiment = experiment
super().__init__()

def _objective(self, trial):
params = {}
for key, space in self.param_space.items():
if hasattr(space, "suggest"): # optuna distribution object
params[key] = trial._suggest(space, key)
elif isinstance(space, (tuple, list)) and len(space) == 2:
low, high = space
# Decide type based on low/high type
if isinstance(low, int) and isinstance(high, int):
params[key] = trial.suggest_int(key, low, high)
else:
params[key] = trial.suggest_float(key, low, high, log=False)
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think we could already handle other things, like distributions or categorical here.

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yes - I was still thinking what the best interface and parameterization is.

else:
raise ValueError(f"Invalid parameter space for key '{key}': {space}")

# Evaluate experiment with suggested params
return self.experiment(**params)

def _run(self, experiment, param_space, n_trials):
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Arguments not used. I am still not sure about this part in our API. But I will take a look at this at a later point.

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yes, but they need to be there unfortunately, due to the API design passing them as a means of convenience.

import optuna

study = optuna.create_study(direction="minimize")
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

According to this PR: #142
The direction must go into the experiment. So we just hard-code it here and then handle it separately in the experiment, right?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yes, that was my feeling - the optimizers being minimizers all, and expeirments having the sign.

Which of course begs the sincere question, are all optimizers currently minimizers?

study.optimize(self._objective, n_trials=n_trials)

self.best_score_ = study.best_value
self.best_params_ = study.best_params
return study.best_params

@classmethod
def get_test_params(cls, parameter_set="default"):
"""Return testing parameter settings for the optimizer."""

from hyperactive.experiment.integrations import SklearnCvExperiment
from sklearn.datasets import load_iris
from sklearn.svm import SVC

X, y = load_iris(return_X_y=True)
sklearn_exp = SklearnCvExperiment(estimator=SVC(), X=X, y=y)

param_space = {
"C": (0.01, 10),
"gamma": (0.0001, 10),
}

return [{
"param_space": param_space,
"n_trials": 10,
"experiment": sklearn_exp,
}]
9 changes: 6 additions & 3 deletions src/hyperactive/tests/test_all_objects.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from skbase.testing import BaseFixtureGenerator as _BaseFixtureGenerator
from skbase.testing import QuickTester as _QuickTester
from skbase.testing import TestAllObjects as _TestAllObjects
from skbase.utils.dependencies import _check_estimator_deps

from hyperactive._registry import all_objects
from hyperactive.tests._config import EXCLUDE_ESTIMATORS, EXCLUDED_TESTS
Expand Down Expand Up @@ -120,11 +121,13 @@ def _all_objects(self):
if isclass(filter):
obj_list = [obj for obj in obj_list if issubclass(obj, filter)]

def run_test_for_class(obj):
return _check_estimator_deps(obj, severity="none")

# run_test_for_class selects the estimators to run
# based on whether they have changed, and whether they have all dependencies
# internally, uses the ONLY_CHANGED_MODULES flag,
# based on whether they have all dependencies
# and checks the python env against python_dependencies tag
# obj_list = [obj for obj in obj_list if run_test_for_class(obj)]
obj_list = [obj for obj in obj_list if run_test_for_class(obj)]

return obj_list

Expand Down