-
Notifications
You must be signed in to change notification settings - Fork 65
[ENH] optuna based optimizer
#140
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -62,6 +62,7 @@ test = [ | |
| ] | ||
| all_extras = [ | ||
| "hyperactive[integrations]", | ||
| "optuna<5", | ||
| ] | ||
|
|
||
|
|
||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,6 @@ | ||
| """Grid search with sklearn style grid and backends.""" | ||
| # copyright: hyperactive developers, MIT License (see LICENSE file) | ||
|
|
||
| from hyperactive.opt.una._optuna import OptunaOptimizer | ||
|
|
||
| __all__ = ["OptunaOptimizer"] | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,104 @@ | ||
| """Optuna optimizer interface.""" | ||
| # copyright: hyperactive developers, MIT License (see LICENSE file) | ||
|
|
||
| from hyperactive.base import BaseOptimizer | ||
|
|
||
|
|
||
| class OptunaOptimizer(BaseOptimizer): | ||
| """Optuna optimizer interface. | ||
|
|
||
| Parameters | ||
| ---------- | ||
| param_space : dict[str, tuple or list or optuna distributions] | ||
| The search space to explore. Dictionary with parameter names | ||
| as keys and either tuples/lists of (low, high) or | ||
| optuna distribution objects as values. | ||
| n_trials : int, default=100 | ||
| Number of optimization trials. | ||
| experiment : BaseExperiment, optional | ||
| The experiment to optimize parameters for. | ||
| Optional, can be passed later via ``set_params``. | ||
|
|
||
| Example | ||
| ------- | ||
| >>> from hyperactive.experiment.integrations import SklearnCvExperiment | ||
| >>> from hyperactive.opt.una import OptunaOptimizer | ||
| >>> from sklearn.datasets import load_iris | ||
| >>> from sklearn.svm import SVC | ||
| >>> X, y = load_iris(return_X_y=True) | ||
| >>> sklearn_exp = SklearnCvExperiment(estimator=SVC(), X=X, y=y) | ||
| >>> param_space = { | ||
| ... "C": (0.01, 10), | ||
| ... "gamma": (0.0001, 10), | ||
| ... } | ||
| >>> optimizer = OptunaOptimizer( | ||
| ... param_space=param_space, n_trials=50, experiment=sklearn_exp | ||
| ... ) | ||
| >>> best_params = optimizer.run() | ||
| """ | ||
|
|
||
| _tags = { | ||
| "python_dependencies": ["optuna"], | ||
| "info:name": "Optuna-based optimizer", | ||
| } | ||
|
|
||
| def __init__( | ||
| self, | ||
| param_space=None, | ||
| n_trials=100, | ||
| experiment=None | ||
| ): | ||
| self.param_space = param_space | ||
| self.n_trials = n_trials | ||
| self.experiment = experiment | ||
| super().__init__() | ||
|
|
||
| def _objective(self, trial): | ||
| params = {} | ||
| for key, space in self.param_space.items(): | ||
| if hasattr(space, "suggest"): # optuna distribution object | ||
| params[key] = trial._suggest(space, key) | ||
| elif isinstance(space, (tuple, list)) and len(space) == 2: | ||
| low, high = space | ||
| # Decide type based on low/high type | ||
| if isinstance(low, int) and isinstance(high, int): | ||
| params[key] = trial.suggest_int(key, low, high) | ||
| else: | ||
| params[key] = trial.suggest_float(key, low, high, log=False) | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I think we could already handle other things, like distributions or categorical here.
Collaborator
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. yes - I was still thinking what the best interface and parameterization is. |
||
| else: | ||
| raise ValueError(f"Invalid parameter space for key '{key}': {space}") | ||
|
|
||
| # Evaluate experiment with suggested params | ||
| return self.experiment(**params) | ||
|
|
||
| def _run(self, experiment, param_space, n_trials): | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Arguments not used. I am still not sure about this part in our API. But I will take a look at this at a later point.
Collaborator
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. yes, but they need to be there unfortunately, due to the API design passing them as a means of convenience. |
||
| import optuna | ||
|
|
||
| study = optuna.create_study(direction="minimize") | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. According to this PR: #142
Collaborator
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. yes, that was my feeling - the optimizers being minimizers all, and expeirments having the sign. Which of course begs the sincere question, are all optimizers currently minimizers? |
||
| study.optimize(self._objective, n_trials=n_trials) | ||
|
|
||
| self.best_score_ = study.best_value | ||
| self.best_params_ = study.best_params | ||
| return study.best_params | ||
|
|
||
| @classmethod | ||
| def get_test_params(cls, parameter_set="default"): | ||
| """Return testing parameter settings for the optimizer.""" | ||
|
|
||
| from hyperactive.experiment.integrations import SklearnCvExperiment | ||
| from sklearn.datasets import load_iris | ||
| from sklearn.svm import SVC | ||
|
|
||
| X, y = load_iris(return_X_y=True) | ||
| sklearn_exp = SklearnCvExperiment(estimator=SVC(), X=X, y=y) | ||
|
|
||
| param_space = { | ||
| "C": (0.01, 10), | ||
| "gamma": (0.0001, 10), | ||
| } | ||
|
|
||
| return [{ | ||
| "param_space": param_space, | ||
| "n_trials": 10, | ||
| "experiment": sklearn_exp, | ||
| }] | ||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
copy-paste error