Skip to content

Commit

Permalink
Add step parameter to float hyperparameters
Browse files Browse the repository at this point in the history
- if set to None, no discretization performed (default behaviour)
- if specified in suggest_with_optuna(), it overrides the step attribute
- if not specified, we use the attribute (which can be None or a float)
  • Loading branch information
nhuet authored and g-poveda committed Jun 24, 2024
1 parent 94a3a1a commit 066780d
Show file tree
Hide file tree
Showing 2 changed files with 42 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -181,6 +181,9 @@ class FloatHyperparameter(Hyperparameter):
"""

step: Optional[float] = None
"""step to discretize if not None."""

log: bool = False
"""Whether to sample the value in a logarithmic scale."""

Expand Down Expand Up @@ -226,6 +229,9 @@ def suggest_with_optuna(
low: can be used to restrict lower bound
high: can be used to restrict upper bound
log: whether to sample the value in a logarithmic scale
step: step of discretization if specified.
If explicitely set to None, no discretization performed.
By default, use self.step (and thus default discretization only if self.step not None)
prefix: prefix to add to optuna corresponding parameter name
(useful for disambiguating hyperparameters from subsolvers in case of meta-solvers)
**kwargs: passed to `trial.suggest_float()`
Expand All @@ -239,8 +245,12 @@ def suggest_with_optuna(
high = self.high
if log is None:
log = self.log
if "step" in kwargs:
step = kwargs.pop("step")
else:
step = self.step
return trial.suggest_float(
name=prefix + self.name, low=low, high=high, log=log, **kwargs
name=prefix + self.name, low=low, high=high, log=log, step=step, **kwargs # type: ignore
)


Expand Down
32 changes: 31 additions & 1 deletion tests/generic_tools/hyperparameters/test_hyperparameter.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ class BigMethod(Enum):
class DummySolver(SolverDO):
hyperparameters = [
IntegerHyperparameter("nb", low=0, high=2, default=1),
FloatHyperparameter("coeff", low=-1.0, high=1.0, default=1.0),
FloatHyperparameter("coeff", low=-1.0, high=1.0, default=1.0, step=0.25),
CategoricalHyperparameter("use_it", choices=[True, False], default=True),
EnumHyperparameter("method", enum=Method, default=Method.GREEDY),
]
Expand Down Expand Up @@ -255,6 +255,36 @@ def objective(trial: optuna.Trial) -> float:
assert len(study.trials) == 2 * 2 * 5 * 1


def test_suggest_with_optuna_default_float_step():
def objective(trial: optuna.Trial) -> float:
# hyperparameters for the chosen solver
suggested_hyperparameters_kwargs = (
DummySolver.suggest_hyperparameters_with_optuna(
trial=trial,
kwargs_by_name={
"nb": dict(high=1),
"use_it": dict(choices=[True]),
},
)
)
assert len(suggested_hyperparameters_kwargs) == 4
assert isinstance(suggested_hyperparameters_kwargs["method"], Method)
assert 0 <= suggested_hyperparameters_kwargs["nb"]
assert 1 >= suggested_hyperparameters_kwargs["nb"]
assert -1.0 <= suggested_hyperparameters_kwargs["coeff"]
assert 1.0 >= suggested_hyperparameters_kwargs["coeff"]
assert suggested_hyperparameters_kwargs["use_it"] is True

return 0.0

study = optuna.create_study(
sampler=optuna.samplers.BruteForceSampler(),
)
study.optimize(objective)

assert len(study.trials) == 2 * 2 * 9 * 1


def test_suggest_with_optuna_with_choices_dict():
def objective(trial: optuna.Trial) -> float:
# hyperparameters for the chosen solver
Expand Down

0 comments on commit 066780d

Please sign in to comment.