Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add log and step attributes to integer and float hyperparameter #239

Merged
merged 2 commits into from
Jun 24, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -94,9 +94,13 @@ class IntegerHyperparameter(Hyperparameter):
If None, the hyperparameter value has no upper bound.

"""

step: int = 1
"""step to discretize."""

log: bool = False
"""Whether to sample the value in a logarithmic scale."""

default: Optional[int] = None
"""Default value for the hyperparameter.

Expand Down Expand Up @@ -129,6 +133,7 @@ def suggest_with_optuna(
low: Optional[int] = None,
high: Optional[int] = None,
step: Optional[int] = None,
log: Optional[bool] = None,
prefix: str = "",
**kwargs: Any,
) -> Any:
Expand All @@ -139,6 +144,7 @@ def suggest_with_optuna(
low: can be used to restrict lower bound
high: can be used to restrict upper bound
step: can be used to discretize by a given step
log: whether to sample the value in a logarithmic scale
prefix: prefix to add to optuna corresponding parameter name
(useful for disambiguating hyperparameters from subsolvers in case of meta-solvers)
**kwargs: passed to `trial.suggest_int()`
Expand All @@ -152,7 +158,9 @@ def suggest_with_optuna(
high = self.high
if step is None:
step = self.step
return trial.suggest_int(name=prefix + self.name, low=low, high=high, step=step, **kwargs) # type: ignore
if log is None:
log = self.log
return trial.suggest_int(name=prefix + self.name, low=low, high=high, step=step, log=log, **kwargs) # type: ignore


@dataclass
Expand All @@ -173,6 +181,12 @@ class FloatHyperparameter(Hyperparameter):

"""

step: Optional[float] = None
"""step to discretize if not None."""

log: bool = False
"""Whether to sample the value in a logarithmic scale."""

default: Optional[float] = None
"""Default value for the hyperparameter.

Expand Down Expand Up @@ -204,6 +218,7 @@ def suggest_with_optuna(
trial: optuna.trial.Trial,
low: Optional[float] = None,
high: Optional[float] = None,
log: Optional[bool] = None,
prefix: str = "",
**kwargs: Any,
) -> Any:
Expand All @@ -213,6 +228,10 @@ def suggest_with_optuna(
trial: optuna Trial used for choosing the hyperparameter value
low: can be used to restrict lower bound
high: can be used to restrict upper bound
log: whether to sample the value in a logarithmic scale
step: step of discretization if specified.
If explicitely set to None, no discretization performed.
By default, use self.step (and thus default discretization only if self.step not None)
prefix: prefix to add to optuna corresponding parameter name
(useful for disambiguating hyperparameters from subsolvers in case of meta-solvers)
**kwargs: passed to `trial.suggest_float()`
Expand All @@ -224,8 +243,14 @@ def suggest_with_optuna(
low = self.low
if high is None:
high = self.high
if log is None:
log = self.log
if "step" in kwargs:
step = kwargs.pop("step")
else:
step = self.step
return trial.suggest_float(
name=prefix + self.name, low=low, high=high, **kwargs
name=prefix + self.name, low=low, high=high, log=log, step=step, **kwargs # type: ignore
)


Expand Down
32 changes: 31 additions & 1 deletion tests/generic_tools/hyperparameters/test_hyperparameter.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ class BigMethod(Enum):
class DummySolver(SolverDO):
hyperparameters = [
IntegerHyperparameter("nb", low=0, high=2, default=1),
FloatHyperparameter("coeff", low=-1.0, high=1.0, default=1.0),
FloatHyperparameter("coeff", low=-1.0, high=1.0, default=1.0, step=0.25),
CategoricalHyperparameter("use_it", choices=[True, False], default=True),
EnumHyperparameter("method", enum=Method, default=Method.GREEDY),
]
Expand Down Expand Up @@ -255,6 +255,36 @@ def objective(trial: optuna.Trial) -> float:
assert len(study.trials) == 2 * 2 * 5 * 1


def test_suggest_with_optuna_default_float_step():
def objective(trial: optuna.Trial) -> float:
# hyperparameters for the chosen solver
suggested_hyperparameters_kwargs = (
DummySolver.suggest_hyperparameters_with_optuna(
trial=trial,
kwargs_by_name={
"nb": dict(high=1),
"use_it": dict(choices=[True]),
},
)
)
assert len(suggested_hyperparameters_kwargs) == 4
assert isinstance(suggested_hyperparameters_kwargs["method"], Method)
assert 0 <= suggested_hyperparameters_kwargs["nb"]
assert 1 >= suggested_hyperparameters_kwargs["nb"]
assert -1.0 <= suggested_hyperparameters_kwargs["coeff"]
assert 1.0 >= suggested_hyperparameters_kwargs["coeff"]
assert suggested_hyperparameters_kwargs["use_it"] is True

return 0.0

study = optuna.create_study(
sampler=optuna.samplers.BruteForceSampler(),
)
study.optimize(objective)

assert len(study.trials) == 2 * 2 * 9 * 1


def test_suggest_with_optuna_with_choices_dict():
def objective(trial: optuna.Trial) -> float:
# hyperparameters for the chosen solver
Expand Down