Skip to content

Commit

Permalink
Merge pull request #5365 from nabenabe0928/hotfix/fix-prob-zero-error…
Browse files Browse the repository at this point in the history
…-in-gp-sampler

Add a unit test for convergence of acquisition function in `GPSampler`
  • Loading branch information
not522 committed May 1, 2024
2 parents 44611a4 + 87d3fa4 commit e73a620
Show file tree
Hide file tree
Showing 2 changed files with 50 additions and 2 deletions.
7 changes: 5 additions & 2 deletions optuna/_gp/optim_mixed.py
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,7 @@ def local_search_mixed(
# Parameters not changed from the beginning.
return (best_normalized_params, best_fval)

_logger.warn("local_search_mixed: Local search did not converge.")
_logger.warning("local_search_mixed: Local search did not converge.")
return (best_normalized_params, best_fval)


Expand Down Expand Up @@ -298,10 +298,13 @@ def optimize_acqf_mixed(
probs = np.exp(f_vals - f_vals[max_i])
probs[max_i] = 0.0 # We already picked the best param, so remove it from roulette.
probs /= probs.sum()
n_non_zero_probs_improvement = np.count_nonzero(probs > 0.0)
# n_additional_warmstart becomes smaller when study starts to converge.
n_additional_warmstart = min(
n_local_search - len(warmstart_normalized_params_array) - 1, np.count_nonzero(probs > 0.0)
n_local_search - len(warmstart_normalized_params_array) - 1, n_non_zero_probs_improvement
)
if n_additional_warmstart == n_non_zero_probs_improvement:
_logger.warning("Study already converged, so the number of local search is reduced.")
chosen_idxs = np.array([max_i])
if n_additional_warmstart > 0:
additional_idxs = rng.choice(
Expand Down
45 changes: 45 additions & 0 deletions tests/samplers_tests/test_gp.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
from __future__ import annotations

from _pytest.logging import LogCaptureFixture
import numpy as np

import optuna
import optuna._gp.acqf as acqf
import optuna._gp.optim_mixed as optim_mixed
import optuna._gp.prior as prior
import optuna._gp.search_space as gp_search_space


def test_after_convergence(caplog: LogCaptureFixture) -> None:
# A large `optimal_trials` causes the instability in the kernel inversion, leading to
# instability in the variance calculation.
X_uniform = [(i + 1) / 10 for i in range(10)]
X_uniform_near_optimal = [(i + 1) / 1e5 for i in range(20)]
X_optimal = [0.0] * 10
X = np.array(X_uniform + X_uniform_near_optimal + X_optimal)
score_vals = -(X - np.mean(X)) / np.std(X)
search_space = gp_search_space.SearchSpace(
scale_types=np.array([gp_search_space.ScaleType.LINEAR]),
bounds=np.array([[0.0, 1.0]]),
steps=np.zeros(1, dtype=float),
)
kernel_params = optuna._gp.gp.fit_kernel_params(
X=X[:, np.newaxis],
Y=score_vals,
is_categorical=np.array([False]),
log_prior=prior.default_log_prior,
minimum_noise=prior.DEFAULT_MINIMUM_NOISE_VAR,
deterministic_objective=False,
)
acqf_params = acqf.create_acqf_params(
acqf_type=acqf.AcquisitionFunctionType.LOG_EI,
kernel_params=kernel_params,
search_space=search_space,
X=X[:, np.newaxis],
Y=score_vals,
)
caplog.clear()
optuna.logging.enable_propagation()
optim_mixed.optimize_acqf_mixed(acqf_params, rng=np.random.RandomState(42))
# len(caplog.text) > 0 means the optimization has already converged.
assert len(caplog.text) > 0, "Did you change the kernel implementation?"

0 comments on commit e73a620

Please sign in to comment.