Skip to content
This repository has been archived by the owner on Feb 28, 2024. It is now read-only.

Commit

Permalink
Make number of random samples configurable
Browse files Browse the repository at this point in the history
  • Loading branch information
betatim committed Apr 20, 2016
1 parent f44f8b7 commit 477c386
Show file tree
Hide file tree
Showing 4 changed files with 59 additions and 27 deletions.
9 changes: 4 additions & 5 deletions examples/plot_gbrt_minimize_1d.py
@@ -1,7 +1,7 @@
"""
======================================================================
Plot lower expected improvement as a function of number of iterations.
======================================================================
================================================================
Plot expected improvement as a function of number of iterations.
================================================================
"""
print(__doc__)
import numpy as np
Expand All @@ -21,8 +21,7 @@
row_no = 6

res = gbrt_minimize(
bench3, bounds, maxiter=6,
random_state=1)
bench3, bounds, maxiter=6, n_start=1, random_state=1)
best_xs = res.x_iters.ravel()
best_ys = res.func_vals.ravel()
models = res.models
Expand Down
3 changes: 1 addition & 2 deletions examples/plot_gbrt_minimize_2d.py
Expand Up @@ -18,8 +18,7 @@


res = gbrt_minimize(
branin, bounds, maxiter=200,
random_state=1)
branin, bounds, maxiter=200, random_state=1)

model = res.models[-1]
opt_points = res.x_iters
Expand Down
41 changes: 30 additions & 11 deletions skopt/gbrt_opt.py
Expand Up @@ -9,7 +9,7 @@
from sklearn.base import clone
from sklearn.utils import check_random_state

from .gbt import GradientBoostingQuantileRegressor
from .gbrt import GradientBoostingQuantileRegressor
from .utils import extract_bounds


Expand Down Expand Up @@ -59,7 +59,7 @@ def _random_point(lower, upper, n_points=1, random_state=None):


def gbrt_minimize(func, bounds, base_estimator=None, maxiter=100,
random_state=None):
n_points=20, n_start=10, random_state=None):
"""Sequential optimisation using gradient boosted trees.
Gradient boosted regression trees are used to model the (very)
Expand All @@ -82,7 +82,17 @@ def gbrt_minimize(func, bounds, base_estimator=None, maxiter=100,
The regressor to use as surrogate model
maxiter: int, default 100
Number of iterations used to find the minimum.
Number of iterations used to find the minimum. This corresponds
to the total number of evaluations of `func`. If `n_start` > 0
only `maxiter - n_start` iterations are used.
n_start: int, default 10
Number of random points to draw before fitting `base_estimator`
for the first time. If `n_start < maxiter` this degrades to
a random search for the minimum.
n_points: int, default 20
Number of points to sample when minimizing the acquisition function.
random_state: int, RandomState instance, or None (default)
Set random state to something other than None for reproducible
Expand Down Expand Up @@ -111,18 +121,27 @@ def gbrt_minimize(func, bounds, base_estimator=None, maxiter=100,

# Record the points and function values evaluated as part of
# the minimization
Xi = np.zeros((maxiter + 1, num_params))
yi = np.zeros(maxiter + 1)
Xi = np.zeros((maxiter, num_params))
yi = np.zeros(maxiter)

# Initialize with random points
if n_start == 0:
raise ValueError("Need at least one starting point.")

if maxiter == 0:
raise ValueError("Need to perform at least one iteration.")

n_start = min(n_start, maxiter)

# Initialize with a random point
Xi[0] = _random_point(lower_bounds, upper_bounds, random_state=rng)
best_x = Xi[0].ravel()
yi[0] = best_y = func(Xi[0])
Xi[:n_start] = _random_point(
lower_bounds, upper_bounds, n_points=n_start, random_state=rng)
best_x = Xi[:n_start].ravel()
yi[:n_start] = [func(xi) for xi in (Xi[:n_start])]
best_y = np.min(yi[:n_start])

models = []

# XXX should there be an early stopping criterion?
for i in range(1, maxiter + 1):
for i in range(n_start, maxiter):
rgr = clone(base_estimator)
# only the first i points are meaningful
rgr.fit(Xi[:i, :], yi[:i])
Expand Down
33 changes: 24 additions & 9 deletions skopt/tests/test_gbrt_opt.py
Expand Up @@ -5,13 +5,14 @@
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_less
from sklearn.utils.testing import assert_raise_message

from skopt.benchmarks import bench1
from skopt.benchmarks import bench2
from skopt.benchmarks import bench3
from skopt.benchmarks import branin
from skopt.benchmarks import hart6
from skopt.gbt import GradientBoostingQuantileRegressor
from skopt.gbrt import GradientBoostingQuantileRegressor
from skopt.gbrt_opt import gbrt_minimize
from skopt.gbrt_opt import _expected_improvement
from skopt.utils import extract_bounds
Expand Down Expand Up @@ -45,20 +46,34 @@ def test_ei_api():


def test_no_iterations():
assert_raise_message(ValueError, "at least one iteration",
gbrt_minimize,
branin, [[-5, 10], [0, 15]], maxiter=0, random_state=1)

assert_raise_message(ValueError, "at least one starting point",
gbrt_minimize,
branin, [[-5, 10], [0, 15]], n_start=0, maxiter=2,
random_state=1)


def test_one_iteration():
result = gbrt_minimize(branin, [[-5, 10], [0, 15]],
maxiter=0, random_state=1)
maxiter=1, random_state=1)

assert_almost_equal(result.fun, branin(result.x))
assert_equal(len(result.models), 0)
assert_array_equal(result.x_iters.shape, (1, 2))
assert_array_equal(result.func_vals.shape, (1,))
assert_array_equal(result.x, result.x_iters[np.argmin(result.func_vals)])
assert_almost_equal(result.fun, branin(result.x))


def test_one_iteration():
def test_seven_iterations():
result = gbrt_minimize(branin, [[-5, 10], [0, 15]],
maxiter=1, random_state=1)
n_start=3, maxiter=7, random_state=1)

assert_equal(len(result.models), 1)
assert_array_equal(result.x_iters.shape, (2, 2))
assert_equal(len(result.models), 4)
assert_array_equal(result.x_iters.shape, (7, 2))
assert_array_equal(result.func_vals.shape, (7,))
assert_array_equal(result.x, result.x_iters[np.argmin(result.func_vals)])
assert_almost_equal(result.fun, branin(result.x))

Expand All @@ -73,6 +88,6 @@ def test_gbrt_minimize():
yield (check_minimize, bench2, -5, [[-6, 6]], 0.05, 75)
yield (check_minimize, bench3, -0.9, [[-2, 2]], 0.05, 75)
yield (check_minimize, branin, 0.39, [[-5, 10], [0, 15]],
0.1, 150)
0.1, 100)
yield (check_minimize, hart6, -3.32, np.tile((0, 1), (6, 1)),
1.0, 150)
1.0, 200)

0 comments on commit 477c386

Please sign in to comment.