Skip to content

Commit

Permalink
UPDATE: bo
Browse files Browse the repository at this point in the history
  • Loading branch information
jungtaekkim committed Jun 23, 2018
1 parent 1029394 commit 30cbebe
Show file tree
Hide file tree
Showing 5 changed files with 45 additions and 18 deletions.
53 changes: 41 additions & 12 deletions bayeso/bo.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# bo
# author: Jungtaek Kim (jtkim@postech.ac.kr)
# last updated: June 20, 2018
# last updated: June 23, 2018

import numpy as np
from scipy.optimize import minimize
Expand All @@ -13,18 +13,24 @@
from bayeso import constants


# TODO: I am not sure, but flatten() should be replaced.
class BO():
def __init__(self, arr_range,
str_cov=constants.STR_GP_COV,
str_acq=constants.STR_BO_ACQ,
is_ard=True,
prior_mu=None,
verbose=False,
debug=False,
):
# TODO: use is_ard
# TODO: use is_ard.
# TODO: add debug cases.
assert isinstance(arr_range, np.ndarray)
assert isinstance(str_cov, str)
assert isinstance(str_acq, str)
assert isinstance(is_ard, bool)
assert isinstance(verbose, bool)
assert isinstance(debug, bool)
assert callable(prior_mu) or prior_mu is None
assert len(arr_range.shape) == 2
assert arr_range.shape[1] == 2
Expand All @@ -34,13 +40,19 @@ def __init__(self, arr_range,
self.str_cov = str_cov
self.str_acq = str_acq
self.is_ard = is_ard
self.verbose = verbose
self.debug = debug
self.prior_mu = prior_mu

def _get_initial_grid(self, int_grid=constants.NUM_BO_GRID):
assert isinstance(int_grid, int)

arr_initials = utils_bo.get_grid(self.arr_range, int_grid)
return arr_initials

def _get_initial_uniform(self, int_samples, int_seed=None):
assert isinstance(int_seed, int) or int_seed is None

if int_seed is not None:
np.random.seed(int_seed)
list_initials = []
Expand All @@ -53,6 +65,8 @@ def _get_initial_uniform(self, int_samples, int_seed=None):
return arr_initials

def _get_initial_sobol(self, int_samples, int_seed=None):
assert isinstance(int_seed, int) or int_seed is None

if int_seed is None:
int_seed = np.random.randint(0, 10000)
arr_samples = sobol_seq.i4_sobol_generate(self.num_dim, int_samples, int_seed)
Expand All @@ -64,10 +78,15 @@ def _get_initial_latin(self, int_samples):

def _get_initial(self, str_initial_method,
fun_objective=None,
int_samples=10,
int_seed=None
int_samples=constants.NUM_ACQ_SAMPLES,
int_seed=None,
):
assert callable(fun_objective) or fun_objective is None
assert isinstance(int_samples, int)
assert isinstance(int_seed, int) or int_seed is None

if str_initial_method == 'grid':
assert fun_objective is not None
arr_initials = self._get_initial_grid()
arr_initials = utils_bo.get_best_acquisition(arr_initials, fun_objective)
elif str_initial_method == 'uniform':
Expand All @@ -78,6 +97,9 @@ def _get_initial(self, str_initial_method,
raise NotImplementedError('_get_initial: latin')
else:
raise ValueError('_get_initial: missing condition for str_initial_method')
if self.debug:
print('DEBUG: _get_initial: arr_initials')
print(arr_initials)
return arr_initials

def _optimize_objective(self, fun_acquisition, X_train, Y_train, X_test, cov_X_X, inv_cov_X_X, hyps):
Expand All @@ -86,7 +108,7 @@ def _optimize_objective(self, fun_acquisition, X_train, Y_train, X_test, cov_X_X
acquisitions = fun_acquisition(pred_mean.flatten(), pred_std.flatten(), Y_train=Y_train)
return acquisitions

def _optimize(self, fun_objective, str_initial_method='sobol', verbose=False):
def _optimize(self, fun_objective, str_initial_method):
list_bounds = []
for elem in self.arr_range:
list_bounds.append(tuple(elem))
Expand All @@ -98,18 +120,25 @@ def _optimize(self, fun_objective, str_initial_method='sobol', verbose=False):
x0=arr_initial,
bounds=list_bounds,
method=constants.STR_OPTIMIZER_METHOD_BO,
options={'disp': verbose}
options={'disp': self.verbose}
)
list_next_point.append(next_point.x)
if verbose:
print('INFORM: optimized result for acq. ', next_point.x)
if self.verbose:
print('INFORM: _optimize: optimized result for acq. ', next_point.x)
next_point = utils_bo.get_best_acquisition(np.array(list_next_point), fun_objective)
return next_point.flatten()

def optimize(self, X_train, Y_train, is_grid_optimized=False, verbose=False):
cov_X_X, inv_cov_X_X, hyps = gp.get_optimized_kernel(X_train, Y_train, self.prior_mu, self.str_cov, verbose=verbose)
def optimize(self, X_train, Y_train, str_initial_method='sobol'):
assert isinstance(X_train, np.ndarray)
assert isinstance(Y_train, np.ndarray)
assert isinstance(str_initial_method, str)
assert len(X_train.shape) == 2
assert len(Y_train.shape) == 2
assert Y_train.shape[1] == 1
assert X_train.shape[0] == Y_train.shape[0]

cov_X_X, inv_cov_X_X, hyps = gp.get_optimized_kernel(X_train, Y_train, self.prior_mu, self.str_cov, verbose=self.verbose)

# NEED: to add acquisition function
if self.str_acq == 'pi':
fun_acquisition = acquisition.pi
elif self.str_acq == 'ei':
Expand All @@ -120,5 +149,5 @@ def optimize(self, X_train, Y_train, is_grid_optimized=False, verbose=False):
raise ValueError('optimize: missing condition for self.str_acq.')

fun_objective = lambda X_test: -1.0 * constants.MULTIPLIER_ACQ * self._optimize_objective(fun_acquisition, X_train, Y_train, X_test, cov_X_X, inv_cov_X_X, hyps)
next_point = self._optimize(fun_objective, verbose=verbose)
next_point = self._optimize(fun_objective, str_initial_method=str_initial_method)
return next_point, cov_X_X, inv_cov_X_X, hyps
3 changes: 2 additions & 1 deletion bayeso/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,9 @@

NUM_BO_GRID = 50
NUM_BO_RANDOM = 1000
NUM_ACQ_SAMPLES = 50

MULTIPLIER_ACQ = 1000.0
MULTIPLIER_ACQ = 10.0

TIME_PAUSE = 2.0
RANGE_SHADE = 1.96
Expand Down
3 changes: 1 addition & 2 deletions bayeso/gp.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,6 @@ def get_optimized_kernel(X_train, Y_train, prior_mu, str_cov, str_optimizer_meth
assert len(X_train.shape) == 2
assert len(Y_train.shape) == 2
assert X_train.shape[0] == Y_train.shape[0]
assert str_optimizer_method == 'L-BFGS-B'

prior_mu_train = get_prior_mu(prior_mu, X_train)
num_dim = X_train.shape[1]
Expand All @@ -79,7 +78,7 @@ def get_optimized_kernel(X_train, Y_train, prior_mu, str_cov, str_optimizer_meth
result_optimized = result_optimized.x
hyps = utils_covariance.restore_hyps(str_cov, result_optimized)
if verbose:
print('INFORM: optimized result for gpr ', hyps)
print('INFORM: get_optimized_kernel: optimized result for gpr ', hyps)
cov_X_X, inv_cov_X_X = get_kernels(X_train, hyps, str_cov)
return cov_X_X, inv_cov_X_X, hyps

Expand Down
2 changes: 1 addition & 1 deletion examples/example_bo_ei.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def main():
X_test = np.reshape(X_test, (400, 1))
for ind_ in range(1, 20+1):
Y_train = fun_target(X_train)
next_x, cov_X_X, inv_cov_X_X, hyps = model_bo.optimize(X_train, fun_target(X_train))
next_x, cov_X_X, inv_cov_X_X, hyps = model_bo.optimize(X_train, fun_target(X_train), str_initial_method='grid')
mu_test, sigma_test = gp.predict_test_(X_train, Y_train, X_test, cov_X_X, inv_cov_X_X, hyps)
acq_test = acquisition.ei(mu_test.flatten(), sigma_test.flatten(), Y_train)
acq_test = np.reshape(acq_test, (acq_test.shape[0], 1))
Expand Down
2 changes: 0 additions & 2 deletions tests/test_gp.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,8 +123,6 @@ def test_get_optimized_kernel():
gp.get_optimized_kernel(X, Y, prior_mu, 'abc')
with pytest.raises(AssertionError) as error:
gp.get_optimized_kernel(X, Y, prior_mu, 'se', str_optimizer_method=1)
with pytest.raises(AssertionError) as error:
gp.get_optimized_kernel(X, Y, prior_mu, 'se', str_optimizer_method='abc')

def test_predict_test_():
np.random.seed(42)
Expand Down

0 comments on commit 30cbebe

Please sign in to comment.