Skip to content

Commit

Permalink
WIP: implement hopsy for init sampling
Browse files Browse the repository at this point in the history
also: print statements for user info;
  • Loading branch information
flo-schu committed Jun 21, 2023
1 parent e9f1ac4 commit 0ca7b54
Showing 1 changed file with 41 additions and 13 deletions.
54 changes: 41 additions & 13 deletions CADETProcess/optimization/axAdapater.py
Expand Up @@ -134,7 +134,7 @@ class AxInterface(OptimizerBase):
ftol = UnsignedFloat(default=0.0025)
acquisition_fn = Typed(default=qNoisyExpectedImprovement)
surrogate_model = Typed(default=FixedNoiseGP)

_specific_options = [
'n_init_evals', 'n_max_evals', 'seed', 'xtol', 'cvtol', 'ftol',
'acquisition_fn', 'surrogate_model'
Expand Down Expand Up @@ -174,7 +174,7 @@ def _setup_linear_constraints(optimizationProblem: OptimizationProblem):
)
parameter_constraints.append(constr)

return
return parameter_constraints

@classmethod
def _setup_searchspace(cls, optimizationProblem):
Expand Down Expand Up @@ -235,8 +235,12 @@ def run(self, optimization_problem, x0):
search_space = self._setup_searchspace(self.optimization_problem)
objectives = self._setup_objectives()


if len(objectives) > 1:
is_moo = True
else:
is_moo = False

if is_moo:
optimization_config = ax.MultiObjectiveOptimizationConfig(
objective=ax.MultiObjective(objectives)
)
Expand All @@ -252,15 +256,15 @@ def run(self, optimization_problem, x0):
botorch_acqf_class=self.acquisition_fn, # Optional, will use default if unspecified
)

# Alternative: Use a model bridge directly with a botorch model.
# Alternative: Use a model bridge directly with a botorch model.
# This allows for more control and does not introduce another "magic"
# middle layer in between. But currently I can't get this to work.
# The above is as suggested in the PR
# Model = BoTorchModel(
# acquisition_class=UpperConfidenceBound,
# acquisition_class=UpperConfidenceBound,
# surrogate=Surrogate(FixedNoiseGP)
# )

# model = TorchModelBridge(
# experiment=self.ax_experiment,
# search_space=search_space,
Expand All @@ -269,7 +273,7 @@ def run(self, optimization_problem, x0):
# transforms=Cont_X_trans + Y_trans
# )


runner = CADETProcessRunner(
optimization_problem=self.optimization_problem)

Expand All @@ -280,10 +284,22 @@ def run(self, optimization_problem, x0):
runner=runner,
)

# init_samples = self.optimization_problem.create_initial_values(
# n_samples=self.n_init_evals,
# method="chebyshev",
# seed=self.seed + 5641,
# )

# self.optimization_problem.evaluate_objectives_population(
# init_samples, n_cores=1
# )




# TODO: termination criteria
print(f"Running Sobol initialization trials...")
with manual_seed(seed=self.seed):
print(f"Running Sobol initialization trials...")
sobol = Models.SOBOL(search_space=self.ax_experiment.search_space)
for i in range(self.n_init_evals):
# Produce a GeneratorRun from the model, which contains proposed arm(s) and other metadata
Expand All @@ -307,17 +323,29 @@ def run(self, optimization_problem, x0):

trial_data = trial.fetch_data()


i += 1
while i < self.n_max_evals:
print(f"Running optimization trial {i+1}/{self.n_max_evals}...")
# Reinitialize GP+EI model at each step with updated data.

# this
# this
model = Model(
experiment=self.ax_experiment,
data=self.ax_experiment.fetch_data(),
)

if i == self.n_init_evals:
if is_moo:
srgm = model.model.model._get_name()
acqf = model.model.acqf_constructor.__name__.split("_")[1]
else:
srgm = model.model.surrogate.model._get_name()
acqf = model.model.botorch_acqf_class.__name__
print(f"Starting bayesian optimization loop...")
print(f"Surrogate model: {srgm}")
print(f"Acquisition function: {acqf}")

print(f"Running optimization trial {i+1}/{self.n_max_evals}...")

# generates samples
# samples can be accessed here by sample_generator.arms:
sample_generator = model.gen(n=1)
Expand Down Expand Up @@ -353,5 +381,5 @@ def run(self, optimization_problem, x0):
test.test_single_objective()
test.test_single_objective_linear_constraints()
test.test_multi_objective()
#x: [0.23783216 0.50267604 0.43198473], f: [4.81493504]

#x: [0.23783216 0.50267604 0.43198473], f: [4.81493504]

0 comments on commit 0ca7b54

Please sign in to comment.