Skip to content

Commit

Permalink
finalize tests
Browse files Browse the repository at this point in the history
- for single objective determinsitic behavior
- single objective constrained
- multi objective constrained
  • Loading branch information
flo-schu authored and schmoelder committed Jun 21, 2023
1 parent 47c571f commit 0d0a857
Show file tree
Hide file tree
Showing 2 changed files with 74 additions and 87 deletions.
2 changes: 1 addition & 1 deletion CADETProcess/optimization/axAdapater.py
Expand Up @@ -373,9 +373,9 @@ def run(self, optimization_problem, x0):

from tests.test_optimizer_behavior import TestAxBehavior
test = TestAxBehavior()
test.test_constrained_moo()
test.test_constrained_soo()
test.test_single_objective()
test.test_multi_objective()

from tests.test_optimizer_ax import TestAxInterface
test = TestAxInterface()
Expand Down
159 changes: 73 additions & 86 deletions tests/test_optimizer_behavior.py
Expand Up @@ -46,6 +46,9 @@ def solution(self) -> tuple:
else:
raise ValueError(f"{self.n_obj} must be >= 1")

def test_if_solved(self, optimizer):
raise NotImplementedError

class UnconstrainedRosenTestProblem(TestProblem):
n_var = 2
n_obj = 1
Expand Down Expand Up @@ -88,6 +91,32 @@ def optimal_solution(self):
def bounds(self):
return [(-10, 10), (-10, 10)]

def test_deterministic_ax_behavior(self, optimizer):
# test the initialization for sobol sampling under 50 init evals
# generated with a seed of 12345
init_var_0_check = np.array([
8.17163229e+00, -8.18102719e+00, -2.95728210e+00, 2.96545641e+00,
1.82164460e-02, -6.38032332e-03, -6.05257418e+00, 6.04195843e+00,
7.41507821e+00, -7.40444580e+00, -1.53658908e+00, 1.52473623e+00,
4.24156815e+00, -4.24975919e+00, -9.61193968e+00, 9.62135108e+00,
9.26775865e+00, -9.26263610e+00, -4.67840832e+00, 4.67206532e+00,
2.34494239e+00, -2.35738920e+00, -6.50430012e+00, 6.51796730e+00,
5.06405249e+00, -5.07773640e+00, -1.06056336e+00, 1.07302682e+00,
3.15997608e+00, -3.15361643e+00, -7.90534761e+00, 7.90020833e+00,
7.56466469e+00, -7.57713027e+00, -3.55995959e+00, 3.57364614e+00,
6.59365188e-01, -6.54223859e-01, -5.40595261e+00, 5.39959025e+00,
6.76836131e+00, -6.76202044e+00, -2.17779500e+00, 2.17267510e+00,
4.84434104e+00, -4.85800616e+00, -9.00491467e+00, 9.01735876e+00,
9.91569279e+00, -9.90383729e+00
])
init_var_0 = exp_to_df(optimizer.ax_experiment).loc[0:49,"var_0"].values
np.testing.assert_almost_equal(init_var_0_check, init_var_0)

# test reproducibility of ax with GP+UC(analytic)
x_check = np.array([[-0.27245758, -0.70684964]])
x = optimizer.results.x
np.testing.assert_almost_equal(x, x_check)

class ConstrainedSooTestProblem(TestProblem):
n_var = 2
n_obj = 1
Expand All @@ -107,6 +136,14 @@ def bounds(self):
def optimal_solution(self):
return -3, np.array((-1, 2)).reshape((1,2))

def test_if_solved(self, optimizer):
f_true, x_true = self.solution()
x = optimizer.results.x
f = optimizer.results.f

np.testing.assert_almost_equal(f-f_true, 0)
np.testing.assert_almost_equal(x-x_true, 0)

class MooTestProblem(TestProblem):
n_var = 2
n_obj = 2
Expand All @@ -127,7 +164,7 @@ def bounds(self):
return [(1,5), (0,3)]

@property
def constraints(self):
def linear_constraints(self):
return [
(['var_0', 'var_1'], [-1, -1], -3),
(['var_0', 'var_1'], [ 1, -1], 5)
Expand All @@ -142,6 +179,28 @@ def pareto_front(self):

return F, X

def test_if_solved(self, optimizer):
eps = 0.05

f_true, x_true = self.solution()
F = optimizer.results.f
X = optimizer.results.x_untransformed

for f_i in F:
min_distance = np.min(np.abs(f_true-f_i), axis=0)
np.testing.assert_array_less(min_distance, eps)

for x_i in X:
min_distance = np.min(np.abs(x_true-x_i), axis=0)
np.testing.assert_array_less(min_distance, eps)

if False:
from matplotlib import pyplot as plt
plt.plot(*f_true.T, color="black", lw=1)
plt.scatter(*F.T, color="tab:red")
plt.show()


def setup_so_optimizer_ax(n_init_evals, n_max_evals):
from botorch.acquisition.analytic import UpperConfidenceBound
from botorch.models.gp_regression import FixedNoiseGP
Expand All @@ -167,100 +226,28 @@ def generate_optimization_problem(problem):
@unittest.skipUnless(ax_imported, "ax package is not installed")
class TestAxBehavior(unittest.TestCase):

def test_deterministic_behavior(self):
# not sure if this problem should be here
problem = UnconstrainedRosenTestProblem()
op = generate_optimization_problem(problem=problem)
optimizer = setup_so_optimizer_ax(50, 53)
optimizer.optimize(optimization_problem=op)
problem.test_deterministic_ax_behavior(optimizer=optimizer)

def test_constrained_soo(self):
problem = ConstrainedSooTestProblem()
op = generate_optimization_problem(problem=problem)
optimizer = setup_so_optimizer_ax(50, 55)
optimizer.optimize(optimization_problem=op)
problem.test_if_solved(optimizer=optimizer)

f_true, x_true = problem.solution()
x = optimizer.results.x
f = optimizer.results.f


np.testing.assert_almost_equal(f-f_true, 0)
np.testing.assert_almost_equal(x-x_true, 0)


def test_single_objective(self):

problem = UnconstrainedRosenTestProblem()
def test_constrained_moo(self):
problem = MooTestProblem()
op = generate_optimization_problem(problem=problem)
optimizer = setup_so_optimizer_ax(50, 53)
optimizer = setup_so_optimizer_ax(50, 55)
optimizer.optimize(optimization_problem=op)
problem.test_if_solved(optimizer=optimizer)

# test the initialization for sobol sampling under 50 init evals
# generated with a seed of 12345
init_var_0_check = np.array([
8.17163229e+00, -8.18102719e+00, -2.95728210e+00, 2.96545641e+00,
1.82164460e-02, -6.38032332e-03, -6.05257418e+00, 6.04195843e+00,
7.41507821e+00, -7.40444580e+00, -1.53658908e+00, 1.52473623e+00,
4.24156815e+00, -4.24975919e+00, -9.61193968e+00, 9.62135108e+00,
9.26775865e+00, -9.26263610e+00, -4.67840832e+00, 4.67206532e+00,
2.34494239e+00, -2.35738920e+00, -6.50430012e+00, 6.51796730e+00,
5.06405249e+00, -5.07773640e+00, -1.06056336e+00, 1.07302682e+00,
3.15997608e+00, -3.15361643e+00, -7.90534761e+00, 7.90020833e+00,
7.56466469e+00, -7.57713027e+00, -3.55995959e+00, 3.57364614e+00,
6.59365188e-01, -6.54223859e-01, -5.40595261e+00, 5.39959025e+00,
6.76836131e+00, -6.76202044e+00, -2.17779500e+00, 2.17267510e+00,
4.84434104e+00, -4.85800616e+00, -9.00491467e+00, 9.01735876e+00,
9.91569279e+00, -9.90383729e+00
])
init_var_0 = exp_to_df(optimizer.ax_experiment).loc[0:49,"var_0"].values
np.testing.assert_almost_equal(init_var_0_check, init_var_0)

# test reproducibility of ax with GP+UC(analytic)
x_check = np.array([[-0.27245758, -0.70684964]])
x = optimizer.results.x
np.testing.assert_almost_equal(x, x_check)


def test_multi_objective(self):
from botorch.acquisition.analytic import UpperConfidenceBound
from botorch.models.gp_regression import FixedNoiseGP
from pymoo.problems.multi import WeldedBeam, Truss2D


problem = MooTestProblem()
# test_problem = WeldedBeam()
# # has 4 constraints, some of which are linear

op = setup_optimization_problem(
n_vars=problem.n_var,
n_obj=problem.n_obj,
n_lincon=problem.n_lincon,
lincons=problem.constraints,
obj_fun=problem.objective_function,
bounds=problem.bounds,
)

optimizer = cop.AxInterface(
n_init_evals=50,
n_max_evals=100,
seed=12345,
acquisition_fn=UpperConfidenceBound,
surrogate_model=FixedNoiseGP
)

optimizer.optimize(
optimization_problem=op,
save_results=False
)

f1_true, f2_true = problem.pareto_front().T
f1, f2 = optimizer.results.f.T
x1, x2 = optimizer.results.x_untransformed.T

if True:
from matplotlib import pyplot as plt
plt.plot(f1_true, f2_true, color="black")
plt.scatter(f1, f2, color="tab:blue")
plt.show()

# test reproducibility of ax with GP+UC(analytic)
x_check = np.array([[-0.27245758, -0.70684964]])
x = optimizer.results.x
np.testing.assert_almost_equal(x, x_check)


if __name__ == '__main__':
Expand Down

0 comments on commit 0d0a857

Please sign in to comment.