Skip to content

Commit

Permalink
Remove some more warnings (#520)
Browse files Browse the repository at this point in the history
* Suppress warnings in utils tests

* Suppress warnings in surrogate_models tests

* Adjust test tolerance

* Suppress some warnings in test_ego

* Revert rtol change in linalg_solver (wait for py3.8 dropping)

* Rename RUN_SLOW in RUN_SLOW_TESTS

* Ignore matplolib non-interactive warnings with pytest

* Switch to Cobyla only if needed

* In test just print not warn

* Use Cobyla where asked

* Warning set Cobyla only if TNC is selected

* Set Cobyla for mixed variable problems

* Test mixed variables MFK, MFKPLS

* Remove useless import

* Improve out of bounds warning message

* Check MFK* examples with pytest
  • Loading branch information
relf committed Feb 27, 2024
1 parent ff1e268 commit d757321
Show file tree
Hide file tree
Showing 20 changed files with 185 additions and 114 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/tests_coverage.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ jobs:
- name: Test with pytest and coverage
run: |
pip install coverage
RUN_SLOW=1 coverage run --source=smt -m pytest
RUN_SLOW_TESTS=1 coverage run --source=smt -m pytest
- name: Coveralls
uses: AndreMiras/coveralls-python-action@develop
Expand Down
3 changes: 3 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,6 @@ requires = ["setuptools", "wheel", "numpy", "Cython"]

[tool.ruff.lint]
ignore = []

[tool.pytest.ini_options]
filterwarnings = ["ignore:FigureCanvasAgg is non-interactive:UserWarning"]
15 changes: 7 additions & 8 deletions smt/applications/mfk.py
Original file line number Diff line number Diff line change
Expand Up @@ -759,9 +759,7 @@ def predict_variances_all_levels(self, X, is_acting=None):
sigma2 = self.optimal_par[0]["sigma2"] / self.y_std**2
MSE[:, 0] = sigma2 * (
# 1 + self.optimal_noise_all[0] - (r_t ** 2).sum(axis=0) + (u_ ** 2).sum(axis=0)
1
- (r_t**2).sum(axis=0)
+ (u_**2).sum(axis=0)
1 - (r_t**2).sum(axis=0) + (u_**2).sum(axis=0)
)

# Calculate recursively kriging variance at level i
Expand Down Expand Up @@ -845,17 +843,16 @@ def predict_variances_all_levels(self, X, is_acting=None):
Q_ = (np.dot((yt - np.dot(Ft, beta)).T, yt - np.dot(Ft, beta)))[0, 0]
MSE[:, i] = (
# sigma2_rho * MSE[:, i - 1]
+Q_ / (2 * (self.nt_all[i] - p - q))
+Q_
/ (2 * (self.nt_all[i] - p - q))
# * (1 + self.optimal_noise_all[i] - (r_t ** 2).sum(axis=0))
* (1 - (r_t**2).sum(axis=0))
+ sigma2 * (u_**2).sum(axis=0)
)
else:
MSE[:, i] = sigma2 * (
# 1 + self.optimal_noise_all[i] - (r_t ** 2).sum(axis=0) + (u_ ** 2).sum(axis=0)
1
- (r_t**2).sum(axis=0)
+ (u_**2).sum(axis=0)
1 - (r_t**2).sum(axis=0) + (u_**2).sum(axis=0)
) # + sigma2_rho * MSE[:, i - 1]
if self.options["propagate_uncertainty"]:
MSE[:, i] = MSE[:, i] + sigma2_rho * MSE[:, i - 1]
Expand Down Expand Up @@ -983,7 +980,9 @@ def _check_param(self):
raise ValueError(
"MFKPLSK only works with a squared exponential kernel (until we prove the contrary)"
)
if self.options["eval_noise"] or np.max(self.options["noise0"]) > 1e-12:
if (
self.options["eval_noise"] or np.max(self.options["noise0"]) > 1e-12
) and self.options["hyper_opt"] == "TNC":
self.options["hyper_opt"] = "Cobyla"
warnings.warn(
"TNC not available yet for noise handling. Switching to Cobyla"
Expand Down
84 changes: 44 additions & 40 deletions smt/applications/tests/test_ego.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,8 +86,8 @@ def test_function_test_1d(self):

x_opt, y_opt, _, _, _ = ego.optimize(fun=TestEGO.function_test_1d)

self.assertAlmostEqual(18.9, float(x_opt), delta=1)
self.assertAlmostEqual(-15.1, float(y_opt), delta=1)
self.assertAlmostEqual(18.9, x_opt.item(), delta=1)
self.assertAlmostEqual(-15.1, y_opt.item(), delta=1)

def test_function_test_1d_parallel(self):
n_iter = 3
Expand All @@ -108,10 +108,10 @@ def test_function_test_1d_parallel(self):
)
x_opt, y_opt, _, _, _ = ego.optimize(fun=TestEGO.function_test_1d)

self.assertAlmostEqual(18.9, float(x_opt), delta=1)
self.assertAlmostEqual(-15.1, float(y_opt), delta=1)
self.assertAlmostEqual(18.9, x_opt.item(), delta=1)
self.assertAlmostEqual(-15.1, y_opt.item(), delta=1)

@unittest.skipIf(int(os.getenv("RUN_SLOW", 0)) < 1, "too slow")
@unittest.skipIf(int(os.getenv("RUN_SLOW_TESTS", 0)) < 1, "too slow")
def test_rosenbrock_2D(self):
n_iter = 50
fun = Rosenbrock(ndim=2)
Expand All @@ -131,7 +131,7 @@ def test_rosenbrock_2D(self):

x_opt, y_opt, _, _, _ = ego.optimize(fun=fun)
self.assertTrue(np.allclose([[1, 1]], x_opt, rtol=0.55))
self.assertAlmostEqual(0.0, float(y_opt), delta=1)
self.assertAlmostEqual(0.0, y_opt.item(), delta=1)

def test_rosenbrock_2D_SBO(self):
n_iter = 10
Expand All @@ -151,9 +151,9 @@ def test_rosenbrock_2D_SBO(self):

x_opt, y_opt, _, _, _ = ego.optimize(fun=fun)
self.assertTrue(np.allclose([[1, 1]], x_opt, atol=1))
self.assertAlmostEqual(0.0, float(y_opt), delta=1)
self.assertAlmostEqual(0.0, y_opt.item(), delta=1)

@unittest.skipIf(int(os.getenv("RUN_SLOW", 0)) < 1, "too slow")
@unittest.skipIf(int(os.getenv("RUN_SLOW_TESTS", 0)) < 1, "too slow")
def test_rosenbrock_2D_parallel(self):
n_iter = 20
n_parallel = 5
Expand All @@ -179,7 +179,7 @@ def test_rosenbrock_2D_parallel(self):
x_opt, y_opt, _, _, _ = ego.optimize(fun=fun)
print("Rosenbrock: ", x_opt)
self.assertTrue(np.allclose([[1, 1]], x_opt, rtol=0.5))
self.assertAlmostEqual(0.0, float(y_opt), delta=1)
self.assertAlmostEqual(0.0, y_opt.item(), delta=1)

def test_branin_2D(self):
n_iter = 20
Expand All @@ -201,9 +201,9 @@ def test_branin_2D(self):
or np.allclose([[3.14, 2.275]], x_opt, rtol=0.25)
or np.allclose([[9.42, 2.475]], x_opt, rtol=0.25)
)
self.assertAlmostEqual(0.39, float(y_opt), delta=0.8)
self.assertAlmostEqual(0.39, y_opt.item(), delta=0.8)

@unittest.skipIf(int(os.getenv("RUN_SLOW", 0)) < 1, "too slow")
@unittest.skipIf(int(os.getenv("RUN_SLOW_TESTS", 0)) < 1, "too slow")
def test_branin_2D_parallel(self):
n_iter = 10
fun = Branin(ndim=2)
Expand Down Expand Up @@ -231,9 +231,9 @@ def test_branin_2D_parallel(self):
or np.allclose([[9.42, 2.475]], x_opt, rtol=0.5)
)
print("Branin=", x_opt)
self.assertAlmostEqual(0.39, float(y_opt), delta=1)
self.assertAlmostEqual(0.39, y_opt.item(), delta=1)

@unittest.skipIf(int(os.getenv("RUN_SLOW", 0)) < 1, "too slow")
@unittest.skipIf(int(os.getenv("RUN_SLOW_TESTS", 0)) < 1, "too slow")
def test_branin_2D_mixed_parallel(self):
n_parallel = 5
n_iter = 20
Expand Down Expand Up @@ -273,9 +273,9 @@ def test_branin_2D_mixed_parallel(self):
or np.allclose([[3, 2.275]], x_opt, rtol=0.2)
or np.allclose([[9, 2.475]], x_opt, rtol=0.2)
)
self.assertAlmostEqual(0.494, float(y_opt), delta=1)
self.assertAlmostEqual(0.494, y_opt.item(), delta=1)

@unittest.skipIf(int(os.getenv("RUN_SLOW", 0)) < 1, "too slow")
@unittest.skipIf(int(os.getenv("RUN_SLOW_TESTS", 0)) < 1, "too slow")
def test_branin_2D_mixed(self):
n_iter = 20
fun = Branin(ndim=2)
Expand Down Expand Up @@ -310,9 +310,9 @@ def test_branin_2D_mixed(self):
or np.allclose([[3, 2.275]], x_opt, rtol=0.2)
or np.allclose([[9, 2.475]], x_opt, rtol=0.2)
)
self.assertAlmostEqual(0.494, float(y_opt), delta=1)
self.assertAlmostEqual(0.494, y_opt.item(), delta=1)

@unittest.skipIf(int(os.getenv("RUN_SLOW", 0)) < 1, "too slow")
@unittest.skipIf(int(os.getenv("RUN_SLOW_TESTS", 0)) < 1, "too slow")
def test_branin_2D_mixed_tunnel(self):
n_iter = 20
fun = Branin(ndim=2)
Expand Down Expand Up @@ -347,7 +347,7 @@ def test_branin_2D_mixed_tunnel(self):
or np.allclose([[3, 2.275]], x_opt, rtol=2)
or np.allclose([[9, 2.475]], x_opt, rtol=2)
)
self.assertAlmostEqual(0.494, float(y_opt), delta=2)
self.assertAlmostEqual(0.494, y_opt.item(), delta=2)

@staticmethod
def function_test_mixed_integer(X):
Expand All @@ -372,7 +372,7 @@ def function_test_mixed_integer(X):
)
return y

@unittest.skipIf(int(os.getenv("RUN_SLOW", 0)) < 1, "too slow")
@unittest.skipIf(int(os.getenv("RUN_SLOW_TESTS", 0)) < 1, "too slow")
def test_ego_mixed_integer(self):
n_iter = 15
n_doe = 5
Expand Down Expand Up @@ -401,9 +401,9 @@ def test_ego_mixed_integer(self):
)
_, y_opt, _, _, _ = ego.optimize(fun=TestEGO.function_test_mixed_integer)

self.assertAlmostEqual(-15, float(y_opt), delta=5)
self.assertAlmostEqual(-15, y_opt.item(), delta=5)

@unittest.skipIf(int(os.getenv("RUN_SLOW", 0)) < 1, "too slow")
@unittest.skipIf(int(os.getenv("RUN_SLOW_TESTS", 0)) < 1, "too slow")
def test_ego_mixed_integer_gower_distance(self):
n_iter = 15
n_doe = 5
Expand Down Expand Up @@ -439,9 +439,9 @@ def test_ego_mixed_integer_gower_distance(self):
)
_, y_opt, _, _, _ = ego.optimize(fun=TestEGO.function_test_mixed_integer)

self.assertAlmostEqual(-15, float(y_opt), delta=5)
self.assertAlmostEqual(-15, y_opt.item(), delta=5)

@unittest.skipIf(int(os.getenv("RUN_SLOW", 0)) < 1, "too slow")
@unittest.skipIf(int(os.getenv("RUN_SLOW_TESTS", 0)) < 1, "too slow")
def test_ego_mixed_integer_hierarchical_NN(self):
random_state = 42

Expand Down Expand Up @@ -556,11 +556,11 @@ def f_hv(X):
x_opt, y_opt, dnk, x_data, y_data = ego.optimize(fun=f_hv)
self.assertAlmostEqual(
f_hv(np.atleast_2d([2, -5, -5, 5, 0, 0, 0, 5])),
float(y_opt),
y_opt.item(),
delta=18,
)

@unittest.skipIf(int(os.getenv("RUN_SLOW", 0)) < 1, "too slow")
@unittest.skipIf(int(os.getenv("RUN_SLOW_TESTS", 0)) < 1, "too slow")
def test_ego_mixed_integer_hierarchical_Goldstein(self):
def H(x1, x2, x3, x4, z3, z4, x5, cos_term):
h = (
Expand Down Expand Up @@ -723,7 +723,7 @@ def f_hv(X):
x_opt, y_opt, dnk, x_data, y_data = ego.optimize(fun=f_hv)
self.assertAlmostEqual(
9.022,
float(y_opt),
y_opt.item(),
delta=25,
)

Expand Down Expand Up @@ -758,15 +758,16 @@ def test_ego_mixed_integer_homo_gaussian(self):
design_space=design_space,
categorical_kernel=MixIntKernelType.EXP_HOMO_HSPHERE,
print_global=False,
hyper_opt="Cobyla",
),
enable_tunneling=False,
random_state=random_state,
)
_, y_opt, _, _, _ = ego.optimize(fun=TestEGO.function_test_mixed_integer)

self.assertAlmostEqual(-15, float(y_opt), delta=5)
self.assertAlmostEqual(-15, y_opt.item(), delta=5)

@unittest.skipIf(int(os.getenv("RUN_SLOW", 0)) < 1, "too slow")
@unittest.skipIf(int(os.getenv("RUN_SLOW_TESTS", 0)) < 1, "too slow")
def test_ego_mixed_integer_homo_gaussian_pls(self):
n_iter = 15
random_state = 42
Expand Down Expand Up @@ -806,7 +807,7 @@ def test_ego_mixed_integer_homo_gaussian_pls(self):
)
_, y_opt, _, _, _ = ego.optimize(fun=TestEGO.function_test_mixed_integer)

self.assertAlmostEqual(-15, float(y_opt), delta=5)
self.assertAlmostEqual(-15, y_opt.item(), delta=5)

def test_ydoe_option(self):
n_iter = 15
Expand All @@ -822,12 +823,14 @@ def test_ydoe_option(self):
ydoe=ydoe,
n_iter=n_iter,
criterion=criterion,
surrogate=KRG(design_space=design_space, print_global=False),
surrogate=KRG(
design_space=design_space, hyper_opt="Cobyla", print_global=False
),
random_state=random_state,
)
_, y_opt, _, _, _ = ego.optimize(fun=fun)

self.assertAlmostEqual(0.39, float(y_opt), delta=1)
self.assertAlmostEqual(0.39, y_opt.item(), delta=1)

def test_find_best_point(self):
fun = TestEGO.function_test_1d
Expand All @@ -848,7 +851,7 @@ def test_find_best_point(self):
)
_, _, _, _, _ = ego.optimize(fun=fun)
x, _ = ego._find_best_point(xdoe, ydoe, enable_tunneling=False)
self.assertAlmostEqual(6.5, float(x), delta=1)
self.assertAlmostEqual(6.5, x.item(), delta=1)

@staticmethod
def initialize_ego_gek(func="exp", criterion="LCB"):
Expand Down Expand Up @@ -996,6 +999,7 @@ def f_obj(X):
theta0=[1e-2],
n_start=15,
corr="squar_exp",
hyper_opt="Cobyla",
print_global=False,
),
verbose=False,
Expand All @@ -1008,7 +1012,7 @@ def f_obj(X):
self.assertAlmostEqual(np.sum(y_data), 6.846225752638086, delta=1e-9)
self.assertAlmostEqual(np.sum(x_data), 33.81192549170815, delta=1e-9)
else:
self.assertAlmostEqual(np.sum(y_data), 1.8911720670620835, delta=1e-9)
self.assertAlmostEqual(np.sum(y_data), 1.8911720670620835, delta=2e-8)
self.assertAlmostEqual(np.sum(x_data), 47.56885202767958, delta=1e-9)

def test_ego_gek(self):
Expand Down Expand Up @@ -1050,15 +1054,15 @@ def test_qei_criterion_default(self):
ego.gpr.train()
xtest = np.array([[10.0]])
# test that default virtual point should be equal to 3sigma lower bound kriging interval
expected = float(
expected = (
ego.gpr.predict_values(xtest)
- 3 * np.sqrt(ego.gpr.predict_variances(xtest))
)
actual = float(ego._get_virtual_point(xtest, fun(xtest))[0])
).item()
actual = ego._get_virtual_point(xtest, fun(xtest))[0].item()
self.assertAlmostEqual(expected, actual)

@unittest.skipIf(
int(os.getenv("RUN_SLOW", 0)) < 2 or NO_MATPLOTLIB,
int(os.getenv("RUN_SLOW_TESTS", 0)) < 2 or NO_MATPLOTLIB,
"too slow or matplotlib not installed",
)
def test_examples(self):
Expand Down Expand Up @@ -1102,7 +1106,7 @@ def function_test_1d(x):
)

x_opt, y_opt, _, x_data, y_data = ego.optimize(fun=function_test_1d)
print("Minimum in x={:.1f} with f(x)={:.1f}".format(float(x_opt), float(y_opt)))
print("Minimum in x={:.1f} with f(x)={:.1f}".format(x_opt.item(), y_opt.item()))

x_plot = np.atleast_2d(np.linspace(0, 25, 100)).T
y_plot = function_test_1d(x_plot)
Expand Down Expand Up @@ -1233,7 +1237,7 @@ def function_test_mixed_integer(X):
)

x_opt, y_opt, _, _, y_data = ego.optimize(fun=function_test_mixed_integer)
print("Minimum in x={} with f(x)={:.1f}".format(x_opt, float(y_opt)))
print("Minimum in x={} with f(x)={:.1f}".format(x_opt, y_opt.item()))
# print("Minimum in typed x={}".format(ego.mixint.cast_to_mixed_integer(x_opt)))

min_ref = -15
Expand Down Expand Up @@ -1320,7 +1324,7 @@ def run(self, fun, x):
)

x_opt, y_opt, _, x_data, y_data = ego.optimize(fun=function_test_1d)
print("Minimum in x={:.1f} with f(x)={:.1f}".format(float(x_opt), float(y_opt)))
print("Minimum in x={:.1f} with f(x)={:.1f}".format(x_opt.item(), y_opt.item()))

x_plot = np.atleast_2d(np.linspace(0, 25, 100)).T
y_plot = function_test_1d(x_plot)
Expand Down
6 changes: 6 additions & 0 deletions smt/applications/tests/test_mfk.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,6 +199,12 @@ def hf_function(x):

plt.show()

# run scripts are used in documentation as documentation is not always rebuild
# make a test run by pytest to test the run scripts
@unittest.skipIf(NO_MATPLOTLIB, "Matplotlib not installed")
def test_run_mfk_example(self):
self.run_mfk_example()


if __name__ == "__main__":
unittest.main()
6 changes: 6 additions & 0 deletions smt/applications/tests/test_mfk_1fidelity.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,12 @@ def hf_function(x):

plt.show()

# run scripts are used in documentation as documentation is not always rebuild
# make a test run by pytest to test the run scripts
@unittest.skipIf(NO_MATPLOTLIB, "Matplotlib not installed")
def test_run_mfk_example_1fidelity(self):
self.run_mfk_example_1fidelity()


if __name__ == "__main__":
unittest.main()
7 changes: 7 additions & 0 deletions smt/applications/tests/test_mfk_mfkpls_mixed.py
Original file line number Diff line number Diff line change
Expand Up @@ -843,6 +843,13 @@ def run_mfkpls_mixed_example(self):
plt.show()
# ------------------------------------------------------------------------------

# run scripts are used in documentation as documentation is not always rebuild
# make a test run by pytest to test the run scripts
@unittest.skipIf(NO_MATPLOTLIB, "Matplotlib not installed")
def test_mfkpls_mixed(self):
self.run_mfk_mixed_example()
self.run_mfkpls_mixed_example()


if __name__ == "__main__":
TestMFKmixed().run_mfk_mixed_example()
Expand Down
6 changes: 6 additions & 0 deletions smt/applications/tests/test_mfkpls.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,12 @@ def hf_function(x):

plt.show()

# run scripts are used in documentation as documentation is not always rebuild
# make a test run by pytest to test the run scripts
@unittest.skipIf(NO_MATPLOTLIB, "Matplotlib not installed")
def test_run_mfkpls_example(self):
self.run_mfkpls_example()


if __name__ == "__main__":
unittest.main()

0 comments on commit d757321

Please sign in to comment.