From b4667a01e2f33e418983431ee2b6b66c5f564e36 Mon Sep 17 00:00:00 2001 From: Jeffrey Larson Date: Thu, 16 Apr 2020 15:17:21 -0500 Subject: [PATCH 1/4] An initial attempt to use the various localopt exit codes in APOSMM --- libensemble/gen_funcs/persistent_aposmm.py | 103 +++++++++++------- .../call_matlab_octave_script.m | 3 +- 2 files changed, 68 insertions(+), 38 deletions(-) diff --git a/libensemble/gen_funcs/persistent_aposmm.py b/libensemble/gen_funcs/persistent_aposmm.py index bc9169593..998a4272d 100644 --- a/libensemble/gen_funcs/persistent_aposmm.py +++ b/libensemble/gen_funcs/persistent_aposmm.py @@ -37,8 +37,9 @@ class ConvergedMsg(object): """ Message communicated when a local optimization is converged. """ - def __init__(self, x): + def __init__(self, x, opt_flag): self.x = x + self.opt_flag = opt_flag class ErrorMsg(object): @@ -229,7 +230,8 @@ def aposmm(H, persis_info, gen_specs, libE_info): x_new = local_opters[child_idx].iterate(row[fields_to_pass]) if isinstance(x_new, ConvergedMsg): x_opt = x_new.x - opt_ind = update_history_optimal(x_opt, local_H, run_order[child_idx]) + opt_flag = x_new.opt_flag + opt_ind = update_history_optimal(x_opt, opt_flag, local_H, run_order[child_idx]) new_opt_inds_to_send_mgr.append(opt_ind) local_opters.pop(child_idx) else: @@ -413,7 +415,8 @@ def run_local_nlopt(user_specs, comm_queue, x0, f0, child_can_read, parent_can_r else: opt.set_initial_step(dist_to_bound) - opt.set_maxeval(user_specs.get('run_max_eval', 1000*n)) + run_max_eval = user_specs.get('run_max_eval', 1000*n) + opt.set_maxeval(run_max_eval) opt.set_min_objective(lambda x, grad: nlopt_callback_fun(x, grad, comm_queue, child_can_read, parent_can_read, @@ -431,11 +434,26 @@ def run_local_nlopt(user_specs, comm_queue, x0, f0, child_can_read, parent_can_r # FIXME: Do we need to do something of the final 'x_opt'? # print('[Child]: Started my optimization', flush=True) x_opt = opt.optimize(x0) + return_val = opt.last_optimize_result() + + if return_val >= 1 and return_val <= 4: + # These return values correspond to an optimium being identified + # https://nlopt.readthedocs.io/en/latest/NLopt_Reference/#return-values + opt_flag = 1 + elif return_val >= 5: + print("The run started from " + str(x0) + " reached it maximum number " + "of function evaluations: " + str(run_max_eval) + ". No point from " + "this run will be ruled as a minimum! APOSMM may start a new run " + "from some point in this run.") + opt_flag = 0 + else: + "NLopt returned with a negative return value, which indicates an error" + opt_flag = 0 if user_specs.get('periodic'): x_opt = x_opt % 1 # Shift x_opt to be in the correct location in the unit cube (not the domain user_specs['lb'] - user_specs['ub']) - finish_queue(x_opt, comm_queue, parent_can_read, user_specs) + finish_queue(x_opt, opt_flag, comm_queue, parent_can_read, user_specs) def run_local_scipy_opt(user_specs, comm_queue, x0, f0, child_can_read, parent_can_read): @@ -457,22 +475,21 @@ def run_local_scipy_opt(user_specs, comm_queue, x0, f0, child_can_read, parent_c # constraints=cons, method=method, **user_specs.get('scipy_kwargs', {})) - # if res['status'] == 2: # SciPy code for exhausting budget of evaluations, so not at a minimum - # exit_code = 0 - # else: - # if method == 'Nelder-Mead': - # assert res['status'] == 0, "Unknown status for Nelder-Mead" - # exit_code = 1 + if res['status'] == 0: + opt_flag = 1 + else: + print("The SciPy localopt run started from " + str(x0) + " stopped " + " without finding a local min. The status of the run is " + str(res['status']) + + ". No point from this run will be ruled as a minimum! APOSMM may " + "start a new run from some point in this run.") + opt_flag = 0 if user_specs.get('periodic'): x_opt = res['x'] % 1 else: x_opt = res['x'] - # FIXME: Need to do something with the exit codes. - # print(exit_code) - - finish_queue(x_opt, comm_queue, parent_can_read, user_specs) + finish_queue(x_opt, opt_flag, comm_queue, parent_can_read, user_specs) def run_external_localopt(user_specs, comm_queue, x0, f0, child_can_read, parent_can_read): @@ -512,11 +529,12 @@ def run_external_localopt(user_specs, comm_queue, x0, f0, child_can_read, parent open(y_done_file, 'w').close() x_opt = np.loadtxt(opt_file) + opt_flag = np.loadtxt(opt_file + "_flag") for f in [x_file, y_file, opt_file]: os.remove(f) - finish_queue(x_opt, comm_queue, parent_can_read, user_specs) + finish_queue(x_opt, opt_flag, comm_queue, parent_can_read, user_specs) def run_local_dfols(user_specs, comm_queue, x0, f0, child_can_read, parent_can_read): @@ -541,10 +559,16 @@ def run_local_dfols(user_specs, comm_queue, x0, f0, child_can_read, parent_can_r x_opt = soln.x - # FIXME: Need to do something with the exit codes. - # print(exit_code) + if soln.flag == soln.EXIT_SUCCESS: + opt_flag = 1 + else: + print("The DFO-LS run started from " + str(x0) + " stopped with an exit " + "flag of " + str(soln.flag) + ". No point from this run will be " + "ruled as a minimum! APOSMM may start a new run from some point " + "in this run.") + opt_flag = 0 - finish_queue(x_opt, comm_queue, parent_can_read, user_specs) + finish_queue(x_opt, opt_flag, comm_queue, parent_can_read, user_specs) def run_local_tao(user_specs, comm_queue, x0, f0, child_can_read, parent_can_read): @@ -605,12 +629,15 @@ def run_local_tao(user_specs, comm_queue, x0, f0, child_can_read, parent_can_rea tao.solve(x) x_opt = tao.getSolution().getArray() - # exit_code = tao.getConvergedReason() + exit_code = tao.getConvergedReason() - # FIXME: Need to do something with the exit codes. - # print(exit_code) - # print(tao.view()) - # print(x_opt) + if exit_code > 0: + opt_flag = 1 + else: + # https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/Tao/TaoGetConvergedReason.html + print("The run started from " + str(x0) + " exited with a nonpositive reason. No point from " + "this run will be ruled as a minimum! APOSMM may start a new run from some point in this run.") + opt_flag = 0 if user_specs['localopt_method'] == 'pounders': f.destroy() @@ -622,7 +649,7 @@ def run_local_tao(user_specs, comm_queue, x0, f0, child_can_read, parent_can_rea x.destroy() tao.destroy() - finish_queue(x_opt, comm_queue, parent_can_read, user_specs) + finish_queue(x_opt, opt_flag, comm_queue, parent_can_read, user_specs) def opt_runner(run_local_opt, user_specs, comm_queue, x0, f0, child_can_read, parent_can_read): @@ -677,11 +704,11 @@ def tao_callback_fun_grad(tao, x, g, comm_queue, child_can_read, parent_can_read return f_recv -def finish_queue(x_opt, comm_queue, parent_can_read, user_specs): +def finish_queue(x_opt, opt_flag, comm_queue, parent_can_read, user_specs): if user_specs.get('print'): print('Local optimum on the [0,1]^n domain', x_opt, flush=True) - comm_queue.put(ConvergedMsg(x_opt)) + comm_queue.put(ConvergedMsg(x_opt, opt_flag)) parent_can_read.set() @@ -856,7 +883,7 @@ def update_history_dist(H, n): # H['ind_of_better_l'][best_local] = -1 -def update_history_optimal(x_opt, H, run_inds): +def update_history_optimal(x_opt, opt_flag, H, run_inds): """ Updated the history after any point has been declared a local minimum """ @@ -877,16 +904,18 @@ def update_history_optimal(x_opt, H, run_inds): tol_x2 = 1e-8 failsafe = np.logical_and(H['f'][run_inds] < H['f'][opt_ind], dists < tol_x2) - if np.any(failsafe): - print("This run has {} point(s) with smaller 'f' value within {} of " - "the point ruled to be the run minimum. \nMarking all as being " - "a 'local_min' to prevent APOSMM from starting another run " - "immediately from these points.".format(sum(failsafe), tol_x2)) - print("Sim_ids to be marked optimal: ", opt_ind, run_inds[failsafe]) - print("Check that the local optimizer is working correctly", flush=True) - H['local_min'][run_inds[failsafe]] = 1 - - H['local_min'][opt_ind] = 1 + if opt_flag: + if np.any(failsafe): + print("This run has {} point(s) with smaller 'f' value within {} of " + "the point ruled to be the run minimum. \nMarking all as being " + "a 'local_min' to prevent APOSMM from starting another run " + "immediately from these points.".format(sum(failsafe), tol_x2)) + print("Sim_ids to be marked optimal: ", opt_ind, run_inds[failsafe]) + print("Check that the local optimizer is working correctly", flush=True) + H['local_min'][run_inds[failsafe]] = 1 + + H['local_min'][opt_ind] = 1 + H['num_active_runs'][run_inds] -= 1 return opt_ind diff --git a/libensemble/tests/regression_tests/scripts_used_by_reg_tests/call_matlab_octave_script.m b/libensemble/tests/regression_tests/scripts_used_by_reg_tests/call_matlab_octave_script.m index 18c5f3880..357679354 100644 --- a/libensemble/tests/regression_tests/scripts_used_by_reg_tests/call_matlab_octave_script.m +++ b/libensemble/tests/regression_tests/scripts_used_by_reg_tests/call_matlab_octave_script.m @@ -8,6 +8,7 @@ % % x0, and the 5 filenames involved must be given when invoking the script. -xopt = fminsearch(@(x)wrapper_obj_fun(x,x_file,y_file,x_done_file,y_done_file),x0) +xopt, ~, opt_flag = fminsearch(@(x)wrapper_obj_fun(x,x_file,y_file,x_done_file,y_done_file),x0) dlmwrite(opt_file, xopt, 'delimiter', ' ', 'precision', 16) +dlmwrite(opt_file + '_flag', opt_flag) exit From 22e49531dc8cce20217718ddf47b86d601502284 Mon Sep 17 00:00:00 2001 From: Jeffrey Larson Date: Thu, 16 Apr 2020 15:24:47 -0500 Subject: [PATCH 2/4] Fixing octave/matlab script --- .../scripts_used_by_reg_tests/call_matlab_octave_script.m | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/libensemble/tests/regression_tests/scripts_used_by_reg_tests/call_matlab_octave_script.m b/libensemble/tests/regression_tests/scripts_used_by_reg_tests/call_matlab_octave_script.m index 357679354..6928ed2b9 100644 --- a/libensemble/tests/regression_tests/scripts_used_by_reg_tests/call_matlab_octave_script.m +++ b/libensemble/tests/regression_tests/scripts_used_by_reg_tests/call_matlab_octave_script.m @@ -8,7 +8,7 @@ % % x0, and the 5 filenames involved must be given when invoking the script. -xopt, ~, opt_flag = fminsearch(@(x)wrapper_obj_fun(x,x_file,y_file,x_done_file,y_done_file),x0) +xopt = fminsearch(@(x)wrapper_obj_fun(x,x_file,y_file,x_done_file,y_done_file),x0) dlmwrite(opt_file, xopt, 'delimiter', ' ', 'precision', 16) -dlmwrite(opt_file + '_flag', opt_flag) +dlmwrite([opt_file '_flag'], 1) % This assume xopt is a local min, not just the last point in the run exit From b58942a82a0309d3bfc6a91336e7942da318d18c Mon Sep 17 00:00:00 2001 From: Jeffrey Larson Date: Thu, 16 Apr 2020 20:32:56 -0500 Subject: [PATCH 3/4] Fixing prints --- libensemble/gen_funcs/persistent_aposmm.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/libensemble/gen_funcs/persistent_aposmm.py b/libensemble/gen_funcs/persistent_aposmm.py index 998a4272d..35fe1b09c 100644 --- a/libensemble/gen_funcs/persistent_aposmm.py +++ b/libensemble/gen_funcs/persistent_aposmm.py @@ -447,7 +447,7 @@ def run_local_nlopt(user_specs, comm_queue, x0, f0, child_can_read, parent_can_r "from some point in this run.") opt_flag = 0 else: - "NLopt returned with a negative return value, which indicates an error" + print("NLopt returned with a negative return value, which indicates an error") opt_flag = 0 if user_specs.get('periodic'): @@ -706,7 +706,7 @@ def tao_callback_fun_grad(tao, x, g, comm_queue, child_can_read, parent_can_read def finish_queue(x_opt, opt_flag, comm_queue, parent_can_read, user_specs): - if user_specs.get('print'): + if user_specs.get('print') and opt_flag: print('Local optimum on the [0,1]^n domain', x_opt, flush=True) comm_queue.put(ConvergedMsg(x_opt, opt_flag)) parent_can_read.set() From d24c245415899b6ba253de0eb536989c6a035763 Mon Sep 17 00:00:00 2001 From: Jeffrey Larson Date: Thu, 16 Apr 2020 20:56:17 -0500 Subject: [PATCH 4/4] Adding persistent_aposmm unit test --- libensemble/gen_funcs/persistent_aposmm.py | 6 ++++-- .../unit_tests/test_persistent_aposmm.py | 19 +++++++++++++++++++ 2 files changed, 23 insertions(+), 2 deletions(-) diff --git a/libensemble/gen_funcs/persistent_aposmm.py b/libensemble/gen_funcs/persistent_aposmm.py index 35fe1b09c..18fc06623 100644 --- a/libensemble/gen_funcs/persistent_aposmm.py +++ b/libensemble/gen_funcs/persistent_aposmm.py @@ -896,12 +896,14 @@ def update_history_optimal(x_opt, opt_flag, H, run_inds): opt_ind = run_inds[ind] tol_x1 = 1e-15 + + # Instead of failing, we accept x_opt that is slightly different from its value in H + # assert dists[ind] <= tol_x1, "Closest point to x_opt not within {}?".format(tol_x1) + if dists[ind] > tol_x1: print("Dist from reported x_opt to closest evaluated point is: " + str(dists[ind]) + "\n" + "Check that the local optimizer is working correctly\n", x_opt, run_inds, flush=True) - assert dists[ind] <= tol_x1, "Closest point to x_opt not within {}?".format(tol_x1) - tol_x2 = 1e-8 failsafe = np.logical_and(H['f'][run_inds] < H['f'][opt_ind], dists < tol_x2) if opt_flag: diff --git a/libensemble/tests/unit_tests/test_persistent_aposmm.py b/libensemble/tests/unit_tests/test_persistent_aposmm.py index 5ab8b5d2a..e1d47b06e 100644 --- a/libensemble/tests/unit_tests/test_persistent_aposmm.py +++ b/libensemble/tests/unit_tests/test_persistent_aposmm.py @@ -23,5 +23,24 @@ def test_persis_apossm_localopt_test(): assert 0 +def test_update_history_optimal(): + hist, _, _, _, _ = setup.hist_setup1(n=2) + + H = hist.H + + H['returned'] = True + H['sim_id'] = range(len(H)) + H['f'][0] = -1e-8 + H['x_on_cube'][-1] = 1e-10 + + # Perturb x_opt point to test the case where the reported minimum isn't + # exactly in H. Also, a point in the neighborhood of x_opt has a better + # function value. + opt_ind = al.update_history_optimal(H['x_on_cube'][-1]+1e-12, 1, H, np.arange(len(H))) + + assert opt_ind == 9, "Wrong point declared minimum" + + if __name__ == "__main__": test_persis_apossm_localopt_test() + test_update_history_optimal()