Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Added support for constraints when using the SciPy differential evolution optimizer #3187

Merged
merged 3 commits into from
Apr 16, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/openmdao_audit.yml
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,6 @@ jobs:
- name: Notify slack
uses: act10ns/slack@v2.0.0
with:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
webhook-url: ${{ secrets.SLACK_WEBHOOK_URL }}
status: ${{ job.status }}
if: failure()
14 changes: 12 additions & 2 deletions .github/workflows/openmdao_test_workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -495,7 +495,7 @@ jobs:
${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}

- name: Slack unit test failure
if: steps.run_tests.outcome == 'failure'
if: failure() && steps.run_tests.outcome == 'failure'
uses: act10ns/slack@v2.0.0
with:
webhook-url: ${{ secrets.SLACK_WEBHOOK_URL }}
Expand All @@ -505,7 +505,7 @@ jobs:
${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}

- name: Slack doc build failure
if: steps.build_docs.outcome == 'failure'
if: failure() && steps.build_docs.outcome == 'failure'
uses: act10ns/slack@v2.0.0
with:
webhook-url: ${{ secrets.SLACK_WEBHOOK_URL }}
Expand Down Expand Up @@ -656,6 +656,16 @@ jobs:
Set-DisplayResolution -Width 1920 -Height 1080 -Force
testflo -n 2 openmdao --timeout=240 --show_skipped --coverage --coverpkg openmdao --durations=20

- name: Slack unit test failure
if: failure() && steps.run_tests.outcome == 'failure'
uses: act10ns/slack@v2.0.0
with:
webhook-url: ${{ secrets.SLACK_WEBHOOK_URL }}
status: ${{ steps.run_tests.outcome }}
message:
Unit testing failed on `${{ matrix.NAME }}` build.
${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}

- name: Submit coverage
id: coveralls
continue-on-error: true
Expand Down
94 changes: 64 additions & 30 deletions openmdao/drivers/scipy_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from openmdao.core.group import Group
from openmdao.utils.class_util import WeakMethodWrapper
from openmdao.utils.mpi import MPI
from openmdao.core.analysis_error import AnalysisError


# Optimizers in scipy.minimize
_optimizers = {'Nelder-Mead', 'Powell', 'CG', 'BFGS', 'Newton-CG', 'L-BFGS-B',
Expand All @@ -33,8 +33,11 @@
_bounds_optimizers |= {'COBYLA'}

_constraint_optimizers = {'COBYLA', 'SLSQP', 'trust-constr', 'shgo'}

_constraint_grad_optimizers = _gradient_optimizers & _constraint_optimizers
if Version(scipy_version) >= Version("1.4"):
_constraint_optimizers.add('differential_evolution')
_constraint_grad_optimizers.add('differential_evolution')

_eq_constraint_optimizers = {'SLSQP', 'trust-constr'}
_global_optimizers = {'differential_evolution', 'basinhopping'}
if Version(scipy_version) >= Version("1.2"): # Only available in newer versions
Expand All @@ -53,6 +56,8 @@
# In principle now everything can work with "old-style"
# These settings have no effect to the optimizers implemented before SciPy 1.1
_supports_new_style = {'trust-constr'}
if Version(scipy_version) >= Version("1.4"):
_supports_new_style.add('differential_evolution')
_use_new_style = True # Recommended to set to True

CITATIONS = """
Expand Down Expand Up @@ -360,6 +365,19 @@ def run(self):
self._obj_and_nlcons = list(self._objs)

if opt in _constraint_optimizers:
# get list of linear constraints and precalculate gradients for them (if any)
if opt in _constraint_grad_optimizers:
lincons = [name for name, meta in self._cons.items() if meta.get('linear')]
else:
lincons = []

if lincons:
lincongrad = self._lincongrad_cache = \
self._compute_totals(of=lincons, wrt=self._dvlist, return_format='array')
else:
self._lincongrad_cache = None

# map constraints to index and instantiate constraints for scipy
for name, meta in self._cons.items():
if meta['indices'] is not None:
meta['size'] = size = meta['indices'].indexed_src_size
Expand All @@ -368,8 +386,9 @@ def run(self):
upper = meta['upper']
lower = meta['lower']
equals = meta['equals']
if opt in _gradient_optimizers and 'linear' in meta and meta['linear']:
lincons.append(name)
linear = name in lincons

if linear:
self._con_idx[name] = lin_i
lin_i += size
else:
Expand All @@ -379,10 +398,10 @@ def run(self):

# In scipy constraint optimizers take constraints in two separate formats

# Type of constraints is list of NonlinearConstraint
if opt in _supports_new_style and _use_new_style:
# Type of constraints is list of NonlinearConstraint and/or LinearConstraint
try:
from scipy.optimize import NonlinearConstraint
from scipy.optimize import NonlinearConstraint, LinearConstraint
except ImportError:
msg = ('The "trust-constr" optimizer is supported for SciPy 1.1.0 and'
'above. The installed version is {}')
Expand All @@ -393,20 +412,31 @@ def run(self):
else:
lb = lower
ub = upper
# Loop over every index separately,
# because scipy calls each constraint by index.
for j in range(size):
# Double-sided constraints are accepted by the algorithm
args = [name, False, j]
# TODO linear constraint if meta['linear']
# TODO add option for Hessian
con = NonlinearConstraint(
fun=signature_extender(WeakMethodWrapper(self, '_con_val_func'),
args),
lb=lb, ub=ub,
jac=signature_extender(WeakMethodWrapper(self, '_congradfunc'), args))
constraints.append(con)
else: # Type of constraints is list of dict

if linear:
# LinearConstraint
con = LinearConstraint(A=lincongrad[self._con_idx[name]],
lb=lower, ub=upper, keep_feasible=True)
else:
# NonlinearConstraint
# Loop over every index separately,
# because scipy calls each constraint by index.
for j in range(size):
# TODO add option for Hessian
# Double-sided constraints are accepted by the algorithm
args = [name, False, j]
con = NonlinearConstraint(
fun=signature_extender(
WeakMethodWrapper(self, '_con_val_func'), args),
lb=lb, ub=ub,
jac=signature_extender(
WeakMethodWrapper(self, '_congradfunc'), args)
)

constraints.append(con)
else:
# Type of constraints is list of dict

# Loop over every index separately,
# because scipy calls each constraint by index.
for j in range(size):
Expand Down Expand Up @@ -439,13 +469,6 @@ def run(self):
dcon_dict['args'] = [name, True, j]
constraints.append(dcon_dict)

# precalculate gradients of linear constraints
if lincons:
self._lincongrad_cache = self._compute_totals(of=lincons, wrt=self._dvlist,
return_format=self._total_jac_format)
else:
self._lincongrad_cache = None

# Provide gradients for optimizers that support it
if opt in _gradient_optimizers:
jac = self._gradfunc
Expand Down Expand Up @@ -526,7 +549,9 @@ def accept_test(f_new, x_new, f_old, x_old):
from scipy.optimize import differential_evolution
# There is no "options" param, so "opt_settings" can be used to set the (many)
# keyword arguments
result = differential_evolution(self._objfunc, bounds=bounds, **self.opt_settings)
result = differential_evolution(self._objfunc, bounds=bounds,
constraints=constraints,
**self.opt_settings)
elif opt == 'shgo':
from scipy.optimize import shgo
kwargs = dict()
Expand Down Expand Up @@ -555,7 +580,6 @@ def accept_test(f_new, x_new, f_old, x_old):
if self._exc_info is None:
raise
finally:
total_jac = self._total_jac # used later if this is the final iter
self._total_jac = None

if self._exc_info is not None:
Expand Down Expand Up @@ -677,6 +701,10 @@ def _con_val_func(self, x_new, name, dbl, idx):
float
Value of the constraint function.
"""
if self.options['optimizer'] == 'differential_evolution':
# the DE opt will not have called this, so we do it here to update DV/resp values
self._objfunc(x_new)

return self._con_cache[name][idx]

def _confunc(self, x_new, name, dbl, idx):
Expand Down Expand Up @@ -807,7 +835,13 @@ def _congradfunc(self, x_new, name, dbl, idx):
if meta['linear']:
grad = self._lincongrad_cache
else:
grad = self._grad_cache
if self._grad_cache is not None:
grad = self._grad_cache
else:
# _gradfunc has not been called, meaning gradients are not
# used for the objective but are needed for the constraints
grad = self._compute_totals(of=self._obj_and_nlcons, wrt=self._dvlist,
return_format=self._total_jac_format)
grad_idx = self._con_idx[name] + idx

# print("Constraint Gradient returned")
Expand Down
114 changes: 110 additions & 4 deletions openmdao/drivers/tests/test_scipy_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,6 @@
vector_class = om.DefaultVector
PETScVector = None

rosenbrock_size = 6 # size of the design variable

def rosenbrock(x):
x_0 = x[:-1]
x_1 = x[1:]
Expand All @@ -39,8 +37,11 @@ def rosenbrock(x):

class Rosenbrock(om.ExplicitComponent):

def initialize(self):
self.options.declare('vec_size', default=6, desc='Size of input vector.')

def setup(self):
self.add_input('x', np.ones(rosenbrock_size))
self.add_input('x', np.ones(self.options['vec_size']))
self.add_output('f', 0.0)

def compute(self, inputs, outputs, discrete_inputs=None, discrete_outputs=None):
Expand Down Expand Up @@ -1900,6 +1901,109 @@ def compute(self, inputs, outputs, discrete_inputs=None, discrete_outputs=None):
assert_near_equal(prob['x'], -np.ones(size), 1e-2)
assert_near_equal(prob['f'], 3.0, 1e-2)

@unittest.skipUnless(Version(scipy_version) >= Version("1.4"),
"scipy >= 1.4 is required.")
def test_differential_evolution_constrained_linear(self):
# Source of example:
# https://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.differential_evolution.html
# In this example the minimum is not the unbounded global minimum.

size = 2

model = om.Group()

model.add_subsystem('rosenbrock', Rosenbrock(vec_size=size), promotes=['*'])

model.add_design_var('x', lower=0, upper=2)
model.add_objective('f')

# We add the constraint that the sum of x[0] and x[1] must be less than or equal to 1.9.
model.add_subsystem('constraint_comp',
om.ExecComp('con = sum(x)', con={'shape': (1,)}, x={'shape': (size)}),
promotes=['*'])
model.add_constraint('con', upper=1.9, linear=True)

driver = om.ScipyOptimizeDriver(optimizer='differential_evolution', disp=False)
driver.opt_settings['seed'] = 1

prob = om.Problem(model, driver)
prob.setup()
failed = prob.run_driver()

self.assertFalse(failed, f"Optimization failed, result = \n{prob.driver.result}")

assert_near_equal(prob['x'], [0.96632622, 0.93367155], 1e-2)
assert_near_equal(prob['f'], 0.0011352416852625719, 1e-2)

@unittest.skipUnless(Version(scipy_version) >= Version("1.4"),
"scipy >= 1.4 is required.")
def test_differential_evolution_constrained_nonlinear(self):
# Source of example:
# https://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.differential_evolution.html
# In this example the minimum is not the unbounded global minimum.

size = 2

model = om.Group()

model.add_subsystem('rosenbrock', Rosenbrock(vec_size=size), promotes=['*'])

# We add the constraint that the sum of x[0] and x[1] must be less than or equal to 1.9.
model.add_subsystem('constraint_comp',
om.ExecComp('con = sum(x)', con={'shape': (1,)}, x={'shape': (size)}),
promotes=['*'])

model.add_design_var('x', lower=0, upper=1)
model.add_constraint('con', upper=1.9, linear=False)
model.add_objective('f')

driver = om.ScipyOptimizeDriver(optimizer='differential_evolution', disp=False)
driver.opt_settings['seed'] = 1

prob = om.Problem(model, driver)
prob.setup()
failed = prob.run_driver()

self.assertFalse(failed, f"Optimization failed, result = \n{prob.driver.result}")

assert_near_equal(prob['x'], [0.96632622, 0.93367155], 1e-2)
assert_near_equal(prob['f'], 0.0011352416852625719, 1e-2)

@unittest.skipUnless(Version(scipy_version) >= Version("1.4"),
"scipy >= 1.4 is required.")
def test_differential_evolution_constrained_linear_nonlinear(self):
# test of the differential evolution optimizer with both
# a linear and a nonlinear constraint

size = 2

model = om.Group()

model.add_subsystem('rosenbrock', Rosenbrock(vec_size=size), promotes=['*'])

# We add the constraint that the sum of x[0] and x[1] must be less than or equal to 1.9.
model.add_subsystem('constraint_comp',
om.ExecComp('con = sum(x)',
con={'shape': (1,)}, x={'shape': (size)}),
promotes=['*'])

model.add_design_var('x', lower=0, upper=1)
model.add_constraint('con', upper=1.9, linear=True)
model.add_constraint('x', indices=[0], upper=.95, linear=False)
model.add_objective('f')

driver = om.ScipyOptimizeDriver(optimizer='differential_evolution', disp=False)
driver.opt_settings['seed'] = 1

prob = om.Problem(model, driver)
prob.setup()
failed = prob.run_driver()

self.assertFalse(failed, f"Optimization failed, result = \n{prob.driver.result}")

assert_near_equal(prob['x'], [0.94999253, 0.90250721], 1e-2)
assert_near_equal(prob['f'], 0.00250079, 1e-2)

@unittest.skipUnless(Version(scipy_version) >= Version("1.2"),
"scipy >= 1.2 is required.")
def test_shgo_rosenbrock(self):
Expand All @@ -1909,8 +2013,10 @@ def test_shgo_rosenbrock(self):
prob = om.Problem()
model = prob.model

rosenbrock_size = 6 # size of the design variable

model.add_subsystem('indeps', om.IndepVarComp('x', np.ones(rosenbrock_size)), promotes=['*'])
model.add_subsystem('rosen', Rosenbrock(), promotes=['*'])
model.add_subsystem('rosen', Rosenbrock(vec_size=rosenbrock_size), promotes=['*'])

prob.driver = driver = om.ScipyOptimizeDriver()
driver.options['optimizer'] = 'shgo'
Expand Down