Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

Merge pull request #939 from Kenneth-T-Moore/derivatives2.0

Derivative of unit conversion factors
  • Loading branch information...
commit 9ecdc33dfade192132e5d37992a6a915227ed1b0 2 parents 5fd67fa + 53bc7ad
@naylor-b naylor-b authored
View
4 docs/optimization/scaler_adder.rst
@@ -197,7 +197,7 @@ Running this produces:
(0.006667, -733.333313)
>>> print "Elapsed time: ", time.time()-tt, "seconds"
Elapsed time: 0.0160000324249 seconds
- >>>> print "Execution count: ", opt_problem.paraboloid.exec_count
+ >>> print "Execution count: ", opt_problem.paraboloid.exec_count
Execution count: 52
@@ -219,7 +219,7 @@ Running the assembly now gives:
(0.006667, -733.333313)
>>> print "Elapsed time: ", time.time()-tt, "seconds"
Elapsed time: 0.0 seconds
- >>>> print "Execution count: ", opt_problem.paraboloid.exec_count
+ >>> print "Execution count: ", opt_problem.paraboloid.exec_count
Execution count: 23
Just as before, the optimization converges more quickly and with fewer iterations.
View
29 openmdao.lib/src/openmdao/lib/differentiators/chain_rule.py
@@ -11,7 +11,7 @@
from openmdao.main.api import Driver, Assembly
from openmdao.main.assembly import Run_Once
from openmdao.main.numpy_fallback import array
-
+from openmdao.units import convert_units
class ChainRule(HasTraits):
""" Differentiates a driver's workflow using the Chain Rule with Numerical
@@ -244,6 +244,18 @@ def _chain_workflow(self, derivs, scope, param):
expr = node.parent._depgraph.get_expr(expr_txt)
expr_deriv = expr.evaluate_gradient(scope=node.parent,
wrt=source)
+
+ # We also need the derivative of the unit
+ # conversion factor if there is one
+ metadata = expr.get_metadata('units')
+ source_unit = [x[1] for x in metadata if x[0]==source]
+ if source_unit and source_unit[0]:
+ dest_expr = node.parent._depgraph.get_expr(source_tuple[1])
+ metadata = dest_expr.get_metadata('units')
+ target_unit = [x[1] for x in metadata if x[0]==source_tuple[1]]
+
+ expr_deriv[source] = expr_deriv[source] * \
+ convert_units(1.0, source_unit[0], target_unit[0])
incoming_deriv_names[input_name] = full_name
if full_name in incoming_derivs:
@@ -291,10 +303,23 @@ def _recurse_assy(self, scope, upscope_derivs, upscope_param):
dest = dest.split('.')[1]
# Differentiate all expressions
- expr_txt = scope._depgraph.get_source(dest.replace('@bin.',''))
+ dest_txt = dest.replace('@bin.','')
+ expr_txt = scope._depgraph.get_source(dest_txt)
expr = scope._depgraph.get_expr(expr_txt)
expr_deriv = expr.evaluate_gradient(scope=scope,
wrt=src)
+ # We also need the derivative of the unit
+ # conversion factor if there is one
+ metadata = expr.get_metadata('units')
+ source_unit = [x[1] for x in metadata if x[0]==src]
+ if source_unit and source_unit[0]:
+ dest_expr = scope._depgraph.get_expr(dest_txt)
+ metadata = dest_expr.get_metadata('units')
+ target_unit = [x[1] for x in metadata if x[0]==dest_txt]
+
+ expr_deriv[src] = expr_deriv[src] * \
+ convert_units(1.0, source_unit[0], target_unit[0])
+
if dest in local_derivs:
local_derivs[dest] += upscope_derivs[upscope_src]*expr_deriv[src]
else:
View
73 openmdao.lib/src/openmdao/lib/differentiators/test/test_chainrule.py
@@ -63,8 +63,8 @@ class CompFoot(ComponentWithDerivatives):
# set up interface to the framework
# pylint: disable-msg=E1101
- x = Float(0.0, iotype='in', units='ft')
- y = Float(0.0, iotype='out', units='ft')
+ x = Float(1.0, iotype='in', units='ft')
+ y = Float(1.0, iotype='out', units='ft')
def __init__(self):
""" declare what derivatives that we can provide"""
@@ -89,8 +89,8 @@ class CompInch(ComponentWithDerivatives):
# set up interface to the framework
# pylint: disable-msg=E1101
- x = Float(0.0, iotype='in', units='inch')
- y = Float(0.0, iotype='out', units='inch')
+ x = Float(1.0, iotype='in', units='inch')
+ y = Float(1.0, iotype='out', units='inch')
def __init__(self):
""" declare what derivatives that we can provide"""
@@ -368,30 +368,63 @@ def test_expr_connect(self):
assert_rel_error(self, grad[0], 17.0, .001)
- #def test_simple_units(self):
+ def test_simple_units(self):
- #self.top = set_as_top(Assembly())
+ self.top = set_as_top(Assembly())
+
+ self.top.add('comp1', CompFoot())
+ self.top.add('comp2', CompInch())
+
+ self.top.connect('comp1.y', 'comp2.x')
+
+ self.top.add('driver', Driv())
+ self.top.driver.workflow.add(['comp1', 'comp2'])
+
+ self.top.driver.differentiator = ChainRule()
+
+ obj = 'comp2.y'
+ self.top.driver.add_parameter('comp1.x', low=-50., high=50., fd_step=.0001)
+ self.top.driver.add_objective(obj)
+
+ self.top.comp1.x = 2.0
+ self.top.run()
+ self.top.driver.differentiator.calc_gradient()
- #self.top.add('comp1', CompFoot())
- #self.top.add('comp2', CompInch())
+ grad = self.top.driver.differentiator.get_gradient(obj)
+ assert_rel_error(self, grad[0], 48.0, .001)
- #self.top.connect('comp1.y', 'comp2.x')
+ def test_subassy_units(self):
- #self.top.add('driver', Driv())
- #self.top.driver.workflow.add(['comp1', 'comp2'])
+ self.top = set_as_top(Assembly())
- #self.top.driver.differentiator = ChainRule()
+ self.top.add('nest1', Assembly())
+ self.top.add('comp1', CompFoot())
+ self.top.nest1.add('comp2', CompInch())
+ self.top.add('comp3', CompFoot())
- #obj = 'comp2.y'
- #self.top.driver.add_parameter('comp1.x', low=-50., high=50., fd_step=.0001)
- #self.top.driver.add_objective(obj)
+ self.top.connect('comp1.y', 'nest1.comp2.x')
+ self.top.connect('nest1.comp2.y', 'comp3.x')
- #self.top.comp1.x = 2.0
- #self.top.run()
- #self.top.driver.differentiator.calc_gradient()
+ self.top.add('driver', Driv())
+ self.top.driver.workflow.add(['comp1', 'nest1', 'comp3'])
+ self.top.nest1.driver.workflow.add(['comp2'])
- #grad = self.top.driver.differentiator.get_gradient(obj)
- #assert_rel_error(self, grad[0], 48.0, .001)
+ self.top.driver.differentiator = ChainRule()
+
+ obj = 'comp3.y'
+ con = 'nest1.comp2.y>0'
+ self.top.driver.add_parameter('comp1.x', low=-50., high=50., fd_step=.0001)
+ self.top.driver.add_objective(obj)
+ self.top.driver.add_constraint(con)
+
+ self.top.comp1.x = 1.0
+ self.top.run()
+ self.top.driver.differentiator.calc_gradient()
+
+ grad = self.top.driver.differentiator.get_gradient(obj)
+ assert_rel_error(self, grad[0], 8.0, .001)
+ grad = self.top.driver.differentiator.get_gradient(con)
+ assert_rel_error(self, grad[0], -48.0, .001)
#def test_reset_state(self):
View
32 openmdao.main/src/openmdao/main/expreval.py
@@ -387,6 +387,7 @@ def __init__(self, text, scope=None, getters=None, default_getter='get'):
self.getters = getters
self.default_getter = default_getter
+ self._examiner = None
self.cached_grad_eq = {}
@property
@@ -560,6 +561,11 @@ def evaluate(self, scope=None, wrapped=False):
raise type(err)("can't evaluate expression "+
"'%s': %s" %(self.text,str(err)))
+ def refs(self):
+ """ Used by evaluate_gradient. Returns a list of all connections
+ including their array indices."""
+ return self._examiner.refs.copy()
+
def evaluate_gradient(self, stepsize=1.0e-6, wrt=None, scope=None):
"""Return a dict containing the gradient of the expression with respect to
each of the referenced varpaths. The gradient is calculated by 1st order central
@@ -574,8 +580,11 @@ def evaluate_gradient(self, stepsize=1.0e-6, wrt=None, scope=None):
global _expr_dict
scope = self._get_updated_scope(scope)
- inputs = list(self.get_referenced_varpaths())
-
+ if self._parse_needed or self._examiner is None:
+ self._examiner = ExprExaminer(ast.parse(self.text,
+ mode='eval'), self)
+ inputs = list(self.refs())
+
if wrt==None:
wrt = inputs
elif isinstance(wrt, str):
@@ -598,7 +607,7 @@ def evaluate_gradient(self, stepsize=1.0e-6, wrt=None, scope=None):
continue
# First time, try to differentiate symbolically
- if var not in self.cached_grad_eq:
+ if (var not in self.cached_grad_eq) or self._parse_needed:
#Take symbolic gradient of all inputs using sympy
try:
@@ -615,7 +624,12 @@ def evaluate_gradient(self, stepsize=1.0e-6, wrt=None, scope=None):
# to mess with everything that's is in self._parse
grad_text = self.cached_grad_eq[var]
for name in inputs:
- grad_text = grad_text.replace(name, str(scope.get(name)))
+ if '[' in name:
+ new_expr = ExprEvaluator(name, scope)
+ replace_val = new_expr.evaluate()
+ else:
+ replace_val = scope.get(name)
+ grad_text = grad_text.replace(name, str(replace_val))
grad_root = ast.parse(grad_text, mode='eval')
grad_code = compile(grad_root, '<string>', 'eval')
@@ -628,13 +642,19 @@ def evaluate_gradient(self, stepsize=1.0e-6, wrt=None, scope=None):
var_dict = {}
grad_text = self.text
for name in inputs:
+ if '[' in name:
+ new_expr = ExprEvaluator(name, scope)
+ replace_val = new_expr.evaluate()
+ else:
+ replace_val = scope.get(name)
+
if name==var:
- var_dict[name] = scope.get(name)
+ var_dict[name] = replace_val
new_name = "var_dict['%s']" % name
grad_text = grad_text.replace(name, new_name)
else:
# If we don't need derivative of a var, replace with its value
- grad_text = grad_text.replace(name, str(scope.get(name)))
+ grad_text = grad_text.replace(name, str(replace_val))
grad_root = ast.parse(grad_text, mode='eval')
grad_code = compile(grad_root, '<string>', 'eval')
View
23 openmdao.main/src/openmdao/main/test/test_evalexpr.py
@@ -464,12 +464,6 @@ def test_eval_gradient(self):
self.assertEqual(top.comp1.c, 7.0)
assert_rel_error(self, grad['comp1.c'], 3.0, 0.00001)
- # Uncomment these when arrays work
- #exp = ExprEvaluator('4.0*comp1.x_array[1]', top.driver)
- #grad = exp.evaluate_gradient(scope=top)
- #print grad
- #assert_rel_error(self, grad['comp1.x_array[1]'], 4.0, 0.00001)
-
# Commented out this test, until we find a case that can't be
# handled analytically
# interface test: step size
@@ -513,10 +507,21 @@ def test_eval_gradient(self):
assert_rel_error(self, grad['comp2.a'], g2, 0.00001)
assert_rel_error(self, grad['comp1.c'], g3, 0.00001)
- exp = ExprEvaluator('gamma(comp2.a)', top.driver) #sympy fails; requires finite difference
+ #exp = ExprEvaluator('gamma(comp2.a)', top.driver) #sympy fails; requires finite difference
+ #grad = exp.evaluate_gradient(scope=top)
+ #g1=gamma(top.comp2.a)*polygamma(0,top.comp2.a) #true partial derivative
+ #assert_rel_error(self, grad['comp2.a'], g1, 0.001)
+
+ def test_eval_gradient_array(self):
+ top = set_as_top(Assembly())
+ top.add('comp1', A())
+ top.run()
+
+ # Uncomment these when arrays work
+ exp = ExprEvaluator('4.0*comp1.b2d[0][1]*comp1.b2d[1][1]', top.driver)
grad = exp.evaluate_gradient(scope=top)
- g1=gamma(top.comp2.a)*polygamma(0,top.comp2.a) #true partial derivative
- assert_rel_error(self, grad['comp2.a'], g1, 0.001)
+ assert_rel_error(self, grad['comp1.b2d[0][1]'], 12.0, 0.00001)
+ assert_rel_error(self, grad['comp1.b2d[1][1]'], 4.0, 0.00001)
def test_scope_transform(self):
exp = ExprEvaluator('myvar+abs(comp.x)*a.a1d[2]', self.top)
Please sign in to comment.
Something went wrong with that request. Please try again.