Skip to content

Commit

Permalink
Merge b203177 into 63dec65
Browse files Browse the repository at this point in the history
  • Loading branch information
Kenneth-T-Moore committed May 19, 2020
2 parents 63dec65 + b203177 commit 668dd2d
Show file tree
Hide file tree
Showing 7 changed files with 353 additions and 65 deletions.
69 changes: 47 additions & 22 deletions openmdao/core/driver.py
Expand Up @@ -64,9 +64,9 @@ class Driver(object):
Contains all design variable info.
_designvars_discrete : list
List of design variables that are discrete.
_distributed_cons : dict
Dict of constraints that are distributed outputs. Values are
(owning rank, size).
_distributed_resp : dict
Dict of constraints that are distributed outputs. Key is rank, values are
(local indices, local sizes).
_cons : dict
Contains all constraint info.
_objs : dict
Expand Down Expand Up @@ -277,10 +277,10 @@ def _setup_driver(self, problem):
obj_set = set()
dv_set = set()

self._remote_dvs = dv_dict = {}
self._remote_cons = con_dict = {}
self._distributed_cons = dist_con_dict = {}
self._remote_objs = obj_dict = {}
self._remote_dvs = remote_dv_dict = {}
self._remote_cons = remote_con_dict = {}
self._distributed_resp = dist_resp_dict = {}
self._remote_objs = remote_obj_dict = {}

# Now determine if later we'll need to allgather cons, objs, or desvars.
if model.comm.size > 1 and model._subsystems_allprocs:
Expand Down Expand Up @@ -310,16 +310,43 @@ def _setup_driver(self, problem):
sz = sizes[owner, i]

if vname in dv_set:
dv_dict[vname] = (owner, sz)
elif distributed:
remote_dv_dict[vname] = (owner, sz)

# Note that design vars are not distributed.
elif distributed and vname in self._responses:
idx = model._var_allprocs_abs2idx['nonlinear'][vname]
dist_sizes = model._var_sizes['nonlinear']['output'][:, idx]
dist_con_dict[vname] = (idx, dist_sizes)

# Determine which indices are on our proc.
rank = model.comm.rank
size = dist_sizes.size
offsets = np.cumsum(dist_sizes)

resp_dict = self._responses[vname]
indices = resp_dict['indices']

if indices is not None:
local_indices = []
true_sizes = np.zeros(size, dtype=INT_DTYPE)
for index in indices:
if index < 0:
# Support for negative indices. Convert to positive index.
index = index + np.sum(dist_sizes)
irank = np.argwhere(offsets > index)[0][0]
true_sizes[irank] += 1
if rank == irank:
new_index = index - offsets[irank] + dist_sizes[irank]
local_indices.append(new_index)

indices = local_indices
dist_sizes = true_sizes

dist_resp_dict[vname] = (indices, dist_sizes)

if vname in con_set:
con_dict[vname] = (owner, sz)
remote_con_dict[vname] = (owner, sz)
if vname in obj_set:
obj_dict[vname] = (owner, sz)
remote_obj_dict[vname] = (owner, sz)

self._remote_responses = self._remote_cons.copy()
self._remote_responses.update(self._remote_objs)
Expand Down Expand Up @@ -429,8 +456,7 @@ def _setup_recording(self):
for sub in self._problem().model.system_iter(recurse=True, include_self=True):
self._rec_mgr.record_metadata(sub)

def _get_voi_val(self, name, meta, remote_vois, distributed_vars, driver_scaling=True,
rank=None):
def _get_voi_val(self, name, meta, remote_vois, driver_scaling=True, rank=None):
"""
Get the value of a variable of interest (objective, constraint, or design var).
Expand All @@ -445,8 +471,6 @@ def _get_voi_val(self, name, meta, remote_vois, distributed_vars, driver_scaling
remote_vois : dict
Dict containing (owning_rank, size) for all remote vois of a particular
type (design var, constraint, or objective).
distributed_vars : dict
Dict containing (indices, sizes) for all distributed responses.
driver_scaling : bool
When True, return values that are scaled according to either the adder and scaler or
the ref and ref0 values that were specified when add_design_var, add_objective, and
Expand All @@ -462,6 +486,7 @@ def _get_voi_val(self, name, meta, remote_vois, distributed_vars, driver_scaling
model = self._problem().model
comm = model.comm
vec = model._outputs._views_flat
distributed_vars = self._distributed_resp
indices = meta['indices']

if MPI:
Expand Down Expand Up @@ -492,9 +517,9 @@ def _get_voi_val(self, name, meta, remote_vois, distributed_vars, driver_scaling

elif distributed:
local_val = model._get_val(name, flat=True)
if indices is not None:
local_val = local_val[indices]
idx, sizes = distributed_vars[name]
local_indices, sizes = distributed_vars[name]
if local_indices is not None:
local_val = local_val[local_indices]
offsets = np.zeros(sizes.size, dtype=INT_DTYPE)
offsets[1:] = np.cumsum(sizes[:-1])
val = np.zeros(np.sum(sizes))
Expand Down Expand Up @@ -541,7 +566,7 @@ def get_design_var_values(self):
dict
Dictionary containing values of each design variable.
"""
return {n: self._get_voi_val(n, dv, self._remote_dvs, {})
return {n: self._get_voi_val(n, dv, self._remote_dvs)
for n, dv in self._designvars.items()}

def set_design_var(self, name, value):
Expand Down Expand Up @@ -613,7 +638,8 @@ def get_objective_values(self, driver_scaling=True):
dict
Dictionary containing values of each objective.
"""
return {n: self._get_voi_val(n, obj, self._remote_objs, {}, driver_scaling=driver_scaling)
return {n: self._get_voi_val(n, obj, self._remote_objs,
driver_scaling=driver_scaling)
for n, obj in self._objs.items()}

def get_constraint_values(self, ctype='all', lintype='all', driver_scaling=True):
Expand Down Expand Up @@ -653,7 +679,6 @@ def get_constraint_values(self, ctype='all', lintype='all', driver_scaling=True)
continue

con_dict[name] = self._get_voi_val(name, meta, self._remote_cons,
self._distributed_cons,
driver_scaling=driver_scaling)

return con_dict
Expand Down
16 changes: 16 additions & 0 deletions openmdao/core/problem.py
Expand Up @@ -1345,6 +1345,8 @@ def check_totals(self, of=None, wrt=None, out_stream=_DEFAULT_OUT_STREAM, compac
'step_calc': step_calc,
}
approx = model._owns_approx_jac
approx_of = model._owns_approx_of
approx_wrt = model._owns_approx_wrt
old_jac = model._jacobian
old_subjacs = model._subjacs_info.copy()

Expand All @@ -1358,16 +1360,25 @@ def check_totals(self, of=None, wrt=None, out_stream=_DEFAULT_OUT_STREAM, compac
if not approx:
model._jacobian = old_jac
model._owns_approx_jac = False
model._owns_approx_of = approx_of
model._owns_approx_wrt = approx_wrt
model._subjacs_info = old_subjacs

# Assemble and Return all metrics.
data = {}
data[''] = {}
resp = self.driver._responses
# TODO key should not be fwd when exact computed in rev mode or auto
for key, val in Jcalc.items():
data[''][key] = {}
data[''][key]['J_fwd'] = val
data[''][key]['J_fd'] = Jfd[key]

# Display whether indices were declared when response was added.
of = key[0]
if of in resp and resp[of]['indices'] is not None:
data[''][key]['indices'] = len(resp[of]['indices'])

fd_args['method'] = method

if out_stream == _DEFAULT_OUT_STREAM:
Expand Down Expand Up @@ -1878,6 +1889,11 @@ def _assemble_derivative_data(derivative_data, rel_error_tol, abs_error_tol, out
del derivative_data[sys_name][rel_key]
continue

# Informative output for responses that were declared with an index.
indices = derivative_info.get('indices')
if indices is not None:
of = '{} (index size: {})'.format(of, indices)

if not suppress_output:

if compact_print:
Expand Down
26 changes: 17 additions & 9 deletions openmdao/core/system.py
Expand Up @@ -3100,16 +3100,24 @@ def get_responses(self, recurse=True, get_sizes=True):
sizes = self._var_sizes['nonlinear']['output']
abs2idx = self._var_allprocs_abs2idx['nonlinear']
for name in out:
if 'size' not in out[name]:
if name in abs2idx:
out[name]['size'] = sizes[self._owning_rank[name], abs2idx[name]]
else:
out[name]['size'] = 0 # discrete var, we don't know the size
response = out[name]

if name in abs2idx:
meta = self._var_allprocs_abs2meta[name]
out[name]['distributed'] = meta['distributed']
out[name]['global_size'] = meta['global_size']
# Discrete vars
if name not in abs2idx:
response['size'] = 0 # discrete var, we don't know the size
continue

meta = self._var_allprocs_abs2meta[name]
response['distributed'] = meta['distributed']

if response['indices'] is not None:
# Index defined in this response.
response['global_size'] = len(response['indices']) if meta['distributed'] \
else meta['global_size']

else:
response['size'] = sizes[self._owning_rank[name], abs2idx[name]]
response['global_size'] = meta['global_size']

if recurse:
for subsys in self._subsystems_myproc:
Expand Down
19 changes: 19 additions & 0 deletions openmdao/core/tests/test_check_derivs.py
Expand Up @@ -2825,6 +2825,25 @@ def compute_partials(self, inputs, partials):
assert_near_equal(J['comp.y', 'p.x']['J_fwd'], [[14.0]], 1e-6)
assert_near_equal(J['comp.y', 'p.x']['J_fd'], [[0.0]], 1e-6)

def test_response_index(self):
prob = om.Problem()
model = prob.model

model.add_subsystem('p', om.IndepVarComp('x', np.ones(2)), promotes=['*'])
model.add_subsystem('comp', om.ExecComp('y=2*x', x=np.ones(2), y=np.ones(2)),
promotes=['*'])

model.add_design_var('x')
model.add_constraint('y', indices=[1], lower=0.0)

prob.setup()
prob.run_model()

stream = StringIO()
prob.check_totals(out_stream=stream)
lines = stream.getvalue().splitlines()
self.assertTrue('index size: 1' in lines[3])


@unittest.skipUnless(MPI and PETScVector, "MPI and PETSc are required.")
class TestProblemCheckTotalsMPI(unittest.TestCase):
Expand Down

0 comments on commit 668dd2d

Please sign in to comment.