Skip to content

Commit

Permalink
Merge dec8492 into 192830f
Browse files Browse the repository at this point in the history
  • Loading branch information
hschilling committed Aug 10, 2017
2 parents 192830f + dec8492 commit d05997f
Show file tree
Hide file tree
Showing 13 changed files with 741 additions and 86 deletions.
8 changes: 4 additions & 4 deletions .travis.yml
@@ -1,6 +1,6 @@
branches:
only:
- master
- master

sudo: false

Expand Down Expand Up @@ -28,10 +28,10 @@ addons:
- openmpi-bin

notifications:
email: false
slack:
on_success: never
secure: lwAjQyviKvouLSTm2xO+AqjY3B8/5jkr/1dreeUz60T4E00llatrgZTXX6m04D2PusKvWudYPl8KLqhqawTVpM0CSOJODmbRyrn9MUxV/UlOeFTVwtl6c1EgWxoHTLh47XMnGhUh2z9Otz/2GLfMQteXXmxguX0KgNy+s++hHP8ycPDWjZ7BUVkNT+kJn6l5JYLsIW5dR/06W3Wg32cEzo7kP5tAWY4yH3YAF1tyBe1BSmMLP38IwtQKTBef6QUAaXwBgVBqFBF/h0sWgjKdwzqSMUCA1J6yrpeYHEC6Wvmwl7Yj3Lqgs6y3BNZ6v1dV1NUqPa+PC35QL043Btvv/hOAyCx24Srt1YnNT7bJg+rverIBoKKd6U2cFeBWCDSaKtpMXQyA61D/MReEGND3mtnH1mq7DcvF7cDchMAU5UH9qAHP02wbX7FLdfqs2P+X8+pA+m0ip5xRKFu3AdBUkPG6Eaoyln3a0O9SDep3gF+pgBoemGwawSazHafJjqh/tmFsQARnI8UsAIBNyWT3iK0518r4sGFmRH57csNzICBCn9fPDtQ+MRJk9mu/zLBTNI8ms/2l7mMe74hewtTWaSPvCTZd4Woq1A5Bexd4JlvFOfkyiQZgBNI97qhly9OjqW7M3UiIDlugAR4UZw396DhQ4WVq3xPC5lFD9jAwjfk=
secure: "TnJSd6Mu99tcACmPVSpsLI6hfYxecLCare+ileiuJa8foIzbgna/cL6/tyPUGpLGgSZkxGXoBSRXZJHonZNa7DYXmfhxBblHa0fwgdOhk4GAEDiiOUa2agaoh4F3EoTOuQIfN0eCprItfewIJtT3CHEh+PuWLifclRU9G/6byldDHKy8dPPinrLtMy91RFcXzcBFBev7EEJtJDL6odhkVsJQjmtf9E7+Up3n/tSL+egllYC0H6P7P4nCIjrNuBNDVYtfqb5QbvUgfuTFVcqMJTMsams04taYQhT4UVvVpB0rBwtP5S6w2QdAr/QTvi06ZIuFsgVAdFmRRNliXLG2qeWbQ44hWT36aehhVHvfyLuC9Bjbo5+OhKcA3qmTzCaG4JoYbsEXPVp0b31m4EKGl+yLIUoV1Zp46MTM+llWny4/mDn6o/gmtmtCpZ8S1EYDniwluelLiu3u/ZkFnadsn++ZS7GkAPkY8VpI5lpY3B2jPsAOJLaLNMHSk8oa51LAPg+2/MqCDpgCFsZUctNXdIkYR6Votts5KpDabcVFpC2NtfQLWdRh4CsvpiLcSJigUe+4aLRqxZgaahsTuL6LeSrDcHR+gqn4KYvKl4E7pUQZX+i8wRYoDvFTMIAx/F0dDZ+U+Rzr5bVORuvNc9lDjuB57vjEIvZ+djPDtBkENwo="
on_success: always
on_failure: always

before_install:
- OS=$(if [[ "$TRAVIS_OS_NAME" = "osx" ]]; then echo "MacOSX"; else echo "Linux"; fi)
Expand Down
17 changes: 6 additions & 11 deletions openmdao/core/driver.py
Expand Up @@ -3,10 +3,10 @@

import numpy as np

from openmdao.utils.record_util import create_local_meta
from openmdao.utils.options_dictionary import OptionsDictionary
from openmdao.recorders.recording_manager import RecordingManager
from openmdao.recorders.recording_iteration_stack import Recording
from openmdao.utils.record_util import create_local_meta
from openmdao.utils.options_dictionary import OptionsDictionary


class Driver(object):
Expand Down Expand Up @@ -113,14 +113,9 @@ def _setup_driver(self, problem):
self._cons = model.get_constraints(recurse=True)

self._rec_mgr.startup(self)

# Only do this for now in serial. Parallel does not work yet.
from openmdao.utils.mpi import MPI

if not MPI:
if (self._rec_mgr._recorders):
from openmdao.devtools.problem_viewer.problem_viewer import _get_viewer_data
self._model_viewer_data = _get_viewer_data(problem)
if (self._rec_mgr._recorders):
from openmdao.devtools.problem_viewer.problem_viewer import _get_viewer_data
self._model_viewer_data = _get_viewer_data(problem)
self._rec_mgr.record_metadata(self)

def get_design_var_values(self, filter=None):
Expand Down Expand Up @@ -213,7 +208,7 @@ def get_response_values(self, filter=None):
Dictionary containing values of each response.
"""
# TODO: finish this method when we have a driver that requires it.
pass
return {}

def get_objective_values(self, filter=None):
"""
Expand Down
16 changes: 16 additions & 0 deletions openmdao/core/system.py
Expand Up @@ -2834,6 +2834,9 @@ def add_recorder(self, recorder):
recorder : <BaseRecorder>
A recorder instance.
"""
if MPI:
raise RuntimeError(
"Recording of Systems when running parallel code is not supported yet")
self._rec_mgr.append(recorder)

def record_iteration(self):
Expand All @@ -2844,6 +2847,19 @@ def record_iteration(self):
self._rec_mgr.record_iteration(self, metadata)
self.iter_count += 1

def is_active(self):
"""
Find out if the current system has a valid MPI communicator.
Returns
-------
bool
If running under MPI, returns True if this `System` has a valid
communicator. Always returns True if not running under MPI.
"""
return MPI is None or not (self.comm is None or
self.comm == MPI.COMM_NULL)


def _get_vec_names(voi_dict):
return set(voi for voi, data in iteritems(voi_dict)
Expand Down
70 changes: 70 additions & 0 deletions openmdao/recorders/base_recorder.py
Expand Up @@ -11,6 +11,7 @@
from openmdao.solvers.solver import Solver, NonlinearSolver
from openmdao.recorders.recording_iteration_stack import recording_iteration_stack, \
get_formatted_iteration_coordinate
from openmdao.utils.mpi import MPI


class BaseRecorder(object):
Expand Down Expand Up @@ -372,6 +373,10 @@ def record_iteration(self, object_requesting_recording, metadata, **kwargs):
**kwargs : keyword args
Some implementations of record_iteration need additional args.
"""
if not self._parallel:
if MPI and MPI.COMM_WORLD.rank > 0:
raise RuntimeError("Non-parallel recorders should not be recording on ranks > 0")

self._counter += 1

self._iteration_coordinate = get_formatted_iteration_coordinate()
Expand All @@ -387,6 +392,71 @@ def record_iteration(self, object_requesting_recording, metadata, **kwargs):
else:
raise ValueError("Recorders must be attached to Drivers, Systems, or Solvers.")

def record_iteration_driver_passing_vars(self, object_requesting_recording, desvars, responses,
objectives, constraints, metadata):
"""
Record an iteration using the Driver options.
Parameters
----------
object_requesting_recording : object
The Driver in need of recording.
metadata : dict, optional
Dictionary containing execution metadata.
desvars: dict
The design variables of the Driver being recorded.
responses: dict
The responses of the Driver being recorded.
objectives: dict
The objectives of the Driver being recorded.
constraints: dict
The constraints of the Driver being recorded.
"""
# TODO: this code and the same code in record_iteration should be in a separate method
if not self._parallel:
if MPI and MPI.COMM_WORLD.rank > 0:
raise RuntimeError("Non-parallel recorders should not be recording on ranks > 0")

self._counter += 1
self._iteration_coordinate = get_formatted_iteration_coordinate()

if self.options['record_desvars']:
if self._filtered_driver:
self._desvars_values = \
{name: desvars[name] for name in self._filtered_driver['des']}
else:
self._desvars_values = desvars
else:
self._desvars_values = None

# Cannot handle responses yet
# if self.options['record_responses']:
# if self._filtered_driver:
# self._responses_values = \
# {name: responses[name] for name in self._filtered_driver['res']}
# else:
# self._responses_values = responses
# else:
# self._responses_values = None

if self.options['record_objectives']:
if self._filtered_driver:
self._objectives_values = \
{name: objectives[name] for name in self._filtered_driver['obj']}
else:
self._objectives_values = objectives
else:
self._objectives_values = None

if self.options['record_constraints']:
if self._filtered_driver:
self._constraints_values = \
{name: constraints[name] for name in self._filtered_driver['con']}
else:
self._constraints_values = constraints
else:
self._constraints_values = None

def record_iteration_driver(self, object_requesting_recording, metadata):
"""
Record an iteration using the driver options.
Expand Down
6 changes: 5 additions & 1 deletion openmdao/recorders/recording_iteration_stack.py
@@ -1,4 +1,5 @@
"""Management of iteration stack for recording."""
from openmdao.utils.mpi import MPI

recording_iteration_stack = []

Expand Down Expand Up @@ -27,7 +28,10 @@ def get_formatted_iteration_coordinate():
for name, iter_count in recording_iteration_stack:
iteration_coord_list.append('{}{}{}'.format(name, separator, iter_count))

rank = 0 # TODO_PARALLEL needs to be updated when we go parallel
if MPI and MPI.COMM_WORLD.rank > 0:
rank = MPI.COMM_WORLD.rank
else:
rank = 0
formatted_iteration_coordinate = ':'.join(["rank%d" % rank,
separator.join(iteration_coord_list)])
return formatted_iteration_coordinate
Expand Down

0 comments on commit d05997f

Please sign in to comment.