-
Notifications
You must be signed in to change notification settings - Fork 240
/
doe_driver.py
278 lines (226 loc) · 8.82 KB
/
doe_driver.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
"""
Design-of-Experiments Driver.
"""
import traceback
import inspect
import numpy as np
from openmdao.core.driver import Driver, RecordingDebugging
from openmdao.core.analysis_error import AnalysisError
from openmdao.drivers.doe_generators import DOEGenerator, ListGenerator
from openmdao.utils.mpi import MPI
from openmdao.recorders.sqlite_recorder import SqliteRecorder
class DOEDriver(Driver):
"""
Design-of-Experiments Driver.
Parameters
----------
generator : DOEGenerator, list or None
The case generator or a list of DOE cases.
**kwargs : dict of keyword arguments
Keyword arguments that will be mapped into the Driver options.
Attributes
----------
_name : str
The name used to identify this driver in recorded cases.
_problem_comm : MPI.Comm or None
The MPI communicator for the Problem.
_color : int or None
In MPI, the cached color is used to determine which cases to run on this proc.
"""
def __init__(self, generator=None, **kwargs):
"""
Construct A DOEDriver.
"""
# if given a list, create a ListGenerator
if isinstance(generator, list):
generator = ListGenerator(generator)
elif generator and not isinstance(generator, DOEGenerator):
if inspect.isclass(generator):
raise TypeError("DOEDriver requires an instance of DOEGenerator, "
"but a class object was found: %s"
% generator.__name__)
else:
raise TypeError("DOEDriver requires an instance of DOEGenerator, "
"but an instance of %s was found."
% type(generator).__name__)
super().__init__(**kwargs)
# What we support
self.supports['integer_design_vars'] = True
# What we don't support
self.supports['distributed_design_vars'] = False
self.supports._read_only = True
if generator is not None:
self.options['generator'] = generator
self._name = ''
self._problem_comm = None
self._color = None
def _declare_options(self):
"""
Declare options before kwargs are processed in the init method.
"""
self.options.declare('generator', types=(DOEGenerator), default=DOEGenerator(),
desc='The case generator. If default, no cases are generated.')
self.options.declare('run_parallel', types=bool, default=False,
desc='Set to True to execute cases in parallel.')
self.options.declare('procs_per_model', types=int, default=1, lower=1,
desc='Number of processors to give each model under MPI.')
def _setup_comm(self, comm):
"""
Perform any driver-specific setup of communicators for the model.
Parameters
----------
comm : MPI.Comm or <FakeComm> or None
The communicator for the Problem.
Returns
-------
MPI.Comm or <FakeComm> or None
The communicator for the Problem model.
"""
self._problem_comm = comm
if MPI:
procs_per_model = self.options['procs_per_model']
full_size = comm.size
size = full_size // procs_per_model
if full_size != size * procs_per_model:
raise RuntimeError("The total number of processors is not evenly divisible by the "
"specified number of processors per model.\n Provide a "
"number of processors that is a multiple of %d, or "
"specify a number of processors per model that divides "
"into %d." % (procs_per_model, full_size))
color = self._color = comm.rank % size
model_comm = comm.Split(color)
else:
model_comm = comm
return model_comm
def _set_name(self):
"""
Set the name of this DOE driver and its case generator.
Returns
-------
str
The name of this DOE driver and its case generator.
"""
generator = self.options['generator']
gen_type = type(generator).__name__.replace('Generator', '')
if gen_type == 'DOEGenerator':
self._name = 'DOEDriver' # Empty generator
else:
self._name = 'DOEDriver_' + gen_type
return self._name
def _get_name(self):
"""
Get the name of this DOE driver and its case generator.
Returns
-------
str
The name of this DOE driver and its case generator.
"""
return self._name
def run(self):
"""
Generate cases and run the model for each set of generated input values.
Returns
-------
bool
Failure flag; True if failed to converge, False is successful.
"""
self.iter_count = 0
# set driver name with current generator
self._set_name()
if MPI and self.options['run_parallel']:
case_gen = self._parallel_generator
else:
case_gen = self.options['generator']
for case in case_gen(self._designvars, self._problem().model):
self._run_case(case)
self.iter_count += 1
return False
def _run_case(self, case):
"""
Run case, save exception info and mark the metadata if the case fails.
Parameters
----------
case : list
list of name, value tuples for the design variables.
"""
metadata = {}
for dv_name, dv_val in case:
try:
msg = None
if isinstance(dv_val, np.ndarray):
self.set_design_var(dv_name, dv_val.flatten())
else:
self.set_design_var(dv_name, dv_val)
except ValueError as err:
msg = "Error assigning %s = %s: " % (dv_name, dv_val) + str(err)
finally:
if msg:
raise(ValueError(msg))
with RecordingDebugging(self._get_name(), self.iter_count, self) as rec:
try:
self._problem().model.run_solve_nonlinear()
metadata['success'] = 1
metadata['msg'] = ''
except AnalysisError:
metadata['success'] = 0
metadata['msg'] = traceback.format_exc()
except Exception:
metadata['success'] = 0
metadata['msg'] = traceback.format_exc()
print(metadata['msg'])
# save reference to metadata for use in record_iteration
self._metadata = metadata
def _parallel_generator(self, design_vars, model=None):
"""
Generate case for this processor when running under MPI.
Parameters
----------
design_vars : dict
Dictionary of design variables for which to generate values.
model : Group
The model containing the design variables (used by some generators).
Yields
------
list
list of name, value tuples for the design variables.
"""
size = self._problem_comm.size // self.options['procs_per_model']
color = self._color
generator = self.options['generator']
for i, case in enumerate(generator(design_vars, model)):
if i % size == color:
yield case
def _setup_recording(self):
"""
Set up case recording.
"""
if MPI:
procs_per_model = self.options['procs_per_model']
for recorder in self._rec_mgr:
recorder._parallel = True
# if SqliteRecorder, write cases only on procs up to the number
# of parallel DOEs (i.e. on the root procs for the cases)
if isinstance(recorder, SqliteRecorder):
if procs_per_model == 1:
recorder._record_on_proc = True
else:
size = self._problem_comm.size // procs_per_model
if self._problem_comm.rank < size:
recorder._record_on_proc = True
else:
recorder._record_on_proc = False
super()._setup_recording()
def _get_recorder_metadata(self, case_name):
"""
Return metadata from the latest iteration for use in the recorder.
Parameters
----------
case_name : str
Name of current case.
Returns
-------
dict
Metadata dictionary for the recorder.
"""
self._metadata['name'] = case_name
return self._metadata