/
response_surface.py
138 lines (106 loc) · 3.48 KB
/
response_surface.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
"""
Surrogate Model based on second order response surface equations.
"""
from numpy import zeros, einsum
from numpy.dual import lstsq
from openmdao.surrogate_models.surrogate_model import SurrogateModel
class ResponseSurface(SurrogateModel):
"""
Surrogate Model based on second order response surface equations.
Attributes
----------
betas : ndarray
Vector of response surface equation coefficients.
m : int
Number of training points.
n : int
Number of independent variables.
"""
def __init__(self):
"""
Initialize all attributes.
"""
super().__init__()
self.m = 0 # number of training points
self.n = 0 # number of independents
# vector of response surface equation coefficients
self.betas = zeros(0)
def train(self, x, y):
"""
Calculate response surface equation coefficients using least squares regression.
Parameters
----------
x : array-like
Training input locations
y : array-like
Model responses at given inputs.
"""
super().train(x, y)
m = self.m = x.shape[0]
n = self.n = x.shape[1]
X = zeros((m, ((n + 1) * (n + 2)) // 2))
# Modify X to include constant, squared terms and cross terms
# Constant Terms
X[:, 0] = 1.0
# Linear Terms
X[:, 1:n + 1] = x
# Quadratic Terms
X_offset = X[:, n + 1:]
for i in range(n):
# Z = einsum('i,ij->ij', X, Y) is equivalent to, but much faster and
# memory efficient than, diag(X).dot(Y) for vector X and 2D array Y.
# I.e. Z[i,j] = X[i]*Y[i,j]
X_offset[:, :n - i] = einsum('i,ij->ij', x[:, i], x[:, i:])
X_offset = X_offset[:, n - i:]
# Determine response surface equation coefficients (betas) using least
# squares
self.betas, rs, r, s = lstsq(X, y)
def predict(self, x):
"""
Calculate predicted value of response based on the current response surface model.
Parameters
----------
x : array-like
Point at which the surrogate is evaluated.
Returns
-------
float
Predicted response.
"""
super().predict(x)
n = x.size
X = zeros(((self.n + 1) * (self.n + 2)) // 2)
# Modify X to include constant, squared terms and cross terms
# Constant Terms
X[0] = 1.0
# Linear Terms
X[1:n + 1] = x
# Quadratic Terms
X_offset = X[n + 1:]
for i in range(n):
X_offset[:n - i] = x[i] * x[i:]
X_offset = X_offset[n - i:]
# Predict new_y using X and betas
return X.dot(self.betas)
def linearize(self, x):
"""
Calculate the jacobian of the Kriging surface at the requested point.
Parameters
----------
x : array-like
Point at which the surrogate Jacobian is evaluated.
Returns
-------
ndarray
Jacobian of surrogate output wrt inputs.
"""
n = self.n
betas = self.betas
x = x.flat
jac = betas[1:n + 1, :].copy()
beta_offset = betas[n + 1:, :]
for i in range(n):
jac[i, :] += x[i:].dot(beta_offset[:n - i, :])
jac[i:, :] += x[i] * beta_offset[:n - i, :]
beta_offset = beta_offset[n - i:, :]
return jac.T