-
Notifications
You must be signed in to change notification settings - Fork 706
/
mfnn.py
104 lines (90 loc) · 3.21 KB
/
mfnn.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from . import activations
from . import initializers
from . import regularizers
from .map import Map
from .. import config
from ..utils import timing
class MfNN(Map):
"""Multifidelity neural networks.
"""
def __init__(
self,
layer_size_low_fidelity,
layer_size_high_fidelity,
activation,
kernel_initializer,
regularization=None,
residue=False,
):
self.layer_size_lo = layer_size_low_fidelity
self.layer_size_hi = layer_size_high_fidelity
self.activation = activations.get(activation)
self.kernel_initializer = initializers.get(kernel_initializer)
self.regularizer = regularizers.get(regularization)
self.residue = residue
super(MfNN, self).__init__()
@property
def inputs(self):
return self.X
@property
def outputs(self):
return [self.y_lo, self.y_hi]
@property
def targets(self):
return [self.target_lo, self.target_hi]
@timing
def build(self):
print("Building multifidelity neural network...")
self.X = tf.placeholder(config.real(tf), [None, self.layer_size_lo[0]])
# Low fidelity
y = self.X
for i in range(len(self.layer_size_lo) - 2):
y = self.dense(
y,
self.layer_size_lo[i + 1],
activation=self.activation,
regularizer=self.regularizer,
)
self.y_lo = self.dense(y, self.layer_size_lo[-1], regularizer=self.regularizer)
# High fidelity
X_hi = tf.concat([self.X, self.y_lo], 1)
# Linear
y_hi_l = self.dense(X_hi, self.layer_size_hi[-1])
# Nonlinear
y = X_hi
for i in range(len(self.layer_size_hi) - 1):
y = self.dense(
y,
self.layer_size_hi[i],
activation=self.activation,
regularizer=self.regularizer,
)
y_hi_nl = self.dense(
y, self.layer_size_hi[-1], use_bias=False, regularizer=self.regularizer
)
# Linear + nonlinear
if not self.residue:
alpha = tf.Variable(0, dtype=config.real(tf))
alpha = activations.get("tanh")(alpha)
self.y_hi = y_hi_l + alpha * y_hi_nl
else:
alpha1 = tf.Variable(0, dtype=config.real(tf))
alpha1 = activations.get("tanh")(alpha1)
alpha2 = tf.Variable(0, dtype=config.real(tf))
alpha2 = activations.get("tanh")(alpha2)
self.y_hi = self.y_lo + 0.1 * (alpha1 * y_hi_l + alpha2 * y_hi_nl)
self.target_lo = tf.placeholder(config.real(tf), [None, self.layer_size_lo[-1]])
self.target_hi = tf.placeholder(config.real(tf), [None, self.layer_size_hi[-1]])
def dense(self, inputs, units, activation=None, use_bias=True, regularizer=None):
return tf.layers.dense(
inputs,
units,
activation=activation,
use_bias=use_bias,
kernel_initializer=self.kernel_initializer,
kernel_regularizer=regularizer,
)