-
Notifications
You must be signed in to change notification settings - Fork 47
/
vladder_small.py
108 lines (95 loc) · 5.71 KB
/
vladder_small.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
from abstract_network import *
class SmallLayers:
""" Definition of layers for a small ladder network """
def __init__(self, network):
self.network = network
def inference0(self, input_x, is_training=True):
with tf.variable_scope("inference0"):
conv1 = conv2d_bn_lrelu(input_x, self.network.cs[1], [4, 4], 2, is_training)
conv2 = conv2d_bn_lrelu(conv1, self.network.cs[2], [4, 4], 2, is_training)
conv2 = tf.reshape(conv2, [-1, np.prod(conv2.get_shape().as_list()[1:])])
fc1 = tf.contrib.layers.fully_connected(conv2, self.network.cs[3], activation_fn=tf.identity)
return fc1
def ladder0(self, input_x, is_training=True):
with tf.variable_scope("ladder0"):
conv1 = conv2d_bn_lrelu(input_x, self.network.cs[1], [4, 4], 2, is_training)
conv2 = conv2d_bn_lrelu(conv1, self.network.cs[2], [4, 4], 2, is_training)
conv2 = tf.reshape(conv2, [-1, np.prod(conv2.get_shape().as_list()[1:])])
fc1_mean = tf.contrib.layers.fully_connected(conv2, self.network.ladder0_dim, activation_fn=tf.identity)
fc1_stddev = tf.contrib.layers.fully_connected(conv2, self.network.ladder0_dim, activation_fn=tf.sigmoid)
return fc1_mean, fc1_stddev
def inference1(self, latent1, is_training=True):
with tf.variable_scope("inference1"):
fc1 = fc_bn_lrelu(latent1, self.network.cs[3], is_training)
fc2 = fc_bn_lrelu(fc1, self.network.cs[3], is_training)
fc3 = tf.contrib.layers.fully_connected(fc2, self.network.cs[3], activation_fn=tf.identity)
return fc3
def ladder1(self, latent1, is_training=True):
with tf.variable_scope("ladder1"):
fc1 = fc_bn_lrelu(latent1, self.network.cs[3], is_training)
fc2 = fc_bn_lrelu(fc1, self.network.cs[3], is_training)
fc3_mean = tf.contrib.layers.fully_connected(fc2, self.network.ladder1_dim, activation_fn=tf.identity)
fc3_stddev = tf.contrib.layers.fully_connected(fc2, self.network.ladder1_dim, activation_fn=tf.sigmoid)
return fc3_mean, fc3_stddev
def ladder2(self, latent1, is_training=True):
with tf.variable_scope("ladder2"):
fc1 = fc_bn_lrelu(latent1, self.network.cs[3], is_training)
fc2 = fc_bn_lrelu(fc1, self.network.cs[3], is_training)
fc3_mean = tf.contrib.layers.fully_connected(fc2, self.network.ladder2_dim, activation_fn=tf.identity)
fc3_stddev = tf.contrib.layers.fully_connected(fc2, self.network.ladder2_dim, activation_fn=tf.sigmoid)
return fc3_mean, fc3_stddev
def combine_noise(self, latent, ladder, method='gated_add', name="default"):
if method is 'concat':
return tf.concat(values=[latent, ladder], axis=len(latent.get_shape())-1)
else:
if method is 'add':
return latent + ladder
elif method is 'gated_add':
gate = tf.get_variable("gate", shape=latent.get_shape()[1:], initializer=tf.constant_initializer(0.1))
tf.summary.histogram(name + "_noise_gate", gate)
return latent + tf.multiply(gate, ladder)
def generative0(self, latent1, ladder0=None, reuse=False, is_training=True):
with tf.variable_scope("generative0") as gs:
if reuse:
gs.reuse_variables()
if ladder0 is not None:
ladder0 = fc_bn_lrelu(ladder0, self.network.cs[3])
if latent1 is not None:
latent1 = self.combine_noise(latent1, ladder0, name="generative0")
else:
latent1 = ladder0
elif latent1 is None:
print("Generative layer must have input")
exit(0)
fc1 = fc_bn_relu(latent1, int(self.network.fs[2] * self.network.fs[2] * self.network.cs[2]), is_training)
fc1 = tf.reshape(fc1, tf.stack([tf.shape(fc1)[0], self.network.fs[2], self.network.fs[2], self.network.cs[2]]))
conv1 = conv2d_t_bn_relu(fc1, self.network.cs[1], [4, 4], 2, is_training)
output = tf.contrib.layers.convolution2d_transpose(conv1, self.network.data_dims[-1], [4, 4], 2,
activation_fn=tf.sigmoid)
output = (self.network.dataset.range[1] - self.network.dataset.range[0]) * output + self.network.dataset.range[0]
return output
def generative1(self, latent2, ladder1=None, reuse=False, is_training=True):
with tf.variable_scope("generative1") as gs:
if reuse:
gs.reuse_variables()
if ladder1 is not None:
ladder1 = fc_bn_relu(ladder1, self.network.cs[3], is_training)
if latent2 is not None:
latent2 = self.combine_noise(latent2, ladder1, name="generative1")
else:
latent2 = ladder1
elif latent2 is None:
print("Generative layer must have input")
exit(0)
fc1 = fc_bn_relu(latent2, self.network.cs[3], is_training)
fc2 = fc_bn_relu(fc1, self.network.cs[3], is_training)
fc3 = tf.contrib.layers.fully_connected(fc2, self.network.cs[3], activation_fn=tf.identity)
return fc3
def generative2(self, latent3, ladder2, reuse=False, is_training=True):
with tf.variable_scope("generative2") as gs:
if reuse:
gs.reuse_variables()
fc1 = fc_bn_relu(ladder2, self.network.cs[3], is_training)
fc2 = fc_bn_relu(fc1, self.network.cs[3], is_training)
fc3 = tf.contrib.layers.fully_connected(fc2, self.network.cs[3], activation_fn=tf.identity)
return fc3