44
55class VariationalAutoencoder (object ):
66
7- def __init__ (self , n_input , n_hidden , optimizer = tf .train .AdamOptimizer (),
8- gaussian_sample_size = 128 ):
7+ def __init__ (self , n_input , n_hidden , optimizer = tf .train .AdamOptimizer ()):
98 self .n_input = n_input
109 self .n_hidden = n_hidden
11- self .gaussian_sample_size = gaussian_sample_size
1210
1311 network_weights = self ._initialize_weights ()
1412 self .weights = network_weights
@@ -18,14 +16,12 @@ def __init__(self, n_input, n_hidden, optimizer = tf.train.AdamOptimizer(),
1816 self .z_mean = tf .add (tf .matmul (self .x , self .weights ['w1' ]), self .weights ['b1' ])
1917 self .z_log_sigma_sq = tf .add (tf .matmul (self .x , self .weights ['log_sigma_w1' ]), self .weights ['log_sigma_b1' ])
2018
21-
2219 # sample from gaussian distribution
23- eps = tf .random_normal (( self .gaussian_sample_size , n_hidden ), 0 , 1 , dtype = tf .float32 )
20+ eps = tf .random_normal (tf . pack ([ tf . shape ( self .x )[ 0 ], self . n_hidden ] ), 0 , 1 , dtype = tf .float32 )
2421 self .z = tf .add (self .z_mean , tf .mul (tf .sqrt (tf .exp (self .z_log_sigma_sq )), eps ))
2522
2623 self .reconstruction = tf .add (tf .matmul (self .z , self .weights ['w2' ]), self .weights ['b2' ])
2724
28-
2925 # cost
3026 reconstr_loss = 0.5 * tf .reduce_sum (tf .pow (tf .sub (self .reconstruction , self .x ), 2.0 ))
3127 latent_loss = - 0.5 * tf .reduce_sum (1 + self .z_log_sigma_sq
@@ -38,7 +34,6 @@ def __init__(self, n_input, n_hidden, optimizer = tf.train.AdamOptimizer(),
3834 self .sess = tf .Session ()
3935 self .sess .run (init )
4036
41-
4237 def _initialize_weights (self ):
4338 all_weights = dict ()
4439 all_weights ['w1' ] = tf .Variable (autoencoder .Utils .xavier_init (self .n_input , self .n_hidden ))
0 commit comments