Skip to content

Commit

Permalink
add names to all tf.variables internal to inference
Browse files Browse the repository at this point in the history
  • Loading branch information
dustinvtran committed Mar 21, 2017
1 parent 3e50dff commit 22991b9
Show file tree
Hide file tree
Showing 4 changed files with 10 additions and 8 deletions.
2 changes: 1 addition & 1 deletion edward/inferences/gan_inference.py
Expand Up @@ -201,7 +201,7 @@ def print_progress(self, info_dict):
def _build_optimizer(optimizer, global_step):
if optimizer is None:
# Use ADAM with a decaying scale factor.
global_step = tf.Variable(0, trainable=False)
global_step = tf.Variable(0, trainable=False, name="global_step")
starter_learning_rate = 0.1
learning_rate = tf.train.exponential_decay(starter_learning_rate,
global_step,
Expand Down
12 changes: 7 additions & 5 deletions edward/inferences/inference.py
Expand Up @@ -62,10 +62,11 @@ def __init__(self, latent_vars=None, data=None):
elif isinstance(value, (float, list, int, np.ndarray, np.number, str)):
# If value is a Python type, store it in the graph.
# Assign its placeholder with the key's data type.
ph = tf.placeholder(key.dtype, np.shape(value))
var = tf.Variable(ph, trainable=False, collections=[])
sess.run(var.initializer, {ph: value})
self.data[key] = var
with tf.variable_scope("data"):
ph = tf.placeholder(key.dtype, np.shape(value))
var = tf.Variable(ph, trainable=False, collections=[])
sess.run(var.initializer, {ph: value})
self.data[key] = var

def run(self, variables=None, use_coordinator=True, *args, **kwargs):
"""A simple wrapper to run inference.
Expand Down Expand Up @@ -163,7 +164,8 @@ def initialize(self, n_iter=1000, n_print=None, scale=None, logdir=None,
self.n_print = n_print

self.progbar = Progbar(self.n_iter)
self.t = tf.Variable(0, trainable=False)
self.t = tf.Variable(0, trainable=False, name="iteration")

self.increment_t = self.t.assign_add(1)

if scale is None:
Expand Down
2 changes: 1 addition & 1 deletion edward/inferences/monte_carlo.py
Expand Up @@ -83,7 +83,7 @@ def initialize(self, *args, **kwargs):
qz in six.itervalues(self.latent_vars)])
super(MonteCarlo, self).initialize(*args, **kwargs)

self.n_accept = tf.Variable(0, trainable=False)
self.n_accept = tf.Variable(0, trainable=False, name="n_accept")
self.n_accept_over_t = self.n_accept / self.t
self.train = self.build_update()

Expand Down
2 changes: 1 addition & 1 deletion edward/inferences/variational_inference.py
Expand Up @@ -65,7 +65,7 @@ def initialize(self, optimizer=None, var_list=None, use_prettytensor=False,

if optimizer is None:
# Use ADAM with a decaying scale factor.
global_step = tf.Variable(0, trainable=False)
global_step = tf.Variable(0, trainable=False, name="global_step")
starter_learning_rate = 0.1
learning_rate = tf.train.exponential_decay(starter_learning_rate,
global_step,
Expand Down

0 comments on commit 22991b9

Please sign in to comment.