Skip to content

Commit

Permalink
Remove all bn.input and bn.output
Browse files Browse the repository at this point in the history
  • Loading branch information
csy530216 committed Apr 21, 2019
1 parent eca61b7 commit 4e9230a
Show file tree
Hide file tree
Showing 4 changed files with 8 additions and 19 deletions.
10 changes: 5 additions & 5 deletions examples/bayesian_neural_nets/variational_dropout.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,11 @@


@zs.meta_bayesian_net(scope="model", reuse_variables=True)
def var_dropout(n, net_size, n_particles, is_training):
def var_dropout(x, n, net_size, n_particles, is_training):
normalizer_params = {'is_training': is_training,
'updates_collections': None}
bn = zs.BayesianNet()
h = bn.input("x")
h = x
for i, [n_in, n_out] in enumerate(zip(net_size[:-1], net_size[1:])):
eps_mean = tf.ones([n, n_in])
eps = bn.normal(
Expand All @@ -34,7 +34,7 @@ def var_dropout(n, net_size, n_particles, is_training):
if i < len(net_size) - 2:
h = tf.nn.relu(h)
y = bn.categorical('y', h)
bn.output('y_logit', h)
bn.deterministic('y_logit', h)
return bn


Expand Down Expand Up @@ -90,7 +90,7 @@ def q(n, net_size, n_particles):
x_obs = tf.tile(tf.expand_dims(x, 0), [n_particles, 1, 1])
y_obs = tf.tile(tf.expand_dims(y, 0), [n_particles, 1])

model = var_dropout(n, net_size, n_particles, is_training)
model = var_dropout(x_obs, n, net_size, n_particles, is_training)
variational = q(n, net_size, n_particles)

def log_joint(bn):
Expand All @@ -100,7 +100,7 @@ def log_joint(bn):

model.log_joint = log_joint

lower_bound = zs.variational.elbo(model, {'x': x_obs, 'y': y_obs},
lower_bound = zs.variational.elbo(model, {'y': y_obs},
variational=variational, axis=0)
y_logit = lower_bound.bn["y_logit"]
h_pred = tf.reduce_mean(tf.nn.softmax(y_logit), 0)
Expand Down
4 changes: 2 additions & 2 deletions examples/toy_examples/toy2d_intractable.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@ def build_toy2d_intractable(n_particles):
def build_mean_field_variational(n_particles):
bn = zs.BayesianNet()
for name in ["z1", "z2"]:
z_mean = bn.output(name + "_mean", tf.Variable(-2.))
z_logstd = bn.output(name + "_logstd", tf.Variable(-5.))
z_mean = bn.deterministic(name + "_mean", tf.Variable(-2.))
z_logstd = bn.deterministic(name + "_logstd", tf.Variable(-5.))
bn.normal(name, z_mean, logstd=z_logstd, n_samples=n_particles)
return bn

Expand Down
2 changes: 1 addition & 1 deletion examples/variational_autoencoders/vae_conv.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def build_gen(n, x_dim, z_dim, n_particles, nf=16):
lx_z = conv2d_transpose(lx_z, [28, 28, 1], kernel_size=(3, 3),
stride=(1, 1), activation_fn=None)
x_logits = tf.reshape(lx_z, [n_particles, -1, x_dim])
bn.output("x_mean", tf.sigmoid(x_logits))
bn.deterministic("x_mean", tf.sigmoid(x_logits))
bn.bernoulli("x", x_logits, group_ndims=1)
return bn

Expand Down
11 changes: 0 additions & 11 deletions zhusuan/framework/bn.py
Original file line number Diff line number Diff line change
Expand Up @@ -370,17 +370,6 @@ def stochastic(self, name, dist, **kwargs):
self._nodes[name] = node
return node

# TODO: Deprecate and prefer unbounded input as function arguments
def input(self, name):
input_tensor = tf.convert_to_tensor(self._get_observation(name))
self._nodes[name] = input_tensor
return input_tensor

# TODO: Deprecate and prefer deterministic
def output(self, name, input_tensor):
self._nodes[name] = input_tensor
return input_tensor

def deterministic(self, name, input_tensor):
"""
Add a named deterministic node in this :class:`BayesianNet`.
Expand Down

0 comments on commit 4e9230a

Please sign in to comment.