Skip to content

Commit

Permalink
Merge 83a204e into 38381ac
Browse files Browse the repository at this point in the history
  • Loading branch information
siddharth-agrawal committed Jan 10, 2018
2 parents 38381ac + 83a204e commit c7b03c8
Show file tree
Hide file tree
Showing 3 changed files with 23 additions and 4 deletions.
11 changes: 10 additions & 1 deletion edward/inferences/implicit_klqp.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,9 @@ class ImplicitKLqp(GANInference):
+ If `scale` has more than one item, then in order to scale
its corresponding output, `discriminator` must output a
dictionary of same size and keys as `scale`.
The objective function also adds to itself a summation over all
tensors in the `REGULARIZATION_LOSSES` collection.
"""
def __init__(self, latent_vars, data=None, discriminator=None,
global_vars=None):
Expand Down Expand Up @@ -203,8 +206,14 @@ def build_loss_and_gradients(self, var_list):
for key in six.iterkeys(self.scale)]
scaled_ratio = tf.reduce_sum(scaled_ratio)

reg_terms_d = tf.losses.get_regularization_losses(scope="Disc")
reg_terms_all = tf.losses.get_regularization_losses()
reg_terms = [r for r in reg_terms_all if r not in reg_terms_d]

# Form variational objective.
loss = -(pbeta_log_prob - qbeta_log_prob + scaled_ratio)
loss = -(pbeta_log_prob - qbeta_log_prob + scaled_ratio -
tf.reduce_sum(reg_terms))
loss_d = loss_d + tf.reduce_sum(reg_terms_d)

var_list_d = tf.get_collection(
tf.GraphKeys.TRAINABLE_VARIABLES, scope="Disc")
Expand Down
6 changes: 5 additions & 1 deletion edward/inferences/map.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,9 @@ class MAP(VariationalInference):
unconstrained; see, e.g., `qsigma` above. This is different than
performing MAP on the unconstrained space: in general, the MAP of
the transform is not the transform of the MAP.
The objective function also adds to itself a summation over all
tensors in the `REGULARIZATION_LOSSES` collection.
"""
def __init__(self, latent_vars=None, data=None):
"""Create an inference algorithm.
Expand Down Expand Up @@ -142,7 +145,8 @@ def build_loss_and_gradients(self, var_list):
p_log_prob += tf.reduce_sum(
self.scale.get(x, 1.0) * x_copy.log_prob(dict_swap[x]))

loss = -p_log_prob
reg_penalty = tf.reduce_sum(tf.losses.get_regularization_losses())
loss = -p_log_prob + reg_penalty

grads = tf.gradients(loss, var_list)
grads_and_vars = list(zip(grads, var_list))
Expand Down
10 changes: 8 additions & 2 deletions edward/inferences/wake_sleep.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,9 @@ class WakeSleep(VariationalInference):
where $z^{(s)} \sim q(z; \lambda)$ and $\\beta^{(s)}
\sim q(\\beta)$.
The objective function also adds to itself a summation over all
tensors in the `REGULARIZATION_LOSSES` collection.
"""
def __init__(self, *args, **kwargs):
super(WakeSleep, self).__init__(*args, **kwargs)
Expand Down Expand Up @@ -129,15 +132,18 @@ def build_loss_and_gradients(self, var_list):

p_log_prob = tf.reduce_mean(p_log_prob)
q_log_prob = tf.reduce_mean(q_log_prob)
reg_penalty = tf.reduce_sum(tf.losses.get_regularization_losses())

if self.logging:
tf.summary.scalar("loss/p_log_prob", p_log_prob,
collections=[self._summary_key])
tf.summary.scalar("loss/q_log_prob", q_log_prob,
collections=[self._summary_key])
tf.summary.scalar("loss/reg_penalty", reg_penalty,
collections=[self._summary_key])

loss_p = -p_log_prob
loss_q = -q_log_prob
loss_p = -p_log_prob + reg_penalty
loss_q = -q_log_prob + reg_penalty

q_rvs = list(six.itervalues(self.latent_vars))
q_vars = [v for v in var_list
Expand Down

0 comments on commit c7b03c8

Please sign in to comment.