Skip to content

Commit

Permalink
add commnet about gradient
Browse files Browse the repository at this point in the history
  • Loading branch information
carpedm20 committed Apr 6, 2017
1 parent 013c1c4 commit f9356c4
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 1 deletion.
2 changes: 2 additions & 0 deletions models.py
Expand Up @@ -110,6 +110,8 @@ def __init__(self, size_average=True):
self.size_average = size_average

def forward(self, input, target):
# this won't still solve the problem
# which means gradient will not flow through target
# _assert_no_grad(target)
backend_fn = getattr(self._backend, type(self).__name__)
return backend_fn(self.size_average)(input, target)
Expand Down
2 changes: 1 addition & 1 deletion trainer.py
Expand Up @@ -161,7 +161,7 @@ def get_optimizer(lr):
d_loss_fake = l1(AE_G_d, sample_z_G.detach())

d_loss = d_loss_real - k_t * d_loss_fake
g_loss = l1(sample_z_G, AE_G_g)
g_loss = l1(sample_z_G, AE_G_g) # this won't still solve the problem

loss = d_loss + g_loss
loss.backward()
Expand Down

0 comments on commit f9356c4

Please sign in to comment.