Skip to content

Commit

Permalink
Fix syntax error
Browse files Browse the repository at this point in the history
  • Loading branch information
anaruse committed Jul 16, 2019
1 parent 1ab2632 commit 4d50de1
Showing 1 changed file with 3 additions and 2 deletions.
5 changes: 3 additions & 2 deletions chainer/functions/loss/softmax_cross_entropy.py
Expand Up @@ -250,7 +250,7 @@ def _soft_target_loss(self, xp, x, t, log_y):
if self.soft_target_loss == 'kl-divergence':
ret = xp.sum(t * (xp.log(t + self.eps) - log_y), axis=1)
else:
ret = -xp.sum(t * log_y), axis=1)
ret = -xp.sum(t * log_y, axis=1)
if self.reduce == 'mean':
self._coeff = 1.0 / (x.size / x.shape[1])
ret = ret.sum(keepdims=True) * self._coeff
Expand Down Expand Up @@ -447,7 +447,8 @@ def _double_backward_softmax_cross_entropy(x, t, normalize, class_weight,
def softmax_cross_entropy(
x, t, normalize=True, cache_score=True, class_weight=None,
ignore_label=-1, reduce='mean', enable_double_backprop=False,
soft_target_loss='cross-entropy'):
soft_target_loss='kl-divergence'):
# soft_target_loss='cross-entropy'):
"""Computes cross entropy loss for pre-softmax activations.
Args:
Expand Down

0 comments on commit 4d50de1

Please sign in to comment.