diff --git a/example/tutorial_mnist_simple.py b/example/tutorial_mnist_simple.py index 0b12f9579..c2957c614 100644 --- a/example/tutorial_mnist_simple.py +++ b/example/tutorial_mnist_simple.py @@ -33,7 +33,7 @@ # define cost function and metric. y = network.outputs -cost = tl.cost.cross_entropy(y, y_) +cost = tl.cost.cross_entropy(y, y_, name='xentropy') correct_prediction = tf.equal(tf.argmax(y, 1), y_) acc = tf.reduce_mean(tf.cast(correct_prediction, tf.float32)) y_op = tf.argmax(tf.nn.softmax(y), 1) diff --git a/tensorlayer/cost.py b/tensorlayer/cost.py index 19d88088b..30b80f443 100644 --- a/tensorlayer/cost.py +++ b/tensorlayer/cost.py @@ -36,7 +36,7 @@ def cross_entropy(output, target, name=None): if tf.__version__ <= "0.12": return tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(logits=output, targets=target, name=name)) else: # TF 1.0 - return tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(labels=target, logits=outputs, name=name)) + return tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(labels=target, logits=output, name=name))