Skip to content

Commit

Permalink
use sparse softmax
Browse files Browse the repository at this point in the history
  • Loading branch information
ppwwyyxx committed Apr 14, 2016
1 parent d646972 commit 9a4e6d9
Show file tree
Hide file tree
Showing 4 changed files with 8 additions and 11 deletions.
6 changes: 3 additions & 3 deletions examples/cifar10_convnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,8 @@
from tensorpack.dataflow import imgaug

"""
CIFAR10 90% validation accuracy after 40k step.
A small cifar10 convnet model.
90% validation accuracy after 40k step.
"""

BATCH_SIZE = 128
Expand Down Expand Up @@ -62,8 +63,7 @@ def _get_cost(self, input_vars, is_training):
# fc will have activation summary by default. disable for the output layer
logits = FullyConnected('linear', l, out_dim=10, nl=tf.identity)

y = one_hot(label, 10)
cost = tf.nn.softmax_cross_entropy_with_logits(logits, y)
cost = tf.nn.sparse_softmax_cross_entropy_with_logits(logits, label)
cost = tf.reduce_mean(cost, name='cross_entropy_loss')
tf.add_to_collection(MOVING_SUMMARY_VARS_KEY, cost)

Expand Down
3 changes: 1 addition & 2 deletions examples/cifar10_resnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,8 +105,7 @@ def residual(name, l, increase_dim=False, first=False):
logits = FullyConnected('linear', l, out_dim=10, nl=tf.identity)
prob = tf.nn.softmax(logits, name='output')

y = one_hot(label, 10)
cost = tf.nn.softmax_cross_entropy_with_logits(logits, y)
cost = tf.nn.sparse_softmax_cross_entropy_with_logits(logits, label)
cost = tf.reduce_mean(cost, name='cross_entropy_loss')
tf.add_to_collection(MOVING_SUMMARY_VARS_KEY, cost)

Expand Down
7 changes: 3 additions & 4 deletions examples/mnist_convnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@

"""
MNIST ConvNet example.
about 0.6% validation error after 50 epochs.
about 0.6% validation error after 30 epochs.
"""

BATCH_SIZE = 128
Expand Down Expand Up @@ -58,8 +58,7 @@ def _get_cost(self, input_vars, is_training):
logits = FullyConnected('fc1', l, out_dim=10, nl=tf.identity)
prob = tf.nn.softmax(logits, name='prob')

y = one_hot(label, 10)
cost = tf.nn.softmax_cross_entropy_with_logits(logits, y)
cost = tf.nn.sparse_softmax_cross_entropy_with_logits(logits, label)
cost = tf.reduce_mean(cost, name='cross_entropy_loss')
tf.add_to_collection(MOVING_SUMMARY_VARS_KEY, cost)

Expand Down Expand Up @@ -97,7 +96,7 @@ def get_config():
learning_rate=1e-3,
global_step=get_global_step_var(),
decay_steps=dataset_train.size() * 10,
decay_rate=0.5, staircase=True, name='learning_rate')
decay_rate=0.3, staircase=True, name='learning_rate')
tf.scalar_summary('learning_rate', lr)

return TrainConfig(
Expand Down
3 changes: 1 addition & 2 deletions examples/svhn_digit_convnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,7 @@ def _get_cost(self, input_vars, is_training):
logits = FullyConnected('linear', l, out_dim=10, nl=tf.identity)
prob = tf.nn.softmax(logits, name='output')

y = one_hot(label, 10)
cost = tf.nn.softmax_cross_entropy_with_logits(logits, y)
cost = tf.nn.sparse_softmax_cross_entropy_with_logits(logits, label)
cost = tf.reduce_mean(cost, name='cross_entropy_loss')
tf.add_to_collection(MOVING_SUMMARY_VARS_KEY, cost)

Expand Down

0 comments on commit 9a4e6d9

Please sign in to comment.