Skip to content

Commit

Permalink
Ref tensorflow#57: Moving to use class weight on logits before cross …
Browse files Browse the repository at this point in the history
…entropy. The math should work as -weight[class]*x[class] + log( sum ( exp weighted x))
  • Loading branch information
ilblackdragon committed Feb 3, 2016
1 parent 6587cee commit b729f46
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 2 deletions.
4 changes: 2 additions & 2 deletions skflow/ops/losses_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,11 +46,11 @@ def softmax_classifier(tensor_in, labels, weights, biases, class_weight=None, na
"""
with tf.op_scope([tensor_in, labels], name, "softmax_classifier"):
logits = tf.nn.xw_plus_b(tensor_in, weights, biases)
if class_weight:
logits = tf.mul(logits, class_weight)
xent = tf.nn.softmax_cross_entropy_with_logits(logits,
labels,
name="xent_raw")
if class_weight:
xent = tf.mul(xent, class_weight)
loss = tf.reduce_mean(xent, name="xent")
predictions = tf.nn.softmax(logits, name=name)
return predictions, loss
Expand Down
13 changes: 13 additions & 0 deletions skflow/ops/tests/test_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,19 @@

class OpsTest(tf.test.TestCase):

def test_softmax_classifier(self):
with self.test_session() as session:
features = tf.placeholder(tf.float32, [None, 3])
labels = tf.placeholder(tf.float32, [None, 2])
weights = tf.constant([[0.1, 0.1], [0.1, 0.1], [0.1, 0.1]])
biases = tf.constant([0.2, 0.3])
class_weight = tf.constant([0.1, 0.9])
prediction, loss = ops.softmax_classifier(features, labels, weights, biases, class_weight)
self.assertEqual(prediction.get_shape()[1], 2)
self.assertEqual(loss.get_shape(), [])
value = session.run(loss, {features: [[0.2, 0.3, 0.2]], labels: [[0, 1]]})
self.assertAllClose(value, 0.55180627)

def test_embedding_lookup(self):
d_embed = 5
n_embed = 10
Expand Down

0 comments on commit b729f46

Please sign in to comment.