Skip to content

Commit

Permalink
Tidy SGD tests.
Browse files Browse the repository at this point in the history
PiperOrigin-RevId: 256969054
Change-Id: Ib9a63c58f8f8ee5c6ac27b811559cf34d2b2e2e2
  • Loading branch information
petebu authored and sonnet-copybara committed Jul 8, 2019
1 parent 28453b3 commit 2ddd050
Showing 1 changed file with 16 additions and 16 deletions.
32 changes: 16 additions & 16 deletions sonnet/src/sgd_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,16 +31,16 @@
class SGDTest(test_utils.TestCase, parameterized.TestCase):

@parameterized.parameters(sgd.SGD, sgd.FastSGD)
def testDense(self, sgd_class):
def testDense(self, opt_class):
parameters = [tf.Variable([1., 2.]), tf.Variable([3., 4.])]
updates = [tf.constant([5., 5.]), tf.constant([3., 3.])]
optimizer = sgd_class(learning_rate=3.)
optimizer = opt_class(learning_rate=3.)
optimizer.apply(updates, parameters)
self.assertAllClose([[-14., -13.], [-6., -5.]],
[x.numpy() for x in parameters])

@parameterized.parameters(sgd.SGD, sgd.FastSGD)
def testSparse(self, sgd_class):
def testSparse(self, opt_class):
if self.primary_device == "TPU":
self.skipTest("IndexedSlices not supported on TPU.")

Expand All @@ -49,25 +49,25 @@ def testSparse(self, sgd_class):
tf.constant([0]), tf.constant([2, 1])),
tf.IndexedSlices(tf.constant([0.01], shape=[1, 1]),
tf.constant([1]), tf.constant([2, 1]))]
optimizer = sgd_class(learning_rate=3.)
optimizer = opt_class(learning_rate=3.)
optimizer.apply(updates, parameters)
self.assertAllClose([[1.0 - 3.0 * 0.1], [2.0]], parameters[0].numpy())
self.assertAllClose([[3.0], [4.0 - 3.0 * 0.01]], parameters[1].numpy())

@parameterized.parameters(sgd.SGD, sgd.FastSGD)
def testNoneUpdate(self, sgd_class):
def testNoneUpdate(self, opt_class):
parameters = [tf.Variable([1., 2.])]
updates = [None]
optimizer = sgd_class(learning_rate=3.)
optimizer = opt_class(learning_rate=3.)
optimizer.apply(updates, parameters)
self.assertAllClose([[1., 2.]], [x.numpy() for x in parameters])

@parameterized.parameters(sgd.SGD, sgd.FastSGD)
def testVariableLearningRate(self, sgd_class):
def testVariableLearningRate(self, opt_class):
parameters = [tf.Variable([1., 2.]), tf.Variable([3., 4.])]
updates = [tf.constant([5., 5.]), tf.constant([3., 3.])]
learning_rate = tf.Variable(3.)
optimizer = sgd_class(learning_rate=learning_rate)
optimizer = opt_class(learning_rate=learning_rate)
optimizer.apply(updates, parameters)
self.assertAllClose([[-14., -13.], [-6., -5.]],
[x.numpy() for x in parameters])
Expand All @@ -78,36 +78,36 @@ def testVariableLearningRate(self, sgd_class):
[x.numpy() for x in parameters])

@parameterized.parameters(sgd.SGD, sgd.FastSGD)
def testLearningRateDTypeConversion(self, sgd_class):
def testLearningRateDTypeConversion(self, opt_class):
parameters = [tf.Variable([1., 2.]), tf.Variable([3., 4.])]
updates = [tf.constant([5., 5.]), tf.constant([3., 3.])]
dtype = tf.int32 if self.primary_device == "TPU" else tf.int64
learning_rate = tf.Variable(3, dtype=dtype)
optimizer = sgd_class(learning_rate=learning_rate)
optimizer = opt_class(learning_rate=learning_rate)
optimizer.apply(updates, parameters)
self.assertAllClose([[-14., -13.], [-6., -5.]],
[x.numpy() for x in parameters])

@parameterized.parameters(sgd.SGD, sgd.FastSGD)
def testDifferentLengthUpdatesParams(self, sgd_class):
def testDifferentLengthUpdatesParams(self, opt_class):
parameters = [tf.Variable([1., 2.]), tf.Variable([3., 4.])]
updates = [tf.constant([5., 5.])]
optimizer = sgd_class(learning_rate=3.)
optimizer = opt_class(learning_rate=3.)
with self.assertRaisesRegexp(
ValueError, "`updates` and `parameters` must be the same length."):
optimizer.apply(updates, parameters)

@parameterized.parameters(sgd.SGD, sgd.FastSGD)
def testEmptyParams(self, sgd_class):
optimizer = sgd_class(learning_rate=3.)
def testEmptyParams(self, opt_class):
optimizer = opt_class(learning_rate=3.)
with self.assertRaisesRegexp(ValueError, "`parameters` cannot be empty."):
optimizer.apply([], [])

@parameterized.parameters(sgd.SGD, sgd.FastSGD)
def testInconsistentDTypes(self, sgd_class):
def testInconsistentDTypes(self, opt_class):
parameters = [tf.Variable([1., 2.], name="param0")]
updates = [tf.constant([5, 5])]
optimizer = sgd_class(learning_rate=3.)
optimizer = opt_class(learning_rate=3.)
with self.assertRaisesRegexp(
ValueError, "DType of .* is not equal to that of parameter .*param0.*"):
optimizer.apply(updates, parameters)
Expand Down

0 comments on commit 2ddd050

Please sign in to comment.