Skip to content

Commit

Permalink
more gradient optimizer tests
Browse files Browse the repository at this point in the history
  • Loading branch information
novikov-alexander committed Nov 13, 2023
1 parent 8cab730 commit 5a74737
Show file tree
Hide file tree
Showing 2 changed files with 155 additions and 6 deletions.
118 changes: 118 additions & 0 deletions test/TensorFlowNET.UnitTest/Training/GradientDescentOptimizerTests.cs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Linq;
using Tensorflow;
using Tensorflow.NumPy;
using static Tensorflow.Binding;
Expand Down Expand Up @@ -67,6 +68,56 @@ public void TestBasic()
TestBasic<double>();
}

private void TestMinimizeResourceVariable<T>() where T : struct
{
var dtype = GetTypeForNumericType<T>();

// train.GradientDescentOptimizer is V1 only API.
tf.Graph().as_default();
using (var sess = self.cached_session())
{
var var0 = tf.Variable(new[] { new[] { 1.0, 2.0 } }, dtype: dtype);
var var1 = tf.Variable(new[] { 3.0 }, dtype: dtype);
var x = tf.constant(
new[] {
new[] { 4.0 },
new[] { 5.0 }
},
dtype: dtype);
var pred = math_ops.matmul(var0, x) + var1;
var loss = pred * pred;
var sgd_op = tf.train.GradientDescentOptimizer(3.0f).minimize(loss);

var global_variables = tf.global_variables_initializer();
sess.run(global_variables);

var initialVar0 = sess.run(var0);
var initialVar1 = sess.run(var1);
// Fetch params to validate initial values
self.assertAllCloseAccordingToType(new[] { new[] { 1.0, 2.0 } }, self.evaluate<T[]>(var0));
self.assertAllCloseAccordingToType(new[] { 3.0 }, self.evaluate<T[]>(var1));
// Run 1 step of sgd
sgd_op.run();
// Validate updated params
var np_pred = 1.0 * 4.0 + 2.0 * 5.0 + 3.0;
var np_grad = 2 * np_pred;
self.assertAllCloseAccordingToType(
new[] { new[] { 1.0 - np_grad * 4.0, 2.0 - np_grad * 5.0 } },
self.evaluate<T[]>(var0));
self.assertAllCloseAccordingToType(
new[] { 3.0 - np_grad },
self.evaluate<T[]>(var1));
}
}

[TestMethod]
public void TestMinimizeResourceVariable()
{
//TODO: add np.half
//TestMinimizeResourceVariable<float>();
TestMinimizeResourceVariable<double>();
}

private void TestTensorLearningRate<T>() where T : struct
{
var dtype = GetTypeForNumericType<T>();
Expand Down Expand Up @@ -115,5 +166,72 @@ public void TestTensorLearningRate()
TestTensorLearningRate<float>();
TestTensorLearningRate<double>();
}

public void TestGradWrtRef<T>() where T : struct
{
var dtype = GetTypeForNumericType<T>();

var graph = tf.Graph().as_default();
using (var sess = self.cached_session())
{
var opt = tf.train.GradientDescentOptimizer(3.0f);
var values = new[] { 1.0, 3.0 };
var vars_ = values.Select(
v => tf.Variable(new[] { v }, dtype: dtype) as IVariableV1
).ToList();
var grads_and_vars = opt.compute_gradients(tf.add(vars_[0], vars_[1]), vars_);
sess.run(tf.global_variables_initializer());
foreach (var (grad, _) in grads_and_vars)
self.assertAllCloseAccordingToType(new[] { 1.0 }, self.evaluate<T[]>(grad));

}
}

[TestMethod]
public void TestGradWrtRef()
{
TestGradWrtRef<float>();
TestGradWrtRef<double>();
}

public void TestWithGlobalStep<T>() where T : struct
{
var dtype = GetTypeForNumericType<T>();

tf.Graph().as_default();
using (var sess = self.cached_session())
{
var global_step = tf.Variable(0, trainable: false);
var var0 = tf.Variable(new[] { 1.0, 2.0 }, dtype: dtype);
var var1 = tf.Variable(new[] { 3.0, 4.0 }, dtype: dtype);
var grads0 = tf.constant(new[] { 0.1, 0.1 }, dtype: dtype);
var grads1 = tf.constant(new[] { 0.01, 0.01 }, dtype: dtype);
var grads_and_vars = new[] {
Tuple.Create(grads0, var0 as IVariableV1),
Tuple.Create(grads1, var1 as IVariableV1)
};
var sgd_op = tf.train.GradientDescentOptimizer(3.0f)
.apply_gradients(grads_and_vars, global_step: global_step);

sess.run(tf.global_variables_initializer());
// Fetch params to validate initial values
self.assertAllCloseAccordingToType(new[] { 1.0, 2.0 }, self.evaluate<T[]>(var0));
self.assertAllCloseAccordingToType(new[] { 3.0, 4.0 }, self.evaluate<T[]>(var1));
// Run 1 step of sgd
sgd_op.run();
// Validate updated params and global_step
self.assertAllCloseAccordingToType(new[] { 1.0 - 3.0 * 0.1, 2.0 - 3.0 * 0.1 }, self.evaluate<T[]>(var0));
self.assertAllCloseAccordingToType(new[] { 3.0 - 3.0 * 0.01, 4.0 - 3.0 * 0.01 }, self.evaluate<T[]>(var1));
Assert.AreEqual(1, self.evaluate<int>(global_step));
}

}

[TestMethod]
public void TestWithGlobalStep()
{
TestWithGlobalStep<float>();
TestWithGlobalStep<double>();
}
}
}
43 changes: 37 additions & 6 deletions test/Tensorflow.UnitTest/PythonTest.cs
Original file line number Diff line number Diff line change
Expand Up @@ -175,8 +175,8 @@ public int Compare(object? x, object? y)
return 1;
}

var a = (double)x;
var b = (double)y;
var a = Convert.ToDouble(x);
var b = Convert.ToDouble(y);

double delta = Math.Abs(a - b);
if (delta < _epsilon)
Expand All @@ -199,6 +199,27 @@ public int Compare(object? x, object? y)
CollectionAssert.AreEqual(expected, givenAsDouble, new CollectionComparer(eps));
}

public void assertAllCloseAccordingToType<T>(
double value,
NDArray array2,
double eps = 1e-5) where T : struct
{
// TODO: check if any of arguments is not double and change toletance
// remove array2AsDouble and cast expected instead
var givenAsDouble = array2.Select(x => { double v = x; return v; }).ToArray();

if (array2.shape.IsScalar)
{
double value2 = array2;
Assert.AreEqual(value, value2, eps);
return;
}
var array1 = np.ones_like(array2) * value;
CollectionAssert.AreEqual(array1.ToArray(), array2.ToArray(), new CollectionComparer(eps));

//TODO: Assert.IsTrue(np.allclose(array1, array2, rtol: eps));
}

public void assertProtoEquals(object toProto, object o)
{
throw new NotImplementedException();
Expand Down Expand Up @@ -267,11 +288,21 @@ public T evaluate<T>(Tensor tensor)
{
var sess = tf.get_default_session();
var ndarray = tensor.eval(sess);
if (typeof(T) == typeof(double)
|| typeof(T) == typeof(float)
|| typeof(T) == typeof(int))

if (typeof(T) == typeof(int))
{
int i = ndarray;
result = i;
}
else if (typeof(T) == typeof(float))
{
float f = ndarray;
result = f;
}
else if (typeof(T) == typeof(double))
{
result = Convert.ChangeType(ndarray, typeof(T));
double d = ndarray;
result = d;
}
else if (typeof(T) == typeof(double[]))
{
Expand Down

0 comments on commit 5a74737

Please sign in to comment.