Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Push forward compat date of gradients-of-log/exp/sqrt change #29888

Merged
merged 1 commit into from
Jun 17, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
2 changes: 1 addition & 1 deletion tensorflow/python/kernel_tests/cwise_ops_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -1268,7 +1268,7 @@ class SingularGradientOpTest(test.TestCase):

@test_util.run_deprecated_v1
def testGradientAtSingularity(self):
if not compat.forward_compatible(2019, 6, 14):
if not compat.forward_compatible(2019, 11, 14):
self.skipTest("Skipping test for future functionality.")

ops_and_singularity = [
Expand Down
46 changes: 23 additions & 23 deletions tensorflow/python/ops/math_grad.py
Original file line number Diff line number Diff line change
Expand Up @@ -470,7 +470,7 @@ def _SqrtGradGrad(op, grad):
a = op.inputs[0]
y = op.outputs[0] # y = 0.5 * b / conj(a)
with ops.control_dependencies([grad]):
if compat.forward_compatible(2019, 6, 14):
if compat.forward_compatible(2019, 11, 14):
ga = gen_math_ops.xdivy(grad, a)
return -gen_math_ops.mul_no_nan(y, math_ops.conj(ga)), 0.5 * ga
else:
Expand Down Expand Up @@ -504,7 +504,7 @@ def _ExpGrad(op, grad):
y = op.outputs[0] # y = e^x
with ops.control_dependencies([grad]):
y = math_ops.conj(y)
if compat.forward_compatible(2019, 6, 14):
if compat.forward_compatible(2019, 11, 14):
return math_ops.mul_no_nan(y, grad)
else:
return grad * y
Expand All @@ -517,7 +517,7 @@ def _Expm1Grad(op, grad):
with ops.control_dependencies([grad]):
x = math_ops.conj(x)
y = math_ops.exp(x)
if compat.forward_compatible(2019, 6, 14):
if compat.forward_compatible(2019, 11, 14):
return math_ops.mul_no_nan(y, grad)
else:
return grad * y
Expand All @@ -529,7 +529,7 @@ def _LogGrad(op, grad):
x = op.inputs[0]
with ops.control_dependencies([grad]):
x = math_ops.conj(x)
if compat.forward_compatible(2019, 6, 14):
if compat.forward_compatible(2019, 11, 14):
return gen_math_ops.xdivy(grad, x)
else:
return grad * math_ops.reciprocal(x)
Expand All @@ -541,7 +541,7 @@ def _Log1pGrad(op, grad):
x = op.inputs[0]
with ops.control_dependencies([grad]):
x = math_ops.conj(x)
if compat.forward_compatible(2019, 6, 14):
if compat.forward_compatible(2019, 11, 14):
return gen_math_ops.xdivy(grad, 1 + x)
else:
return grad * math_ops.reciprocal(1 + x)
Expand Down Expand Up @@ -623,7 +623,7 @@ def _AcoshGrad(op, grad):
y = op.outputs[0]
with ops.control_dependencies([grad]):
y = math_ops.conj(y)
if compat.forward_compatible(2019, 6, 14):
if compat.forward_compatible(2019, 11, 14):
return math_ops.xdivy(grad, math_ops.sinh(y))
else:
return grad / math_ops.sinh(y)
Expand Down Expand Up @@ -676,7 +676,7 @@ def _LgammaGrad(op, grad):
x = op.inputs[0]
with ops.control_dependencies([grad]):
x = math_ops.conj(x)
if compat.forward_compatible(2019, 6, 14):
if compat.forward_compatible(2019, 11, 14):
return math_ops.mul_no_nan(math_ops.digamma(x), grad)
else:
return grad * math_ops.digamma(x)
Expand All @@ -689,7 +689,7 @@ def _DigammaGrad(op, grad):
with ops.control_dependencies([grad]):
x = math_ops.conj(x)
partial_x = math_ops.polygamma(array_ops.constant(1, dtype=x.dtype), x)
if compat.forward_compatible(2019, 6, 14):
if compat.forward_compatible(2019, 11, 14):
return math_ops.mul_no_nan(partial_x, grad)
else:
return grad * partial_x
Expand All @@ -702,7 +702,7 @@ def _BesselI0eGrad(op, grad):
y = op.outputs[0]
with ops.control_dependencies([grad]):
partial_x = (math_ops.bessel_i1e(x) - math_ops.sign(x) * y)
if compat.forward_compatible(2019, 6, 14):
if compat.forward_compatible(2019, 11, 14):
return math_ops.mul_no_nan(partial_x, grad)
else:
return grad * partial_x
Expand All @@ -726,7 +726,7 @@ def _BesselI1eGrad(op, grad):
dy_dx = math_ops.bessel_i0e(safe_x) - y * (
math_ops.sign(safe_x) + math_ops.reciprocal(safe_x))
dy_dx = array_ops.where(x_is_not_tiny, dy_dx, 0.5 + zeros)
if compat.forward_compatible(2019, 6, 14):
if compat.forward_compatible(2019, 11, 14):
return math_ops.mul_no_nan(dy_dx, grad)
else:
return grad * dy_dx
Expand All @@ -747,7 +747,7 @@ def _IgammaGrad(op, grad):
# and Gamma'(a) can grow large.
partial_x = math_ops.exp(-x + (a - 1) * math_ops.log(x) -
math_ops.lgamma(a))
if compat.forward_compatible(2019, 6, 14):
if compat.forward_compatible(2019, 11, 14):
return (array_ops.reshape(
math_ops.reduce_sum(math_ops.mul_no_nan(partial_a, grad), ra), sa),
array_ops.reshape(
Expand Down Expand Up @@ -786,7 +786,7 @@ def _BetaincGrad(op, grad):
(a - 1) * math_ops.log(x) - log_beta)

# TODO(b/36815900): Mark None return values as NotImplemented
if compat.forward_compatible(2019, 6, 14):
if compat.forward_compatible(2019, 11, 14):
return (
None, # da
None, # db
Expand Down Expand Up @@ -815,7 +815,7 @@ def _ZetaGrad(op, grad):
q = math_ops.conj(q)
partial_q = -x * math_ops.zeta(x + 1, q)
# TODO(b/36815900): Mark None return values as NotImplemented
if compat.forward_compatible(2019, 6, 14):
if compat.forward_compatible(2019, 11, 14):
return (None,
array_ops.reshape(
math_ops.reduce_sum(math_ops.mul_no_nan(partial_q, grad), rq),
Expand All @@ -841,7 +841,7 @@ def _PolygammaGrad(op, grad):
x = math_ops.conj(x)
partial_x = math_ops.polygamma(n + 1, x)
# TODO(b/36815900): Mark None return values as NotImplemented
if compat.forward_compatible(2019, 6, 14):
if compat.forward_compatible(2019, 11, 14):
return (None,
array_ops.reshape(
math_ops.reduce_sum(math_ops.mul_no_nan(partial_x, grad), rx),
Expand Down Expand Up @@ -902,7 +902,7 @@ def _TanGrad(op, grad):
x = math_ops.conj(x)
secx = math_ops.reciprocal(math_ops.cos(x))
secx2 = math_ops.square(secx)
if compat.forward_compatible(2019, 6, 14):
if compat.forward_compatible(2019, 11, 14):
return math_ops.mul_no_nan(secx2, grad)
else:
return secx2 * grad
Expand All @@ -917,7 +917,7 @@ def _AsinGrad(op, grad):
x2 = math_ops.square(x)
one = constant_op.constant(1, dtype=grad.dtype)
den = math_ops.sqrt(math_ops.subtract(one, x2))
if compat.forward_compatible(2019, 6, 14):
if compat.forward_compatible(2019, 11, 14):
return math_ops.xdivy(grad, den)
else:
inv = math_ops.reciprocal(den)
Expand All @@ -933,7 +933,7 @@ def _AcosGrad(op, grad):
x2 = math_ops.square(x)
one = constant_op.constant(1, dtype=grad.dtype)
den = math_ops.sqrt(math_ops.subtract(one, x2))
if compat.forward_compatible(2019, 6, 14):
if compat.forward_compatible(2019, 11, 14):
return -math_ops.xdivy(grad, den)
else:
inv = math_ops.reciprocal(den)
Expand All @@ -958,7 +958,7 @@ def _Atan2Grad(op, grad):
y = op.inputs[0]
x = op.inputs[1]
with ops.control_dependencies([grad]):
if compat.forward_compatible(2019, 6, 14):
if compat.forward_compatible(2019, 11, 14):
grad_inv = math_ops.xdivy(grad, (math_ops.square(x) + math_ops.square(y)))
else:
grad_inv = grad / (math_ops.square(x) + math_ops.square(y))
Expand Down Expand Up @@ -1078,7 +1078,7 @@ def _DivGrad(op, grad):
rx, ry = gen_array_ops.broadcast_gradient_args(sx, sy)
x = math_ops.conj(x)
y = math_ops.conj(y)
if compat.forward_compatible(2019, 6, 14):
if compat.forward_compatible(2019, 11, 14):
return (array_ops.reshape(
math_ops.reduce_sum(math_ops.xdivy(grad, y), rx), sx),
array_ops.reshape(
Expand Down Expand Up @@ -1131,7 +1131,7 @@ def _RealDivGrad(op, grad):
rx, ry = gen_array_ops.broadcast_gradient_args(sx, sy)
x = math_ops.conj(x)
y = math_ops.conj(y)
if compat.forward_compatible(2019, 6, 14):
if compat.forward_compatible(2019, 11, 14):
return (array_ops.reshape(
math_ops.reduce_sum(math_ops.xdivy(grad, y), rx), sx),
array_ops.reshape(
Expand All @@ -1158,7 +1158,7 @@ def _DivNoNanGrad(op, grad):
rx, ry = gen_array_ops.broadcast_gradient_args(sx, sy)
x = math_ops.conj(x)
y = math_ops.conj(y)
if compat.forward_compatible(2019, 6, 14):
if compat.forward_compatible(2019, 11, 14):
return (array_ops.reshape(
math_ops.reduce_sum(math_ops.div_no_nan(grad, y), rx), sx),
array_ops.reshape(
Expand Down Expand Up @@ -1188,7 +1188,7 @@ def _PowGrad(op, grad):
y = math_ops.conj(y)
z = math_ops.conj(z)

if compat.forward_compatible(2019, 6, 14):
if compat.forward_compatible(2019, 11, 14):
gx = array_ops.reshape(
math_ops.reduce_sum(
gen_math_ops.mul_no_nan(y * math_ops.pow(x, y - 1), grad), rx), sx)
Expand All @@ -1204,7 +1204,7 @@ def _PowGrad(op, grad):
mask = x > 0
safe_x = array_ops.where(mask, x, array_ops.ones_like(x))
log_x = array_ops.where(mask, math_ops.log(safe_x), array_ops.zeros_like(x))
if compat.forward_compatible(2019, 6, 14):
if compat.forward_compatible(2019, 11, 14):
gy = array_ops.reshape(
math_ops.reduce_sum(gen_math_ops.mul_no_nan(z * log_x, grad), ry), sy)
else:
Expand Down