Move compat date for safe gradients change forward by one month. This will give us more time do discuss the impact of this change.

PiperOrigin-RevId: 247586553
This commit is contained in:
Peter Buchlovsky 2019-05-10 03:48:08 -07:00 committed by TensorFlower Gardener
parent 7e30ae8657
commit a600315139
2 changed files with 24 additions and 24 deletions

View File

@ -1117,7 +1117,7 @@ class SingularGradientOpTest(test.TestCase):
@test_util.run_deprecated_v1
def testGradientAtSingularity(self):
if not compat.forward_compatible(2019, 5, 14):
if not compat.forward_compatible(2019, 6, 14):
self.skipTest("Skipping test for future functionality.")
ops_and_singularity = [

View File

@ -470,7 +470,7 @@ def _SqrtGradGrad(op, grad):
a = op.inputs[0]
y = op.outputs[0] # y = 0.5 * b / conj(a)
with ops.control_dependencies([grad]):
if compat.forward_compatible(2019, 5, 14):
if compat.forward_compatible(2019, 6, 14):
ga = gen_math_ops.xdivy(grad, a)
return -gen_math_ops.mul_no_nan(y, math_ops.conj(ga)), 0.5 * ga
else:
@ -504,7 +504,7 @@ def _ExpGrad(op, grad):
y = op.outputs[0] # y = e^x
with ops.control_dependencies([grad]):
y = math_ops.conj(y)
if compat.forward_compatible(2019, 5, 14):
if compat.forward_compatible(2019, 6, 14):
return math_ops.mul_no_nan(y, grad)
else:
return grad * y
@ -517,7 +517,7 @@ def _Expm1Grad(op, grad):
with ops.control_dependencies([grad]):
x = math_ops.conj(x)
y = math_ops.exp(x)
if compat.forward_compatible(2019, 5, 14):
if compat.forward_compatible(2019, 6, 14):
return math_ops.mul_no_nan(y, grad)
else:
return grad * y
@ -529,7 +529,7 @@ def _LogGrad(op, grad):
x = op.inputs[0]
with ops.control_dependencies([grad]):
x = math_ops.conj(x)
if compat.forward_compatible(2019, 5, 14):
if compat.forward_compatible(2019, 6, 14):
return gen_math_ops.xdivy(grad, x)
else:
return grad * math_ops.reciprocal(x)
@ -541,7 +541,7 @@ def _Log1pGrad(op, grad):
x = op.inputs[0]
with ops.control_dependencies([grad]):
x = math_ops.conj(x)
if compat.forward_compatible(2019, 5, 14):
if compat.forward_compatible(2019, 6, 14):
return gen_math_ops.xdivy(grad, 1 + x)
else:
return grad * math_ops.reciprocal(1 + x)
@ -623,7 +623,7 @@ def _AcoshGrad(op, grad):
y = op.outputs[0]
with ops.control_dependencies([grad]):
y = math_ops.conj(y)
if compat.forward_compatible(2019, 5, 14):
if compat.forward_compatible(2019, 6, 14):
return math_ops.xdivy(grad, math_ops.sinh(y))
else:
return grad / math_ops.sinh(y)
@ -676,7 +676,7 @@ def _LgammaGrad(op, grad):
x = op.inputs[0]
with ops.control_dependencies([grad]):
x = math_ops.conj(x)
if compat.forward_compatible(2019, 5, 14):
if compat.forward_compatible(2019, 6, 14):
return math_ops.mul_no_nan(math_ops.digamma(x), grad)
else:
return grad * math_ops.digamma(x)
@ -689,7 +689,7 @@ def _DigammaGrad(op, grad):
with ops.control_dependencies([grad]):
x = math_ops.conj(x)
partial_x = math_ops.polygamma(array_ops.constant(1, dtype=x.dtype), x)
if compat.forward_compatible(2019, 5, 14):
if compat.forward_compatible(2019, 6, 14):
return math_ops.mul_no_nan(partial_x, grad)
else:
return grad * partial_x
@ -702,7 +702,7 @@ def _BesselI0eGrad(op, grad):
y = op.outputs[0]
with ops.control_dependencies([grad]):
partial_x = (math_ops.bessel_i1e(x) - math_ops.sign(x) * y)
if compat.forward_compatible(2019, 5, 14):
if compat.forward_compatible(2019, 6, 14):
return math_ops.mul_no_nan(partial_x, grad)
else:
return grad * partial_x
@ -726,7 +726,7 @@ def _BesselI1eGrad(op, grad):
dy_dx = math_ops.bessel_i0e(safe_x) - y * (
math_ops.sign(safe_x) + math_ops.reciprocal(safe_x))
dy_dx = array_ops.where(x_is_not_tiny, dy_dx, 0.5 + zeros)
if compat.forward_compatible(2019, 5, 14):
if compat.forward_compatible(2019, 6, 14):
return math_ops.mul_no_nan(dy_dx, grad)
else:
return grad * dy_dx
@ -747,7 +747,7 @@ def _IgammaGrad(op, grad):
# and Gamma'(a) can grow large.
partial_x = math_ops.exp(-x + (a - 1) * math_ops.log(x) -
math_ops.lgamma(a))
if compat.forward_compatible(2019, 5, 14):
if compat.forward_compatible(2019, 6, 14):
return (array_ops.reshape(
math_ops.reduce_sum(math_ops.mul_no_nan(partial_a, grad), ra), sa),
array_ops.reshape(
@ -786,7 +786,7 @@ def _BetaincGrad(op, grad):
(a - 1) * math_ops.log(x) - log_beta)
# TODO(b/36815900): Mark None return values as NotImplemented
if compat.forward_compatible(2019, 5, 14):
if compat.forward_compatible(2019, 6, 14):
return (
None, # da
None, # db
@ -815,7 +815,7 @@ def _ZetaGrad(op, grad):
q = math_ops.conj(q)
partial_q = -x * math_ops.zeta(x + 1, q)
# TODO(b/36815900): Mark None return values as NotImplemented
if compat.forward_compatible(2019, 5, 14):
if compat.forward_compatible(2019, 6, 14):
return (None,
array_ops.reshape(
math_ops.reduce_sum(math_ops.mul_no_nan(partial_q, grad), rq),
@ -841,7 +841,7 @@ def _PolygammaGrad(op, grad):
x = math_ops.conj(x)
partial_x = math_ops.polygamma(n + 1, x)
# TODO(b/36815900): Mark None return values as NotImplemented
if compat.forward_compatible(2019, 5, 14):
if compat.forward_compatible(2019, 6, 14):
return (None,
array_ops.reshape(
math_ops.reduce_sum(math_ops.mul_no_nan(partial_x, grad), rx),
@ -902,7 +902,7 @@ def _TanGrad(op, grad):
x = math_ops.conj(x)
secx = math_ops.reciprocal(math_ops.cos(x))
secx2 = math_ops.square(secx)
if compat.forward_compatible(2019, 5, 14):
if compat.forward_compatible(2019, 6, 14):
return math_ops.mul_no_nan(secx2, grad)
else:
return secx2 * grad
@ -917,7 +917,7 @@ def _AsinGrad(op, grad):
x2 = math_ops.square(x)
one = constant_op.constant(1, dtype=grad.dtype)
den = math_ops.sqrt(math_ops.subtract(one, x2))
if compat.forward_compatible(2019, 5, 14):
if compat.forward_compatible(2019, 6, 14):
return math_ops.xdivy(grad, den)
else:
inv = math_ops.reciprocal(den)
@ -933,7 +933,7 @@ def _AcosGrad(op, grad):
x2 = math_ops.square(x)
one = constant_op.constant(1, dtype=grad.dtype)
den = math_ops.sqrt(math_ops.subtract(one, x2))
if compat.forward_compatible(2019, 5, 14):
if compat.forward_compatible(2019, 6, 14):
return -math_ops.xdivy(grad, den)
else:
inv = math_ops.reciprocal(den)
@ -958,7 +958,7 @@ def _Atan2Grad(op, grad):
y = op.inputs[0]
x = op.inputs[1]
with ops.control_dependencies([grad]):
if compat.forward_compatible(2019, 5, 14):
if compat.forward_compatible(2019, 6, 14):
grad_inv = math_ops.xdivy(grad, (math_ops.square(x) + math_ops.square(y)))
else:
grad_inv = grad / (math_ops.square(x) + math_ops.square(y))
@ -1078,7 +1078,7 @@ def _DivGrad(op, grad):
rx, ry = gen_array_ops.broadcast_gradient_args(sx, sy)
x = math_ops.conj(x)
y = math_ops.conj(y)
if compat.forward_compatible(2019, 5, 14):
if compat.forward_compatible(2019, 6, 14):
return (array_ops.reshape(
math_ops.reduce_sum(math_ops.xdivy(grad, y), rx), sx),
array_ops.reshape(
@ -1131,7 +1131,7 @@ def _RealDivGrad(op, grad):
rx, ry = gen_array_ops.broadcast_gradient_args(sx, sy)
x = math_ops.conj(x)
y = math_ops.conj(y)
if compat.forward_compatible(2019, 5, 14):
if compat.forward_compatible(2019, 6, 14):
return (array_ops.reshape(
math_ops.reduce_sum(math_ops.xdivy(grad, y), rx), sx),
array_ops.reshape(
@ -1158,7 +1158,7 @@ def _DivNoNanGrad(op, grad):
rx, ry = gen_array_ops.broadcast_gradient_args(sx, sy)
x = math_ops.conj(x)
y = math_ops.conj(y)
if compat.forward_compatible(2019, 5, 14):
if compat.forward_compatible(2019, 6, 14):
return (array_ops.reshape(
math_ops.reduce_sum(math_ops.div_no_nan(grad, y), rx), sx),
array_ops.reshape(
@ -1188,7 +1188,7 @@ def _PowGrad(op, grad):
y = math_ops.conj(y)
z = math_ops.conj(z)
if compat.forward_compatible(2019, 5, 14):
if compat.forward_compatible(2019, 6, 14):
gx = array_ops.reshape(
math_ops.reduce_sum(
gen_math_ops.mul_no_nan(y * math_ops.pow(x, y - 1), grad), rx), sx)
@ -1204,7 +1204,7 @@ def _PowGrad(op, grad):
mask = x > 0
safe_x = array_ops.where(mask, x, array_ops.ones_like(x))
log_x = array_ops.where(mask, math_ops.log(safe_x), array_ops.zeros_like(x))
if compat.forward_compatible(2019, 5, 14):
if compat.forward_compatible(2019, 6, 14):
gy = array_ops.reshape(
math_ops.reduce_sum(gen_math_ops.mul_no_nan(z * log_x, grad), ry), sy)
else: