Remove @test_util.deprecated_graph_mode_only in gradients_test.py

PiperOrigin-RevId: 324310254
Change-Id: I47728b12de273d1fa50eac71ef06f6209ba4e6f6
This commit is contained in:
Kibeom Kim 2020-07-31 16:48:37 -07:00 committed by TensorFlower Gardener
parent 40626d2c54
commit 0d78d544c3

View File

@ -1444,7 +1444,8 @@ class TensorListGradientsTest(test_util.TensorFlowTestCase):
self.assertEqual(self.evaluate(grad), 5.)
class VariablesGradientTest(test_util.TensorFlowTestCase):
class VariablesGradientTest(test_util.TensorFlowTestCase,
parameterized.TestCase):
def _TestFnVariablesGradient(self, inputs, test_fn, vars_to_grad):
"""Returns gradients of `test_model` with respect to `vars_to_grad`."""
@ -1550,8 +1551,8 @@ class VariablesGradientTest(test_util.TensorFlowTestCase):
for g, g_re in zip(grads, grads_re):
self.assertAllClose(g, g_re)
@test_util.deprecated_graph_mode_only
def testFnRecomputeWithScopeGradientTape(self):
@parameterized.parameters(set((True, context.executing_eagerly())))
def testFnRecomputeWithScopeGradient(self, use_tape):
"""Checks that recompute_grad works with var scope and GradientTape."""
def TestFn(input_t):
@ -1561,7 +1562,6 @@ class VariablesGradientTest(test_util.TensorFlowTestCase):
shape=10,
trainable=True,
)
self.evaluate(test_var.assign(np.ones([10])))
return input_t * test_var
test_input_t = constant(np.zeros((10, 10), dtype=np.float32))
@ -1570,10 +1570,12 @@ class VariablesGradientTest(test_util.TensorFlowTestCase):
"output_scope", reuse=variable_scope.AUTO_REUSE, use_resource=True):
test_fn_re = custom_gradient.recompute_grad(TestFn)
with backprop.GradientTape(persistent=True) as tape:
with test_util.AbstractGradientTape(
use_tape=use_tape, persistent=True) as tape:
out_re = test_fn_re(test_input_t)
out = TestFn(test_input_t)
self.evaluate(variables.global_variables_initializer())
grads_re = tape.gradient(out_re, variables.trainable_variables())
grads = tape.gradient(out, variables.trainable_variables())
@ -1581,39 +1583,6 @@ class VariablesGradientTest(test_util.TensorFlowTestCase):
grads = self.evaluate(grads)
for g, g_re in zip(grads, grads_re):
self.assertAllClose(g, g_re)
self.assertAllClose(g, g_re)
@test_util.deprecated_graph_mode_only
def testFnRecomputeWithScopeGradients(self):
"""Checks that recompute_grad works with var scope and gradients(..)."""
def TestFn(input_t):
with variable_scope.variable_scope("inner_scope"):
test_var = variable_scope.get_variable(
name="test_var",
shape=10,
trainable=True,
)
return input_t * test_var
test_input_t = constant(np.zeros((10, 10), dtype=np.float32))
with variable_scope.variable_scope(
"output_scope", reuse=variable_scope.AUTO_REUSE, use_resource=True):
test_fn_re = custom_gradient.recompute_grad(TestFn)
out_re = test_fn_re(test_input_t)
out = TestFn(test_input_t)
init = variables.global_variables_initializer()
self.evaluate(init)
grads_re = gradients.gradients(out_re, variables.trainable_variables())
grads = gradients.gradients(out, variables.trainable_variables())
grads_re = self.evaluate(grads_re)
grads = self.evaluate(grads)
for g, g_re in zip(grads, grads_re):
self.assertAllClose(g, g_re)
self.assertAllClose(g, g_re)
@test_util.run_in_graph_and_eager_modes
def testFnRecomputeSameTensor(self):