From 0d78d544c3102fd6820941fbcde2b8ab7a1d56ec Mon Sep 17 00:00:00 2001 From: Kibeom Kim Date: Fri, 31 Jul 2020 16:48:37 -0700 Subject: [PATCH] Remove @test_util.deprecated_graph_mode_only in gradients_test.py PiperOrigin-RevId: 324310254 Change-Id: I47728b12de273d1fa50eac71ef06f6209ba4e6f6 --- tensorflow/python/ops/gradients_test.py | 45 ++++--------------------- 1 file changed, 7 insertions(+), 38 deletions(-) diff --git a/tensorflow/python/ops/gradients_test.py b/tensorflow/python/ops/gradients_test.py index 7425e1e1522..5bd31aa8c73 100644 --- a/tensorflow/python/ops/gradients_test.py +++ b/tensorflow/python/ops/gradients_test.py @@ -1444,7 +1444,8 @@ class TensorListGradientsTest(test_util.TensorFlowTestCase): self.assertEqual(self.evaluate(grad), 5.) -class VariablesGradientTest(test_util.TensorFlowTestCase): +class VariablesGradientTest(test_util.TensorFlowTestCase, + parameterized.TestCase): def _TestFnVariablesGradient(self, inputs, test_fn, vars_to_grad): """Returns gradients of `test_model` with respect to `vars_to_grad`.""" @@ -1550,8 +1551,8 @@ class VariablesGradientTest(test_util.TensorFlowTestCase): for g, g_re in zip(grads, grads_re): self.assertAllClose(g, g_re) - @test_util.deprecated_graph_mode_only - def testFnRecomputeWithScopeGradientTape(self): + @parameterized.parameters(set((True, context.executing_eagerly()))) + def testFnRecomputeWithScopeGradient(self, use_tape): """Checks that recompute_grad works with var scope and GradientTape.""" def TestFn(input_t): @@ -1561,7 +1562,6 @@ class VariablesGradientTest(test_util.TensorFlowTestCase): shape=10, trainable=True, ) - self.evaluate(test_var.assign(np.ones([10]))) return input_t * test_var test_input_t = constant(np.zeros((10, 10), dtype=np.float32)) @@ -1570,10 +1570,12 @@ class VariablesGradientTest(test_util.TensorFlowTestCase): "output_scope", reuse=variable_scope.AUTO_REUSE, use_resource=True): test_fn_re = custom_gradient.recompute_grad(TestFn) - with backprop.GradientTape(persistent=True) as tape: + with test_util.AbstractGradientTape( + use_tape=use_tape, persistent=True) as tape: out_re = test_fn_re(test_input_t) out = TestFn(test_input_t) + self.evaluate(variables.global_variables_initializer()) grads_re = tape.gradient(out_re, variables.trainable_variables()) grads = tape.gradient(out, variables.trainable_variables()) @@ -1581,39 +1583,6 @@ class VariablesGradientTest(test_util.TensorFlowTestCase): grads = self.evaluate(grads) for g, g_re in zip(grads, grads_re): self.assertAllClose(g, g_re) - self.assertAllClose(g, g_re) - - @test_util.deprecated_graph_mode_only - def testFnRecomputeWithScopeGradients(self): - """Checks that recompute_grad works with var scope and gradients(..).""" - - def TestFn(input_t): - with variable_scope.variable_scope("inner_scope"): - test_var = variable_scope.get_variable( - name="test_var", - shape=10, - trainable=True, - ) - return input_t * test_var - - test_input_t = constant(np.zeros((10, 10), dtype=np.float32)) - - with variable_scope.variable_scope( - "output_scope", reuse=variable_scope.AUTO_REUSE, use_resource=True): - test_fn_re = custom_gradient.recompute_grad(TestFn) - out_re = test_fn_re(test_input_t) - out = TestFn(test_input_t) - - init = variables.global_variables_initializer() - self.evaluate(init) - grads_re = gradients.gradients(out_re, variables.trainable_variables()) - grads = gradients.gradients(out, variables.trainable_variables()) - - grads_re = self.evaluate(grads_re) - grads = self.evaluate(grads) - for g, g_re in zip(grads, grads_re): - self.assertAllClose(g, g_re) - self.assertAllClose(g, g_re) @test_util.run_in_graph_and_eager_modes def testFnRecomputeSameTensor(self):