From 295d7831843d50b87987a51d7a0cde5a8d814988 Mon Sep 17 00:00:00 2001 From: fsx950223 Date: Wed, 28 Oct 2020 08:33:41 +0000 Subject: [PATCH 1/4] fix nest args --- tensorflow/python/eager/backprop_test.py | 8 +++++--- tensorflow/python/ops/custom_gradient.py | 3 +-- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/tensorflow/python/eager/backprop_test.py b/tensorflow/python/eager/backprop_test.py index 584fed73158..e80767998ce 100644 --- a/tensorflow/python/eager/backprop_test.py +++ b/tensorflow/python/eager/backprop_test.py @@ -1605,7 +1605,7 @@ class BackpropTest(test.TestCase, parameterized.TestCase): @def_function.function def inner(z): - return z + 1 + return [z[0] + 1, z[1] + 1] i = constant_op.constant(0.0) c = lambda y, i: i < 10. @@ -1618,10 +1618,12 @@ class BackpropTest(test.TestCase, parameterized.TestCase): with MemoryChecker() as memory_checker: for _ in range(5): - x = variables.Variable(1.0, name='x') + x = [variables.Variable([1.0, 2.0], name='x'), + variables.Variable(1.0, name='x')] with backprop.GradientTape(): y = outer(x) - self.assertAllEqual(y, 11.0) + self.assertAllEqual(y[0], [11.0, 12.0]) + self.assertAllEqual(y[1], 11.0) memory_checker.report() memory_checker.assert_no_leak_if_all_possibly_except_one() diff --git a/tensorflow/python/ops/custom_gradient.py b/tensorflow/python/ops/custom_gradient.py index 3e38f68a0f7..2fc663e76be 100644 --- a/tensorflow/python/ops/custom_gradient.py +++ b/tensorflow/python/ops/custom_gradient.py @@ -514,7 +514,6 @@ def recompute_grad(f): current_var_scope = variable_scope.get_variable_scope() with tape_lib.stop_recording(): result = f(*args, **kwargs) - def grad_wrapper(*wrapper_args, **grad_kwargs): """Wrapper function to accomodate lack of kwargs in graph mode decorator.""" @@ -524,7 +523,7 @@ def recompute_grad(f): # Gradient calculation for reverse mode autodiff. variables = grad_kwargs.get("variables") with backprop.GradientTape() as t: - id_args = [gen_array_ops.identity(x) for x in args] + id_args = nest.map_structure(gen_array_ops.identity, args) t.watch(id_args) if variables is not None: t.watch(variables) From 5ba75f40607e8314ed00873351d52f54cf79a8b7 Mon Sep 17 00:00:00 2001 From: fsx950223 Date: Thu, 29 Oct 2020 01:52:14 +0000 Subject: [PATCH 2/4] revert line --- tensorflow/python/ops/custom_gradient.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tensorflow/python/ops/custom_gradient.py b/tensorflow/python/ops/custom_gradient.py index 2fc663e76be..5d7f605b884 100644 --- a/tensorflow/python/ops/custom_gradient.py +++ b/tensorflow/python/ops/custom_gradient.py @@ -514,6 +514,7 @@ def recompute_grad(f): current_var_scope = variable_scope.get_variable_scope() with tape_lib.stop_recording(): result = f(*args, **kwargs) + def grad_wrapper(*wrapper_args, **grad_kwargs): """Wrapper function to accomodate lack of kwargs in graph mode decorator.""" From 162f9cff31247a032a5093a4e4831f4376434cf3 Mon Sep 17 00:00:00 2001 From: fsx950223 Date: Thu, 29 Oct 2020 04:49:02 +0000 Subject: [PATCH 3/4] update test case --- tensorflow/python/eager/backprop_test.py | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/tensorflow/python/eager/backprop_test.py b/tensorflow/python/eager/backprop_test.py index e80767998ce..9aff91ed6bd 100644 --- a/tensorflow/python/eager/backprop_test.py +++ b/tensorflow/python/eager/backprop_test.py @@ -1593,6 +1593,20 @@ class BackpropTest(test.TestCase, parameterized.TestCase): self.assertIn('gradient_tape/my_scope/', op.name) self.assertEqual(num_sin_ops_found, 2) + @test_util.assert_no_new_pyobjects_executing_eagerly + def testRecomputeGradWithDifferentShape(self): + + @custom_gradient.recompute_grad + def outer(x): + return [x[0]+1, x[1]+1] + + x = [variables.Variable([1.0, 2.0], name='x'), + variables.Variable(1.0, name='x')] + with backprop.GradientTape(): + y = outer(x) + self.assertAllEqual(y[0], [2.0, 3.0]) + self.assertAllEqual(y[1], 2.0) + @test_util.assert_no_new_pyobjects_executing_eagerly def testRecomputeGradWithNestedFunctionAndWhileLoop(self): @@ -1605,7 +1619,7 @@ class BackpropTest(test.TestCase, parameterized.TestCase): @def_function.function def inner(z): - return [z[0] + 1, z[1] + 1] + return z + 1 i = constant_op.constant(0.0) c = lambda y, i: i < 10. @@ -1618,12 +1632,10 @@ class BackpropTest(test.TestCase, parameterized.TestCase): with MemoryChecker() as memory_checker: for _ in range(5): - x = [variables.Variable([1.0, 2.0], name='x'), - variables.Variable(1.0, name='x')] + x = variables.Variable(1.0, name='x') with backprop.GradientTape(): y = outer(x) - self.assertAllEqual(y[0], [11.0, 12.0]) - self.assertAllEqual(y[1], 11.0) + self.assertAllEqual(y, 11.0) memory_checker.report() memory_checker.assert_no_leak_if_all_possibly_except_one() From a445d6b7cf21c9c1c8730b74c29db2acd79ed3e3 Mon Sep 17 00:00:00 2001 From: fsx950223 Date: Thu, 29 Oct 2020 05:01:21 +0000 Subject: [PATCH 4/4] add dict test case --- tensorflow/python/eager/backprop_test.py | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/tensorflow/python/eager/backprop_test.py b/tensorflow/python/eager/backprop_test.py index 9aff91ed6bd..b1c590ade2f 100644 --- a/tensorflow/python/eager/backprop_test.py +++ b/tensorflow/python/eager/backprop_test.py @@ -1600,13 +1600,25 @@ class BackpropTest(test.TestCase, parameterized.TestCase): def outer(x): return [x[0]+1, x[1]+1] - x = [variables.Variable([1.0, 2.0], name='x'), - variables.Variable(1.0, name='x')] + x = [variables.Variable([1.0, 2.0], name='a'), + variables.Variable(1.0, name='b')] with backprop.GradientTape(): y = outer(x) self.assertAllEqual(y[0], [2.0, 3.0]) self.assertAllEqual(y[1], 2.0) + @custom_gradient.recompute_grad + def outer_dict(x): + for key in x.keys(): + x[key] = x[key] + 1 + return x + x = {x[0].ref(): x[0], x[1].ref(): x[1]} + with backprop.GradientTape(): + y = outer_dict(x) + y = list(y.values()) + self.assertAllEqual(y[0], [2.0, 3.0]) + self.assertAllEqual(y[1], 2.0) + @test_util.assert_no_new_pyobjects_executing_eagerly def testRecomputeGradWithNestedFunctionAndWhileLoop(self):