From 2819e2a2728daac29cc8a8b524a6c1c4fb032f58 Mon Sep 17 00:00:00 2001 From: Cesar Crusius Date: Thu, 30 Apr 2020 15:01:41 -0700 Subject: [PATCH] Remove reset_test argument from test_util.run_in_graph_and_eager_modes That argument was only used with its default `True` value. Removing it makes reasoning about the code, and improving its logic, easier. In particular, it will make it easier to enforce the resetting of the eager context in between test calls. PiperOrigin-RevId: 309306414 Change-Id: Ie98b6586f51cb01a5cdf6e76f76194bd77220d85 --- tensorflow/python/framework/test_util.py | 15 +++++---------- tensorflow/python/training/adadelta_test.py | 2 +- tensorflow/python/training/adagrad_test.py | 2 +- tensorflow/python/training/adam_test.py | 2 +- tensorflow/python/training/momentum_test.py | 6 +++--- 5 files changed, 11 insertions(+), 16 deletions(-) diff --git a/tensorflow/python/framework/test_util.py b/tensorflow/python/framework/test_util.py index 535de3402db..fa2a4f63e6c 100644 --- a/tensorflow/python/framework/test_util.py +++ b/tensorflow/python/framework/test_util.py @@ -1075,7 +1075,6 @@ def eager_lazy_remote_copy_on_and_off(f): def run_in_graph_and_eager_modes(func=None, config=None, use_gpu=True, - reset_test=True, assert_no_eager_garbage=False): """Execute the decorated test with and without enabling eager execution. @@ -1117,8 +1116,6 @@ def run_in_graph_and_eager_modes(func=None, config: An optional config_pb2.ConfigProto to use to configure the session when executing graphs. use_gpu: If True, attempt to run as many operations as possible on GPU. - reset_test: If True, tearDown and SetUp the test case between the two - executions of the test (once with and once without eager execution). assert_no_eager_garbage: If True, sets DEBUG_SAVEALL on the garbage collector and asserts that no extra garbage has been created when running the test with eager execution enabled. This will fail if there are @@ -1162,17 +1159,15 @@ def run_in_graph_and_eager_modes(func=None, run_eagerly = assert_no_new_tensors( assert_no_garbage_created(run_eagerly)) - if reset_test: - # This decorator runs the wrapped test twice. - # Reset the test environment between runs. - self.tearDown() - self._tempdir = None + # This decorator runs the wrapped test twice. + # Reset the test environment between runs. + self.tearDown() + self._tempdir = None # Create a new graph for the eagerly executed version of this test for # better isolation. graph_for_eager_test = ops.Graph() with graph_for_eager_test.as_default(), context.eager_mode(): - if reset_test: - self.setUp() + self.setUp() run_eagerly(self, **kwargs) ops.dismantle_graph(graph_for_eager_test) diff --git a/tensorflow/python/training/adadelta_test.py b/tensorflow/python/training/adadelta_test.py index 0e5af5a9222..5bc2937e144 100644 --- a/tensorflow/python/training/adadelta_test.py +++ b/tensorflow/python/training/adadelta_test.py @@ -158,7 +158,7 @@ class AdadeltaOptimizerTest(test.TestCase): with self.cached_session(): self.doTestBasic(use_resource=False) - @test_util.run_in_graph_and_eager_modes(reset_test=True) + @test_util.run_in_graph_and_eager_modes def testResourceBasic(self): self.doTestBasic(use_resource=True) diff --git a/tensorflow/python/training/adagrad_test.py b/tensorflow/python/training/adagrad_test.py index 3528fdaa8b0..4c0ee1c66f5 100644 --- a/tensorflow/python/training/adagrad_test.py +++ b/tensorflow/python/training/adagrad_test.py @@ -84,7 +84,7 @@ class AdagradOptimizerTest(test.TestCase): def testBasic(self): self.doTestBasic(use_locking=False) - @test_util.run_in_graph_and_eager_modes(reset_test=True) + @test_util.run_in_graph_and_eager_modes def testBasicResource(self): self.doTestBasic(use_locking=False, use_resource=True) diff --git a/tensorflow/python/training/adam_test.py b/tensorflow/python/training/adam_test.py index 8ac5f944cd6..d0ff8603da3 100644 --- a/tensorflow/python/training/adam_test.py +++ b/tensorflow/python/training/adam_test.py @@ -243,7 +243,7 @@ class AdamOptimizerTest(test.TestCase): with self.cached_session(): self.doTestBasic(use_resource=False) - @test_util.run_in_graph_and_eager_modes(reset_test=True) + @test_util.run_in_graph_and_eager_modes def testResourceBasic(self): self.doTestBasic(use_resource=True) diff --git a/tensorflow/python/training/momentum_test.py b/tensorflow/python/training/momentum_test.py index 8d27e957fc8..639276988a1 100644 --- a/tensorflow/python/training/momentum_test.py +++ b/tensorflow/python/training/momentum_test.py @@ -126,7 +126,7 @@ class MomentumOptimizerTest(test.TestCase): with self.cached_session(): self.doTestBasic(use_resource=False) - @test_util.run_in_graph_and_eager_modes(reset_test=True) + @test_util.run_in_graph_and_eager_modes def testResourceBasic(self): self.doTestBasic(use_resource=True) @@ -229,7 +229,7 @@ class MomentumOptimizerTest(test.TestCase): self.assertAllClose(var0_np, self.evaluate(var0)) self.assertAllClose(var1_np, self.evaluate(var1)) - @test_util.run_in_graph_and_eager_modes(reset_test=True) + @test_util.run_in_graph_and_eager_modes def testMinimizeSparseResourceVariable(self): for dtype in [dtypes.half, dtypes.float32, dtypes.float64]: # This test invokes the ResourceSparseApplyMomentum operation, which @@ -259,7 +259,7 @@ class MomentumOptimizerTest(test.TestCase): # Validate updated params self.assertAllCloseAccordingToType([[-111, -138]], self.evaluate(var0)) - @test_util.run_in_graph_and_eager_modes(reset_test=True) + @test_util.run_in_graph_and_eager_modes def testMinimizeWith2DIndicesForEmbeddingLookup(self): # This test invokes the ResourceSparseApplyMomentum operation, which # did not have a registered GPU kernel as of April 2018. With graph