Update v1 only training/adagrad_test with proper reason.

PiperOrigin-RevId: 320273086
Change-Id: Ibcd14926a7e8acb457347501a50d32c1973e1bc3
This commit is contained in:
Pavithra Vijay 2020-07-08 15:02:57 -07:00 committed by TensorFlower Gardener
parent 546df2593f
commit e242717155

View File

@ -96,7 +96,7 @@ class AdagradOptimizerTest(test.TestCase):
def testBasicLocked(self): def testBasicLocked(self):
self.doTestBasic(use_locking=True) self.doTestBasic(use_locking=True)
@test_util.run_deprecated_v1 @test_util.run_v1_only("train.AdagradOptimizer is V1 only API.")
def testMinimizeSparseResourceVariable(self): def testMinimizeSparseResourceVariable(self):
for dtype in [dtypes.half, dtypes.float32, dtypes.float64]: for dtype in [dtypes.half, dtypes.float32, dtypes.float64]:
with self.cached_session(): with self.cached_session():
@ -117,7 +117,7 @@ class AdagradOptimizerTest(test.TestCase):
self.evaluate(var0), self.evaluate(var0),
atol=0.01) atol=0.01)
@test_util.run_deprecated_v1 @test_util.run_v1_only("train.AdagradOptimizer is V1 only API.")
def testTensorLearningRate(self): def testTensorLearningRate(self):
for dtype in [dtypes.half, dtypes.float32, dtypes.float64]: for dtype in [dtypes.half, dtypes.float32, dtypes.float64]:
with self.cached_session(): with self.cached_session():
@ -144,7 +144,7 @@ class AdagradOptimizerTest(test.TestCase):
np.array([2.715679168701172, 3.715679168701172]), np.array([2.715679168701172, 3.715679168701172]),
self.evaluate(var1)) self.evaluate(var1))
@test_util.run_deprecated_v1 @test_util.run_v1_only("train.AdagradOptimizer is V1 only API.")
def testSparseBasic(self): def testSparseBasic(self):
for dtype in [dtypes.half, dtypes.float32, dtypes.float64]: for dtype in [dtypes.half, dtypes.float32, dtypes.float64]:
with self.cached_session(): with self.cached_session():
@ -176,7 +176,7 @@ class AdagradOptimizerTest(test.TestCase):
self.assertAllCloseAccordingToType( self.assertAllCloseAccordingToType(
np.array([[3.0], [3.715679168701172]]), self.evaluate(var1)) np.array([[3.0], [3.715679168701172]]), self.evaluate(var1))
@test_util.run_deprecated_v1 @test_util.run_v1_only("train.AdagradOptimizer is V1 only API.")
def testSparseRepeatedIndices(self): def testSparseRepeatedIndices(self):
for dtype in [dtypes.half, dtypes.float32, dtypes.float64]: for dtype in [dtypes.half, dtypes.float32, dtypes.float64]:
with self.cached_session(): with self.cached_session():
@ -207,7 +207,7 @@ class AdagradOptimizerTest(test.TestCase):
self.assertAllClose(aggregated_update_var, self.assertAllClose(aggregated_update_var,
self.evaluate(repeated_index_update_var)) self.evaluate(repeated_index_update_var))
@test_util.run_deprecated_v1 @test_util.run_v1_only("train.AdagradOptimizer is V1 only API.")
def testSparseRepeatedIndicesResourceVariable(self): def testSparseRepeatedIndicesResourceVariable(self):
for dtype in [dtypes.half, dtypes.float32, dtypes.float64]: for dtype in [dtypes.half, dtypes.float32, dtypes.float64]:
with self.cached_session(): with self.cached_session():
@ -232,7 +232,7 @@ class AdagradOptimizerTest(test.TestCase):
self.assertAllCloseAccordingToType( self.assertAllCloseAccordingToType(
self.evaluate(var_repeated), self.evaluate(var_aggregated)) self.evaluate(var_repeated), self.evaluate(var_aggregated))
@test_util.run_deprecated_v1 @test_util.run_v1_only("train.AdagradOptimizer is V1 only API.")
def testSparseStability(self): def testSparseStability(self):
for dtype in [dtypes.half, dtypes.float32, dtypes.float64]: for dtype in [dtypes.half, dtypes.float32, dtypes.float64]:
with self.cached_session(): with self.cached_session():
@ -269,7 +269,7 @@ class AdagradOptimizerTest(test.TestCase):
-0.01029443 -0.01029443
]]), self.evaluate(var0)) ]]), self.evaluate(var0))
@test_util.run_deprecated_v1 @test_util.run_v1_only("train.AdagradOptimizer is V1 only API.")
def testSharing(self): def testSharing(self):
for dtype in [dtypes.half, dtypes.float32, dtypes.float64]: for dtype in [dtypes.half, dtypes.float32, dtypes.float64]:
with self.cached_session(): with self.cached_session():
@ -306,7 +306,7 @@ class AdagradOptimizerTest(test.TestCase):
np.array([2.715679168701172, 3.715679168701172]), np.array([2.715679168701172, 3.715679168701172]),
self.evaluate(var1)) self.evaluate(var1))
@test_util.run_v1_only("b/120545219") @test_util.run_v1_only("train.AdagradOptimizer is V1 only API.")
def testDynamicShapeVariable_Ok(self): def testDynamicShapeVariable_Ok(self):
with self.cached_session(): with self.cached_session():
v = variable_scope.get_variable("v", initializer=constant_op.constant(1.), v = variable_scope.get_variable("v", initializer=constant_op.constant(1.),
@ -315,7 +315,7 @@ class AdagradOptimizerTest(test.TestCase):
# Creating optimizer should cause no exception. # Creating optimizer should cause no exception.
adagrad.AdagradOptimizer(3.0, initial_accumulator_value=0.1) adagrad.AdagradOptimizer(3.0, initial_accumulator_value=0.1)
@test_util.run_v1_only("b/120545219") @test_util.run_v1_only("train.AdagradOptimizer is V1 only API.")
def testDynamicShapeVariableWithCallableInit(self): def testDynamicShapeVariableWithCallableInit(self):
var0 = variable_scope.get_variable("var0", var0 = variable_scope.get_variable("var0",
initializer=constant_op.constant(1.), initializer=constant_op.constant(1.),