Remove @test_util.run_deprecated_v1 in broadcast_to_ops_test.py

PiperOrigin-RevId: 324129236
Change-Id: I770aef18f7ab43a9f679c4e285135e2f0bd4797a
This commit is contained in:
Kibeom Kim 2020-07-30 18:54:02 -07:00 committed by TensorFlower Gardener
parent e1d3f777be
commit 294b239a2b

View File

@ -25,13 +25,12 @@ from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradient_checker
from tensorflow.python.ops import gradient_checker_v2
from tensorflow.python.platform import test as test_lib
class BroadcastToTest(test_util.TensorFlowTestCase):
@test_util.run_deprecated_v1
def testBroadcastToBasic(self):
for dtype in [np.uint8, np.uint16, np.int8, np.int16, np.int32, np.int64]:
with self.session(use_gpu=True):
@ -40,7 +39,6 @@ class BroadcastToTest(test_util.TensorFlowTestCase):
v_np = np.broadcast_to(x, [3, 3])
self.assertAllEqual(v_tf, v_np)
@test_util.run_deprecated_v1
def testBroadcastToString(self):
with self.session(use_gpu=True):
x = np.array([b"1", b"2", b"3"])
@ -48,7 +46,6 @@ class BroadcastToTest(test_util.TensorFlowTestCase):
v_np = np.broadcast_to(x, [3, 3])
self.assertAllEqual(v_tf, v_np)
@test_util.run_deprecated_v1
def testBroadcastToBool(self):
with self.session(use_gpu=True):
x = np.array([True, False, True], dtype=np.bool)
@ -56,7 +53,6 @@ class BroadcastToTest(test_util.TensorFlowTestCase):
v_np = np.broadcast_to(x, [3, 3])
self.assertAllEqual(v_tf, v_np)
@test_util.run_deprecated_v1
def testBroadcastToShape(self):
for input_dim in range(1, 6):
for output_dim in range(input_dim, 6):
@ -68,7 +64,6 @@ class BroadcastToTest(test_util.TensorFlowTestCase):
v_np = np.broadcast_to(x, output_shape)
self.assertAllEqual(v_tf, v_np)
@test_util.run_deprecated_v1
def testBroadcastToShapeInnerDim(self):
input_shape = [2, 1, 3]
output_shape = [2, 5, 3]
@ -78,7 +73,6 @@ class BroadcastToTest(test_util.TensorFlowTestCase):
v_np = np.broadcast_to(x, output_shape)
self.assertAllEqual(v_tf, v_np)
@test_util.run_deprecated_v1
def testBroadcastToShapeLargerDim(self):
input_shape = [2, 1, 3, 2, 2, 2]
output_shape = [1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 15, 3, 2, 2, 2]
@ -88,7 +82,6 @@ class BroadcastToTest(test_util.TensorFlowTestCase):
v_np = np.broadcast_to(x, output_shape)
self.assertAllEqual(v_tf, v_np)
@test_util.run_deprecated_v1
def testBroadcastToShapeLargerDim2(self):
input_shape = [2, 1, 3, 2, 2, 2, 1, 1, 1]
output_shape = [1, 1, 1, 2, 5, 3, 2, 2, 2, 3, 3, 3]
@ -98,7 +91,6 @@ class BroadcastToTest(test_util.TensorFlowTestCase):
v_np = np.broadcast_to(x, output_shape)
self.assertAllEqual(v_tf, v_np)
@test_util.run_deprecated_v1
def testBroadcastToScalar(self):
with self.session(use_gpu=True):
x = np.array(1, dtype=np.int32)
@ -106,7 +98,6 @@ class BroadcastToTest(test_util.TensorFlowTestCase):
v_np = np.broadcast_to(x, [3, 3])
self.assertAllEqual(v_tf, v_np)
@test_util.run_deprecated_v1
def testBroadcastScalarToNonScalar(self):
with self.session(use_gpu=True):
x = np.array(1.0, dtype=np.float)
@ -115,7 +106,6 @@ class BroadcastToTest(test_util.TensorFlowTestCase):
v_np = np.broadcast_to(x, [2, 3, 4, 1, 1, 1])
self.assertAllEqual(v_tf, v_np)
@test_util.run_deprecated_v1
def testBroadcastToShapeTypeAndInference(self):
for dtype in [dtypes.int32, dtypes.int64]:
with self.cached_session(use_gpu=True):
@ -137,59 +127,70 @@ class BroadcastToTest(test_util.TensorFlowTestCase):
array_ops.broadcast_to(
constant_op.constant([0, 1]), constant_op.constant([2, 1])))
@test_util.run_deprecated_v1
def testGradientForScalar(self):
x = constant_op.constant(1, dtype=dtypes.float32)
v = array_ops.broadcast_to(x, [2, 4, 3])
out = 2 * v
def func(x):
v = array_ops.broadcast_to(x, [2, 4, 3])
return 2 * v
with self.cached_session():
err = gradient_checker.compute_gradient_error(x, x.get_shape(), out,
out.get_shape())
err = gradient_checker_v2.max_error(
*gradient_checker_v2.compute_gradient(func, [x]))
self.assertLess(err, 1e-4)
@test_util.run_deprecated_v1
def testGradientWithSameRank(self):
x = constant_op.constant(np.reshape(np.arange(6), (2, 1, 3)),
dtype=dtypes.float32)
v = array_ops.broadcast_to(x, [2, 5, 3])
out = 2 * v
def func(x):
v = array_ops.broadcast_to(x, [2, 5, 3])
return 2 * v
with self.cached_session():
err = gradient_checker.compute_gradient_error(x, x.get_shape(),
out, out.get_shape())
err = gradient_checker_v2.max_error(
*gradient_checker_v2.compute_gradient(func, [x], delta=1e-2))
self.assertLess(err, 1e-4)
@test_util.run_deprecated_v1
def testGradientWithIncreasingRank(self):
x = constant_op.constant([[1], [2]],
dtype=dtypes.float32)
v = array_ops.broadcast_to(x, [5, 2, 3])
out = 2 * v
def func(x):
v = array_ops.broadcast_to(x, [5, 2, 3])
return 2 * v
with self.cached_session():
err = gradient_checker.compute_gradient_error(x, x.get_shape(),
out, out.get_shape())
err = gradient_checker_v2.max_error(
*gradient_checker_v2.compute_gradient(func, [x]))
self.assertLess(err, 1e-4)
@test_util.run_deprecated_v1
def testGradientWithBroadcastAllDimensions(self):
x = constant_op.constant([1], dtype=dtypes.float32)
v = array_ops.broadcast_to(x, [5, 2, 3])
out = 2 * v
def func(x):
v = array_ops.broadcast_to(x, [5, 2, 3])
return 2 * v
with self.cached_session():
err = gradient_checker.compute_gradient_error(x, x.get_shape(),
out, out.get_shape())
err = gradient_checker_v2.max_error(
*gradient_checker_v2.compute_gradient(func, [x]))
self.assertLess(err, 1e-4)
@test_util.run_deprecated_v1
def testGradientWithLargeDim(self):
input_shape = [2, 1, 3, 2, 2, 2, 1, 1, 1]
output_shape = [1, 1, 1, 2, 5, 3, 2, 2, 2, 3, 3, 3]
x = constant_op.constant(np.array(np.random.randn(*input_shape),
dtype=np.float32))
v = array_ops.broadcast_to(x, output_shape)
out = 2 * v
def func(x):
v = array_ops.broadcast_to(x, output_shape)
return 2 * v
with self.cached_session():
err = gradient_checker.compute_gradient_error(x, x.get_shape(),
out, out.get_shape())
err = gradient_checker_v2.max_error(
*gradient_checker_v2.compute_gradient(func, [x], delta=1e-2))
self.assertLess(err, 1e-4)
def testBroadcastToInvalidShape(self):