[TF:XLA] Attach useful information to disabled XLA tests.

PiperOrigin-RevId: 232669090
This commit is contained in:
A. Unique TensorFlower 2019-02-06 07:07:57 -08:00 committed by TensorFlower Gardener
parent 4232ed9143
commit 9f6937e0d1
20 changed files with 24 additions and 35 deletions

View File

@ -284,7 +284,6 @@ class FunctionTest(test.TestCase):
out, = sess.run(dlogits, {logits: x, labels: y})
self.assertAllClose(out, np.exp(prob - y))
@test_util.disable_xla("This test never passed for XLA")
def testCustomGradientError(self):
dtype = dtypes.float32

View File

@ -1460,7 +1460,7 @@ def disable_all_xla(description):
value = getattr(cls, name)
if callable(value) and name.startswith(
"test") and not name == "test_session":
setattr(cls, name, base_decorator(value))
setattr(cls, name, base_decorator(description)(value))
return cls
return disable_all_impl

View File

@ -126,7 +126,7 @@ class BenchmarkTest(test.TestCase):
self.assertFalse(_ran_somebenchmark_2[0])
self.assertFalse(_ran_somebenchmark_but_shouldnt[0])
@test_util.disable_xla("This test never passed for XLA")
@test_util.disable_xla("b/123744455") # GPU memory is incorrect
def testReportingBenchmark(self):
tempdir = test.get_temp_dir()
try:

View File

@ -889,8 +889,8 @@ class EnsureShapeTest(test.TestCase):
# Dynamic shape check
@test_util.run_deprecated_v1
@test_util.disable_xla("This test never passed for XLA"
) # Dynamic shapes not supported now with XLA
@test_util.disable_xla(
"b/123337890") # Dynamic shapes not supported now with XLA
def testEnsuresDynamicShape_RaisesError(self):
placeholder = array_ops.placeholder(dtypes.int32)
derived = math_ops.divide(placeholder, 3, name="MyDivide")
@ -904,8 +904,8 @@ class EnsureShapeTest(test.TestCase):
sess.run(derived, feed_dict={placeholder: feed_val})
@test_util.run_deprecated_v1
@test_util.disable_xla("This test never passed for XLA"
) # Dynamic shapes not supported now with XLA
@test_util.disable_xla(
"b/123337890") # Dynamic shapes not supported now with XLA
def testEnsuresDynamicShape_RaisesErrorDimUnknown(self):
placeholder = array_ops.placeholder(dtypes.int32)
derived = placeholder / 3

View File

@ -163,7 +163,9 @@ class CholeskyOpTest(test.TestCase):
with self.assertRaises(ValueError):
linalg_ops.cholesky(tensor3)
@test_util.disable_xla("This test never passed for XLA") # all nan on XLA
# The below invalid Cholesky call returns an error with TF Classic and just
# returns NaNs with XLA.
@test_util.disable_xla("b/123337890")
def testNotInvertibleCPU(self):
# The input should be invertible.
with self.session(use_gpu=True):

View File

@ -33,7 +33,6 @@ from tensorflow.python.ops import variables
from tensorflow.python.platform import test
@test_util.disable_all_xla("This test never passed for XLA")
class ConcatOpTest(test.TestCase):
@test_util.run_deprecated_v1
@ -642,7 +641,6 @@ class ConcatOpTest(test.TestCase):
self.assertAllEqual([[1, 2, 3, 7, 8, 9], [4, 5, 6, 10, 11, 12]], output)
@test_util.disable_all_xla("This test never passed for XLA")
class ConcatOffsetTest(test.TestCase):
def testBasic(self):
@ -686,8 +684,7 @@ class ConcatOffsetTest(test.TestCase):
self.evaluate(off)
@test_util.run_deprecated_v1
@test_util.disable_xla(
"This test never passed for XLA") # Different error message on XLA
@test_util.disable_xla("b/123337890") # Error messages differ
def testSizeMismatch(self):
cdim = constant_op.constant(1, dtypes.int32)
s0 = constant_op.constant([2, 3, 5], dtypes.int32)

View File

@ -2769,8 +2769,6 @@ class ControlFlowTest(test.TestCase):
self.assertEqual(i_val, 3)
self.assertAllClose(x_val, 1.0)
@test_util.disable_xla("This test never passed for XLA"
) # Resource variable issue for ControlFlowV2
@test_util.run_gpu_only
def testGpuResourceAccess(self):
with ops.device(test.gpu_device_name()):

View File

@ -295,7 +295,7 @@ class DepthToSpaceTest(test.TestCase):
actual_vals, expected_vals = self.evaluate([actual, expected])
self.assertTrue(np.array_equal(actual_vals, expected_vals))
@test_util.disable_xla("This test never passed for XLA")
@test_util.disable_xla("b/123553551") # Unsupported data format
def testAgainstTranspose(self):
self.compareToTranspose(3, 2, 3, 1, 2, "NHWC", False)
self.compareToTranspose(3, 2, 3, 2, 2, "NHWC", False)

View File

@ -65,7 +65,7 @@ class MatrixDiagTest(test.TestCase):
array_ops.matrix_diag(0)
@test_util.run_deprecated_v1
@test_util.disable_xla("This test never passed for XLA")
@test_util.disable_xla("b/123337890") # Error messages differ
def testInvalidShapeAtEval(self):
with self.session(use_gpu=True):
v = array_ops.placeholder(dtype=dtypes_lib.float32)
@ -270,7 +270,7 @@ class MatrixDiagPartTest(test.TestCase):
array_ops.matrix_diag_part(0)
@test_util.run_deprecated_v1
@test_util.disable_xla("This test never passed for XLA")
@test_util.disable_xla("b/123337890") # Error messages differ
def testInvalidShapeAtEval(self):
with self.session(use_gpu=True):
v = array_ops.placeholder(dtype=dtypes_lib.float32)

View File

@ -56,7 +56,6 @@ def simple_scoped_fn(a, x):
@test_util.with_control_flow_v2
@test_util.disable_all_xla("This test never passed for XLA")
class FunctionalOpsTest(test.TestCase):
@test_util.run_in_graph_and_eager_modes
@ -660,8 +659,7 @@ class FunctionalOpsTest(test.TestCase):
self.assertAllEqual(Run(100., True), 5050.)
@test_util.run_v1_only("b/120545219")
@test_util.disable_xla(
"This test never passed for XLA") # Different error message
@test_util.disable_xla("b/123337890") # Different error message
def testWhileError(self):
for use_gpu in (True, False):
with ops.Graph().as_default() as g:
@ -938,7 +936,6 @@ class FunctionalOpsTest(test.TestCase):
# TODO(akshayka): Replace `function.Defun` with tf.contrib.eager.defun` in the
# below test cases.
@test_util.disable_all_xla("This test never passed for XLA")
class PartitionedCallTest(test.TestCase):
@test_util.run_deprecated_v1

View File

@ -34,7 +34,6 @@ from tensorflow.python.ops import variables
from tensorflow.python.platform import test
@test_util.disable_all_xla("This test never passed for XLA")
class GatherNdTest(test.TestCase):
def _testSimpleDtype(self, dtype):
@ -57,7 +56,7 @@ class GatherNdTest(test.TestCase):
self._testSimpleDtype("|S") # byte strings in python2 + 3
@test_util.run_deprecated_v1
@test_util.disable_xla("This test never passed for XLA")
@test_util.disable_xla("b/123337890") # Error messages differ
def testEmptyIndicesAndParamsOKButJustEmptyParamsFails(self):
with self.session(use_gpu=True):
params = np.ones((3, 3), dtype=np.float32)
@ -360,7 +359,6 @@ class GatherNdTest(test.TestCase):
self.assertAllEqual(expected_grads, ops.convert_to_tensor(grads).eval())
@test_util.disable_all_xla("This test never passed for XLA")
class GatherNdOpBenchmark(test.Benchmark):
def benchmark_gather_nd_op(self):

View File

@ -49,7 +49,6 @@ def simple_scoped_fn(a, x):
@test_util.with_control_flow_v2
@test_util.disable_all_xla("This test never passed for XLA")
class MapFnTest(test.TestCase):
@test_util.run_in_graph_and_eager_modes

View File

@ -303,7 +303,6 @@ class PoolingTest(test.TestCase):
self.assertLess(err, err_tolerance)
@test_util.run_deprecated_v1
@test_util.disable_xla("This test never passed for XLA") # Much larger error
def testGradient1D(self):
with self.session(use_gpu=test.is_gpu_available()):
for padding in ["SAME", "VALID"]:

View File

@ -730,7 +730,7 @@ class PoolingTest(test.TestCase):
t = nn_ops.max_pool(
t, ksize=ksize, strides=strides, padding="SAME").eval()
@test_util.disable_xla("b/123338077")
@test_util.disable_xla("b/123338077") # Passes with XLA
def testDepthwiseMaxPoolInvalidConfigs(self):
self._testDepthwiseMaxPoolInvalidConfig(
[1, 2, 2, 4], [1, 2, 2, 2], [1, 1, 1, 2],
@ -1821,7 +1821,7 @@ class PoolingTest(test.TestCase):
padding="SAME")
@test_util.run_deprecated_v1
@test_util.disable_xla("This test never passed for XLA")
@test_util.disable_xla("b/123337890") # Error messages differ
def testOpEdgeCases(self):
with self.session(use_gpu=test.is_gpu_available()) as sess:
pool_funcs = [nn_ops.max_pool, nn_ops.avg_pool]

View File

@ -21,16 +21,17 @@ from __future__ import print_function
import numpy as np
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
class BaseReductionTest(test.TestCase):
def _tf_reduce(self, x, reduction_axes, keepdims):
raise NotImplementedError()
@test_util.disable_all_xla("b/123864762") # Test times out
class BigReductionTest(BaseReductionTest):
"""Test reductions for sum and boolean all over a wide range of shapes."""

View File

@ -86,7 +86,7 @@ class ReluTest(test.TestCase):
self.assertAllClose(np_relu, tf_relu)
self.assertShapeEqual(np_relu, tf_relu)
@test_util.disable_xla("This test never passed for XLA")
@test_util.disable_xla("b/123338077") # Passes with XLA
def testReluInt8x4BadShape(self):
if not test.is_gpu_available(cuda_only=True):
self.skipTest("No GPU available")

View File

@ -70,7 +70,6 @@ def handle_options(func, x, axis, exclusive, reverse):
return x
@test_util.disable_all_xla("This test never passed for XLA")
class CumsumTest(test.TestCase):
valid_dtypes = [
@ -135,6 +134,7 @@ class CumsumTest(test.TestCase):
self._compareAll(x, axis)
@test_util.run_deprecated_v1
@test_util.disable_xla("b/123860949") # The computation is constant folded
def testLarge(self):
for dtype in self.valid_dtypes:
x = np.ones([1000000], dtype=dtype) / 1024
@ -194,7 +194,6 @@ class CumsumTest(test.TestCase):
self._compareGradient([5, 10], axis, exclusive, reverse)
@test_util.disable_all_xla("This test never passed for XLA")
class CumprodTest(test.TestCase):
valid_dtypes = [

View File

@ -296,7 +296,7 @@ class StatefulScatterNdTest(test.TestCase):
updates).get_shape().as_list(), shape)
@test_util.run_v1_only("b/120545219")
@test_util.disable_xla("This test never passed for XLA")
@test_util.disable_xla("b/123337890") # Error messages differ
def testResVarInvalidOutputShape(self):
res = variables.Variable(
initial_value=lambda: array_ops.zeros(shape=[], dtype=dtypes.float32),

View File

@ -285,7 +285,7 @@ class SpaceToDepthTest(test.TestCase):
actual_vals, expected_vals = self.evaluate([actual, expected])
self.assertTrue(np.array_equal(actual_vals, expected_vals))
@test_util.disable_xla("This test never passed for XLA")
@test_util.disable_xla("b/123553551") # Unsupported data format
def testAgainstTranspose(self):
self.compareToTranspose(3, 2, 3, 1, 2, "NHWC", False)
self.compareToTranspose(1, 2, 3, 2, 2, "NHWC", False)

View File

@ -373,7 +373,7 @@ class SplitOpTest(test.TestCase):
assert s1.shape.as_list() == [1]
@test_util.run_deprecated_v1
@test_util.disable_xla("This test never passed for XLA")
@test_util.disable_xla("b/123337890") # Error messages differ
def testNonexistentDimTensor(self):
x = array_ops.placeholder(dtypes.int32)
values = np.zeros([5, 30])