Increase tolerance for conv2d_backprop_filter_grad_test
And enable the XLA GPU test. PiperOrigin-RevId: 252146139
This commit is contained in:
parent
dfacd7618a
commit
30af0f2592
@ -2916,8 +2916,7 @@ cuda_py_test(
|
||||
"no_rocm",
|
||||
"optonly", # flaky timeouts unless optimized
|
||||
],
|
||||
# TODO(b/134677329): Re-enable this test.
|
||||
xla_enable_strict_auto_jit = False,
|
||||
xla_enable_strict_auto_jit = True,
|
||||
)
|
||||
|
||||
cuda_py_test(
|
||||
|
@ -110,7 +110,7 @@ class Conv2DBackpropFilterGradTest(test.TestCase):
|
||||
[in_val, out_backprop_val], [in_shape, out_backprop_shape],
|
||||
output, filter_shape)
|
||||
print("conv2d_backprop_filter gradient err = %g " % err)
|
||||
err_tolerance = 2e-3
|
||||
err_tolerance = 1e-2
|
||||
self.assertLess(err, err_tolerance)
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user