diff --git a/tensorflow/python/data/benchmarks/BUILD b/tensorflow/python/data/benchmarks/BUILD index f039f6bff72..bc8240227e8 100644 --- a/tensorflow/python/data/benchmarks/BUILD +++ b/tensorflow/python/data/benchmarks/BUILD @@ -1,4 +1,4 @@ -load("//tensorflow:tensorflow.bzl", "py_test") +load("//tensorflow:tensorflow.bzl", "tf_py_test") package( default_visibility = ["//tensorflow:internal"], @@ -7,21 +7,22 @@ package( exports_files(["LICENSE"]) -py_test( +tf_py_test( name = "meta_benchmark", srcs = ["meta_benchmark.py"], - python_version = "PY2", - deps = [ + additional_deps = [ + "//third_party/py/numpy", "//tensorflow/python:client_testlib", "//tensorflow/python:session", + "//tensorflow/python/data/experimental/ops:testing", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", ], ) py_library( name = "benchmark_base", srcs = ["benchmark_base.py"], + srcs_version = "PY2AND3", deps = [ "//tensorflow/python:client_testlib", "//tensorflow/python:session", @@ -30,75 +31,63 @@ py_library( ], ) -py_test( +tf_py_test( name = "batch_benchmark", srcs = ["batch_benchmark.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - deps = [ + additional_deps = [ ":benchmark_base", + "//third_party/py/numpy", "//tensorflow/python:sparse_tensor", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", ], ) -py_test( +tf_py_test( name = "filter_benchmark", srcs = ["filter_benchmark.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - deps = [ + additional_deps = [ ":benchmark_base", "//tensorflow/python/data/ops:dataset_ops", ], ) -py_test( +tf_py_test( name = "from_tensor_slices_benchmark", srcs = ["from_tensor_slices_benchmark.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - deps = [ + additional_deps = [ ":benchmark_base", - "//tensorflow/python/data/ops:dataset_ops", "//third_party/py/numpy", + "//tensorflow/python/data/ops:dataset_ops", ], ) -py_test( +tf_py_test( name = "list_files_benchmark", srcs = ["list_files_benchmark.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - deps = [ + additional_deps = [ ":benchmark_base", + "//third_party/py/numpy", "//tensorflow/python:client_testlib", "//tensorflow/python:errors", "//tensorflow/python:framework_ops", "//tensorflow/python:session", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", ], ) -py_test( +tf_py_test( name = "map_benchmark", srcs = ["map_benchmark.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - deps = [ + additional_deps = [ ":benchmark_base", "//tensorflow/python/data/ops:dataset_ops", ], ) -py_test( +tf_py_test( name = "range_benchmark", srcs = ["range_benchmark.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - deps = [ + additional_deps = [ ":benchmark_base", "//tensorflow/python/data/ops:dataset_ops", ], diff --git a/tensorflow/python/data/benchmarks/meta_benchmark.py b/tensorflow/python/data/benchmarks/meta_benchmark.py index f03e6efa600..7a699a5b22a 100644 --- a/tensorflow/python/data/benchmarks/meta_benchmark.py +++ b/tensorflow/python/data/benchmarks/meta_benchmark.py @@ -21,7 +21,7 @@ import timeit import numpy as np from tensorflow.python.client import session -from tensorflow.python.data.experimental.ops import sleep +from tensorflow.python.data.experimental.ops import testing from tensorflow.python.data.ops import dataset_ops from tensorflow.python.eager import context from tensorflow.python.platform import test @@ -61,7 +61,7 @@ class MetaBenchmark(test.Benchmark): dataset = self.setup_fast_dataset() self.iters = 1000 # sleep for 1e-3s per iteration - return dataset.apply(sleep.sleep(1000)) + return dataset.apply(testing.sleep(1000)) def benchmark_slow_dataset_with_only_cpp_iterations(self): dataset = self.setup_slow_dataset() diff --git a/tensorflow/python/data/experimental/benchmarks/BUILD b/tensorflow/python/data/experimental/benchmarks/BUILD index 683d3eca158..0540fa069d3 100644 --- a/tensorflow/python/data/experimental/benchmarks/BUILD +++ b/tensorflow/python/data/experimental/benchmarks/BUILD @@ -1,4 +1,4 @@ -load("//tensorflow:tensorflow.bzl", "py_test") +load("//tensorflow:tensorflow.bzl", "tf_py_test") package( default_visibility = ["//tensorflow:internal"], @@ -7,59 +7,51 @@ package( exports_files(["LICENSE"]) -py_test( +tf_py_test( name = "autotune_benchmark", srcs = ["autotune_benchmark.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - deps = [ + additional_deps = [ + "//third_party/py/numpy", "//tensorflow/python:client_testlib", "//tensorflow/python:math_ops", "//tensorflow/python:session", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", ], ) -py_test( +tf_py_test( name = "choose_fastest_benchmark", srcs = ["choose_fastest_benchmark.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - deps = [ + additional_deps = [ + "//third_party/py/numpy", "//tensorflow/python:client_testlib", "//tensorflow/python:framework_ops", "//tensorflow/python:math_ops", "//tensorflow/python:session", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", ], ) -py_test( +tf_py_test( name = "choose_fastest_branch_benchmark", srcs = ["choose_fastest_branch_benchmark.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - deps = [ + additional_deps = [ + "//third_party/py/numpy", "//tensorflow/python:client_testlib", "//tensorflow/python:framework_ops", "//tensorflow/python:math_ops", "//tensorflow/python:session", "//tensorflow/python/data/benchmarks:benchmark_base", - "//tensorflow/python/data/experimental/ops:sleep", + "//tensorflow/python/data/experimental/ops:testing", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", ], ) -py_test( +tf_py_test( name = "csv_dataset_benchmark", srcs = ["csv_dataset_benchmark.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - tags = ["no_pip"], - deps = [ + additional_deps = [ + "//third_party/py/numpy", "//tensorflow/python:client_testlib", "//tensorflow/python:parsing_ops", "//tensorflow/python:platform", @@ -67,16 +59,15 @@ py_test( "//tensorflow/python:session", "//tensorflow/python/data/experimental/ops:readers", "//tensorflow/python/data/ops:readers", - "//third_party/py/numpy", ], + tags = ["no_pip"], ) -py_test( +tf_py_test( name = "map_and_batch_benchmark", srcs = ["map_and_batch_benchmark.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - deps = [ + additional_deps = [ + "//third_party/py/numpy", "//tensorflow/core:protos_all_py", "//tensorflow/python:array_ops", "//tensorflow/python:client_testlib", @@ -87,16 +78,13 @@ py_test( "//tensorflow/python:session", "//tensorflow/python/data/experimental/ops:batching", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", ], ) -py_test( +tf_py_test( name = "map_defun_benchmark", srcs = ["map_defun_benchmark.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - deps = [ + additional_deps = [ "//tensorflow/python:array_ops", "//tensorflow/python:client_testlib", "//tensorflow/python:dtypes", @@ -108,12 +96,11 @@ py_test( ], ) -py_test( +tf_py_test( name = "map_vectorization_benchmark", srcs = ["map_vectorization_benchmark.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - deps = [ + additional_deps = [ + "//third_party/py/numpy", "//tensorflow/core:protos_all_py", "//tensorflow/python:array_ops", "//tensorflow/python:client_testlib", @@ -124,17 +111,15 @@ py_test( "//tensorflow/python:session", "//tensorflow/python/data/ops:dataset_ops", "//tensorflow/python/data/util:nest", - "//third_party/py/numpy", ], ) -py_test( +tf_py_test( name = "matching_files_benchmark", size = "small", srcs = ["matching_files_benchmark.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - deps = [ + additional_deps = [ + "//third_party/py/numpy", "//tensorflow/python:array_ops", "//tensorflow/python:client_testlib", "//tensorflow/python:dtypes", @@ -142,61 +127,54 @@ py_test( "//tensorflow/python:util", "//tensorflow/python/data/experimental/ops:matching_files", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", ], ) -py_test( +tf_py_test( name = "optimize_benchmark", srcs = ["optimize_benchmark.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - deps = [ + additional_deps = [ + "//third_party/py/numpy", "//tensorflow/python:client_testlib", "//tensorflow/python:framework_ops", "//tensorflow/python:math_ops", "//tensorflow/python:session", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", ], ) -py_test( +tf_py_test( name = "parallel_interleave_benchmark", srcs = ["parallel_interleave_benchmark.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - deps = [ + additional_deps = [ + "//third_party/py/numpy", "//tensorflow/python:client_testlib", "//tensorflow/python:math_ops", "//tensorflow/python:session", "//tensorflow/python/data/experimental/ops:interleave_ops", - "//tensorflow/python/data/experimental/ops:sleep", + "//tensorflow/python/data/experimental/ops:testing", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", ], ) -py_test( +tf_py_test( name = "rejection_resample_benchmark", srcs = ["rejection_resample_benchmark.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - tags = ["no_pip"], - deps = [ + additional_deps = [ + "//third_party/py/numpy", + "@six_archive//:six", "//tensorflow/python:client_testlib", "//tensorflow/python/data/experimental/ops:resampling", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", - "@six_archive//:six", ], + tags = ["no_pip"], ) -py_test( +tf_py_test( name = "snapshot_dataset_benchmark", srcs = ["snapshot_dataset_benchmark.py"], - srcs_version = "PY2AND3", - deps = [ + additional_deps = [ + "//third_party/py/numpy", "//tensorflow/python:array_ops", "//tensorflow/python:client_testlib", "//tensorflow/python:errors", @@ -207,16 +185,14 @@ py_test( "//tensorflow/python/data/experimental/ops:snapshot", "//tensorflow/python/data/kernel_tests:test_base", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", ], ) -py_test( +tf_py_test( name = "unbatch_benchmark", srcs = ["unbatch_benchmark.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - deps = [ + additional_deps = [ + "//third_party/py/numpy", "//tensorflow/python:array_ops", "//tensorflow/python:client_testlib", "//tensorflow/python:dtypes", @@ -224,6 +200,5 @@ py_test( "//tensorflow/python:session", "//tensorflow/python/data/experimental/ops:batching", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", ], ) diff --git a/tensorflow/python/data/experimental/benchmarks/choose_fastest_branch_benchmark.py b/tensorflow/python/data/experimental/benchmarks/choose_fastest_branch_benchmark.py index 2b7761c1fca..093b0aa809d 100644 --- a/tensorflow/python/data/experimental/benchmarks/choose_fastest_branch_benchmark.py +++ b/tensorflow/python/data/experimental/benchmarks/choose_fastest_branch_benchmark.py @@ -19,7 +19,7 @@ from __future__ import print_function from tensorflow.python.data.benchmarks import benchmark_base from tensorflow.python.data.experimental.ops import optimization -from tensorflow.python.data.experimental.ops import sleep +from tensorflow.python.data.experimental.ops import testing from tensorflow.python.data.ops import dataset_ops @@ -78,7 +78,8 @@ class ChooseFastestBranchBenchmark(benchmark_base.DatasetBenchmarkBase): def benchmark_with_input_skew(self): def make_dataset(time_us, num_elements): - return dataset_ops.Dataset.range(num_elements).apply(sleep.sleep(time_us)) + return dataset_ops.Dataset.range(num_elements).apply( + testing.sleep(time_us)) # Dataset with 100 elements that emulates performance characteristics of a # file-based dataset stored in remote storage, where the first element @@ -87,10 +88,10 @@ class ChooseFastestBranchBenchmark(benchmark_base.DatasetBenchmarkBase): 0).concatenate(make_dataset(1, 100)).take(100) def slow_branch(dataset): - return dataset.apply(sleep.sleep(10000)) + return dataset.apply(testing.sleep(10000)) def fast_branch(dataset): - return dataset.apply(sleep.sleep(10)) + return dataset.apply(testing.sleep(10)) def benchmark(dataset, name): self.run_and_report_benchmark( diff --git a/tensorflow/python/data/experimental/benchmarks/parallel_interleave_benchmark.py b/tensorflow/python/data/experimental/benchmarks/parallel_interleave_benchmark.py index bc49f84ddfd..c2f59d294e3 100644 --- a/tensorflow/python/data/experimental/benchmarks/parallel_interleave_benchmark.py +++ b/tensorflow/python/data/experimental/benchmarks/parallel_interleave_benchmark.py @@ -22,7 +22,7 @@ import time import numpy as np from tensorflow.python.data.experimental.ops import interleave_ops -from tensorflow.python.data.experimental.ops import sleep +from tensorflow.python.data.experimental.ops import testing from tensorflow.python.data.ops import dataset_ops from tensorflow.python.framework import ops from tensorflow.python.platform import test @@ -52,7 +52,7 @@ def _make_fake_dataset_fn(initial_delay_us, remainder_delay_us): def make_dataset(time_us, num_elements): dataset = dataset_ops.Dataset.range(num_elements) if time_us > 0: - dataset = dataset.apply(sleep.sleep(time_us)) + dataset = dataset.apply(testing.sleep(time_us)) return dataset if not initial_delay_us: diff --git a/tensorflow/python/data/experimental/kernel_tests/BUILD b/tensorflow/python/data/experimental/kernel_tests/BUILD index 3c2279bcfbd..7e9b52e43de 100644 --- a/tensorflow/python/data/experimental/kernel_tests/BUILD +++ b/tensorflow/python/data/experimental/kernel_tests/BUILD @@ -1,4 +1,4 @@ -load("//tensorflow:tensorflow.bzl", "py_test", "tf_py_test") +load("//tensorflow:tensorflow.bzl", "tf_py_test") load("//tensorflow:tensorflow.bzl", "cuda_py_test") package( @@ -8,13 +8,51 @@ package( exports_files(["LICENSE"]) -py_test( +tf_py_test( + name = "assert_next_test", + size = "small", + srcs = ["assert_next_test.py"], + additional_deps = [ + "//tensorflow/python:client_testlib", + "//tensorflow/python:errors", + "//tensorflow/python/data/experimental/ops:testing", + "//tensorflow/python/data/kernel_tests:test_base", + "//tensorflow/python/data/ops:dataset_ops", + ], + tags = [ + "no_oss", + "no_pip", + "no_windows", + ], +) + +tf_py_test( + name = "auto_shard_dataset_test", + size = "medium", + srcs = ["auto_shard_dataset_test.py"], + additional_deps = [ + ":reader_dataset_ops_test_base", + "@absl_py//absl/testing:parameterized", + "//tensorflow/python:client_testlib", + "//tensorflow/python/data/experimental/ops:distribute", + "//tensorflow/python/data/experimental/ops:readers", + "//tensorflow/python/data/experimental/ops:testing", + "//tensorflow/python/data/kernel_tests:test_base", + "//tensorflow/python/data/ops:dataset_ops", + "//tensorflow/python/data/util:nest", + ], + tags = [ + "no_pip", + ], +) + +tf_py_test( name = "bucket_by_sequence_length_test", size = "medium", srcs = ["bucket_by_sequence_length_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - deps = [ + additional_deps = [ + "@absl_py//absl/testing:parameterized", + "//third_party/py/numpy", "//tensorflow/python:array_ops", "//tensorflow/python:client_testlib", "//tensorflow/python:dtypes", @@ -24,21 +62,17 @@ py_test( "//tensorflow/python/data/experimental/ops:grouping", "//tensorflow/python/data/kernel_tests:test_base", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", - "@absl_py//absl/testing:parameterized", ], ) -py_test( +tf_py_test( name = "cardinality_test", srcs = ["cardinality_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - deps = [ + additional_deps = [ + "@absl_py//absl/testing:parameterized", "//tensorflow/python/data/experimental/ops:cardinality", "//tensorflow/python/data/kernel_tests:test_base", "//tensorflow/python/data/ops:dataset_ops", - "@absl_py//absl/testing:parameterized", ], ) @@ -62,13 +96,11 @@ cuda_py_test( tags = ["no_windows_gpu"], ) -py_test( +tf_py_test( name = "counter_test", size = "small", srcs = ["counter_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - deps = [ + additional_deps = [ "//tensorflow/python:client_testlib", "//tensorflow/python:dtypes", "//tensorflow/python/data/experimental/ops:counter", @@ -76,14 +108,11 @@ py_test( ], ) -py_test( +tf_py_test( name = "csv_dataset_test", size = "medium", srcs = ["csv_dataset_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - tags = ["no_pip"], - deps = [ + additional_deps = [ "//tensorflow/python:client_testlib", "//tensorflow/python:constant_op", "//tensorflow/python:dtypes", @@ -96,14 +125,14 @@ py_test( "//tensorflow/python/data/ops:readers", "//tensorflow/python/eager:context", ], + tags = ["no_pip"], ) -py_test( +tf_py_test( name = "dense_to_sparse_batch_test", srcs = ["dense_to_sparse_batch_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - deps = [ + additional_deps = [ + "//third_party/py/numpy", "//tensorflow/python:array_ops", "//tensorflow/python:client_testlib", "//tensorflow/python:dtypes", @@ -111,62 +140,455 @@ py_test( "//tensorflow/python/data/experimental/ops:batching", "//tensorflow/python/data/kernel_tests:test_base", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", ], ) -py_test( +tf_py_test( name = "dense_to_ragged_batch_test", srcs = ["dense_to_ragged_batch_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", - deps = [ + additional_deps = [ + "//third_party/py/numpy", "//tensorflow/python:array_ops", "//tensorflow/python:client_testlib", "//tensorflow/python:dtypes", "//tensorflow/python:errors", "//tensorflow/python/data/kernel_tests:test_base", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", ], ) -py_test( +tf_py_test( name = "directed_interleave_dataset_test", size = "medium", srcs = ["directed_interleave_dataset_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - deps = [ + additional_deps = [ + "//third_party/py/numpy", "//tensorflow/python:client_testlib", "//tensorflow/python:errors", "//tensorflow/python:random_seed", "//tensorflow/python/data/experimental/ops:interleave_ops", "//tensorflow/python/data/kernel_tests:test_base", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", ], ) -py_test( - name = "auto_shard_dataset_test", - size = "medium", - srcs = ["auto_shard_dataset_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - tags = [ - "no_pip", +tf_py_test( + name = "get_single_element_test", + size = "small", + srcs = ["get_single_element_test.py"], + additional_deps = [ + "@absl_py//absl/testing:parameterized", + "//tensorflow/python:array_ops", + "//tensorflow/python:client_testlib", + "//tensorflow/python:constant_op", + "//tensorflow/python:dtypes", + "//tensorflow/python:errors", + "//tensorflow/python:sparse_tensor", + "//tensorflow/python/data/experimental/ops:get_single_element", + "//tensorflow/python/data/experimental/ops:grouping", + "//tensorflow/python/data/kernel_tests:test_base", + "//tensorflow/python/data/ops:dataset_ops", ], - deps = [ +) + +tf_py_test( + name = "group_by_reducer_test", + size = "medium", + srcs = ["group_by_reducer_test.py"], + additional_deps = [ + "//third_party/py/numpy", + "//tensorflow/python:array_ops", + "//tensorflow/python:client_testlib", + "//tensorflow/python:constant_op", + "//tensorflow/python:dtypes", + "//tensorflow/python:errors", + "//tensorflow/python:math_ops", + "//tensorflow/python:sparse_tensor", + "//tensorflow/python:tensor_shape", + "//tensorflow/python/data/experimental/ops:grouping", + "//tensorflow/python/data/kernel_tests:test_base", + "//tensorflow/python/data/ops:dataset_ops", + ], +) + +tf_py_test( + name = "group_by_window_test", + size = "medium", + srcs = ["group_by_window_test.py"], + additional_deps = [ + "//third_party/py/numpy", + "//tensorflow/python:array_ops", + "//tensorflow/python:client_testlib", + "//tensorflow/python:constant_op", + "//tensorflow/python:dtypes", + "//tensorflow/python:errors", + "//tensorflow/python:framework_ops", + "//tensorflow/python:math_ops", + "//tensorflow/python:string_ops", + "//tensorflow/python:tensor_shape", + "//tensorflow/python/data/experimental/ops:grouping", + "//tensorflow/python/data/kernel_tests:test_base", + "//tensorflow/python/data/ops:dataset_ops", + ], +) + +tf_py_test( + name = "ignore_errors_test", + srcs = ["ignore_errors_test.py"], + additional_deps = [ + "//third_party/py/numpy", + "//tensorflow/python:array_ops", + "//tensorflow/python:client_testlib", + "//tensorflow/python:errors", + "//tensorflow/python:io_ops", + "//tensorflow/python:util", + "//tensorflow/python/data/experimental/ops:error_ops", + "//tensorflow/python/data/kernel_tests:test_base", + "//tensorflow/python/data/ops:dataset_ops", + ], +) + +tf_py_test( + name = "make_batched_features_dataset_test", + size = "medium", + srcs = ["make_batched_features_dataset_test.py"], + additional_deps = [ + ":reader_dataset_ops_test_base", + "//third_party/py/numpy", + "//tensorflow/python:client_testlib", + "//tensorflow/python:dtypes", + "//tensorflow/python:errors", + "//tensorflow/python:framework_ops", + "//tensorflow/python:io_ops", + "//tensorflow/python:parsing_ops", + "//tensorflow/python/data/experimental/ops:readers", + "//tensorflow/python/data/ops:readers", + "//tensorflow/python/data/util:nest", + ], + tags = ["no_pip"], +) + +tf_py_test( + name = "make_csv_dataset_test", + size = "medium", + srcs = ["make_csv_dataset_test.py"], + additional_deps = [ + "//third_party/py/numpy", + "//tensorflow/python:client_testlib", + "//tensorflow/python:constant_op", + "//tensorflow/python:dtypes", + "//tensorflow/python:errors", + "//tensorflow/python:framework_ops", + "//tensorflow/python/data/experimental/ops:readers", + "//tensorflow/python/data/kernel_tests:test_base", + "//tensorflow/python/data/util:nest", + ], + tags = ["no_pip"], +) + +tf_py_test( + name = "make_tf_record_dataset_test", + size = "medium", + srcs = ["make_tf_record_dataset_test.py"], + additional_deps = [ ":reader_dataset_ops_test_base", "//tensorflow/python:client_testlib", - "//tensorflow/python/data/experimental/ops:distribute", - "//tensorflow/python/data/experimental/ops:optimization", + "//tensorflow/python:errors", + "//tensorflow/python:framework_ops", + "//tensorflow/python:string_ops", + "//tensorflow/python/data/experimental/ops:readers", + "//tensorflow/python/data/util:nest", + ], + tags = ["no_pip"], +) + +tf_py_test( + name = "map_and_batch_test", + size = "medium", + srcs = ["map_and_batch_test.py"], + additional_deps = [ + "@absl_py//absl/testing:parameterized", + "//third_party/py/numpy", + "//tensorflow/python:array_ops", + "//tensorflow/python:client_testlib", + "//tensorflow/python:cond_v2", + "//tensorflow/python:constant_op", + "//tensorflow/python:dtypes", + "//tensorflow/python:errors", + "//tensorflow/python:math_ops", + "//tensorflow/python:script_ops", + "//tensorflow/python:sparse_tensor", + "//tensorflow/python/data/experimental/ops:batching", + "//tensorflow/python/data/kernel_tests:test_base", + "//tensorflow/python/data/ops:dataset_ops", + ], +) + +tf_py_test( + name = "map_defun_op_test", + size = "small", + srcs = ["map_defun_op_test.py"], + additional_deps = [ + "//tensorflow/python:array_ops", + "//tensorflow/python:check_ops", + "//tensorflow/python:client_testlib", + "//tensorflow/python:constant_op", + "//tensorflow/python:data_flow_ops", + "//tensorflow/python:dtypes", + "//tensorflow/python:framework_ops", + "//tensorflow/python:function", + "//tensorflow/python:functional_ops", + "//tensorflow/python:math_ops", + "//tensorflow/python:session", + "//tensorflow/python:sparse_ops", + "//tensorflow/python:sparse_tensor", + "//tensorflow/python/data/experimental/ops:map_defun", + "//tensorflow/python/data/kernel_tests:test_base", + ], + tags = ["no_pip"], +) + +tf_py_test( + name = "matching_files_test", + size = "small", + srcs = ["matching_files_test.py"], + additional_deps = [ + "//third_party/py/numpy", + "//tensorflow/python:array_ops", + "//tensorflow/python:client_testlib", + "//tensorflow/python:dtypes", + "//tensorflow/python:errors", + "//tensorflow/python:util", + "//tensorflow/python/data/experimental/ops:matching_files", + "//tensorflow/python/data/kernel_tests:test_base", + "//tensorflow/python/data/ops:dataset_ops", + ], + tags = ["no_pip"], +) + +tf_py_test( + name = "model_dataset_test", + size = "small", + srcs = ["model_dataset_test.py"], + additional_deps = [ + "@absl_py//absl/testing:parameterized", + "//tensorflow/python:client_testlib", + "//tensorflow/python:errors", + "//tensorflow/python/data/experimental/ops:testing", + "//tensorflow/python/data/kernel_tests:test_base", + "//tensorflow/python/data/ops:dataset_ops", + ], + tags = [ + "no_oss", + "no_pip", + "no_windows", + ], +) + +tf_py_test( + name = "non_serializable_test", + size = "small", + srcs = ["non_serializable_test.py"], + additional_deps = [ + "//tensorflow/python:client_testlib", + "//tensorflow/python:errors", + "//tensorflow/python/data/experimental/ops:testing", + "//tensorflow/python/data/kernel_tests:test_base", + "//tensorflow/python/data/ops:dataset_ops", + ], + tags = [ + "no_oss", + "no_pip", + "no_windows", + ], +) + +tf_py_test( + name = "optimize_dataset_test", + size = "medium", + srcs = ["optimize_dataset_test.py"], + additional_deps = [ + "@absl_py//absl/testing:parameterized", + "//third_party/py/numpy", + "//tensorflow/python:array_ops", + "//tensorflow/python:client_testlib", + "//tensorflow/python:dtypes", + "//tensorflow/python:errors", + "//tensorflow/python:random_ops", + "//tensorflow/python:variable_scope", + "//tensorflow/python/data/experimental/ops:batching", + "//tensorflow/python/data/experimental/ops:grouping", + "//tensorflow/python/data/experimental/ops:optimization_options", + "//tensorflow/python/data/experimental/ops:scan_ops", + "//tensorflow/python/data/experimental/ops:testing", + "//tensorflow/python/data/kernel_tests:test_base", + "//tensorflow/python/data/ops:dataset_ops", + "//tensorflow/python/eager:context", + ], + tags = [ + "no_oss", + "no_pip", + "no_windows", + ], +) + +tf_py_test( + name = "override_threadpool_test", + size = "small", + srcs = ["override_threadpool_test.py"], + additional_deps = [ + "@absl_py//absl/testing:parameterized", + "//third_party/py/numpy", + "//tensorflow/python:client_testlib", + "//tensorflow/python:dtypes", + "//tensorflow/python:errors", + "//tensorflow/python:script_ops", + "//tensorflow/python/data/experimental/ops:threadpool", + "//tensorflow/python/data/experimental/ops:unique", + "//tensorflow/python/data/kernel_tests:test_base", + "//tensorflow/python/data/ops:dataset_ops", + ], + tags = ["no_pip"], +) + +tf_py_test( + name = "parallel_interleave_test", + size = "medium", + srcs = ["parallel_interleave_test.py"], + additional_deps = [ + "@six_archive//:six", + "//tensorflow/python:array_ops", + "//tensorflow/python:client_testlib", + "//tensorflow/python:dtypes", + "//tensorflow/python:errors", + "//tensorflow/python:math_ops", + "//tensorflow/python:script_ops", + "//tensorflow/python:sparse_ops", + "//tensorflow/python:sparse_tensor", + "//tensorflow/python/data/experimental/ops:interleave_ops", + "//tensorflow/python/data/kernel_tests:test_base", + "//tensorflow/python/data/ops:dataset_ops", + ], + tags = ["no_pip"], +) + +tf_py_test( + name = "parse_example_dataset_test", + size = "small", + srcs = ["parse_example_dataset_test.py"], + additional_deps = [ + "//third_party/py/numpy", + "//tensorflow/core:protos_all_py", + "//tensorflow/python:client_testlib", + "//tensorflow/python:dtypes", + "//tensorflow/python:errors", + "//tensorflow/python:framework_ops", + "//tensorflow/python:parsing_ops", + "//tensorflow/python:platform", + "//tensorflow/python:sparse_tensor", + "//tensorflow/python/data/experimental/ops:parsing_ops", + "//tensorflow/python/data/kernel_tests:test_base", + "//tensorflow/python/data/ops:dataset_ops", + "//tensorflow/python/data/util:nest", + ], +) + +cuda_py_test( + name = "prefetch_to_device_test", + size = "small", + srcs = ["prefetch_to_device_test.py"], + additional_deps = [ + "//tensorflow/python/data/experimental/ops:prefetching_ops", + "//tensorflow/core:protos_all_py", + "//tensorflow/python:client_testlib", + "//tensorflow/python/data/kernel_tests:test_base", + "//tensorflow/python:dtypes", + "//tensorflow/python:framework_ops", + "//tensorflow/python:framework_test_lib", + "//tensorflow/python/data/ops:dataset_ops", + ], + tags = ["no_windows_gpu"], +) + +tf_py_test( + name = "prefetch_with_slack_test", + size = "small", + srcs = ["prefetch_with_slack_test.py"], + additional_deps = [ + "@absl_py//absl/testing:parameterized", + "//tensorflow/core:protos_all_py", + "//tensorflow/python:client_testlib", + "//tensorflow/python:errors", + "//tensorflow/python:framework_test_lib", + "//tensorflow/python/data/kernel_tests:test_base", + "//tensorflow/python/data/ops:dataset_ops", + "//tensorflow/python/data/ops:iterator_ops", + "//tensorflow/python/data/ops:multi_device_iterator_ops", + ], +) + +py_library( + name = "reader_dataset_ops_test_base", + srcs = [ + "reader_dataset_ops_test_base.py", + ], + srcs_version = "PY2AND3", + deps = [ + "//tensorflow/core:protos_all_py", + "//tensorflow/python:array_ops", + "//tensorflow/python:client_testlib", + "//tensorflow/python:constant_op", + "//tensorflow/python:dtypes", + "//tensorflow/python:lib", + "//tensorflow/python:parsing_ops", + "//tensorflow/python:util", + "//tensorflow/python/data/experimental/ops:readers", + "//tensorflow/python/data/kernel_tests:test_base", + "//tensorflow/python/data/ops:iterator_ops", + "//tensorflow/python/data/ops:readers", + ], +) + +tf_py_test( + name = "rebatch_dataset_test", + size = "small", + srcs = ["rebatch_dataset_test.py"], + additional_deps = [ + "@absl_py//absl/testing:parameterized", + "//tensorflow/core:protos_all_py", + "//tensorflow/python:client_testlib", + "//tensorflow/python:parsing_ops", "//tensorflow/python/data/experimental/ops:readers", "//tensorflow/python/data/kernel_tests:test_base", "//tensorflow/python/data/ops:dataset_ops", "//tensorflow/python/data/util:nest", + "//tensorflow/python/ops/ragged:ragged_tensor", + ], +) + +tf_py_test( + name = "rejection_resample_test", + size = "medium", + srcs = ["rejection_resample_test.py"], + additional_deps = [ "@absl_py//absl/testing:parameterized", + "//third_party/py/numpy", + "@six_archive//:six", + "//tensorflow/python:client_testlib", + "//tensorflow/python:dtypes", + "//tensorflow/python:errors", + "//tensorflow/python:math_ops", + "//tensorflow/python:random_ops", + "//tensorflow/python:string_ops", + "//tensorflow/python:util", + "//tensorflow/python/data/experimental/ops:resampling", + "//tensorflow/python/data/kernel_tests:test_base", + "//tensorflow/python/data/ops:dataset_ops", + ], + shard_count = 5, + tags = [ + "noasan", + "optonly", ], ) @@ -198,389 +620,6 @@ tf_py_test( tags = ["no_oss"], ) -py_test( - name = "get_single_element_test", - size = "small", - srcs = ["get_single_element_test.py"], - python_version = "PY2", - deps = [ - "//tensorflow/python:array_ops", - "//tensorflow/python:client_testlib", - "//tensorflow/python:constant_op", - "//tensorflow/python:dtypes", - "//tensorflow/python:errors", - "//tensorflow/python:sparse_tensor", - "//tensorflow/python/data/experimental/ops:get_single_element", - "//tensorflow/python/data/experimental/ops:grouping", - "//tensorflow/python/data/kernel_tests:test_base", - "//tensorflow/python/data/ops:dataset_ops", - "@absl_py//absl/testing:parameterized", - ], -) - -py_test( - name = "group_by_reducer_test", - size = "medium", - srcs = ["group_by_reducer_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - deps = [ - "//tensorflow/python:array_ops", - "//tensorflow/python:client_testlib", - "//tensorflow/python:constant_op", - "//tensorflow/python:dtypes", - "//tensorflow/python:errors", - "//tensorflow/python:math_ops", - "//tensorflow/python:sparse_tensor", - "//tensorflow/python:tensor_shape", - "//tensorflow/python/data/experimental/ops:grouping", - "//tensorflow/python/data/kernel_tests:test_base", - "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", - ], -) - -py_test( - name = "group_by_window_test", - size = "medium", - srcs = ["group_by_window_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - deps = [ - "//tensorflow/python:array_ops", - "//tensorflow/python:client_testlib", - "//tensorflow/python:constant_op", - "//tensorflow/python:dtypes", - "//tensorflow/python:errors", - "//tensorflow/python:framework_ops", - "//tensorflow/python:math_ops", - "//tensorflow/python:string_ops", - "//tensorflow/python:tensor_shape", - "//tensorflow/python/data/experimental/ops:grouping", - "//tensorflow/python/data/kernel_tests:test_base", - "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", - ], -) - -py_test( - name = "ignore_errors_test", - srcs = ["ignore_errors_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - deps = [ - "//tensorflow/python:array_ops", - "//tensorflow/python:client_testlib", - "//tensorflow/python:errors", - "//tensorflow/python:io_ops", - "//tensorflow/python:util", - "//tensorflow/python/data/experimental/ops:error_ops", - "//tensorflow/python/data/kernel_tests:test_base", - "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", - ], -) - -py_test( - name = "make_batched_features_dataset_test", - size = "medium", - srcs = ["make_batched_features_dataset_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - tags = ["no_pip"], - deps = [ - ":reader_dataset_ops_test_base", - "//tensorflow/python:client_testlib", - "//tensorflow/python:dtypes", - "//tensorflow/python:errors", - "//tensorflow/python:framework_ops", - "//tensorflow/python:io_ops", - "//tensorflow/python:parsing_ops", - "//tensorflow/python/data/experimental/ops:readers", - "//tensorflow/python/data/ops:readers", - "//tensorflow/python/data/util:nest", - "//third_party/py/numpy", - ], -) - -py_test( - name = "make_csv_dataset_test", - size = "medium", - srcs = ["make_csv_dataset_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - tags = ["no_pip"], - deps = [ - "//tensorflow/python:client_testlib", - "//tensorflow/python:constant_op", - "//tensorflow/python:dtypes", - "//tensorflow/python:errors", - "//tensorflow/python:framework_ops", - "//tensorflow/python/data/experimental/ops:readers", - "//tensorflow/python/data/kernel_tests:test_base", - "//tensorflow/python/data/util:nest", - "//third_party/py/numpy", - ], -) - -py_test( - name = "make_tf_record_dataset_test", - size = "medium", - srcs = ["make_tf_record_dataset_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - tags = ["no_pip"], - deps = [ - ":reader_dataset_ops_test_base", - "//tensorflow/python:client_testlib", - "//tensorflow/python:errors", - "//tensorflow/python:framework_ops", - "//tensorflow/python:string_ops", - "//tensorflow/python/data/experimental/ops:readers", - "//tensorflow/python/data/util:nest", - ], -) - -py_test( - name = "map_and_batch_test", - size = "medium", - srcs = ["map_and_batch_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - deps = [ - "//tensorflow/python:array_ops", - "//tensorflow/python:client_testlib", - "//tensorflow/python:cond_v2", - "//tensorflow/python:constant_op", - "//tensorflow/python:dtypes", - "//tensorflow/python:errors", - "//tensorflow/python:math_ops", - "//tensorflow/python:script_ops", - "//tensorflow/python:sparse_tensor", - "//tensorflow/python/data/experimental/ops:batching", - "//tensorflow/python/data/kernel_tests:test_base", - "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", - "@absl_py//absl/testing:parameterized", - ], -) - -py_test( - name = "map_defun_op_test", - size = "small", - srcs = ["map_defun_op_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - tags = ["no_pip"], - deps = [ - "//tensorflow/python:array_ops", - "//tensorflow/python:check_ops", - "//tensorflow/python:client_testlib", - "//tensorflow/python:constant_op", - "//tensorflow/python:data_flow_ops", - "//tensorflow/python:dtypes", - "//tensorflow/python:framework_ops", - "//tensorflow/python:function", - "//tensorflow/python:functional_ops", - "//tensorflow/python:math_ops", - "//tensorflow/python:session", - "//tensorflow/python:sparse_ops", - "//tensorflow/python:sparse_tensor", - "//tensorflow/python/data/experimental/ops:map_defun", - "//tensorflow/python/data/kernel_tests:test_base", - ], -) - -py_test( - name = "matching_files_test", - size = "small", - srcs = ["matching_files_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - tags = ["no_pip"], - deps = [ - "//tensorflow/python:array_ops", - "//tensorflow/python:client_testlib", - "//tensorflow/python:dtypes", - "//tensorflow/python:errors", - "//tensorflow/python:util", - "//tensorflow/python/data/experimental/ops:matching_files", - "//tensorflow/python/data/kernel_tests:test_base", - "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", - ], -) - -py_test( - name = "override_threadpool_test", - size = "small", - srcs = ["override_threadpool_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - tags = ["no_pip"], - deps = [ - "//tensorflow/python:client_testlib", - "//tensorflow/python:dtypes", - "//tensorflow/python:errors", - "//tensorflow/python:script_ops", - "//tensorflow/python/data/experimental/ops:threadpool", - "//tensorflow/python/data/experimental/ops:unique", - "//tensorflow/python/data/kernel_tests:test_base", - "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", - "@absl_py//absl/testing:parameterized", - ], -) - -py_test( - name = "parallel_interleave_test", - size = "medium", - srcs = ["parallel_interleave_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - tags = ["no_pip"], - deps = [ - "//tensorflow/python:array_ops", - "//tensorflow/python:client_testlib", - "//tensorflow/python:dtypes", - "//tensorflow/python:errors", - "//tensorflow/python:math_ops", - "//tensorflow/python:script_ops", - "//tensorflow/python:sparse_ops", - "//tensorflow/python:sparse_tensor", - "//tensorflow/python/data/experimental/ops:interleave_ops", - "//tensorflow/python/data/kernel_tests:test_base", - "//tensorflow/python/data/ops:dataset_ops", - "@six_archive//:six", - ], -) - -py_test( - name = "parse_example_dataset_test", - size = "small", - srcs = ["parse_example_dataset_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - deps = [ - "//tensorflow/core:protos_all_py", - "//tensorflow/python:client_testlib", - "//tensorflow/python:dtypes", - "//tensorflow/python:errors", - "//tensorflow/python:framework_ops", - "//tensorflow/python:parsing_ops", - "//tensorflow/python:platform", - "//tensorflow/python:sparse_tensor", - "//tensorflow/python/data/experimental/ops:parsing_ops", - "//tensorflow/python/data/kernel_tests:test_base", - "//tensorflow/python/data/ops:dataset_ops", - "//tensorflow/python/data/util:nest", - "//third_party/py/numpy", - ], -) - -cuda_py_test( - name = "prefetch_to_device_test", - size = "small", - srcs = ["prefetch_to_device_test.py"], - additional_deps = [ - "//tensorflow/python/data/experimental/ops:prefetching_ops", - "//tensorflow/core:protos_all_py", - "//tensorflow/python:client_testlib", - "//tensorflow/python/data/kernel_tests:test_base", - "//tensorflow/python:dtypes", - "//tensorflow/python:framework_ops", - "//tensorflow/python:framework_test_lib", - "//tensorflow/python/data/ops:dataset_ops", - ], - tags = ["no_windows_gpu"], -) - -py_test( - name = "prefetch_with_slack_test", - size = "small", - srcs = ["prefetch_with_slack_test.py"], - deps = [ - "//tensorflow/core:protos_all_py", - "//tensorflow/python:client_testlib", - "//tensorflow/python:errors", - "//tensorflow/python:framework_test_lib", - "//tensorflow/python/data/kernel_tests:test_base", - "//tensorflow/python/data/ops:dataset_ops", - "//tensorflow/python/data/ops:iterator_ops", - "//tensorflow/python/data/ops:multi_device_iterator_ops", - "@absl_py//absl/testing:parameterized", - ], -) - -py_library( - name = "reader_dataset_ops_test_base", - srcs = [ - "reader_dataset_ops_test_base.py", - ], - srcs_version = "PY2AND3", - deps = [ - "//tensorflow/core:protos_all_py", - "//tensorflow/python:array_ops", - "//tensorflow/python:client_testlib", - "//tensorflow/python:constant_op", - "//tensorflow/python:dtypes", - "//tensorflow/python:lib", - "//tensorflow/python:parsing_ops", - "//tensorflow/python:util", - "//tensorflow/python/data/experimental/ops:readers", - "//tensorflow/python/data/kernel_tests:test_base", - "//tensorflow/python/data/ops:iterator_ops", - "//tensorflow/python/data/ops:readers", - ], -) - -py_test( - name = "rebatch_dataset_test", - size = "small", - srcs = ["rebatch_dataset_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - deps = [ - "//tensorflow/core:protos_all_py", - "//tensorflow/python:client_testlib", - "//tensorflow/python:parsing_ops", - "//tensorflow/python/data/experimental/ops:readers", - "//tensorflow/python/data/kernel_tests:test_base", - "//tensorflow/python/data/ops:dataset_ops", - "//tensorflow/python/data/util:nest", - "//tensorflow/python/ops/ragged:ragged_tensor", - "@absl_py//absl/testing:parameterized", - ], -) - -py_test( - name = "rejection_resample_test", - size = "medium", - srcs = ["rejection_resample_test.py"], - python_version = "PY2", - shard_count = 5, - srcs_version = "PY2AND3", - tags = [ - "noasan", - "optonly", - ], - deps = [ - "//tensorflow/python:client_testlib", - "//tensorflow/python:dtypes", - "//tensorflow/python:errors", - "//tensorflow/python:math_ops", - "//tensorflow/python:random_ops", - "//tensorflow/python:string_ops", - "//tensorflow/python:util", - "//tensorflow/python/data/experimental/ops:resampling", - "//tensorflow/python/data/kernel_tests:test_base", - "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", - "@absl_py//absl/testing:parameterized", - "@six_archive//:six", - ], -) - cuda_py_test( name = "scan_test", size = "small", @@ -606,36 +645,32 @@ cuda_py_test( tags = ["no_pip"], ) -py_test( +tf_py_test( name = "shuffle_and_repeat_test", size = "medium", srcs = ["shuffle_and_repeat_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - tags = [ - "no_pip", - "optonly", - ], - deps = [ + additional_deps = [ + "//third_party/py/numpy", "//tensorflow/python:client_testlib", "//tensorflow/python:errors", "//tensorflow/python:framework_ops", "//tensorflow/python/data/experimental/ops:shuffle_ops", "//tensorflow/python/data/kernel_tests:test_base", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", + ], + tags = [ + "no_pip", + "optonly", ], ) -py_test( +tf_py_test( name = "sleep_test", srcs = ["sleep_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - deps = [ + additional_deps = [ "//tensorflow/python:client_testlib", "//tensorflow/python:util", - "//tensorflow/python/data/experimental/ops:sleep", + "//tensorflow/python/data/experimental/ops:testing", "//tensorflow/python/data/kernel_tests:test_base", "//tensorflow/python/data/ops:dataset_ops", ], @@ -659,52 +694,45 @@ py_library( ], ) -py_test( +tf_py_test( name = "sql_dataset_test", size = "medium", srcs = ["sql_dataset_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - tags = ["no_pip"], - deps = [ + additional_deps = [ ":sql_dataset_test_base", "//tensorflow/python:client_testlib", "//tensorflow/python:dtypes", "//tensorflow/python:errors", ], + tags = ["no_pip"], ) -py_test( +tf_py_test( name = "snapshot_test", size = "medium", srcs = ["snapshot_test.py"], - python_version = "PY2", - shard_count = 10, - srcs_version = "PY2AND3", - deps = [ + additional_deps = [ ":reader_dataset_ops_test_base", + "@absl_py//absl/testing:parameterized", "//tensorflow/python:client_testlib", "//tensorflow/python:framework_test_lib", "//tensorflow/python:string_ops", "//tensorflow/python/data/experimental/ops:snapshot", "//tensorflow/python/data/ops:dataset_ops", "//tensorflow/python/data/ops:readers", - "@absl_py//absl/testing:parameterized", ], + shard_count = 10, ) -py_test( +tf_py_test( name = "stats_dataset_ops_test", size = "large", srcs = ["stats_dataset_ops_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - tags = [ - "no_pip", - ], - deps = [ + additional_deps = [ ":reader_dataset_ops_test_base", ":stats_dataset_test_base", + "@absl_py//absl/testing:parameterized", + "//third_party/py/numpy", "//tensorflow/python:array_ops", "//tensorflow/python:client_testlib", "//tensorflow/python:errors", @@ -715,8 +743,9 @@ py_test( "//tensorflow/python/data/experimental/ops:stats_ops", "//tensorflow/python/data/experimental/ops:stats_options", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", - "@absl_py//absl/testing:parameterized", + ], + tags = [ + "no_pip", ], ) @@ -731,13 +760,13 @@ py_library( ], ) -py_test( +tf_py_test( name = "take_while_test", size = "small", srcs = ["take_while_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - deps = [ + additional_deps = [ + "@absl_py//absl/testing:parameterized", + "//third_party/py/numpy", "//tensorflow/python:array_ops", "//tensorflow/python:client_testlib", "//tensorflow/python:constant_op", @@ -749,17 +778,14 @@ py_test( "//tensorflow/python/data/kernel_tests:test_base", "//tensorflow/python/data/ops:dataset_ops", "//tensorflow/python/eager:context", - "//third_party/py/numpy", - "@absl_py//absl/testing:parameterized", ], ) -py_test( +tf_py_test( name = "tf_record_writer_test", size = "small", srcs = ["tf_record_writer_test.py"], - python_version = "PY2", - deps = [ + additional_deps = [ "//tensorflow/python:array_ops", "//tensorflow/python:client_testlib", "//tensorflow/python:dtypes", @@ -772,14 +798,11 @@ py_test( ], ) -py_test( +tf_py_test( name = "unique_test", size = "small", srcs = ["unique_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - tags = ["no_pip"], - deps = [ + additional_deps = [ "//tensorflow/python:client_testlib", "//tensorflow/python:dtypes", "//tensorflow/python:errors", @@ -788,14 +811,13 @@ py_test( "//tensorflow/python/data/kernel_tests:test_base", "//tensorflow/python/data/ops:dataset_ops", ], + tags = ["no_pip"], ) -py_test( +tf_py_test( name = "variant_test", srcs = ["variant_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - deps = [ + additional_deps = [ "//tensorflow/python:client_testlib", "//tensorflow/python:util", "//tensorflow/python/data/kernel_tests:test_base", diff --git a/tensorflow/python/data/experimental/kernel_tests/optimization/assert_next_dataset_test.py b/tensorflow/python/data/experimental/kernel_tests/assert_next_test.py similarity index 88% rename from tensorflow/python/data/experimental/kernel_tests/optimization/assert_next_dataset_test.py rename to tensorflow/python/data/experimental/kernel_tests/assert_next_test.py index 83cbb3cbcaa..c246122c92b 100644 --- a/tensorflow/python/data/experimental/kernel_tests/optimization/assert_next_dataset_test.py +++ b/tensorflow/python/data/experimental/kernel_tests/assert_next_test.py @@ -17,7 +17,7 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function -from tensorflow.python.data.experimental.ops import optimization +from tensorflow.python.data.experimental.ops import testing from tensorflow.python.data.kernel_tests import test_base from tensorflow.python.data.ops import dataset_ops from tensorflow.python.framework import errors @@ -26,11 +26,11 @@ from tensorflow.python.platform import test @test_util.run_all_in_graph_and_eager_modes -class AssertNextDatasetTest(test_base.DatasetTestBase): +class AssertNextTest(test_base.DatasetTestBase): def testAssertNext(self): dataset = dataset_ops.Dataset.from_tensors(0).apply( - optimization.assert_next(["Map"])).map(lambda x: x) + testing.assert_next(["Map"])).map(lambda x: x) options = dataset_ops.Options() options.experimental_optimization.apply_default_optimizations = False dataset = dataset.with_options(options) @@ -38,7 +38,7 @@ class AssertNextDatasetTest(test_base.DatasetTestBase): def testAssertNextInvalid(self): dataset = dataset_ops.Dataset.from_tensors(0).apply( - optimization.assert_next(["Whoops"])).map(lambda x: x) + testing.assert_next(["Whoops"])).map(lambda x: x) options = dataset_ops.Options() options.experimental_optimization.apply_default_optimizations = False dataset = dataset.with_options(options) @@ -51,7 +51,7 @@ class AssertNextDatasetTest(test_base.DatasetTestBase): def testAssertNextShort(self): dataset = dataset_ops.Dataset.from_tensors(0).apply( - optimization.assert_next(["Map", "Whoops"])).map(lambda x: x) + testing.assert_next(["Map", "Whoops"])).map(lambda x: x) options = dataset_ops.Options() options.experimental_optimization.apply_default_optimizations = False options.experimental_optimization.autotune = False diff --git a/tensorflow/python/data/experimental/kernel_tests/auto_shard_dataset_test.py b/tensorflow/python/data/experimental/kernel_tests/auto_shard_dataset_test.py index f693640e07c..73e68ebcf42 100644 --- a/tensorflow/python/data/experimental/kernel_tests/auto_shard_dataset_test.py +++ b/tensorflow/python/data/experimental/kernel_tests/auto_shard_dataset_test.py @@ -23,7 +23,7 @@ from tensorflow.python.data.experimental.kernel_tests import reader_dataset_ops_ from tensorflow.python.data.experimental.ops import distribute from tensorflow.python.data.experimental.ops import distribute_options from tensorflow.python.data.experimental.ops import interleave_ops -from tensorflow.python.data.experimental.ops import optimization +from tensorflow.python.data.experimental.ops import testing from tensorflow.python.data.experimental.ops import readers from tensorflow.python.data.experimental.ops import unique from tensorflow.python.data.kernel_tests import test_base @@ -391,7 +391,7 @@ class AutoShardDatasetTest(reader_dataset_ops_test_base.TFRecordDatasetTestBase, # Tests that Rebatch is a passthrough op. dataset = dataset_ops.Dataset.list_files(self.test_filenames, shuffle=False) dataset = dataset.apply( - optimization.assert_next(["Shard", "FlatMap", "BatchV2", "Rebatch"])) + testing.assert_next(["Shard", "FlatMap", "BatchV2", "Rebatch"])) dataset = dataset.flat_map(core_readers.TFRecordDataset) dataset = dataset.batch(5) dataset = distribute._RebatchDataset(dataset, num_replicas=1) diff --git a/tensorflow/python/data/experimental/kernel_tests/optimization/model_dataset_test.py b/tensorflow/python/data/experimental/kernel_tests/model_dataset_test.py similarity index 94% rename from tensorflow/python/data/experimental/kernel_tests/optimization/model_dataset_test.py rename to tensorflow/python/data/experimental/kernel_tests/model_dataset_test.py index dd2031f7b02..511990d6d27 100644 --- a/tensorflow/python/data/experimental/kernel_tests/optimization/model_dataset_test.py +++ b/tensorflow/python/data/experimental/kernel_tests/model_dataset_test.py @@ -19,7 +19,7 @@ from __future__ import print_function from absl.testing import parameterized -from tensorflow.python.data.experimental.ops import optimization +from tensorflow.python.data.experimental.ops import testing from tensorflow.python.data.kernel_tests import test_base from tensorflow.python.data.ops import dataset_ops from tensorflow.python.framework import errors @@ -33,7 +33,7 @@ class ModelDatasetTest(test_base.DatasetTestBase, parameterized.TestCase): def testAutotuneOption(self): dataset = dataset_ops.Dataset.from_tensors(0) dataset = dataset.map(lambda x: x).apply( - optimization.assert_next(["Model"])) + testing.assert_next(["Model"])) options = dataset_ops.Options() options.experimental_optimization.apply_default_optimizations = False options.experimental_optimization.autotune = True diff --git a/tensorflow/python/data/experimental/kernel_tests/non_serializable_test.py b/tensorflow/python/data/experimental/kernel_tests/non_serializable_test.py new file mode 100644 index 00000000000..7b07853384b --- /dev/null +++ b/tensorflow/python/data/experimental/kernel_tests/non_serializable_test.py @@ -0,0 +1,56 @@ +# Copyright 2018 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for `tf.data.experimental.non_serializable()`.""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +from tensorflow.python.data.experimental.ops import testing +from tensorflow.python.data.kernel_tests import test_base +from tensorflow.python.data.ops import dataset_ops +from tensorflow.python.framework import test_util +from tensorflow.python.platform import test + + +@test_util.run_all_in_graph_and_eager_modes +class NonSerializableTest(test_base.DatasetTestBase): + + def testNonSerializable(self): + dataset = dataset_ops.Dataset.from_tensors(0) + dataset = dataset.apply(testing.assert_next(["FiniteSkip"])) + dataset = dataset.skip(0) # Should not be removed by noop elimination + dataset = dataset.apply(testing.non_serializable()) + dataset = dataset.apply(testing.assert_next(["MemoryCacheImpl"])) + dataset = dataset.skip(0) # Should be removed by noop elimination + dataset = dataset.cache() + options = dataset_ops.Options() + options.experimental_optimization.apply_default_optimizations = False + options.experimental_optimization.noop_elimination = True + dataset = dataset.with_options(options) + self.assertDatasetProduces(dataset, expected_output=[0]) + + def testNonSerializableAsDirectInput(self): + """Tests that non-serializable dataset can be OptimizeDataset's input.""" + dataset = dataset_ops.Dataset.from_tensors(0) + dataset = dataset.apply(testing.non_serializable()) + options = dataset_ops.Options() + options.experimental_optimization.apply_default_optimizations = False + options.experimental_optimization.noop_elimination = True + dataset = dataset.with_options(options) + self.assertDatasetProduces(dataset, expected_output=[0]) + + +if __name__ == "__main__": + test.main() diff --git a/tensorflow/python/data/experimental/kernel_tests/optimization/BUILD b/tensorflow/python/data/experimental/kernel_tests/optimization/BUILD index 26c213fb8a0..1e9d1ca1d00 100644 --- a/tensorflow/python/data/experimental/kernel_tests/optimization/BUILD +++ b/tensorflow/python/data/experimental/kernel_tests/optimization/BUILD @@ -1,4 +1,4 @@ -load("//tensorflow:tensorflow.bzl", "py_test") +load("//tensorflow:tensorflow.bzl", "tf_py_test") package( default_visibility = ["//tensorflow:internal"], @@ -7,77 +7,64 @@ package( exports_files(["LICENSE"]) -py_test( - name = "assert_next_dataset_test", - size = "medium", - srcs = ["assert_next_dataset_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - tags = [ - "no_oss", - "no_pip", - "no_windows", - ], - deps = [ - "//tensorflow/python:client_testlib", - "//tensorflow/python:errors", - "//tensorflow/python/data/experimental/ops:optimization", - "//tensorflow/python/data/kernel_tests:test_base", - "//tensorflow/python/data/ops:dataset_ops", - ], -) - -py_test( - name = "inject_prefetch_test", +tf_py_test( + name = "choose_fastest_dataset_test", size = "small", - srcs = ["inject_prefetch_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - tags = [ - "no_oss", - "no_pip", - "no_windows", - ], - deps = [ + srcs = ["choose_fastest_dataset_test.py"], + additional_deps = [ + "@absl_py//absl/testing:parameterized", "//tensorflow/python:client_testlib", "//tensorflow/python:errors", "//tensorflow/python/data/experimental/ops:optimization", "//tensorflow/python/data/kernel_tests:test_base", "//tensorflow/python/data/ops:dataset_ops", ], + tags = [ + "no_oss", + "no_pip", + "no_windows", + ], ) -py_test( +tf_py_test( name = "filter_fusion_test", size = "medium", srcs = ["filter_fusion_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - tags = [ - "no_oss", - "no_pip", - "no_windows", - ], - deps = [ + additional_deps = [ + "@absl_py//absl/testing:parameterized", "//tensorflow/python:client_testlib", "//tensorflow/python:constant_op", "//tensorflow/python:dtypes", "//tensorflow/python:errors", "//tensorflow/python:math_ops", - "//tensorflow/python/data/experimental/ops:optimization", "//tensorflow/python/data/experimental/ops:optimization_options", + "//tensorflow/python/data/experimental/ops:testing", "//tensorflow/python/data/kernel_tests:test_base", "//tensorflow/python/data/ops:dataset_ops", - "@absl_py//absl/testing:parameterized", + ], + tags = [ + "no_oss", + "no_pip", + "no_windows", ], ) -py_test( +tf_py_test( name = "filter_with_random_uniform_fusion_test", size = "medium", srcs = ["filter_with_random_uniform_fusion_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", + additional_deps = [ + "@absl_py//absl/testing:parameterized", + "//tensorflow/python:client_testlib", + "//tensorflow/python:constant_op", + "//tensorflow/python:dtypes", + "//tensorflow/python:errors", + "//tensorflow/python:math_ops", + "//tensorflow/python/data/experimental/ops:optimization_options", + "//tensorflow/python/data/experimental/ops:testing", + "//tensorflow/python/data/kernel_tests:test_base", + "//tensorflow/python/data/ops:dataset_ops", + ], tags = [ "manual", "no_oss", @@ -85,32 +72,14 @@ py_test( "no_windows", "notap", # TODO(b/131229793) ], - deps = [ - "//tensorflow/python:client_testlib", - "//tensorflow/python:constant_op", - "//tensorflow/python:dtypes", - "//tensorflow/python:errors", - "//tensorflow/python:math_ops", - "//tensorflow/python/data/experimental/ops:optimization", - "//tensorflow/python/data/experimental/ops:optimization_options", - "//tensorflow/python/data/kernel_tests:test_base", - "//tensorflow/python/data/ops:dataset_ops", - "@absl_py//absl/testing:parameterized", - ], ) -py_test( +tf_py_test( name = "hoist_random_uniform_test", size = "small", srcs = ["hoist_random_uniform_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - tags = [ - "no_oss", - "no_pip", - "no_windows", - ], - deps = [ + additional_deps = [ + "@absl_py//absl/testing:parameterized", "//tensorflow/python:client_testlib", "//tensorflow/python:constant_op", "//tensorflow/python:control_flow_ops", @@ -119,113 +88,103 @@ py_test( "//tensorflow/python:framework_ops", "//tensorflow/python:math_ops", "//tensorflow/python:random_ops", - "//tensorflow/python/data/experimental/ops:optimization", "//tensorflow/python/data/experimental/ops:optimization_options", + "//tensorflow/python/data/experimental/ops:testing", "//tensorflow/python/data/kernel_tests:test_base", "//tensorflow/python/data/ops:dataset_ops", - "@absl_py//absl/testing:parameterized", + ], + tags = [ + "no_oss", + "no_pip", + "no_windows", ], ) -py_test( +tf_py_test( name = "latency_all_edges_test", size = "small", srcs = ["latency_all_edges_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - tags = [ - "no_oss", - "no_pip", - "no_windows", - ], - deps = [ + additional_deps = [ "//tensorflow/python:client_testlib", "//tensorflow/python:errors", "//tensorflow/python/data/experimental/kernel_tests:stats_dataset_test_base", - "//tensorflow/python/data/experimental/ops:optimization", "//tensorflow/python/data/experimental/ops:stats_aggregator", "//tensorflow/python/data/experimental/ops:stats_ops", + "//tensorflow/python/data/experimental/ops:testing", "//tensorflow/python/data/ops:dataset_ops", ], -) - -py_test( - name = "map_and_batch_fusion_test", - srcs = ["map_and_batch_fusion_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ +) + +tf_py_test( + name = "map_and_batch_fusion_test", + srcs = ["map_and_batch_fusion_test.py"], + additional_deps = [ "//tensorflow/python:client_testlib", "//tensorflow/python:errors", - "//tensorflow/python/data/experimental/ops:optimization", "//tensorflow/python/data/experimental/ops:optimization_options", + "//tensorflow/python/data/experimental/ops:testing", "//tensorflow/python/data/kernel_tests:test_base", "//tensorflow/python/data/ops:dataset_ops", ], -) - -py_test( - name = "map_and_filter_fusion_test", - srcs = ["map_and_filter_fusion_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ +) + +tf_py_test( + name = "map_and_filter_fusion_test", + srcs = ["map_and_filter_fusion_test.py"], + additional_deps = [ + "@absl_py//absl/testing:parameterized", "//tensorflow/python:client_testlib", "//tensorflow/python:constant_op", "//tensorflow/python:dtypes", "//tensorflow/python:errors", "//tensorflow/python:math_ops", - "//tensorflow/python/data/experimental/ops:optimization", "//tensorflow/python/data/experimental/ops:optimization_options", + "//tensorflow/python/data/experimental/ops:testing", "//tensorflow/python/data/kernel_tests:test_base", "//tensorflow/python/data/ops:dataset_ops", - "@absl_py//absl/testing:parameterized", ], -) - -py_test( - name = "map_fusion_test", - srcs = ["map_fusion_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ +) + +tf_py_test( + name = "map_fusion_test", + srcs = ["map_fusion_test.py"], + additional_deps = [ + "@absl_py//absl/testing:parameterized", "//tensorflow/python:client_testlib", "//tensorflow/python:errors", - "//tensorflow/python/data/experimental/ops:optimization", "//tensorflow/python/data/experimental/ops:optimization_options", + "//tensorflow/python/data/experimental/ops:testing", "//tensorflow/python/data/kernel_tests:test_base", "//tensorflow/python/data/ops:dataset_ops", - "@absl_py//absl/testing:parameterized", + ], + tags = [ + "no_oss", + "no_pip", + "no_windows", ], ) -py_test( +tf_py_test( name = "map_parallelization_test", size = "small", srcs = ["map_parallelization_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - tags = [ - "no_oss", - "no_pip", - "no_windows", - ], - deps = [ + additional_deps = [ + "@absl_py//absl/testing:parameterized", "//tensorflow/python:client_testlib", "//tensorflow/python:constant_op", "//tensorflow/python:control_flow_ops", @@ -234,27 +193,25 @@ py_test( "//tensorflow/python:framework_ops", "//tensorflow/python:math_ops", "//tensorflow/python:random_ops", - "//tensorflow/python/data/experimental/ops:optimization", "//tensorflow/python/data/experimental/ops:optimization_options", + "//tensorflow/python/data/experimental/ops:testing", "//tensorflow/python/data/kernel_tests:test_base", "//tensorflow/python/data/ops:dataset_ops", - "@absl_py//absl/testing:parameterized", ], -) - -py_test( - name = "map_vectorization_test", - size = "small", - srcs = ["map_vectorization_test.py"], - python_version = "PY2", - shard_count = 8, - srcs_version = "PY2AND3", tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ +) + +tf_py_test( + name = "map_vectorization_test", + size = "small", + srcs = ["map_vectorization_test.py"], + additional_deps = [ + "@absl_py//absl/testing:parameterized", + "//third_party/py/numpy", "//tensorflow/core:protos_all_py", "//tensorflow/python:array_ops", "//tensorflow/python:bitwise_ops", @@ -272,152 +229,54 @@ py_test( "//tensorflow/python:parsing_ops", "//tensorflow/python:sparse_tensor", "//tensorflow/python/data/experimental/ops:batching", - "//tensorflow/python/data/experimental/ops:optimization", "//tensorflow/python/data/experimental/ops:optimization_options", + "//tensorflow/python/data/experimental/ops:testing", "//tensorflow/python/data/kernel_tests:test_base", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", - "@absl_py//absl/testing:parameterized", ], -) - -py_test( - name = "choose_fastest_dataset_test", - size = "small", - srcs = ["choose_fastest_dataset_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", + shard_count = 8, tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ - "//tensorflow/python:client_testlib", - "//tensorflow/python:errors", - "//tensorflow/python/data/experimental/ops:optimization", - "//tensorflow/python/data/kernel_tests:test_base", - "//tensorflow/python/data/ops:dataset_ops", - "@absl_py//absl/testing:parameterized", - ], ) -py_test( - name = "choose_fastest_branch_dataset_test", - size = "small", - srcs = ["choose_fastest_branch_dataset_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - tags = [ - "no_oss", - "no_pip", - "no_windows", - ], - deps = [ - "//tensorflow/python:client_testlib", - "//tensorflow/python:constant_op", - "//tensorflow/python:errors", - "//tensorflow/python:math_ops", - "//tensorflow/python/data/experimental/ops:batching", - "//tensorflow/python/data/experimental/ops:optimization", - "//tensorflow/python/data/kernel_tests:test_base", - "//tensorflow/python/data/ops:dataset_ops", - "@absl_py//absl/testing:parameterized", - ], -) - -py_test( - name = "model_dataset_test", - size = "medium", - srcs = ["model_dataset_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - tags = [ - "no_oss", - "no_pip", - "no_windows", - "optonly", - ], - deps = [ - "//tensorflow/python:client_testlib", - "//tensorflow/python:errors", - "//tensorflow/python/data/experimental/ops:optimization", - "//tensorflow/python/data/kernel_tests:test_base", - "//tensorflow/python/data/ops:dataset_ops", - "@absl_py//absl/testing:parameterized", - ], -) - -py_test( +tf_py_test( name = "noop_elimination_test", size = "small", srcs = ["noop_elimination_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", - tags = [ - "no_oss", - "no_pip", - "no_windows", - ], - deps = [ + additional_deps = [ "//tensorflow/python:client_testlib", "//tensorflow/python:constant_op", "//tensorflow/python:dtypes", "//tensorflow/python:errors", "//tensorflow/python:math_ops", - "//tensorflow/python/data/experimental/ops:optimization", + "//tensorflow/python/data/experimental/ops:testing", "//tensorflow/python/data/kernel_tests:test_base", "//tensorflow/python/data/ops:dataset_ops", ], -) - -py_test( - name = "optimize_dataset_test", - size = "medium", - srcs = ["optimize_dataset_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ - "//tensorflow/python:array_ops", - "//tensorflow/python:client_testlib", - "//tensorflow/python:dtypes", - "//tensorflow/python:errors", - "//tensorflow/python:random_ops", - "//tensorflow/python:variable_scope", - "//tensorflow/python/data/experimental/ops:batching", - "//tensorflow/python/data/experimental/ops:grouping", - "//tensorflow/python/data/experimental/ops:optimization", - "//tensorflow/python/data/experimental/ops:optimization_options", - "//tensorflow/python/data/experimental/ops:scan_ops", - "//tensorflow/python/data/kernel_tests:test_base", - "//tensorflow/python/data/ops:dataset_ops", - "//tensorflow/python/eager:context", - "//third_party/py/numpy", - "@absl_py//absl/testing:parameterized", - ], ) -py_test( +tf_py_test( name = "shuffle_and_repeat_fusion_test", srcs = ["shuffle_and_repeat_fusion_test.py"], - python_version = "PY2", - srcs_version = "PY2AND3", + additional_deps = [ + "//tensorflow/python:client_testlib", + "//tensorflow/python:errors", + "//tensorflow/python/data/experimental/ops:optimization_options", + "//tensorflow/python/data/experimental/ops:testing", + "//tensorflow/python/data/kernel_tests:test_base", + "//tensorflow/python/data/ops:dataset_ops", + ], tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ - "//tensorflow/python:client_testlib", - "//tensorflow/python:errors", - "//tensorflow/python/data/experimental/ops:optimization", - "//tensorflow/python/data/experimental/ops:optimization_options", - "//tensorflow/python/data/kernel_tests:test_base", - "//tensorflow/python/data/ops:dataset_ops", - ], ) diff --git a/tensorflow/python/data/experimental/kernel_tests/optimization/filter_fusion_test.py b/tensorflow/python/data/experimental/kernel_tests/optimization/filter_fusion_test.py index 525ae2c54e4..1aa3d636f02 100644 --- a/tensorflow/python/data/experimental/kernel_tests/optimization/filter_fusion_test.py +++ b/tensorflow/python/data/experimental/kernel_tests/optimization/filter_fusion_test.py @@ -19,7 +19,7 @@ from __future__ import print_function from absl.testing import parameterized -from tensorflow.python.data.experimental.ops import optimization +from tensorflow.python.data.experimental.ops import testing from tensorflow.python.data.kernel_tests import test_base from tensorflow.python.data.ops import dataset_ops from tensorflow.python.framework import constant_op @@ -64,8 +64,8 @@ class FilterFusionTest(test_base.DatasetTestBase, parameterized.TestCase): @parameterized.named_parameters(*_filter_fusion_test_cases()) def testFilterFusion(self, map_function, predicates): dataset = dataset_ops.Dataset.range(5).apply( - optimization.assert_next(["Map", "Filter", - "MemoryCacheImpl"])).map(map_function) + testing.assert_next(["Map", "Filter", + "MemoryCacheImpl"])).map(map_function) for predicate in predicates: dataset = dataset.filter(predicate) diff --git a/tensorflow/python/data/experimental/kernel_tests/optimization/filter_with_random_uniform_fusion_test.py b/tensorflow/python/data/experimental/kernel_tests/optimization/filter_with_random_uniform_fusion_test.py index 6ce99d79910..2b130f40fc9 100644 --- a/tensorflow/python/data/experimental/kernel_tests/optimization/filter_with_random_uniform_fusion_test.py +++ b/tensorflow/python/data/experimental/kernel_tests/optimization/filter_with_random_uniform_fusion_test.py @@ -17,7 +17,7 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function -from tensorflow.python.data.experimental.ops import optimization +from tensorflow.python.data.experimental.ops import testing from tensorflow.python.data.kernel_tests import test_base from tensorflow.python.data.ops import dataset_ops from tensorflow.python.framework import test_util @@ -30,7 +30,7 @@ class FilterWithRandomUniformFusionTest(test_base.DatasetTestBase): def testFilterWithRandomUniformFusion(self): dataset = dataset_ops.Dataset.range(10000000).apply( - optimization.assert_next(["Sampling"])) + testing.assert_next(["Sampling"])) dataset = dataset.filter(lambda _: random_ops.random_uniform([]) < 0.05) options = dataset_ops.Options() diff --git a/tensorflow/python/data/experimental/kernel_tests/optimization/hoist_random_uniform_test.py b/tensorflow/python/data/experimental/kernel_tests/optimization/hoist_random_uniform_test.py index 08a44e572b8..928b435fe5c 100644 --- a/tensorflow/python/data/experimental/kernel_tests/optimization/hoist_random_uniform_test.py +++ b/tensorflow/python/data/experimental/kernel_tests/optimization/hoist_random_uniform_test.py @@ -19,7 +19,7 @@ from __future__ import print_function from absl.testing import parameterized -from tensorflow.python.data.experimental.ops import optimization +from tensorflow.python.data.experimental.ops import testing from tensorflow.python.data.kernel_tests import test_base from tensorflow.python.data.ops import dataset_ops from tensorflow.python.framework import constant_op @@ -81,7 +81,7 @@ class HoistRandomUniformTest(test_base.DatasetTestBase, parameterized.TestCase): @parameterized.named_parameters(*_hoist_random_uniform_test_cases()) def testHoisting(self, function, will_optimize): dataset = dataset_ops.Dataset.range(5).apply( - optimization.assert_next( + testing.assert_next( ["Zip[0]", "Map"] if will_optimize else ["Map"])).map(function) options = dataset_ops.Options() @@ -100,7 +100,7 @@ class HoistRandomUniformTest(test_base.DatasetTestBase, parameterized.TestCase): [], minval=1, maxval=10, dtype=dtypes.float32, seed=42) dataset = dataset_ops.Dataset.range(5).apply( - optimization.assert_next(["Zip[0]", "Map"])).map(random_with_capture) + testing.assert_next(["Zip[0]", "Map"])).map(random_with_capture) options = dataset_ops.Options() options.experimental_optimization.apply_default_optimizations = False options.experimental_optimization.hoist_random_uniform = True diff --git a/tensorflow/python/data/experimental/kernel_tests/optimization/latency_all_edges_test.py b/tensorflow/python/data/experimental/kernel_tests/optimization/latency_all_edges_test.py index dc8286d352a..f6e5111cf32 100644 --- a/tensorflow/python/data/experimental/kernel_tests/optimization/latency_all_edges_test.py +++ b/tensorflow/python/data/experimental/kernel_tests/optimization/latency_all_edges_test.py @@ -18,7 +18,7 @@ from __future__ import division from __future__ import print_function from tensorflow.python.data.experimental.kernel_tests import stats_dataset_test_base -from tensorflow.python.data.experimental.ops import optimization +from tensorflow.python.data.experimental.ops import testing from tensorflow.python.data.experimental.ops import stats_aggregator from tensorflow.python.data.ops import dataset_ops from tensorflow.python.platform import test @@ -29,7 +29,7 @@ class LatencyAllEdgesTest(stats_dataset_test_base.StatsDatasetTestBase): def testLatencyStatsOptimization(self): aggregator = stats_aggregator.StatsAggregator() dataset = dataset_ops.Dataset.from_tensors(1).apply( - optimization.assert_next( + testing.assert_next( ["LatencyStats", "Map", "LatencyStats", "Prefetch", "LatencyStats"])).map(lambda x: x * x).prefetch(1) options = dataset_ops.Options() diff --git a/tensorflow/python/data/experimental/kernel_tests/optimization/map_and_batch_fusion_test.py b/tensorflow/python/data/experimental/kernel_tests/optimization/map_and_batch_fusion_test.py index dc7bb9d6a37..c7e6fbbf377 100644 --- a/tensorflow/python/data/experimental/kernel_tests/optimization/map_and_batch_fusion_test.py +++ b/tensorflow/python/data/experimental/kernel_tests/optimization/map_and_batch_fusion_test.py @@ -17,7 +17,7 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function -from tensorflow.python.data.experimental.ops import optimization +from tensorflow.python.data.experimental.ops import testing from tensorflow.python.data.kernel_tests import test_base from tensorflow.python.data.ops import dataset_ops from tensorflow.python.framework import test_util @@ -29,7 +29,7 @@ class MapAndBatchFusionTest(test_base.DatasetTestBase): def testMapAndBatchFusion(self): dataset = dataset_ops.Dataset.range(10).apply( - optimization.assert_next( + testing.assert_next( ["MapAndBatch"])).map(lambda x: x * x).batch(10) options = dataset_ops.Options() options.experimental_optimization.apply_default_optimizations = False diff --git a/tensorflow/python/data/experimental/kernel_tests/optimization/map_and_filter_fusion_test.py b/tensorflow/python/data/experimental/kernel_tests/optimization/map_and_filter_fusion_test.py index c1c5e736a88..1e53b4394ae 100644 --- a/tensorflow/python/data/experimental/kernel_tests/optimization/map_and_filter_fusion_test.py +++ b/tensorflow/python/data/experimental/kernel_tests/optimization/map_and_filter_fusion_test.py @@ -19,7 +19,7 @@ from __future__ import print_function from absl.testing import parameterized -from tensorflow.python.data.experimental.ops import optimization +from tensorflow.python.data.experimental.ops import testing from tensorflow.python.data.kernel_tests import test_base from tensorflow.python.data.ops import dataset_ops from tensorflow.python.framework import constant_op @@ -80,8 +80,8 @@ class MapAndFilterFusionTest(test_base.DatasetTestBase, parameterized.TestCase): @parameterized.named_parameters(*_map_and_filter_fusion_test_cases()) def testMapFilterFusion(self, function, predicate): dataset = dataset_ops.Dataset.range(10).apply( - optimization.assert_next(["Map", "Filter", - "Map"])).map(function).filter(predicate) + testing.assert_next(["Map", "Filter", + "Map"])).map(function).filter(predicate) options = dataset_ops.Options() options.experimental_optimization.apply_default_optimizations = False options.experimental_optimization.map_and_filter_fusion = True @@ -99,8 +99,7 @@ class MapAndFilterFusionTest(test_base.DatasetTestBase, parameterized.TestCase): # We are currently not supporting functions with captured inputs. dataset = dataset_ops.Dataset.range(10).apply( - optimization.assert_next(["Map", - "Filter"])).map(function).filter(predicate) + testing.assert_next(["Map", "Filter"])).map(function).filter(predicate) options = dataset_ops.Options() options.experimental_optimization.apply_default_optimizations = False options.experimental_optimization.map_and_filter_fusion = True diff --git a/tensorflow/python/data/experimental/kernel_tests/optimization/map_fusion_test.py b/tensorflow/python/data/experimental/kernel_tests/optimization/map_fusion_test.py index b3a7304b4e4..10f27dc277f 100644 --- a/tensorflow/python/data/experimental/kernel_tests/optimization/map_fusion_test.py +++ b/tensorflow/python/data/experimental/kernel_tests/optimization/map_fusion_test.py @@ -19,7 +19,7 @@ from __future__ import print_function from absl.testing import parameterized -from tensorflow.python.data.experimental.ops import optimization +from tensorflow.python.data.experimental.ops import testing from tensorflow.python.data.kernel_tests import test_base from tensorflow.python.data.ops import dataset_ops from tensorflow.python.framework import test_util @@ -68,7 +68,7 @@ class MapFusionTest(test_base.DatasetTestBase, parameterized.TestCase): @parameterized.named_parameters(*_map_fusion_test_cases()) def testMapFusion(self, functions): dataset = dataset_ops.Dataset.range(5).apply( - optimization.assert_next(["Map", "MemoryCacheImpl"])) + testing.assert_next(["Map", "MemoryCacheImpl"])) for function in functions: dataset = dataset.map(function) diff --git a/tensorflow/python/data/experimental/kernel_tests/optimization/map_parallelization_test.py b/tensorflow/python/data/experimental/kernel_tests/optimization/map_parallelization_test.py index 60649cd3ede..668ab28c64c 100644 --- a/tensorflow/python/data/experimental/kernel_tests/optimization/map_parallelization_test.py +++ b/tensorflow/python/data/experimental/kernel_tests/optimization/map_parallelization_test.py @@ -19,7 +19,7 @@ from __future__ import print_function from absl.testing import parameterized -from tensorflow.python.data.experimental.ops import optimization +from tensorflow.python.data.experimental.ops import testing from tensorflow.python.data.kernel_tests import test_base from tensorflow.python.data.ops import dataset_ops from tensorflow.python.framework import constant_op @@ -55,7 +55,7 @@ class MapParallelizationTest(test_base.DatasetTestBase, parameterized.TestCase): def testMapParallelization(self, function, should_be_parallel): next_nodes = ["ParallelMap"] if should_be_parallel else ["Map"] dataset = dataset_ops.Dataset.range(5).apply( - optimization.assert_next(next_nodes)).map(function) + testing.assert_next(next_nodes)).map(function) options = dataset_ops.Options() options.experimental_optimization.apply_default_optimizations = False options.experimental_optimization.map_parallelization = True @@ -70,7 +70,7 @@ class MapParallelizationTest(test_base.DatasetTestBase, parameterized.TestCase): def fn(x): return x + captured_t dataset = dataset_ops.Dataset.range(5).apply( - optimization.assert_next(["ParallelMap"])).map(fn) + testing.assert_next(["ParallelMap"])).map(fn) options = dataset_ops.Options() options.experimental_optimization.apply_default_optimizations = False options.experimental_optimization.map_parallelization = True @@ -85,7 +85,7 @@ class MapParallelizationTest(test_base.DatasetTestBase, parameterized.TestCase): def fn(x): return x + captured_t dataset = dataset_ops.Dataset.range(5).apply( - optimization.assert_next(["Map"])).map(fn) + testing.assert_next(["Map"])).map(fn) options = dataset_ops.Options() options.experimental_optimization.apply_default_optimizations = False options.experimental_optimization.map_parallelization = True diff --git a/tensorflow/python/data/experimental/kernel_tests/optimization/map_vectorization_test.py b/tensorflow/python/data/experimental/kernel_tests/optimization/map_vectorization_test.py index ba2bb14cce2..f17d863e555 100644 --- a/tensorflow/python/data/experimental/kernel_tests/optimization/map_vectorization_test.py +++ b/tensorflow/python/data/experimental/kernel_tests/optimization/map_vectorization_test.py @@ -23,7 +23,7 @@ import numpy as np from tensorflow.core.example import example_pb2 from tensorflow.core.example import feature_pb2 from tensorflow.python.data.experimental.ops import batching -from tensorflow.python.data.experimental.ops import optimization +from tensorflow.python.data.experimental.ops import testing from tensorflow.python.data.kernel_tests import test_base from tensorflow.python.data.ops import dataset_ops from tensorflow.python.framework import constant_op @@ -353,7 +353,7 @@ class MapVectorizationTest(test_base.DatasetTestBase, parameterized.TestCase): map_node_name = "Map" if num_parallel_calls is None else "ParallelMap" def _make_dataset(node_names): - dataset = base_dataset.apply(optimization.assert_next(node_names)) + dataset = base_dataset.apply(testing.assert_next(node_names)) dataset = dataset.map(map_fn, num_parallel_calls) dataset = dataset.batch(100) options = dataset_ops.Options() @@ -416,7 +416,7 @@ class MapVectorizationTest(test_base.DatasetTestBase, parameterized.TestCase): base_dataset = base_dataset.with_options(options) def _make_dataset(node_names): - dataset = base_dataset.apply(optimization.assert_next(node_names)) + dataset = base_dataset.apply(testing.assert_next(node_names)) dataset = dataset.apply(batching.map_and_batch(map_fn, 100)) return dataset @@ -464,7 +464,7 @@ class MapVectorizationTest(test_base.DatasetTestBase, parameterized.TestCase): apply_fn_2 = make_apply_fn(fuse_second) def make_dataset(node_names): - dataset = base_dataset.apply(optimization.assert_next(node_names)) + dataset = base_dataset.apply(testing.assert_next(node_names)) dataset = apply_fn_1(dataset) dataset = apply_fn_2(dataset) return dataset diff --git a/tensorflow/python/data/experimental/kernel_tests/optimization/noop_elimination_test.py b/tensorflow/python/data/experimental/kernel_tests/optimization/noop_elimination_test.py index 74f620e37d5..a401a5c8baf 100644 --- a/tensorflow/python/data/experimental/kernel_tests/optimization/noop_elimination_test.py +++ b/tensorflow/python/data/experimental/kernel_tests/optimization/noop_elimination_test.py @@ -17,7 +17,7 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function -from tensorflow.python.data.experimental.ops import optimization +from tensorflow.python.data.experimental.ops import testing from tensorflow.python.data.kernel_tests import test_base from tensorflow.python.data.ops import dataset_ops from tensorflow.python.framework import constant_op @@ -37,7 +37,7 @@ class NoopEliminationTest(test_base.DatasetTestBase): dataset = dataset_ops.Dataset.range(5) dataset = dataset.apply( - optimization.assert_next( + testing.assert_next( ["FiniteRepeat", "FiniteSkip", "Prefetch", "MemoryCacheImpl"])) dataset = dataset.repeat(some_tensor).skip(5).take(-1).skip(0).repeat( 1).prefetch(0).prefetch(1).cache() diff --git a/tensorflow/python/data/experimental/kernel_tests/optimization/shuffle_and_repeat_fusion_test.py b/tensorflow/python/data/experimental/kernel_tests/optimization/shuffle_and_repeat_fusion_test.py index 8d429b01cc1..4da7fa27d58 100644 --- a/tensorflow/python/data/experimental/kernel_tests/optimization/shuffle_and_repeat_fusion_test.py +++ b/tensorflow/python/data/experimental/kernel_tests/optimization/shuffle_and_repeat_fusion_test.py @@ -18,7 +18,7 @@ from __future__ import division from __future__ import print_function from tensorflow.python import tf2 -from tensorflow.python.data.experimental.ops import optimization +from tensorflow.python.data.experimental.ops import testing from tensorflow.python.data.kernel_tests import test_base from tensorflow.python.data.ops import dataset_ops from tensorflow.python.eager import context @@ -37,7 +37,7 @@ class ShuffleAndRepeatFusionTest(test_base.DatasetTestBase): expected = "ShuffleAndRepeat" dataset = dataset_ops.Dataset.range(10).apply( - optimization.assert_next([expected])).shuffle(10).repeat(2) + testing.assert_next([expected])).shuffle(10).repeat(2) options = dataset_ops.Options() options.experimental_optimization.apply_default_optimizations = False options.experimental_optimization.shuffle_and_repeat_fusion = True diff --git a/tensorflow/python/data/experimental/kernel_tests/optimization/optimize_dataset_test.py b/tensorflow/python/data/experimental/kernel_tests/optimize_dataset_test.py similarity index 87% rename from tensorflow/python/data/experimental/kernel_tests/optimization/optimize_dataset_test.py rename to tensorflow/python/data/experimental/kernel_tests/optimize_dataset_test.py index a85e0cf801c..1bd7e320466 100644 --- a/tensorflow/python/data/experimental/kernel_tests/optimization/optimize_dataset_test.py +++ b/tensorflow/python/data/experimental/kernel_tests/optimize_dataset_test.py @@ -24,8 +24,8 @@ import numpy as np from tensorflow.python.data.experimental.ops import batching from tensorflow.python.data.experimental.ops import grouping -from tensorflow.python.data.experimental.ops import optimization from tensorflow.python.data.experimental.ops import scan_ops +from tensorflow.python.data.experimental.ops import testing from tensorflow.python.data.experimental.ops import threadpool from tensorflow.python.data.kernel_tests import test_base from tensorflow.python.data.ops import dataset_ops @@ -146,7 +146,7 @@ class OptimizeDatasetTest(test_base.DatasetTestBase, parameterized.TestCase): def flat_map_fn(_): dataset = dataset_ops.Dataset.from_tensors(0) - dataset = dataset.apply(optimization.assert_next(["MemoryCacheImpl"])) + dataset = dataset.apply(testing.assert_next(["MemoryCacheImpl"])) dataset = dataset.skip(0) # Should be removed by noop elimination dataset = dataset.cache() return dataset @@ -163,7 +163,7 @@ class OptimizeDatasetTest(test_base.DatasetTestBase, parameterized.TestCase): def flat_map_fn(_): dataset = dataset_ops.Dataset.from_tensors(0) - dataset = dataset.apply(optimization.assert_next(["MapAndBatch"])) + dataset = dataset.apply(testing.assert_next(["MapAndBatch"])) # Should be fused by map and batch fusion dataset = dataset.map(lambda x: x) dataset = dataset.batch(1) @@ -194,30 +194,6 @@ class OptimizeDatasetTest(test_base.DatasetTestBase, parameterized.TestCase): expected_output=[list(range(10))], requires_initialization=True) - def testOptimizationNonSerializable(self): - dataset = dataset_ops.Dataset.from_tensors(0) - dataset = dataset.apply(optimization.assert_next(["FiniteSkip"])) - dataset = dataset.skip(0) # Should not be removed by noop elimination - dataset = dataset.apply(optimization.non_serializable()) - dataset = dataset.apply(optimization.assert_next(["MemoryCacheImpl"])) - dataset = dataset.skip(0) # Should be removed by noop elimination - dataset = dataset.cache() - options = dataset_ops.Options() - options.experimental_optimization.apply_default_optimizations = False - options.experimental_optimization.noop_elimination = True - dataset = dataset.with_options(options) - self.assertDatasetProduces(dataset, expected_output=[0]) - - def testOptimizationNonSerializableAsDirectInput(self): - """Tests that non-serializable dataset can be OptimizeDataset's input.""" - dataset = dataset_ops.Dataset.from_tensors(0) - dataset = dataset.apply(optimization.non_serializable()) - options = dataset_ops.Options() - options.experimental_optimization.apply_default_optimizations = False - options.experimental_optimization.noop_elimination = True - dataset = dataset.with_options(options) - self.assertDatasetProduces(dataset, expected_output=[0]) - @parameterized.named_parameters(_generate_captured_refvar_test_cases()) @test_util.run_v1_only("RefVariables are not supported in eager mode.") def testSkipEagerOptimizationWithCapturedRefVar(self, dataset_fn): diff --git a/tensorflow/python/data/experimental/kernel_tests/serialization/BUILD b/tensorflow/python/data/experimental/kernel_tests/serialization/BUILD index 7770323fc48..70671a89aad 100644 --- a/tensorflow/python/data/experimental/kernel_tests/serialization/BUILD +++ b/tensorflow/python/data/experimental/kernel_tests/serialization/BUILD @@ -1,4 +1,4 @@ -load("//tensorflow:tensorflow.bzl", "py_test") +load("//tensorflow:tensorflow.bzl", "tf_py_test") package( default_visibility = ["//tensorflow:internal"], @@ -31,19 +31,13 @@ py_library( ], ) -py_test( +tf_py_test( name = "auto_shard_dataset_serialization_test", size = "medium", srcs = ["auto_shard_dataset_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", - tags = [ - "no_oss", - "no_pip", - "no_windows", - ], - deps = [ + additional_deps = [ ":dataset_serialization_test_base", + "//third_party/py/numpy", "//tensorflow/python:array_ops", "//tensorflow/python:client_testlib", "//tensorflow/python:sparse_tensor", @@ -51,64 +45,57 @@ py_test( "//tensorflow/python/data/experimental/ops:interleave_ops", "//tensorflow/python/data/experimental/ops:readers", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", ], -) - -py_test( - name = "batch_dataset_serialization_test", - size = "medium", - srcs = ["batch_dataset_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ +) + +tf_py_test( + name = "batch_dataset_serialization_test", + size = "medium", + srcs = ["batch_dataset_serialization_test.py"], + additional_deps = [ ":dataset_serialization_test_base", + "//third_party/py/numpy", "//tensorflow/python:array_ops", "//tensorflow/python:client_testlib", "//tensorflow/python:sparse_tensor", "//tensorflow/python/data/experimental/ops:batching", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", ], -) - -py_test( - name = "cache_dataset_serialization_test", - size = "small", - srcs = ["cache_dataset_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ +) + +tf_py_test( + name = "cache_dataset_serialization_test", + size = "small", + srcs = ["cache_dataset_serialization_test.py"], + additional_deps = [ ":dataset_serialization_test_base", + "@absl_py//absl/testing:parameterized", "//tensorflow/python:client_testlib", "//tensorflow/python:errors", "//tensorflow/python/data/ops:dataset_ops", - "@absl_py//absl/testing:parameterized", + ], + tags = [ + "no_oss", + "no_pip", + "no_windows", ], ) -py_test( +tf_py_test( name = "checkpoint_input_pipeline_hook_test", size = "small", srcs = ["checkpoint_input_pipeline_hook_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", - tags = [ - "no_pip", - "no_windows", - "notsan", - ], - deps = [ + additional_deps = [ "//tensorflow/python:client_testlib", "//tensorflow/python:constant_op", "//tensorflow/python:dtypes", @@ -120,20 +107,18 @@ py_test( "//tensorflow/python/data/ops:dataset_ops", "//tensorflow/python/estimator:estimator_py", ], + tags = [ + "no_pip", + "no_windows", + "notsan", + ], ) -py_test( +tf_py_test( name = "choose_fastest_branch_dataset_serialization_test", size = "medium", srcs = ["choose_fastest_branch_dataset_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", - tags = [ - "no_oss", - "no_pip", - "no_windows", - ], - deps = [ + additional_deps = [ ":dataset_serialization_test_base", "//tensorflow/python:client_testlib", "//tensorflow/python:constant_op", @@ -142,137 +127,124 @@ py_test( "//tensorflow/python/data/experimental/ops:optimization", "//tensorflow/python/data/ops:dataset_ops", ], -) - -py_test( - name = "choose_fastest_dataset_serialization_test", - size = "small", - srcs = ["choose_fastest_dataset_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ +) + +tf_py_test( + name = "choose_fastest_dataset_serialization_test", + size = "small", + srcs = ["choose_fastest_dataset_serialization_test.py"], + additional_deps = [ ":dataset_serialization_test_base", "//tensorflow/python:client_testlib", "//tensorflow/python/data/experimental/ops:optimization", "//tensorflow/python/data/ops:dataset_ops", ], + tags = [ + "no_oss", + "no_pip", + "no_windows", + ], ) -py_test( +tf_py_test( name = "concatenate_dataset_serialization_test", size = "small", srcs = ["concatenate_dataset_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", + additional_deps = [ + ":dataset_serialization_test_base", + "//third_party/py/numpy", + "//tensorflow/python:client_testlib", + "//tensorflow/python/data/ops:dataset_ops", + ], tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ - ":dataset_serialization_test_base", - "//tensorflow/python:client_testlib", - "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", - ], ) -py_test( +tf_py_test( name = "csv_dataset_serialization_test", size = "small", srcs = ["csv_dataset_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", - tags = [ - "no_oss", - "no_pip", - "no_windows", - ], - deps = [ + additional_deps = [ ":dataset_serialization_test_base", "//tensorflow/python:client_testlib", "//tensorflow/python:framework_ops", "//tensorflow/python/data/experimental/ops:readers", ], + tags = [ + "no_oss", + "no_pip", + "no_windows", + ], ) -py_test( +tf_py_test( name = "dataset_constructor_serialization_test", size = "medium", srcs = ["dataset_constructor_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", - tags = [ - "no_oss", - "no_pip", - "no_windows", - ], - deps = [ + additional_deps = [ ":dataset_serialization_test_base", + "//third_party/py/numpy", "//tensorflow/python:client_testlib", "//tensorflow/python:sparse_tensor", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", ], -) - -py_test( - name = "filter_dataset_serialization_test", - size = "medium", - srcs = ["filter_dataset_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ +) + +tf_py_test( + name = "filter_dataset_serialization_test", + size = "medium", + srcs = ["filter_dataset_serialization_test.py"], + additional_deps = [ ":dataset_serialization_test_base", + "//third_party/py/numpy", "//tensorflow/python:client_testlib", "//tensorflow/python:math_ops", "//tensorflow/python:sparse_tensor", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", ], -) - -py_test( - name = "fixed_length_record_dataset_serialization_test", - size = "medium", - srcs = ["fixed_length_record_dataset_serialization_test.py"], - python_version = "PY3", - shard_count = 4, - srcs_version = "PY2AND3", tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ +) + +tf_py_test( + name = "fixed_length_record_dataset_serialization_test", + size = "medium", + srcs = ["fixed_length_record_dataset_serialization_test.py"], + additional_deps = [ ":dataset_serialization_test_base", "//tensorflow/python:client_testlib", "//tensorflow/python/data/experimental/kernel_tests:reader_dataset_ops_test_base", "//tensorflow/python/data/ops:readers", ], -) - -py_test( - name = "flat_map_dataset_serialization_test", - size = "medium", - srcs = ["flat_map_dataset_serialization_test.py"], - python_version = "PY3", + shard_count = 4, tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ +) + +tf_py_test( + name = "flat_map_dataset_serialization_test", + size = "medium", + srcs = ["flat_map_dataset_serialization_test.py"], + additional_deps = [ ":dataset_serialization_test_base", "//tensorflow/python:client_testlib", "//tensorflow/python:constant_op", @@ -286,124 +258,113 @@ py_test( "//tensorflow/python:variable_scope", "//tensorflow/python/data/ops:dataset_ops", ], + tags = [ + "no_oss", + "no_pip", + "no_windows", + ], ) -py_test( +tf_py_test( name = "group_by_reducer_serialization_test", size = "medium", srcs = ["group_by_reducer_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", + additional_deps = [ + ":dataset_serialization_test_base", + "//third_party/py/numpy", + "//tensorflow/python:client_testlib", + "//tensorflow/python/data/experimental/ops:grouping", + "//tensorflow/python/data/ops:dataset_ops", + ], tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ - ":dataset_serialization_test_base", - "//tensorflow/python:client_testlib", - "//tensorflow/python/data/experimental/ops:grouping", - "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", - ], ) -py_test( +tf_py_test( name = "group_by_window_serialization_test", size = "medium", srcs = ["group_by_window_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", - tags = [ - "no_oss", - "no_pip", - "no_windows", - ], - deps = [ + additional_deps = [ ":dataset_serialization_test_base", + "//third_party/py/numpy", "//tensorflow/python:client_testlib", "//tensorflow/python/data/experimental/ops:grouping", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", ], -) - -py_test( - name = "ignore_errors_serialization_test", - size = "small", - srcs = ["ignore_errors_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ +) + +tf_py_test( + name = "ignore_errors_serialization_test", + size = "small", + srcs = ["ignore_errors_serialization_test.py"], + additional_deps = [ ":dataset_serialization_test_base", + "//third_party/py/numpy", "//tensorflow/python:array_ops", "//tensorflow/python:client_testlib", "//tensorflow/python/data/experimental/ops:error_ops", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", ], -) - -py_test( - name = "interleave_dataset_serialization_test", - size = "medium", - srcs = ["interleave_dataset_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ +) + +tf_py_test( + name = "interleave_dataset_serialization_test", + size = "medium", + srcs = ["interleave_dataset_serialization_test.py"], + additional_deps = [ ":dataset_serialization_test_base", + "@absl_py//absl/testing:parameterized", + "//third_party/py/numpy", "//tensorflow/python:client_testlib", "//tensorflow/python:sparse_ops", "//tensorflow/python:sparse_tensor", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", - "@absl_py//absl/testing:parameterized", ], -) - -py_test( - name = "map_and_batch_dataset_serialization_test", - size = "medium", - srcs = ["map_and_batch_dataset_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ +) + +tf_py_test( + name = "map_and_batch_dataset_serialization_test", + size = "medium", + srcs = ["map_and_batch_dataset_serialization_test.py"], + additional_deps = [ ":dataset_serialization_test_base", "//tensorflow/python:client_testlib", "//tensorflow/python:math_ops", "//tensorflow/python/data/experimental/ops:batching", "//tensorflow/python/data/ops:dataset_ops", ], -) - -py_test( - name = "map_dataset_serialization_test", - size = "medium", - srcs = ["map_dataset_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ +) + +tf_py_test( + name = "map_dataset_serialization_test", + size = "medium", + srcs = ["map_dataset_serialization_test.py"], + additional_deps = [ ":dataset_serialization_test_base", + "//third_party/py/numpy", "//tensorflow/python:client_testlib", "//tensorflow/python:constant_op", "//tensorflow/python:dtypes", @@ -414,122 +375,110 @@ py_test( "//tensorflow/python:sparse_tensor", "//tensorflow/python:variable_scope", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", ], -) - -py_test( - name = "matching_files_dataset_serialization_test", - size = "small", - srcs = ["matching_files_dataset_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", - tags = [ - "no_windows", - ], - deps = [ - ":dataset_serialization_test_base", - "//tensorflow/python:client_testlib", - "//tensorflow/python/data/experimental/ops:matching_files", - "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", - ], -) - -py_test( - name = "optimize_dataset_serialization_test", - size = "small", - srcs = ["optimize_dataset_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ +) + +tf_py_test( + name = "matching_files_dataset_serialization_test", + size = "small", + srcs = ["matching_files_dataset_serialization_test.py"], + additional_deps = [ + ":dataset_serialization_test_base", + "//third_party/py/numpy", + "//tensorflow/python:client_testlib", + "//tensorflow/python/data/experimental/ops:matching_files", + "//tensorflow/python/data/ops:dataset_ops", + ], + tags = [ + "no_windows", + ], +) + +tf_py_test( + name = "optimize_dataset_serialization_test", + size = "small", + srcs = ["optimize_dataset_serialization_test.py"], + additional_deps = [ ":dataset_serialization_test_base", "//tensorflow/python:client_testlib", "//tensorflow/python/data/experimental/ops:optimization", "//tensorflow/python/data/ops:dataset_ops", ], -) - -py_test( - name = "rebatch_dataset_serialization_test", - size = "small", - srcs = ["rebatch_dataset_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ +) + +tf_py_test( + name = "rebatch_dataset_serialization_test", + size = "small", + srcs = ["rebatch_dataset_serialization_test.py"], + additional_deps = [ ":dataset_serialization_test_base", "//tensorflow/python:client_testlib", "//tensorflow/python/data/experimental/ops:distribute", "//tensorflow/python/data/ops:dataset_ops", ], -) - -py_test( - name = "padded_batch_dataset_serialization_test", - size = "medium", - srcs = ["padded_batch_dataset_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ +) + +tf_py_test( + name = "padded_batch_dataset_serialization_test", + size = "medium", + srcs = ["padded_batch_dataset_serialization_test.py"], + additional_deps = [ ":dataset_serialization_test_base", + "//third_party/py/numpy", "//tensorflow/python:array_ops", "//tensorflow/python:client_testlib", "//tensorflow/python:string_ops", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", ], -) - -py_test( - name = "parallel_interleave_dataset_serialization_test", - size = "medium", - srcs = ["parallel_interleave_dataset_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ +) + +tf_py_test( + name = "parallel_interleave_dataset_serialization_test", + size = "medium", + srcs = ["parallel_interleave_dataset_serialization_test.py"], + additional_deps = [ ":dataset_serialization_test_base", + "//third_party/py/numpy", "//tensorflow/python:client_testlib", "//tensorflow/python:sparse_ops", "//tensorflow/python:sparse_tensor", "//tensorflow/python/data/experimental/ops:interleave_ops", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", ], -) - -py_test( - name = "parallel_map_dataset_serialization_test", - size = "medium", - srcs = ["parallel_map_dataset_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ +) + +tf_py_test( + name = "parallel_map_dataset_serialization_test", + size = "medium", + srcs = ["parallel_map_dataset_serialization_test.py"], + additional_deps = [ ":dataset_serialization_test_base", + "//third_party/py/numpy", "//tensorflow/python:client_testlib", "//tensorflow/python:constant_op", "//tensorflow/python:dtypes", @@ -539,58 +488,51 @@ py_test( "//tensorflow/python:random_ops", "//tensorflow/python:variable_scope", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", ], -) - -py_test( - name = "parse_example_dataset_serialization_test", - size = "medium", - srcs = ["parse_example_dataset_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ +) + +tf_py_test( + name = "parse_example_dataset_serialization_test", + size = "medium", + srcs = ["parse_example_dataset_serialization_test.py"], + additional_deps = [ ":dataset_serialization_test_base", "//tensorflow/python:client_testlib", "//tensorflow/python/data/experimental/kernel_tests:reader_dataset_ops_test_base", ], -) - -py_test( - name = "prefetch_dataset_serialization_test", - size = "small", - srcs = ["prefetch_dataset_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ +) + +tf_py_test( + name = "prefetch_dataset_serialization_test", + size = "small", + srcs = ["prefetch_dataset_serialization_test.py"], + additional_deps = [ ":dataset_serialization_test_base", "//tensorflow/python:client_testlib", "//tensorflow/python/data/ops:dataset_ops", ], -) - -py_test( - name = "range_dataset_serialization_test", - size = "small", - srcs = ["range_dataset_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ +) + +tf_py_test( + name = "range_dataset_serialization_test", + size = "small", + srcs = ["range_dataset_serialization_test.py"], + additional_deps = [ ":dataset_serialization_test_base", "//tensorflow/python:client_testlib", "//tensorflow/python:dataset_ops_gen", @@ -602,77 +544,69 @@ py_test( "//tensorflow/python:variables", "//tensorflow/python/data/ops:dataset_ops", ], -) - -py_test( - name = "sample_from_datasets_serialization_test", - size = "medium", - srcs = ["sample_from_datasets_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ +) + +tf_py_test( + name = "sample_from_datasets_serialization_test", + size = "medium", + srcs = ["sample_from_datasets_serialization_test.py"], + additional_deps = [ ":dataset_serialization_test_base", "//tensorflow/python:client_testlib", "//tensorflow/python/data/experimental/ops:interleave_ops", "//tensorflow/python/data/ops:dataset_ops", ], -) - -py_test( - name = "scan_dataset_serialization_test", - size = "small", - srcs = ["scan_dataset_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ +) + +tf_py_test( + name = "scan_dataset_serialization_test", + size = "small", + srcs = ["scan_dataset_serialization_test.py"], + additional_deps = [ ":dataset_serialization_test_base", "//tensorflow/python:client_testlib", "//tensorflow/python/data/experimental/ops:scan_ops", "//tensorflow/python/data/ops:dataset_ops", ], + tags = [ + "no_oss", + "no_pip", + "no_windows", + ], ) -py_test( +tf_py_test( name = "sequence_dataset_serialization_test", size = "medium", srcs = ["sequence_dataset_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", + additional_deps = [ + ":dataset_serialization_test_base", + "//third_party/py/numpy", + "//tensorflow/python:client_testlib", + "//tensorflow/python/data/ops:dataset_ops", + ], tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ - ":dataset_serialization_test_base", - "//tensorflow/python:client_testlib", - "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", - ], ) -py_test( +tf_py_test( name = "serialization_integration_test", size = "small", srcs = ["serialization_integration_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", - tags = [ - "no_oss", - "no_pip", - "no_windows", - ], - deps = [ + additional_deps = [ "//tensorflow/python:client_testlib", "//tensorflow/python:framework_combinations", "//tensorflow/python:framework_ops", @@ -681,58 +615,52 @@ py_test( "//tensorflow/python/data/kernel_tests:test_base", "//tensorflow/python/data/ops:dataset_ops", ], + tags = [ + "no_oss", + "no_pip", + "no_windows", + ], ) -py_test( +tf_py_test( name = "shard_dataset_serialization_test", size = "medium", srcs = ["shard_dataset_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", + additional_deps = [ + ":dataset_serialization_test_base", + "@absl_py//absl/testing:parameterized", + "//tensorflow/python:client_testlib", + "//tensorflow/python/data/ops:dataset_ops", + ], tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ - ":dataset_serialization_test_base", - "//tensorflow/python:client_testlib", - "//tensorflow/python/data/ops:dataset_ops", - "@absl_py//absl/testing:parameterized", - ], ) -py_test( +tf_py_test( name = "shuffle_and_repeat_dataset_serialization_test", size = "medium", srcs = ["shuffle_and_repeat_dataset_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", - tags = [ - "no_oss", - "no_pip", - "no_windows", - ], - deps = [ + additional_deps = [ ":dataset_serialization_test_base", "//tensorflow/python:client_testlib", "//tensorflow/python/data/experimental/ops:shuffle_ops", "//tensorflow/python/data/ops:dataset_ops", ], -) - -py_test( - name = "shuffle_dataset_serialization_test", - size = "medium", - srcs = ["shuffle_dataset_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ +) + +tf_py_test( + name = "shuffle_dataset_serialization_test", + size = "medium", + srcs = ["shuffle_dataset_serialization_test.py"], + additional_deps = [ ":dataset_serialization_test_base", "//tensorflow/python:client_testlib", "//tensorflow/python:framework_ops", @@ -740,20 +668,18 @@ py_test( "//tensorflow/python/data/experimental/ops:iterator_ops", "//tensorflow/python/data/ops:dataset_ops", ], -) - -py_test( - name = "sql_dataset_serialization_test", - size = "small", - srcs = ["sql_dataset_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ +) + +tf_py_test( + name = "sql_dataset_serialization_test", + size = "small", + srcs = ["sql_dataset_serialization_test.py"], + additional_deps = [ ":dataset_serialization_test_base", "//tensorflow/python:array_ops", "//tensorflow/python:client_testlib", @@ -761,20 +687,18 @@ py_test( "//tensorflow/python/data/experimental/kernel_tests:sql_dataset_test_base", "//tensorflow/python/data/experimental/ops:readers", ], -) - -py_test( - name = "stats_dataset_serialization_test", - size = "medium", - srcs = ["stats_dataset_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ +) + +tf_py_test( + name = "stats_dataset_serialization_test", + size = "medium", + srcs = ["stats_dataset_serialization_test.py"], + additional_deps = [ ":dataset_serialization_test_base", "//tensorflow/python:array_ops", "//tensorflow/python:client_testlib", @@ -783,122 +707,115 @@ py_test( "//tensorflow/python/data/experimental/ops:stats_ops", "//tensorflow/python/data/ops:dataset_ops", ], + tags = [ + "no_oss", + "no_pip", + "no_windows", + ], ) -py_test( +tf_py_test( name = "take_while_dataset_serialization_test", size = "medium", srcs = ["take_while_dataset_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", - tags = [ - "no_oss", - "no_pip", - "no_windows", - ], - deps = [ + additional_deps = [ ":dataset_serialization_test_base", + "@absl_py//absl/testing:parameterized", "//tensorflow/python:client_testlib", "//tensorflow/python/data/experimental/ops:take_while_ops", "//tensorflow/python/data/ops:dataset_ops", - "@absl_py//absl/testing:parameterized", + ], + tags = [ + "no_oss", + "no_pip", + "no_windows", ], ) -py_test( +tf_py_test( name = "textline_dataset_serialization_test", size = "medium", srcs = ["textline_dataset_serialization_test.py"], - python_version = "PY3", - shard_count = 4, - srcs_version = "PY2AND3", - tags = [ - "no_oss", - "no_pip", - "no_windows", - ], - deps = [ + additional_deps = [ ":dataset_serialization_test_base", "//tensorflow/python:client_testlib", "//tensorflow/python/data/experimental/kernel_tests:reader_dataset_ops_test_base", "//tensorflow/python/data/ops:readers", ], + shard_count = 4, + tags = [ + "no_oss", + "no_pip", + "no_windows", + ], ) -py_test( +tf_py_test( name = "tf_record_dataset_serialization_test", size = "medium", srcs = ["tf_record_dataset_serialization_test.py"], - python_version = "PY3", - shard_count = 4, - srcs_version = "PY2AND3", - tags = [ - "no_oss", - "no_pip", - "no_windows", - ], - deps = [ + additional_deps = [ ":dataset_serialization_test_base", "//tensorflow/python:client_testlib", "//tensorflow/python/data/experimental/kernel_tests:reader_dataset_ops_test_base", "//tensorflow/python/data/ops:readers", ], + shard_count = 4, + tags = [ + "no_oss", + "no_pip", + "no_windows", + ], ) -py_test( +tf_py_test( name = "unbatch_dataset_serialization_test", size = "medium", srcs = ["unbatch_dataset_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", - tags = [ - "no_oss", - "no_pip", - "no_windows", - ], - deps = [ + additional_deps = [ ":dataset_serialization_test_base", + "//third_party/py/numpy", "//tensorflow/python:client_testlib", "//tensorflow/python/data/experimental/ops:batching", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", ], -) - -py_test( - name = "unique_dataset_serialization_test", - size = "small", - srcs = ["unique_dataset_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ +) + +tf_py_test( + name = "unique_dataset_serialization_test", + size = "small", + srcs = ["unique_dataset_serialization_test.py"], + additional_deps = [ ":dataset_serialization_test_base", "//tensorflow/python:client_testlib", "//tensorflow/python/data/experimental/ops:unique", "//tensorflow/python/data/ops:dataset_ops", ], -) - -py_test( - name = "zip_dataset_serialization_test", - size = "small", - srcs = ["zip_dataset_serialization_test.py"], - python_version = "PY3", - srcs_version = "PY2AND3", tags = [ "no_oss", "no_pip", "no_windows", ], - deps = [ +) + +tf_py_test( + name = "zip_dataset_serialization_test", + size = "small", + srcs = ["zip_dataset_serialization_test.py"], + additional_deps = [ ":dataset_serialization_test_base", + "//third_party/py/numpy", "//tensorflow/python:client_testlib", "//tensorflow/python/data/ops:dataset_ops", - "//third_party/py/numpy", + ], + tags = [ + "no_oss", + "no_pip", + "no_windows", ], ) diff --git a/tensorflow/python/data/experimental/kernel_tests/sleep_test.py b/tensorflow/python/data/experimental/kernel_tests/sleep_test.py index c6be0909a88..a24e37b55ca 100644 --- a/tensorflow/python/data/experimental/kernel_tests/sleep_test.py +++ b/tensorflow/python/data/experimental/kernel_tests/sleep_test.py @@ -21,7 +21,7 @@ import time from absl.testing import parameterized -from tensorflow.python.data.experimental.ops import sleep +from tensorflow.python.data.experimental.ops import testing from tensorflow.python.data.kernel_tests import test_base from tensorflow.python.data.ops import dataset_ops from tensorflow.python.framework import combinations @@ -36,7 +36,7 @@ class SleepTest(test_base.DatasetTestBase, parameterized.TestCase): self.skipTest("b/123597912") sleep_microseconds = 100 dataset = dataset_ops.Dataset.range(10).apply( - sleep.sleep(sleep_microseconds)) + testing.sleep(sleep_microseconds)) next_element = self.getNext(dataset) start_time = time.time() for i in range(10): @@ -50,7 +50,7 @@ class SleepTest(test_base.DatasetTestBase, parameterized.TestCase): def testSleepCancellation(self): sleep_microseconds = int(1e6) * 1000 ds = dataset_ops.Dataset.range(1) - ds = ds.apply(sleep.sleep(sleep_microseconds)) + ds = ds.apply(testing.sleep(sleep_microseconds)) ds = ds.prefetch(1) get_next = self.getNext(ds, requires_initialization=True) @@ -67,7 +67,7 @@ class SleepTest(test_base.DatasetTestBase, parameterized.TestCase): sleep_microseconds = int(1e6) * 1000 ds_sleep = dataset_ops.Dataset.range(1) - ds_sleep = ds.apply(sleep.sleep(sleep_microseconds)) + ds_sleep = ds.apply(testing.sleep(sleep_microseconds)) ds = ds.concatenate(ds_sleep) ds = ds.prefetch(1) diff --git a/tensorflow/python/data/experimental/ops/BUILD b/tensorflow/python/data/experimental/ops/BUILD index f133e4aa595..b2c41cc3a71 100644 --- a/tensorflow/python/data/experimental/ops/BUILD +++ b/tensorflow/python/data/experimental/ops/BUILD @@ -332,16 +332,6 @@ py_library( ], ) -py_library( - name = "sleep", - srcs = ["sleep.py"], - srcs_version = "PY2AND3", - deps = [ - "//tensorflow/python:experimental_dataset_ops_gen", - "//tensorflow/python/data/ops:dataset_ops", - ], -) - py_library( name = "snapshot", srcs = [ @@ -405,6 +395,18 @@ py_library( ], ) +py_library( + name = "testing", + testonly = 1, + srcs = ["testing.py"], + srcs_version = "PY2AND3", + deps = [ + "//tensorflow/python:experimental_dataset_ops_gen", + "//tensorflow/python:framework_ops", + "//tensorflow/python/data/ops:dataset_ops", + ], +) + py_library( name = "threading_options", srcs = ["threading_options.py"], @@ -475,7 +477,6 @@ py_library( ":resampling", ":scan_ops", ":shuffle_ops", - ":sleep", ":snapshot", ":stats_ops", ":take_while_ops", diff --git a/tensorflow/python/data/experimental/ops/optimization.py b/tensorflow/python/data/experimental/ops/optimization.py index a5f71d376c1..4581a612ed6 100644 --- a/tensorflow/python/data/experimental/ops/optimization.py +++ b/tensorflow/python/data/experimental/ops/optimization.py @@ -18,32 +18,9 @@ from __future__ import division from __future__ import print_function from tensorflow.python.data.ops import dataset_ops -from tensorflow.python.framework import dtypes -from tensorflow.python.framework import ops from tensorflow.python.ops import gen_experimental_dataset_ops -# TODO(jsimsa): Support RE matching for both individual transformation (e.g. to -# account for indexing) and transformation sequence. -def assert_next(transformations): - """A transformation that asserts which transformations happen next. - - Args: - transformations: A `tf.string` vector `tf.Tensor` identifying the - transformations that are expected to happen next. - - Returns: - A `Dataset` transformation function, which can be passed to - `tf.data.Dataset.apply`. - """ - - def _apply_fn(dataset): - """Function from `Dataset` to `Dataset` that applies the transformation.""" - return _AssertNextDataset(dataset, transformations) - - return _apply_fn - - def model(): """A transformation that models performance. @@ -59,21 +36,6 @@ def model(): return _apply_fn -def non_serializable(): - """A non-serializable identity transformation. - - Returns: - A `Dataset` transformation function, which can be passed to - `tf.data.Dataset.apply`. - """ - - def _apply_fn(dataset): - """Function from `Dataset` to `Dataset` that applies the transformation.""" - return _NonSerializableDataset(dataset) - - return _apply_fn - - def optimize(optimizations=None): """A transformation that applies optimizations. @@ -94,37 +56,6 @@ def optimize(optimizations=None): return _apply_fn -class _AssertNextDataset(dataset_ops.UnaryUnchangedStructureDataset): - """A `Dataset` that asserts which transformations happen next.""" - - def __init__(self, input_dataset, transformations): - """See `assert_next()` for details.""" - self._input_dataset = input_dataset - if transformations is None: - raise ValueError("At least one transformation should be specified") - self._transformations = ops.convert_to_tensor( - transformations, dtype=dtypes.string, name="transformations") - variant_tensor = ( - gen_experimental_dataset_ops.assert_next_dataset( - self._input_dataset._variant_tensor, # pylint: disable=protected-access - self._transformations, - **self._flat_structure)) - super(_AssertNextDataset, self).__init__(input_dataset, variant_tensor) - - -class _NonSerializableDataset(dataset_ops.UnaryUnchangedStructureDataset): - """A `Dataset` that performs non-serializable identity transformation.""" - - def __init__(self, input_dataset): - """See `non_serializable()` for details.""" - self._input_dataset = input_dataset - variant_tensor = ( - gen_experimental_dataset_ops.non_serializable_dataset( - self._input_dataset._variant_tensor, # pylint: disable=protected-access - **self._flat_structure)) - super(_NonSerializableDataset, self).__init__(input_dataset, variant_tensor) - - class _ChooseFastestDataset(dataset_ops.DatasetV2): """A `Dataset` that merges two input datasets.""" diff --git a/tensorflow/python/data/experimental/ops/testing.py b/tensorflow/python/data/experimental/ops/testing.py new file mode 100644 index 00000000000..ec5b4810b23 --- /dev/null +++ b/tensorflow/python/data/experimental/ops/testing.py @@ -0,0 +1,123 @@ +# Copyright 2018 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Experimental API for testing of tf.data.""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +from tensorflow.python.data.ops import dataset_ops +from tensorflow.python.framework import dtypes +from tensorflow.python.framework import ops +from tensorflow.python.ops import gen_experimental_dataset_ops + + +# TODO(jsimsa): Support RE matching for both individual transformation (e.g. to +# account for indexing) and transformation sequence. +def assert_next(transformations): + """A transformation that asserts which transformations happen next. + + Args: + transformations: A `tf.string` vector `tf.Tensor` identifying the + transformations that are expected to happen next. + + Returns: + A `Dataset` transformation function, which can be passed to + `tf.data.Dataset.apply`. + """ + + def _apply_fn(dataset): + """Function from `Dataset` to `Dataset` that applies the transformation.""" + return _AssertNextDataset(dataset, transformations) + + return _apply_fn + + +def non_serializable(): + """A non-serializable identity transformation. + + Returns: + A `Dataset` transformation function, which can be passed to + `tf.data.Dataset.apply`. + """ + + def _apply_fn(dataset): + """Function from `Dataset` to `Dataset` that applies the transformation.""" + return _NonSerializableDataset(dataset) + + return _apply_fn + + +def sleep(sleep_microseconds): + """Sleeps for `sleep_microseconds` before producing each input element. + + Args: + sleep_microseconds: The number of microseconds to sleep before producing an + input element. + + Returns: + A `Dataset` transformation function, which can be passed to + `tf.data.Dataset.apply`. + """ + + def _apply_fn(dataset): + return _SleepDataset(dataset, sleep_microseconds) + + return _apply_fn + + +class _AssertNextDataset(dataset_ops.UnaryUnchangedStructureDataset): + """A `Dataset` that asserts which transformations happen next.""" + + def __init__(self, input_dataset, transformations): + """See `assert_next()` for details.""" + self._input_dataset = input_dataset + if transformations is None: + raise ValueError("At least one transformation should be specified") + self._transformations = ops.convert_to_tensor( + transformations, dtype=dtypes.string, name="transformations") + variant_tensor = ( + gen_experimental_dataset_ops.experimental_assert_next_dataset( + self._input_dataset._variant_tensor, # pylint: disable=protected-access + self._transformations, + **self._flat_structure)) + super(_AssertNextDataset, self).__init__(input_dataset, variant_tensor) + + +class _NonSerializableDataset(dataset_ops.UnaryUnchangedStructureDataset): + """A `Dataset` that performs non-serializable identity transformation.""" + + def __init__(self, input_dataset): + """See `non_serializable()` for details.""" + self._input_dataset = input_dataset + variant_tensor = ( + gen_experimental_dataset_ops.experimental_non_serializable_dataset( + self._input_dataset._variant_tensor, # pylint: disable=protected-access + **self._flat_structure)) + super(_NonSerializableDataset, self).__init__(input_dataset, variant_tensor) + + +class _SleepDataset(dataset_ops.UnaryUnchangedStructureDataset): + """A `Dataset` that sleeps before producing each upstream element.""" + + def __init__(self, input_dataset, sleep_microseconds): + self._input_dataset = input_dataset + self._sleep_microseconds = sleep_microseconds + variant_tensor = gen_experimental_dataset_ops.sleep_dataset( + self._input_dataset._variant_tensor, # pylint: disable=protected-access + self._sleep_microseconds, + **self._flat_structure) + super(_SleepDataset, self).__init__(input_dataset, variant_tensor) + + diff --git a/tensorflow/python/data/kernel_tests/BUILD b/tensorflow/python/data/kernel_tests/BUILD index f4639b72ef2..148fafb0926 100644 --- a/tensorflow/python/data/kernel_tests/BUILD +++ b/tensorflow/python/data/kernel_tests/BUILD @@ -413,7 +413,7 @@ cuda_py_test( "//tensorflow/python/data/ops:dataset_ops", "//tensorflow/python/data/ops:multi_device_iterator_ops", "//tensorflow/python/data/ops:iterator_ops", - "//tensorflow/python/data/experimental/ops:optimization", + "//tensorflow/python/data/experimental/ops:testing", "//tensorflow/python/data/experimental/ops:optimization_options", "//tensorflow/python:array_ops", "//tensorflow/python:client_testlib", @@ -555,7 +555,7 @@ cuda_py_test( "//tensorflow/python:client_testlib", "//tensorflow/python:dtypes", "//tensorflow/python:math_ops", - "//tensorflow/python/data/experimental/ops:optimization", + "//tensorflow/python/data/experimental/ops:testing", "//tensorflow/python:sparse_tensor", "//tensorflow/python/data/ops:dataset_ops", ], diff --git a/tensorflow/python/data/kernel_tests/multi_device_iterator_test.py b/tensorflow/python/data/kernel_tests/multi_device_iterator_test.py index 61f4ae580b6..5535173a92d 100644 --- a/tensorflow/python/data/kernel_tests/multi_device_iterator_test.py +++ b/tensorflow/python/data/kernel_tests/multi_device_iterator_test.py @@ -23,7 +23,7 @@ import numpy as np from tensorflow.core.protobuf import config_pb2 from tensorflow.python.client import session -from tensorflow.python.data.experimental.ops import optimization +from tensorflow.python.data.experimental.ops import testing from tensorflow.python.data.kernel_tests import test_base from tensorflow.python.data.ops import dataset_ops from tensorflow.python.data.ops import multi_device_iterator_ops @@ -323,7 +323,7 @@ class MultiDeviceIteratorTest(test_base.DatasetTestBase, @combinations.generate(skip_v2_test_combinations()) def testOptimization(self): dataset = dataset_ops.Dataset.range(10) - dataset = dataset.apply(optimization.assert_next(["MemoryCacheImpl"])) + dataset = dataset.apply(testing.assert_next(["MemoryCacheImpl"])) dataset = dataset.skip(0) # this should be optimized away dataset = dataset.cache() diff --git a/tensorflow/python/data/kernel_tests/reduce_test.py b/tensorflow/python/data/kernel_tests/reduce_test.py index 9d9b1fa7822..35c07fd42c1 100644 --- a/tensorflow/python/data/kernel_tests/reduce_test.py +++ b/tensorflow/python/data/kernel_tests/reduce_test.py @@ -22,7 +22,7 @@ import time from absl.testing import parameterized import numpy as np -from tensorflow.python.data.experimental.ops import optimization +from tensorflow.python.data.experimental.ops import testing from tensorflow.python.data.kernel_tests import test_base from tensorflow.python.data.ops import dataset_ops from tensorflow.python.eager import function @@ -239,7 +239,7 @@ class ReduceTest(test_base.DatasetTestBase, parameterized.TestCase): @combinations.generate(test_base.default_test_combinations()) def testOptions(self): dataset = dataset_ops.Dataset.range(5) - dataset = dataset.apply(optimization.assert_next(["MapAndBatch"])) + dataset = dataset.apply(testing.assert_next(["MapAndBatch"])) dataset = dataset.map(lambda x: x).batch(5) self.evaluate(dataset.reduce(0, lambda state, value: state)) diff --git a/tensorflow/tools/pip_package/pip_smoke_test.py b/tensorflow/tools/pip_package/pip_smoke_test.py index 7e3643f65b7..0237df95ebc 100644 --- a/tensorflow/tools/pip_package/pip_smoke_test.py +++ b/tensorflow/tools/pip_package/pip_smoke_test.py @@ -82,6 +82,7 @@ DEPENDENCY_BLACKLIST = [ "//tensorflow/core:image_testdata", "//tensorflow/core:lmdb_testdata", "//tensorflow/core/kernels/cloud:bigquery_reader_ops", + "//tensorflow/python/data/experimental/ops:testing", "//tensorflow/python/debug:grpc_tensorflow_server.par", "//tensorflow/python/feature_column:vocabulary_testdata", "//tensorflow/python:framework/test_file_system.so",