From ed5a72cb2aa942eda4879ca9e814c13fb2cff50d Mon Sep 17 00:00:00 2001 From: Jiri Simsa Date: Thu, 30 Apr 2020 11:35:12 -0700 Subject: [PATCH] [tf.data] Moving `cardinality` to core API. PiperOrigin-RevId: 309266739 Change-Id: I9f9364a13e01d4cab32ddea06a1ec26f2f5ff307 --- .../api_def_DatasetCardinalityV2.pbtxt | 4 + tensorflow/core/kernels/data/dataset_ops.cc | 18 ++ tensorflow/core/kernels/data/dataset_ops.h | 7 + .../core/ops/experimental_dataset_ops.cc | 5 + .../data/experimental/ops/cardinality.py | 2 + tensorflow/python/data/kernel_tests/BUILD | 11 ++ .../data/kernel_tests/cardinality_test.py | 171 ++++++++++++++++++ tensorflow/python/data/ops/dataset_ops.py | 30 +++ .../golden/v1/tensorflow.data.-dataset.pbtxt | 4 + ...ow.data.-fixed-length-record-dataset.pbtxt | 4 + .../tensorflow.data.-t-f-record-dataset.pbtxt | 4 + .../tensorflow.data.-text-line-dataset.pbtxt | 4 + ...rflow.data.experimental.-csv-dataset.pbtxt | 4 + ...ow.data.experimental.-random-dataset.pbtxt | 4 + ...rflow.data.experimental.-sql-dataset.pbtxt | 4 + .../api/golden/v1/tensorflow.raw_ops.pbtxt | 4 + .../golden/v2/tensorflow.data.-dataset.pbtxt | 4 + ...ow.data.-fixed-length-record-dataset.pbtxt | 4 + .../tensorflow.data.-t-f-record-dataset.pbtxt | 4 + .../tensorflow.data.-text-line-dataset.pbtxt | 4 + ...rflow.data.experimental.-csv-dataset.pbtxt | 4 + ...ow.data.experimental.-random-dataset.pbtxt | 4 + ...rflow.data.experimental.-sql-dataset.pbtxt | 4 + .../api/golden/v2/tensorflow.raw_ops.pbtxt | 4 + 24 files changed, 312 insertions(+) create mode 100644 tensorflow/core/api_def/base_api/api_def_DatasetCardinalityV2.pbtxt create mode 100644 tensorflow/python/data/kernel_tests/cardinality_test.py diff --git a/tensorflow/core/api_def/base_api/api_def_DatasetCardinalityV2.pbtxt b/tensorflow/core/api_def/base_api/api_def_DatasetCardinalityV2.pbtxt new file mode 100644 index 00000000000..a548b0fd05f --- /dev/null +++ b/tensorflow/core/api_def/base_api/api_def_DatasetCardinalityV2.pbtxt @@ -0,0 +1,4 @@ +op { + graph_op_name: "DatasetCardinalityV2" + visibility: HIDDEN +} diff --git a/tensorflow/core/kernels/data/dataset_ops.cc b/tensorflow/core/kernels/data/dataset_ops.cc index ddfaa69b14b..dbb125e9c9e 100644 --- a/tensorflow/core/kernels/data/dataset_ops.cc +++ b/tensorflow/core/kernels/data/dataset_ops.cc @@ -115,6 +115,21 @@ void DatasetCardinalityOp::Compute(OpKernelContext* ctx) { result->scalar()() = dataset->Cardinality(); } +void DatasetCardinalityV2Op::Compute(OpKernelContext* ctx) { + DatasetBase* dataset; + OP_REQUIRES_OK(ctx, GetDatasetFromVariantTensor(ctx->input(0), &dataset)); + Tensor* result; + OP_REQUIRES_OK(ctx, ctx->allocate_output(0, TensorShape({}), &result)); + int64 cardinality = dataset->Cardinality(); + if (cardinality == data::kUnknownCardinality) { + result->scalar()() = std::nan(""); + } else if (cardinality == data::kInfiniteCardinality) { + result->scalar()() = std::numeric_limits::infinity(); + } else { + result->scalar()() = cardinality; + } +} + void DatasetFromGraphOp::Compute(OpKernelContext* ctx) { tstring graph_def_string; OP_REQUIRES_OK(ctx, @@ -163,6 +178,9 @@ REGISTER_KERNEL_BUILDER( Name("ExperimentalDatasetCardinality").Device(DEVICE_CPU), DatasetCardinalityOp); +REGISTER_KERNEL_BUILDER(Name("DatasetCardinalityV2").Device(DEVICE_CPU), + DatasetCardinalityV2Op); + REGISTER_KERNEL_BUILDER(Name("DatasetFromGraph").Device(DEVICE_CPU), DatasetFromGraphOp); diff --git a/tensorflow/core/kernels/data/dataset_ops.h b/tensorflow/core/kernels/data/dataset_ops.h index 9895585f3de..b48da4ea571 100644 --- a/tensorflow/core/kernels/data/dataset_ops.h +++ b/tensorflow/core/kernels/data/dataset_ops.h @@ -49,6 +49,13 @@ class DatasetCardinalityOp : public OpKernel { void Compute(OpKernelContext* ctx) override; }; +class DatasetCardinalityV2Op : public OpKernel { + public: + explicit DatasetCardinalityV2Op(OpKernelConstruction* ctx) : OpKernel(ctx) {} + + void Compute(OpKernelContext* ctx) override; +}; + class DatasetFromGraphOp : public OpKernel { public: static constexpr const char* const kGraphDef = "graph_def"; diff --git a/tensorflow/core/ops/experimental_dataset_ops.cc b/tensorflow/core/ops/experimental_dataset_ops.cc index fcde36afe55..480626a2465 100644 --- a/tensorflow/core/ops/experimental_dataset_ops.cc +++ b/tensorflow/core/ops/experimental_dataset_ops.cc @@ -221,6 +221,11 @@ REGISTER_OP("DatasetCardinality") .Output("cardinality: int64") .SetShapeFn(shape_inference::ScalarShape); +REGISTER_OP("DatasetCardinalityV2") + .Input("input_dataset: variant") + .Output("cardinality: float64") + .SetShapeFn(shape_inference::ScalarShape); + REGISTER_OP("ExperimentalDatasetCardinality") .Input("input_dataset: variant") .Output("cardinality: int64") diff --git a/tensorflow/python/data/experimental/ops/cardinality.py b/tensorflow/python/data/experimental/ops/cardinality.py index 54d30e1fba8..c59e68f6745 100644 --- a/tensorflow/python/data/experimental/ops/cardinality.py +++ b/tensorflow/python/data/experimental/ops/cardinality.py @@ -21,6 +21,7 @@ from tensorflow.python.data.ops import dataset_ops from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.ops import gen_experimental_dataset_ops as ged_ops +from tensorflow.python.util import deprecation from tensorflow.python.util.tf_export import tf_export @@ -32,6 +33,7 @@ tf_export("data.experimental.UNKNOWN_CARDINALITY").export_constant( __name__, "UNKNOWN") +@deprecation.deprecated(None, "Use `tf.data.Dataset.cardinality()") @tf_export("data.experimental.cardinality") def cardinality(dataset): """Returns the cardinality of `dataset`, if known. diff --git a/tensorflow/python/data/kernel_tests/BUILD b/tensorflow/python/data/kernel_tests/BUILD index 2e01021cfd2..25b930cec01 100644 --- a/tensorflow/python/data/kernel_tests/BUILD +++ b/tensorflow/python/data/kernel_tests/BUILD @@ -46,6 +46,17 @@ tf_py_test( ], ) +tf_py_test( + name = "cardinality_test", + srcs = ["cardinality_test.py"], + deps = [ + "//tensorflow/python/data/experimental/ops:cardinality", + "//tensorflow/python/data/kernel_tests:test_base", + "//tensorflow/python/data/ops:dataset_ops", + "@absl_py//absl/testing:parameterized", + ], +) + tf_py_test( name = "checkpoint_test", size = "medium", diff --git a/tensorflow/python/data/kernel_tests/cardinality_test.py b/tensorflow/python/data/kernel_tests/cardinality_test.py new file mode 100644 index 00000000000..b18f992d2e9 --- /dev/null +++ b/tensorflow/python/data/kernel_tests/cardinality_test.py @@ -0,0 +1,171 @@ +# Copyright 2018 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for `tf.data.Dataset.cardinality()`.""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import functools + +from absl.testing import parameterized + +import numpy as np + +from tensorflow.python.data.kernel_tests import test_base +from tensorflow.python.data.ops import dataset_ops +from tensorflow.python.framework import combinations +from tensorflow.python.platform import test + + +def _test_combinations(): + # pylint: disable=g-long-lambda + cases = [ + ("Batch1", + lambda: dataset_ops.Dataset.range(5).batch(2, drop_remainder=True), 2), + ("Batch2", + lambda: dataset_ops.Dataset.range(5).batch(2, drop_remainder=False), 3), + ("Batch3", + lambda: dataset_ops.Dataset.range(5).filter(lambda _: True).batch(2), + np.nan), + ("Batch4", lambda: dataset_ops.Dataset.range(5).repeat().batch(2), + np.inf), + ("Cache1", lambda: dataset_ops.Dataset.range(5).cache(), 5), + ("Cache2", lambda: dataset_ops.Dataset.range(5).cache("foo"), 5), + ("Concatenate1", lambda: dataset_ops.Dataset.range(5).concatenate( + dataset_ops.Dataset.range(5)), 10), + ("Concatenate2", lambda: dataset_ops.Dataset.range(5).filter( + lambda _: True).concatenate(dataset_ops.Dataset.range(5)), np.nan), + ("Concatenate3", lambda: dataset_ops.Dataset.range(5).repeat(). + concatenate(dataset_ops.Dataset.range(5)), np.inf), + ("Concatenate4", lambda: dataset_ops.Dataset.range(5).concatenate( + dataset_ops.Dataset.range(5).filter(lambda _: True)), np.nan), + ("Concatenate5", + lambda: dataset_ops.Dataset.range(5).filter(lambda _: True).concatenate( + dataset_ops.Dataset.range(5).filter(lambda _: True)), np.nan), + ("Concatenate6", + lambda: dataset_ops.Dataset.range(5).repeat().concatenate( + dataset_ops.Dataset.range(5).filter(lambda _: True)), np.inf), + ("Concatenate7", lambda: dataset_ops.Dataset.range(5).concatenate( + dataset_ops.Dataset.range(5).repeat()), np.inf), + ("Concatenate8", + lambda: dataset_ops.Dataset.range(5).filter(lambda _: True).concatenate( + dataset_ops.Dataset.range(5).repeat()), np.inf), + ("Concatenate9", lambda: dataset_ops.Dataset.range(5).repeat(). + concatenate(dataset_ops.Dataset.range(5).repeat()), np.inf), + ("FlatMap", lambda: dataset_ops.Dataset.range(5).flat_map( + lambda _: dataset_ops.Dataset.from_tensors(0)), np.nan), + ("Filter", lambda: dataset_ops.Dataset.range(5).filter(lambda _: True), + np.nan), + ("FromTensors1", lambda: dataset_ops.Dataset.from_tensors(0), 1), + ("FromTensors2", lambda: dataset_ops.Dataset.from_tensors((0, 1)), 1), + ("FromTensorSlices1", + lambda: dataset_ops.Dataset.from_tensor_slices([0, 0, 0]), 3), + ("FromTensorSlices2", lambda: dataset_ops.Dataset.from_tensor_slices( + ([0, 0, 0], [1, 1, 1])), 3), + ("Interleave1", lambda: dataset_ops.Dataset.range(5).interleave( + lambda _: dataset_ops.Dataset.from_tensors(0), cycle_length=1), + np.nan), + ("Interleave2", lambda: dataset_ops.Dataset.range(5).interleave( + lambda _: dataset_ops.Dataset.from_tensors(0), + cycle_length=1, + num_parallel_calls=1), np.nan), + ("Map1", lambda: dataset_ops.Dataset.range(5).map(lambda x: x), 5), + ("Map2", lambda: dataset_ops.Dataset.range(5).map( + lambda x: x, num_parallel_calls=1), 5), + ("PaddedBatch1", lambda: dataset_ops.Dataset.range(5).padded_batch( + 2, [], drop_remainder=True), 2), + ("PaddedBatch2", lambda: dataset_ops.Dataset.range(5).padded_batch( + 2, [], drop_remainder=False), 3), + ("PaddedBatch3", lambda: dataset_ops.Dataset.range(5).filter( + lambda _: True).padded_batch(2, []), np.nan), + ("PaddedBatch4", + lambda: dataset_ops.Dataset.range(5).repeat().padded_batch(2, []), + np.inf), + ("Prefetch", lambda: dataset_ops.Dataset.range(5).prefetch(buffer_size=1), + 5), + ("Range1", lambda: dataset_ops.Dataset.range(0), 0), + ("Range2", lambda: dataset_ops.Dataset.range(5), 5), + ("Range3", lambda: dataset_ops.Dataset.range(5, 10), 5), + ("Range4", lambda: dataset_ops.Dataset.range(10, 5), 0), + ("Range5", lambda: dataset_ops.Dataset.range(5, 10, 2), 3), + ("Range6", lambda: dataset_ops.Dataset.range(10, 5, -2), 3), + ("Repeat1", lambda: dataset_ops.Dataset.range(0).repeat(0), 0), + ("Repeat2", lambda: dataset_ops.Dataset.range(1).repeat(0), 0), + ("Repeat3", lambda: dataset_ops.Dataset.range(0).repeat(5), 0), + ("Repeat4", lambda: dataset_ops.Dataset.range(1).repeat(5), 5), + ("Repeat5", lambda: dataset_ops.Dataset.range(0).repeat(), 0), + ("Repeat6", lambda: dataset_ops.Dataset.range(1).repeat(), np.inf), + ("Shuffle", lambda: dataset_ops.Dataset.range(5).shuffle(buffer_size=1), + 5), + ("Shard1", lambda: dataset_ops.Dataset.range(5).shard(2, 0), 3), + ("Shard2", lambda: dataset_ops.Dataset.range(5).shard(8, 7), 0), + ("Shard3", + lambda: dataset_ops.Dataset.range(5).filter(lambda _: True).shard(2, 0), + np.nan), + ("Shard4", lambda: dataset_ops.Dataset.range(5).repeat().shard(2, 0), + np.inf), + ("Skip1", lambda: dataset_ops.Dataset.range(5).skip(2), 3), + ("Skip2", lambda: dataset_ops.Dataset.range(5).skip(8), 0), + ("Skip3", + lambda: dataset_ops.Dataset.range(5).filter(lambda _: True).skip(2), + np.nan), + ("Skip4", lambda: dataset_ops.Dataset.range(5).repeat().skip(2), np.inf), + ("Take1", lambda: dataset_ops.Dataset.range(5).take(2), 2), + ("Take2", lambda: dataset_ops.Dataset.range(5).take(8), 5), + ("Take3", + lambda: dataset_ops.Dataset.range(5).filter(lambda _: True).take(2), + np.nan), + ("Take4", lambda: dataset_ops.Dataset.range(5).repeat().take(2), 2), + ("Window1", lambda: dataset_ops.Dataset.range(5).window( + size=2, shift=2, drop_remainder=True), 2), + ("Window2", lambda: dataset_ops.Dataset.range(5).window( + size=2, shift=2, drop_remainder=False), 3), + ("Zip1", lambda: dataset_ops.Dataset.zip(dataset_ops.Dataset.range(5)), + 5), + ("Zip2", lambda: dataset_ops.Dataset.zip( + (dataset_ops.Dataset.range(5), dataset_ops.Dataset.range(3))), 3), + ("Zip3", lambda: dataset_ops.Dataset.zip((dataset_ops.Dataset.range( + 5), dataset_ops.Dataset.range(3).repeat())), 5), + ("Zip4", lambda: dataset_ops.Dataset.zip((dataset_ops.Dataset.range( + 5).repeat(), dataset_ops.Dataset.range(3).repeat())), np.inf), + ("Zip5", lambda: dataset_ops.Dataset.zip((dataset_ops.Dataset.range( + 5), dataset_ops.Dataset.range(3).filter(lambda _: True))), np.nan), + ] + + def reduce_fn(x, y): + name, dataset_fn, expected_result = y + return x + combinations.combine( + dataset_fn=combinations.NamedObject(name, dataset_fn), + expected_result=expected_result) + + return functools.reduce(reduce_fn, cases, []) + + +class CardinalityTest(test_base.DatasetTestBase, parameterized.TestCase): + """Tests for `tf.data.Dataset.cardinality()`.""" + + @combinations.generate( + combinations.times(test_base.default_test_combinations(), + _test_combinations())) + def testCardinality(self, dataset_fn, expected_result): + dataset = dataset_fn() + if np.isnan(expected_result): + self.assertTrue(np.isnan(self.evaluate(dataset.cardinality()))) + else: + self.assertEqual(self.evaluate(dataset.cardinality()), expected_result) + + +if __name__ == "__main__": + test.main() diff --git a/tensorflow/python/data/ops/dataset_ops.py b/tensorflow/python/data/ops/dataset_ops.py index d41aeb22d87..9ae560f71db 100644 --- a/tensorflow/python/data/ops/dataset_ops.py +++ b/tensorflow/python/data/ops/dataset_ops.py @@ -2080,6 +2080,36 @@ name=None)) """ return _OptionsDataset(self, options) + def cardinality(self): + """Returns the (statically known) cardinality of the dataset. + + The returned cardinality may be infinite or unknown. the latter will be + returned if static analysis fails to determine the number of elements in + `dataset` (e.g. when the dataset source is a file). + + Note: To provide an idiomatic representation for infinite and unknown + cardinality, this method returns a 64-bit floating point number. As a + consequence, the returned cardinality will be approximate for datasets + whose integer cardinality cannot be accurately represented by 64-bit + floating point number (i.e. cardinalities greater than 2^53). + + >>> dataset = tf.data.Dataset.range(42) + >>> print(dataset.cardinality().numpy()) + 42.0 + >>> dataset = dataset.repeat() + >>> print(dataset.cardinality().numpy() == np.inf) + True + >>> dataset = dataset.filter(lambda x: True) + >>> print(np.isnan(dataset.cardinality().numpy())) + True + + Returns: + A scalar `tf.float64` `Tensor` representing the cardinality of the + dataset. If the cardinality is infinite or unknown, the operation returns + IEEE 754 representation of infinity and NaN respectively. + """ + return ged_ops.dataset_cardinality_v2(self._variant_tensor) + @tf_export(v1=["data.Dataset"]) class DatasetV1(DatasetV2): diff --git a/tensorflow/tools/api/golden/v1/tensorflow.data.-dataset.pbtxt b/tensorflow/tools/api/golden/v1/tensorflow.data.-dataset.pbtxt index 6bf7e809604..df5f7761b07 100644 --- a/tensorflow/tools/api/golden/v1/tensorflow.data.-dataset.pbtxt +++ b/tensorflow/tools/api/golden/v1/tensorflow.data.-dataset.pbtxt @@ -41,6 +41,10 @@ tf_class { name: "cache" argspec: "args=[\'self\', \'filename\'], varargs=None, keywords=None, defaults=[\'\'], " } + member_method { + name: "cardinality" + argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None" + } member_method { name: "concatenate" argspec: "args=[\'self\', \'dataset\'], varargs=None, keywords=None, defaults=None" diff --git a/tensorflow/tools/api/golden/v1/tensorflow.data.-fixed-length-record-dataset.pbtxt b/tensorflow/tools/api/golden/v1/tensorflow.data.-fixed-length-record-dataset.pbtxt index 7af13486d3d..3488398a955 100644 --- a/tensorflow/tools/api/golden/v1/tensorflow.data.-fixed-length-record-dataset.pbtxt +++ b/tensorflow/tools/api/golden/v1/tensorflow.data.-fixed-length-record-dataset.pbtxt @@ -43,6 +43,10 @@ tf_class { name: "cache" argspec: "args=[\'self\', \'filename\'], varargs=None, keywords=None, defaults=[\'\'], " } + member_method { + name: "cardinality" + argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None" + } member_method { name: "concatenate" argspec: "args=[\'self\', \'dataset\'], varargs=None, keywords=None, defaults=None" diff --git a/tensorflow/tools/api/golden/v1/tensorflow.data.-t-f-record-dataset.pbtxt b/tensorflow/tools/api/golden/v1/tensorflow.data.-t-f-record-dataset.pbtxt index 3e586a48947..ba554656ba1 100644 --- a/tensorflow/tools/api/golden/v1/tensorflow.data.-t-f-record-dataset.pbtxt +++ b/tensorflow/tools/api/golden/v1/tensorflow.data.-t-f-record-dataset.pbtxt @@ -43,6 +43,10 @@ tf_class { name: "cache" argspec: "args=[\'self\', \'filename\'], varargs=None, keywords=None, defaults=[\'\'], " } + member_method { + name: "cardinality" + argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None" + } member_method { name: "concatenate" argspec: "args=[\'self\', \'dataset\'], varargs=None, keywords=None, defaults=None" diff --git a/tensorflow/tools/api/golden/v1/tensorflow.data.-text-line-dataset.pbtxt b/tensorflow/tools/api/golden/v1/tensorflow.data.-text-line-dataset.pbtxt index b838fe0f336..061ccb70f6d 100644 --- a/tensorflow/tools/api/golden/v1/tensorflow.data.-text-line-dataset.pbtxt +++ b/tensorflow/tools/api/golden/v1/tensorflow.data.-text-line-dataset.pbtxt @@ -43,6 +43,10 @@ tf_class { name: "cache" argspec: "args=[\'self\', \'filename\'], varargs=None, keywords=None, defaults=[\'\'], " } + member_method { + name: "cardinality" + argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None" + } member_method { name: "concatenate" argspec: "args=[\'self\', \'dataset\'], varargs=None, keywords=None, defaults=None" diff --git a/tensorflow/tools/api/golden/v1/tensorflow.data.experimental.-csv-dataset.pbtxt b/tensorflow/tools/api/golden/v1/tensorflow.data.experimental.-csv-dataset.pbtxt index 9c1adbad96c..f1a99bf2b21 100644 --- a/tensorflow/tools/api/golden/v1/tensorflow.data.experimental.-csv-dataset.pbtxt +++ b/tensorflow/tools/api/golden/v1/tensorflow.data.experimental.-csv-dataset.pbtxt @@ -43,6 +43,10 @@ tf_class { name: "cache" argspec: "args=[\'self\', \'filename\'], varargs=None, keywords=None, defaults=[\'\'], " } + member_method { + name: "cardinality" + argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None" + } member_method { name: "concatenate" argspec: "args=[\'self\', \'dataset\'], varargs=None, keywords=None, defaults=None" diff --git a/tensorflow/tools/api/golden/v1/tensorflow.data.experimental.-random-dataset.pbtxt b/tensorflow/tools/api/golden/v1/tensorflow.data.experimental.-random-dataset.pbtxt index 905e0d6f8fe..140c1355285 100644 --- a/tensorflow/tools/api/golden/v1/tensorflow.data.experimental.-random-dataset.pbtxt +++ b/tensorflow/tools/api/golden/v1/tensorflow.data.experimental.-random-dataset.pbtxt @@ -43,6 +43,10 @@ tf_class { name: "cache" argspec: "args=[\'self\', \'filename\'], varargs=None, keywords=None, defaults=[\'\'], " } + member_method { + name: "cardinality" + argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None" + } member_method { name: "concatenate" argspec: "args=[\'self\', \'dataset\'], varargs=None, keywords=None, defaults=None" diff --git a/tensorflow/tools/api/golden/v1/tensorflow.data.experimental.-sql-dataset.pbtxt b/tensorflow/tools/api/golden/v1/tensorflow.data.experimental.-sql-dataset.pbtxt index 8eee750860e..a41f7cdfedf 100644 --- a/tensorflow/tools/api/golden/v1/tensorflow.data.experimental.-sql-dataset.pbtxt +++ b/tensorflow/tools/api/golden/v1/tensorflow.data.experimental.-sql-dataset.pbtxt @@ -43,6 +43,10 @@ tf_class { name: "cache" argspec: "args=[\'self\', \'filename\'], varargs=None, keywords=None, defaults=[\'\'], " } + member_method { + name: "cardinality" + argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None" + } member_method { name: "concatenate" argspec: "args=[\'self\', \'dataset\'], varargs=None, keywords=None, defaults=None" diff --git a/tensorflow/tools/api/golden/v1/tensorflow.raw_ops.pbtxt b/tensorflow/tools/api/golden/v1/tensorflow.raw_ops.pbtxt index cf6b807502c..68de2d3b478 100644 --- a/tensorflow/tools/api/golden/v1/tensorflow.raw_ops.pbtxt +++ b/tensorflow/tools/api/golden/v1/tensorflow.raw_ops.pbtxt @@ -944,6 +944,10 @@ tf_module { name: "DatasetCardinality" argspec: "args=[\'input_dataset\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], " } + member_method { + name: "DatasetCardinalityV2" + argspec: "args=[\'input_dataset\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], " + } member_method { name: "DatasetFromGraph" argspec: "args=[\'graph_def\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], " diff --git a/tensorflow/tools/api/golden/v2/tensorflow.data.-dataset.pbtxt b/tensorflow/tools/api/golden/v2/tensorflow.data.-dataset.pbtxt index acc6334055f..c52d26ec6ff 100644 --- a/tensorflow/tools/api/golden/v2/tensorflow.data.-dataset.pbtxt +++ b/tensorflow/tools/api/golden/v2/tensorflow.data.-dataset.pbtxt @@ -28,6 +28,10 @@ tf_class { name: "cache" argspec: "args=[\'self\', \'filename\'], varargs=None, keywords=None, defaults=[\'\'], " } + member_method { + name: "cardinality" + argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None" + } member_method { name: "concatenate" argspec: "args=[\'self\', \'dataset\'], varargs=None, keywords=None, defaults=None" diff --git a/tensorflow/tools/api/golden/v2/tensorflow.data.-fixed-length-record-dataset.pbtxt b/tensorflow/tools/api/golden/v2/tensorflow.data.-fixed-length-record-dataset.pbtxt index 1c5ab59020e..aa27517a73f 100644 --- a/tensorflow/tools/api/golden/v2/tensorflow.data.-fixed-length-record-dataset.pbtxt +++ b/tensorflow/tools/api/golden/v2/tensorflow.data.-fixed-length-record-dataset.pbtxt @@ -30,6 +30,10 @@ tf_class { name: "cache" argspec: "args=[\'self\', \'filename\'], varargs=None, keywords=None, defaults=[\'\'], " } + member_method { + name: "cardinality" + argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None" + } member_method { name: "concatenate" argspec: "args=[\'self\', \'dataset\'], varargs=None, keywords=None, defaults=None" diff --git a/tensorflow/tools/api/golden/v2/tensorflow.data.-t-f-record-dataset.pbtxt b/tensorflow/tools/api/golden/v2/tensorflow.data.-t-f-record-dataset.pbtxt index 18c77bf4289..7864c08540c 100644 --- a/tensorflow/tools/api/golden/v2/tensorflow.data.-t-f-record-dataset.pbtxt +++ b/tensorflow/tools/api/golden/v2/tensorflow.data.-t-f-record-dataset.pbtxt @@ -29,6 +29,10 @@ tf_class { name: "cache" argspec: "args=[\'self\', \'filename\'], varargs=None, keywords=None, defaults=[\'\'], " } + member_method { + name: "cardinality" + argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None" + } member_method { name: "concatenate" argspec: "args=[\'self\', \'dataset\'], varargs=None, keywords=None, defaults=None" diff --git a/tensorflow/tools/api/golden/v2/tensorflow.data.-text-line-dataset.pbtxt b/tensorflow/tools/api/golden/v2/tensorflow.data.-text-line-dataset.pbtxt index 6ebe24206ab..1aeaac23b4a 100644 --- a/tensorflow/tools/api/golden/v2/tensorflow.data.-text-line-dataset.pbtxt +++ b/tensorflow/tools/api/golden/v2/tensorflow.data.-text-line-dataset.pbtxt @@ -30,6 +30,10 @@ tf_class { name: "cache" argspec: "args=[\'self\', \'filename\'], varargs=None, keywords=None, defaults=[\'\'], " } + member_method { + name: "cardinality" + argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None" + } member_method { name: "concatenate" argspec: "args=[\'self\', \'dataset\'], varargs=None, keywords=None, defaults=None" diff --git a/tensorflow/tools/api/golden/v2/tensorflow.data.experimental.-csv-dataset.pbtxt b/tensorflow/tools/api/golden/v2/tensorflow.data.experimental.-csv-dataset.pbtxt index 41865c9700f..087eb1a3860 100644 --- a/tensorflow/tools/api/golden/v2/tensorflow.data.experimental.-csv-dataset.pbtxt +++ b/tensorflow/tools/api/golden/v2/tensorflow.data.experimental.-csv-dataset.pbtxt @@ -30,6 +30,10 @@ tf_class { name: "cache" argspec: "args=[\'self\', \'filename\'], varargs=None, keywords=None, defaults=[\'\'], " } + member_method { + name: "cardinality" + argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None" + } member_method { name: "concatenate" argspec: "args=[\'self\', \'dataset\'], varargs=None, keywords=None, defaults=None" diff --git a/tensorflow/tools/api/golden/v2/tensorflow.data.experimental.-random-dataset.pbtxt b/tensorflow/tools/api/golden/v2/tensorflow.data.experimental.-random-dataset.pbtxt index ae905aa1fea..ac00eaf018a 100644 --- a/tensorflow/tools/api/golden/v2/tensorflow.data.experimental.-random-dataset.pbtxt +++ b/tensorflow/tools/api/golden/v2/tensorflow.data.experimental.-random-dataset.pbtxt @@ -30,6 +30,10 @@ tf_class { name: "cache" argspec: "args=[\'self\', \'filename\'], varargs=None, keywords=None, defaults=[\'\'], " } + member_method { + name: "cardinality" + argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None" + } member_method { name: "concatenate" argspec: "args=[\'self\', \'dataset\'], varargs=None, keywords=None, defaults=None" diff --git a/tensorflow/tools/api/golden/v2/tensorflow.data.experimental.-sql-dataset.pbtxt b/tensorflow/tools/api/golden/v2/tensorflow.data.experimental.-sql-dataset.pbtxt index 3f274660402..8fb4318379e 100644 --- a/tensorflow/tools/api/golden/v2/tensorflow.data.experimental.-sql-dataset.pbtxt +++ b/tensorflow/tools/api/golden/v2/tensorflow.data.experimental.-sql-dataset.pbtxt @@ -30,6 +30,10 @@ tf_class { name: "cache" argspec: "args=[\'self\', \'filename\'], varargs=None, keywords=None, defaults=[\'\'], " } + member_method { + name: "cardinality" + argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None" + } member_method { name: "concatenate" argspec: "args=[\'self\', \'dataset\'], varargs=None, keywords=None, defaults=None" diff --git a/tensorflow/tools/api/golden/v2/tensorflow.raw_ops.pbtxt b/tensorflow/tools/api/golden/v2/tensorflow.raw_ops.pbtxt index cf6b807502c..68de2d3b478 100644 --- a/tensorflow/tools/api/golden/v2/tensorflow.raw_ops.pbtxt +++ b/tensorflow/tools/api/golden/v2/tensorflow.raw_ops.pbtxt @@ -944,6 +944,10 @@ tf_module { name: "DatasetCardinality" argspec: "args=[\'input_dataset\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], " } + member_method { + name: "DatasetCardinalityV2" + argspec: "args=[\'input_dataset\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], " + } member_method { name: "DatasetFromGraph" argspec: "args=[\'graph_def\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], "