From ea51dc00f0711f2ca5572ba75597996a215a9f72 Mon Sep 17 00:00:00 2001 From: Andrew Audibert Date: Wed, 26 Aug 2020 09:46:59 -0700 Subject: [PATCH] [tf.data] Remove unused job_token parameter. This was originally introduced when we explicitly managed data service job tokens in Python. Job tokens are now managed in c++ instead, so this parameter is no longer unused. This CL also calls super(OwnedIterator, self).__init__() to prevent a lint error. PiperOrigin-RevId: 328549745 Change-Id: I1df491f646f39aaf7c6b99e0e7257cd6cd94cc73 --- tensorflow/python/data/ops/iterator_ops.py | 18 +++--------------- 1 file changed, 3 insertions(+), 15 deletions(-) diff --git a/tensorflow/python/data/ops/iterator_ops.py b/tensorflow/python/data/ops/iterator_ops.py index f6f2da0939e..479c8d337a0 100644 --- a/tensorflow/python/data/ops/iterator_ops.py +++ b/tensorflow/python/data/ops/iterator_ops.py @@ -36,7 +36,6 @@ from tensorflow.python.framework import tensor_shape from tensorflow.python.framework import tensor_spec from tensorflow.python.framework import type_spec from tensorflow.python.ops import gen_dataset_ops -from tensorflow.python.ops import gen_experimental_dataset_ops from tensorflow.python.training.saver import BaseSaverBuilder from tensorflow.python.training.tracking import base as trackable from tensorflow.python.util import deprecation @@ -656,11 +655,7 @@ class OwnedIterator(IteratorBase): in eager mode and inside of tf.functions. """ - def __init__(self, - dataset=None, - components=None, - element_spec=None, - job_token=None): + def __init__(self, dataset=None, components=None, element_spec=None): """Creates a new iterator from the given dataset. If `dataset` is not specified, the iterator will be created from the given @@ -673,20 +668,17 @@ class OwnedIterator(IteratorBase): components: Tensor components to construct the iterator from. element_spec: A nested structure of `TypeSpec` objects that represents the type specification of elements of the iterator. - job_token: A token to use for reading from a tf.data service job. Data - will be partitioned among all iterators using the same token. If `None`, - the iterator will not read from the tf.data service. Raises: ValueError: If `dataset` is not provided and either `components` or `element_spec` is not provided. Or `dataset` is provided and either `components` and `element_spec` is provided. """ + super(OwnedIterator, self).__init__() error_message = ("Either `dataset` or both `components` and " "`element_spec` need to be provided.") self._device = context.context().device_name - self._job_token = job_token if dataset is None: if (components is None or element_spec is None): @@ -729,11 +721,7 @@ class OwnedIterator(IteratorBase): gen_dataset_ops.anonymous_iterator_v2( output_types=self._flat_output_types, output_shapes=self._flat_output_shapes)) - if self._job_token is None: - gen_dataset_ops.make_iterator(ds_variant, self._iterator_resource) - else: - gen_experimental_dataset_ops.make_data_service_iterator( - ds_variant, self._job_token, self._iterator_resource) + gen_dataset_ops.make_iterator(ds_variant, self._iterator_resource) # Delete the resource when this object is deleted self._resource_deleter = IteratorResourceDeleter( handle=self._iterator_resource,