More comprehensive example of TF v1 of Dataset.make_initializable_iterator().

PiperOrigin-RevId: 305021425
Change-Id: Idf4d0af800957e91ce3a5bb983ec9e74db65075e
This commit is contained in:
A. Unique TensorFlower 2020-04-06 07:10:20 -07:00 committed by TensorFlower Gardener
parent cf88f3c5d2
commit 8c9bfce2ee

View File

@ -2103,15 +2103,40 @@ class DatasetV1(DatasetV2):
raise NotImplementedError("Dataset._as_variant_tensor") raise NotImplementedError("Dataset._as_variant_tensor")
@deprecation.deprecated( @deprecation.deprecated(
None, "Use `for ... in dataset:` to iterate over a dataset. If using " None, "This is a deprecated API that should only be used in TF 1 graph "
"`tf.estimator`, return the `Dataset` object directly from your input " "mode and legacy TF 2 graph mode available through `tf.compat.v1`. In "
"function. As a last resort, you can use " "all other situations -- namely, eager mode and inside `tf.function` -- "
"`tf.compat.v1.data.make_one_shot_iterator(dataset)`.") "you can consume dataset elements using `for elem in dataset: ...` or "
"by explicitly creating iterator via `iterator = iter(dataset)` and "
"fetching its elements via `values = next(iterator)`. Furthermore, "
"this API is not available in TF 2. During the transition from TF 1 "
"to TF 2 you can use `tf.compat.v1.data.make_one_shot_iterator(dataset)` "
"to create a TF 1 graph mode style iterator for a dataset created "
"through TF 2 APIs. Note that this should be a transient state of your "
"code base as there are in general no guarantees about the "
"interoperability of TF 1 and TF 2 code.")
def make_one_shot_iterator(self): def make_one_shot_iterator(self):
"""Creates an `Iterator` for enumerating the elements of this dataset. """Creates an `Iterator` for enumerating the elements of this dataset.
Note: The returned iterator will be initialized automatically. Note: The returned iterator will be initialized automatically.
A "one-shot" iterator does not currently support re-initialization. A "one-shot" iterator does not currently support re-initialization. For
that see `make_initializable_iterator`.
Example:
```python
# Building graph ...
dataset = ...
next_value = dataset.make_one_shot_iterator().get_next()
# ... from within a session ...
try:
while True:
value = sess.run(next_value)
...
except tf.errors.OutOfRangeError:
pass
```
Returns: Returns:
An `Iterator` over the elements of this dataset. An `Iterator` over the elements of this dataset.
@ -2170,10 +2195,19 @@ class DatasetV1(DatasetV2):
get_legacy_output_classes(self)) get_legacy_output_classes(self))
@deprecation.deprecated( @deprecation.deprecated(
None, "Use `for ... in dataset:` to iterate over a dataset. If using " None, "This is a deprecated API that should only be used in TF 1 graph "
"`tf.estimator`, return the `Dataset` object directly from your input " "mode and legacy TF 2 graph mode available through `tf.compat.v1`. "
"function. As a last resort, you can use " "In all other situations -- namely, eager mode and inside `tf.function` "
"`tf.compat.v1.data.make_initializable_iterator(dataset)`.") "-- you can consume dataset elements using `for elem in dataset: ...` "
"or by explicitly creating iterator via `iterator = iter(dataset)` "
"and fetching its elements via `values = next(iterator)`. "
"Furthermore, this API is not available in TF 2. During the transition "
"from TF 1 to TF 2 you can use "
"`tf.compat.v1.data.make_initializable_iterator(dataset)` to create a TF "
"1 graph mode style iterator for a dataset created through TF 2 APIs. "
"Note that this should be a transient state of your code base as there "
"are in general no guarantees about the interoperability of TF 1 and TF "
"2 code.")
def make_initializable_iterator(self, shared_name=None): def make_initializable_iterator(self, shared_name=None):
"""Creates an `Iterator` for enumerating the elements of this dataset. """Creates an `Iterator` for enumerating the elements of this dataset.
@ -2181,10 +2215,19 @@ class DatasetV1(DatasetV2):
and you must run the `iterator.initializer` operation before using it: and you must run the `iterator.initializer` operation before using it:
```python ```python
# Building graph ...
dataset = ... dataset = ...
iterator = dataset.make_initializable_iterator() iterator = dataset.make_initializable_iterator()
# ... next_value = iterator.get_next() # This is a Tensor.
# ... from within a session ...
sess.run(iterator.initializer) sess.run(iterator.initializer)
try:
while True:
value = sess.run(next_value)
...
except tf.errors.OutOfRangeError:
pass
``` ```
Args: Args:
@ -2198,7 +2241,6 @@ class DatasetV1(DatasetV2):
Raises: Raises:
RuntimeError: If eager execution is enabled. RuntimeError: If eager execution is enabled.
""" """
return self._make_initializable_iterator(shared_name) return self._make_initializable_iterator(shared_name)
def _make_initializable_iterator(self, shared_name=None): # pylint: disable=missing-docstring def _make_initializable_iterator(self, shared_name=None): # pylint: disable=missing-docstring