Merge pull request #41355 from lgeiger/cherry-pick-abc-collections
[CherryPick:r2.3] Fix deprecated usage of collections ABC
This commit is contained in:
commit
88b3e4b855
tensorflow/python
compiler/xla
data
debug/wrappers
distribute
framework
keras
ops
tools
@ -18,7 +18,6 @@ from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
import collections
|
||||
import contextlib
|
||||
|
||||
from six.moves import xrange # pylint: disable=redefined-builtin
|
||||
@ -37,6 +36,7 @@ from tensorflow.python.platform import tf_logging as logging
|
||||
from tensorflow.python.util import compat
|
||||
from tensorflow.python.util import nest
|
||||
from tensorflow.python.util import tf_inspect
|
||||
from tensorflow.python.util.compat import collections_abc
|
||||
from tensorflow.python.util.tf_export import tf_export
|
||||
|
||||
_XLA_COMPILE_ATTR = '_xla_compile_id'
|
||||
@ -329,7 +329,7 @@ def _compile_internal(computation, inputs=None):
|
||||
if inputs is None:
|
||||
inputs = []
|
||||
|
||||
if not isinstance(inputs, collections.Sequence):
|
||||
if not isinstance(inputs, collections_abc.Sequence):
|
||||
raise TypeError('inputs must be a list')
|
||||
|
||||
# Flatten inputs.
|
||||
@ -428,15 +428,15 @@ def is_flat(outputs):
|
||||
"""
|
||||
# If outputs is a list or tuple, check if it has any nested structure. If
|
||||
# there is, then outputs is non-flat.
|
||||
if isinstance(outputs, collections.Sequence):
|
||||
if isinstance(outputs, collections_abc.Sequence):
|
||||
for o in outputs:
|
||||
if (isinstance(o, collections.Sequence) or
|
||||
isinstance(o, collections.Mapping) or
|
||||
if (isinstance(o, collections_abc.Sequence) or
|
||||
isinstance(o, collections_abc.Mapping) or
|
||||
hasattr(o.__class__, '__attrs_attrs__')):
|
||||
return False
|
||||
|
||||
# If outputs is a dict, it is non-flat.
|
||||
if isinstance(outputs, collections.Mapping):
|
||||
if isinstance(outputs, collections_abc.Mapping):
|
||||
return False
|
||||
|
||||
# If outputs is from the attrs library, it is non-flat.
|
||||
@ -467,7 +467,7 @@ def _postprocess_flat_outputs(outputs):
|
||||
if outputs is None:
|
||||
outputs = tuple()
|
||||
# If the computation only returned one value, make it a tuple.
|
||||
if not isinstance(outputs, collections.Sequence):
|
||||
if not isinstance(outputs, collections_abc.Sequence):
|
||||
outputs = (outputs,)
|
||||
|
||||
# Append `no_op` here so that return value of this function always contains
|
||||
|
@ -18,7 +18,6 @@ from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
import abc
|
||||
import collections
|
||||
import functools
|
||||
import sys
|
||||
import threading
|
||||
@ -72,6 +71,7 @@ from tensorflow.python.util import deprecation
|
||||
from tensorflow.python.util import function_utils
|
||||
from tensorflow.python.util import lazy_loader
|
||||
from tensorflow.python.util import nest as tf_nest
|
||||
from tensorflow.python.util.compat import collections_abc
|
||||
from tensorflow.python.util.tf_export import tf_export
|
||||
|
||||
# Loaded lazily due to a circular dependency (roughly
|
||||
@ -103,7 +103,7 @@ tf_export("data.UNKNOWN_CARDINALITY").export_constant(__name__, "UNKNOWN")
|
||||
|
||||
@tf_export("data.Dataset", v1=[])
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class DatasetV2(collections.Iterable, tracking_base.Trackable,
|
||||
class DatasetV2(collections_abc.Iterable, tracking_base.Trackable,
|
||||
composite_tensor.CompositeTensor):
|
||||
"""Represents a potentially large set of elements.
|
||||
|
||||
|
@ -18,7 +18,6 @@ from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
import abc
|
||||
import collections
|
||||
import threading
|
||||
import warnings
|
||||
|
||||
@ -41,6 +40,7 @@ from tensorflow.python.ops import gen_experimental_dataset_ops
|
||||
from tensorflow.python.training.saver import BaseSaverBuilder
|
||||
from tensorflow.python.training.tracking import base as trackable
|
||||
from tensorflow.python.util import deprecation
|
||||
from tensorflow.python.util.compat import collections_abc
|
||||
from tensorflow.python.util.tf_export import tf_export
|
||||
|
||||
|
||||
@ -543,7 +543,7 @@ class IteratorResourceDeleter(object):
|
||||
|
||||
@tf_export("data.Iterator", v1=[])
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class IteratorBase(collections.Iterator, trackable.Trackable,
|
||||
class IteratorBase(collections_abc.Iterator, trackable.Trackable,
|
||||
composite_tensor.CompositeTensor):
|
||||
"""Represents an iterator of a `tf.data.Dataset`.
|
||||
|
||||
|
@ -440,7 +440,7 @@ def type_spec_from_value(element, use_fallback=True):
|
||||
|
||||
if isinstance(element, tuple):
|
||||
if hasattr(element, "_fields") and isinstance(
|
||||
element._fields, collections.Sequence) and all(
|
||||
element._fields, collections_abc.Sequence) and all(
|
||||
isinstance(f, six.string_types) for f in element._fields):
|
||||
if isinstance(element, wrapt.ObjectProxy):
|
||||
element_type = type(element.__wrapped__)
|
||||
|
@ -99,7 +99,6 @@ from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
import abc
|
||||
import collections
|
||||
import re
|
||||
import threading
|
||||
|
||||
@ -113,6 +112,7 @@ from tensorflow.python.framework import ops
|
||||
from tensorflow.python.platform import tf_logging
|
||||
from tensorflow.python.training import monitored_session
|
||||
from tensorflow.python.util import nest
|
||||
from tensorflow.python.util.compat import collections_abc
|
||||
|
||||
|
||||
# Helper function.
|
||||
@ -445,7 +445,7 @@ class BaseDebugWrapperSession(session.SessionInterface):
|
||||
"""Check whether a possibly nested structure is empty."""
|
||||
if not nest.is_nested(x):
|
||||
return False
|
||||
if isinstance(x, collections.Mapping):
|
||||
if isinstance(x, collections_abc.Mapping):
|
||||
return is_empty(list(x.values()))
|
||||
for item in x:
|
||||
if not is_empty(item):
|
||||
|
@ -18,7 +18,6 @@ from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
import collections
|
||||
import functools
|
||||
import sys
|
||||
|
||||
@ -53,6 +52,7 @@ from tensorflow.python.ops import math_ops
|
||||
from tensorflow.python.ops.ragged import ragged_tensor
|
||||
from tensorflow.python.types import distribute as distribute_types
|
||||
from tensorflow.python.util import nest
|
||||
from tensorflow.python.util.compat import collections_abc
|
||||
from tensorflow.python.util.deprecation import deprecated
|
||||
from tensorflow.python.util.tf_export import tf_export
|
||||
from tensorflow.tools.docs import doc_controls
|
||||
@ -143,7 +143,7 @@ def get_distributed_datasets_from_function(dataset_fn,
|
||||
|
||||
|
||||
@tf_export("distribute.DistributedIterator", v1=[])
|
||||
class DistributedIteratorInterface(collections.Iterator,
|
||||
class DistributedIteratorInterface(collections_abc.Iterator,
|
||||
distribute_types.Iterator):
|
||||
"""An iterator over `tf.distribute.DistributedDataset`.
|
||||
|
||||
@ -272,7 +272,7 @@ class DistributedIteratorInterface(collections.Iterator,
|
||||
|
||||
|
||||
@tf_export("distribute.DistributedDataset", v1=[])
|
||||
class DistributedDatasetInterface(collections.Iterable,
|
||||
class DistributedDatasetInterface(collections_abc.Iterable,
|
||||
distribute_types.Iterable):
|
||||
# pylint: disable=line-too-long
|
||||
"""Represents a dataset distributed among devices and machines.
|
||||
|
@ -32,6 +32,7 @@ from tensorflow.python.framework import tensor_conversion_registry
|
||||
from tensorflow.python.framework import tensor_shape
|
||||
from tensorflow.python.framework import type_spec
|
||||
from tensorflow.python.types import internal
|
||||
from tensorflow.python.util.compat import collections_abc
|
||||
from tensorflow.python.util.lazy_loader import LazyLoader
|
||||
from tensorflow.python.util.tf_export import tf_export
|
||||
|
||||
@ -344,7 +345,7 @@ def internal_convert_n_to_tensor_or_indexed_slices(values,
|
||||
RuntimeError: If a registered conversion function returns an invalid
|
||||
value.
|
||||
"""
|
||||
if not isinstance(values, collections.Iterable):
|
||||
if not isinstance(values, collections_abc.Iterable):
|
||||
raise TypeError("values must be iterable.")
|
||||
ret = []
|
||||
for i, value in enumerate(values):
|
||||
|
@ -19,7 +19,6 @@ from __future__ import print_function
|
||||
|
||||
import abc
|
||||
import atexit
|
||||
import collections
|
||||
from collections import OrderedDict
|
||||
import functools
|
||||
import multiprocessing.pool
|
||||
@ -617,7 +616,7 @@ def standardize_sample_or_class_weights(x_weight, output_names, weight_type):
|
||||
'You should provide one `' + weight_type + '`'
|
||||
'array per model output.')
|
||||
return x_weight
|
||||
if isinstance(x_weight, collections.Mapping):
|
||||
if isinstance(x_weight, collections_abc.Mapping):
|
||||
generic_utils.check_for_unexpected_keys(weight_type, x_weight, output_names)
|
||||
x_weights = []
|
||||
for name in output_names:
|
||||
@ -864,7 +863,7 @@ def collect_per_output_metric_info(metrics,
|
||||
[metrics_module.clone_metric(m) for m in metrics])
|
||||
else:
|
||||
nested_metrics = [metrics]
|
||||
elif isinstance(metrics, collections.Mapping):
|
||||
elif isinstance(metrics, collections_abc.Mapping):
|
||||
generic_utils.check_for_unexpected_keys('metrics', metrics, output_names)
|
||||
nested_metrics = []
|
||||
for name in output_names:
|
||||
@ -1443,7 +1442,7 @@ def prepare_sample_weight_modes(training_endpoints, sample_weight_mode):
|
||||
ValueError: In case of invalid `sample_weight_mode` input.
|
||||
"""
|
||||
|
||||
if isinstance(sample_weight_mode, collections.Mapping):
|
||||
if isinstance(sample_weight_mode, collections_abc.Mapping):
|
||||
generic_utils.check_for_unexpected_keys(
|
||||
'sample_weight_mode', sample_weight_mode,
|
||||
[e.output_name for e in training_endpoints])
|
||||
@ -1536,7 +1535,7 @@ def prepare_loss_weights(training_endpoints, loss_weights=None):
|
||||
if loss_weights is None:
|
||||
for e in training_endpoints:
|
||||
e.loss_weight = 1.
|
||||
elif isinstance(loss_weights, collections.Mapping):
|
||||
elif isinstance(loss_weights, collections_abc.Mapping):
|
||||
generic_utils.check_for_unexpected_keys(
|
||||
'loss_weights', loss_weights,
|
||||
[e.output_name for e in training_endpoints])
|
||||
|
@ -18,11 +18,10 @@ from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
import collections
|
||||
|
||||
import numpy as np
|
||||
|
||||
from tensorflow.python.platform import test
|
||||
from tensorflow.python.util.compat import collections_abc
|
||||
|
||||
|
||||
class PreprocessingLayerTest(test.TestCase):
|
||||
@ -38,7 +37,7 @@ class PreprocessingLayerTest(test.TestCase):
|
||||
self.assertEqual(len(a), len(b))
|
||||
for a_value, b_value in zip(a, b):
|
||||
self.assertAllCloseOrEqual(a_value, b_value, msg=msg)
|
||||
elif isinstance(a, collections.Mapping):
|
||||
elif isinstance(a, collections_abc.Mapping):
|
||||
self.assertEqual(len(a), len(b))
|
||||
for key, a_value in a.items():
|
||||
b_value = b[key]
|
||||
|
@ -44,14 +44,10 @@ from tensorflow.python.platform import tf_logging as logging
|
||||
from tensorflow.python.training.tracking import base as trackable
|
||||
from tensorflow.python.training.tracking import data_structures
|
||||
from tensorflow.python.util import nest
|
||||
from tensorflow.python.util.compat import collections_abc
|
||||
from tensorflow.python.util.tf_export import keras_export
|
||||
from tensorflow.tools.docs import doc_controls
|
||||
|
||||
try:
|
||||
from collections import abc as collections_abc # pylint: disable=g-import-not-at-top
|
||||
except ImportError: # For Python 2
|
||||
import collections as collections_abc # pylint: disable=g-import-not-at-top
|
||||
|
||||
|
||||
RECURRENT_DROPOUT_WARNING_MSG = (
|
||||
'RNN `implementation=2` is not supported when `recurrent_dropout` is set. '
|
||||
|
@ -70,8 +70,6 @@ from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
import collections
|
||||
|
||||
import numpy as np
|
||||
import six
|
||||
from six.moves import builtins
|
||||
@ -100,6 +98,7 @@ from tensorflow.python.util import compat
|
||||
from tensorflow.python.util import deprecation
|
||||
from tensorflow.python.util import dispatch
|
||||
from tensorflow.python.util import nest
|
||||
from tensorflow.python.util.compat import collections_abc
|
||||
from tensorflow.python.util.tf_export import tf_export
|
||||
|
||||
# Aliases for some automatically-generated names.
|
||||
@ -3493,7 +3492,7 @@ def add_n(inputs, name=None):
|
||||
ValueError: If `inputs` don't all have same shape and dtype or the shape
|
||||
cannot be inferred.
|
||||
"""
|
||||
if not inputs or not isinstance(inputs, collections.Iterable):
|
||||
if not inputs or not isinstance(inputs, collections_abc.Iterable):
|
||||
raise ValueError("inputs must be an iterable of at least one "
|
||||
"Tensor/IndexedSlices with the same dtype and shape")
|
||||
inputs = ops.convert_n_to_tensor_or_indexed_slices(inputs)
|
||||
@ -3626,9 +3625,9 @@ def sigmoid(x, name=None):
|
||||
|
||||
Returns:
|
||||
A Tensor with the same type as `x`.
|
||||
|
||||
|
||||
Usage Example:
|
||||
|
||||
|
||||
>>> x = tf.constant([-128.0, 0.0, 128.0], dtype=tf.float32)
|
||||
>>> tf.sigmoid(x)
|
||||
<tf.Tensor: shape=(3,), dtype=float32,
|
||||
|
@ -18,7 +18,6 @@ from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
import collections
|
||||
import functools
|
||||
import numbers
|
||||
import os
|
||||
@ -3270,7 +3269,7 @@ def conv_transpose(input, # pylint: disable=redefined-builtin
|
||||
[input, filter, output_shape]) as name:
|
||||
if tensor_util.is_tensor(output_shape):
|
||||
n = output_shape.shape[0] - 2
|
||||
elif isinstance(output_shape, collections.Sized):
|
||||
elif isinstance(output_shape, collections_abc.Sized):
|
||||
n = len(output_shape) - 2
|
||||
else:
|
||||
raise ValueError("output_shape must be a tensor or sized collection.")
|
||||
|
@ -18,7 +18,6 @@ from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
import collections as collections_lib
|
||||
import copy
|
||||
import enum # pylint: disable=g-bad-import-order
|
||||
import functools
|
||||
@ -47,6 +46,7 @@ from tensorflow.python.util import deprecation
|
||||
from tensorflow.python.util import function_utils
|
||||
from tensorflow.python.util import tf_contextlib
|
||||
from tensorflow.python.util import tf_inspect
|
||||
from tensorflow.python.util.compat import collections_abc
|
||||
from tensorflow.python.util.tf_export import tf_export
|
||||
|
||||
__all__ = [
|
||||
@ -77,13 +77,13 @@ class _PartitionInfo(object):
|
||||
ValueError: If `full_shape` or `var_offset` differ in length. If
|
||||
`var_offset` exceeds `full_shape` in any dimension.
|
||||
"""
|
||||
if not isinstance(full_shape, collections_lib.Sequence) or isinstance(
|
||||
if not isinstance(full_shape, collections_abc.Sequence) or isinstance(
|
||||
full_shape, six.string_types):
|
||||
raise TypeError(
|
||||
"`full_shape` must be a sequence (like tuple or list) instead of " +
|
||||
type(full_shape).__name__)
|
||||
|
||||
if not isinstance(var_offset, collections_lib.Sequence) or isinstance(
|
||||
if not isinstance(var_offset, collections_abc.Sequence) or isinstance(
|
||||
var_offset, six.string_types):
|
||||
raise TypeError(
|
||||
"`var_offset` must be a sequence (like tuple or list) instead of " +
|
||||
@ -151,7 +151,7 @@ class _PartitionInfo(object):
|
||||
ValueError: If `shape` is not the same length as `self.full_shape`. If
|
||||
the variable is partitioned in more than one dimension.
|
||||
"""
|
||||
if not isinstance(shape, collections_lib.Sequence) or isinstance(
|
||||
if not isinstance(shape, collections_abc.Sequence) or isinstance(
|
||||
shape, six.string_types):
|
||||
raise TypeError(
|
||||
"`shape` must be a sequence (like tuple or list) instead of " +
|
||||
@ -451,7 +451,7 @@ class _VariableStore(object):
|
||||
synchronization=VariableSynchronization.AUTO,
|
||||
aggregation=VariableAggregation.NONE):
|
||||
is_scalar = (
|
||||
shape is not None and isinstance(shape, collections_lib.Sequence) and
|
||||
shape is not None and isinstance(shape, collections_abc.Sequence) and
|
||||
not shape)
|
||||
# Partitioned variable case
|
||||
if partitioner is not None and not is_scalar:
|
||||
@ -2511,7 +2511,7 @@ def _call_partitioner(partitioner, shape, dtype):
|
||||
"shape: %s" % shape)
|
||||
|
||||
slicing = partitioner(shape=shape, dtype=dtype)
|
||||
if not isinstance(slicing, collections_lib.Sequence):
|
||||
if not isinstance(slicing, collections_abc.Sequence):
|
||||
raise ValueError("Partitioner must return a sequence, but saw: %s" %
|
||||
slicing)
|
||||
if len(slicing) != shape.ndims:
|
||||
|
@ -24,7 +24,6 @@ from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import collections
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
@ -51,6 +50,7 @@ from tensorflow.python.saved_model import signature_constants
|
||||
from tensorflow.python.tools import saved_model_aot_compile
|
||||
from tensorflow.python.tools import saved_model_utils
|
||||
from tensorflow.python.tpu import tpu
|
||||
from tensorflow.python.util.compat import collections_abc
|
||||
|
||||
|
||||
_XLA_DEBUG_OPTIONS_URL = (
|
||||
@ -241,7 +241,7 @@ def _print_args(arguments, argument_type='Argument', indent=0):
|
||||
in_print(' %s' % element)
|
||||
elif isinstance(element, tensor_spec.TensorSpec):
|
||||
print((indent + 1) * ' ' + '%s: %s' % (element.name, repr(element)))
|
||||
elif (isinstance(element, collections.Iterable) and
|
||||
elif (isinstance(element, collections_abc.Iterable) and
|
||||
not isinstance(element, dict)):
|
||||
in_print(' DType: %s' % type(element).__name__)
|
||||
in_print(' Value: [', end='')
|
||||
|
Loading…
Reference in New Issue
Block a user