Remove references to private name_scope endpoint.

PiperOrigin-RevId: 322623654
Change-Id: Id82f764b7768fdf40db43bea1e32404cc220a850
This commit is contained in:
Francois Chollet 2020-07-22 11:59:20 -07:00 committed by TensorFlower Gardener
parent 70edbdb6c7
commit b7fb9affc8
12 changed files with 26 additions and 26 deletions

View File

@ -169,7 +169,7 @@ class BackendUtilsTest(test.TestCase):
sess.run(y, feed_dict={x: np.random.random((2, 3))})
def test_learning_phase_name(self):
with ops.name_scope('test_scope'):
with backend.name_scope('test_scope'):
# Test that outer name scopes do not affect the learning phase's name.
lp = backend.symbolic_learning_phase()
self.assertEqual(lp.name, 'keras_learning_phase:0')

View File

@ -21,7 +21,6 @@ from __future__ import print_function
import json
from tensorflow.python.feature_column import feature_column_v2 as fc
from tensorflow.python.framework import ops
from tensorflow.python.keras import backend
from tensorflow.python.keras.feature_column import base_feature_layer as kfc
from tensorflow.python.keras.saving.saved_model import json_utils
@ -161,7 +160,7 @@ class DenseFeatures(kfc._BaseFeaturesLayer): # pylint: disable=protected-access
transformation_cache = fc.FeatureTransformationCache(features)
output_tensors = []
for column in self._feature_columns:
with ops.name_scope(column.name):
with backend.name_scope(column.name):
try:
tensor = column.get_dense_tensor(
transformation_cache, self._state_manager, training=training)

View File

@ -89,7 +89,7 @@ class DenseFeatures(dense_features.DenseFeatures):
def build(self, _):
for column in self._feature_columns:
with ops.name_scope(column.name):
with ops.name_scope_v2(column.name):
column.create_state(self._state_manager)
# We would like to call Layer.build and not _DenseFeaturesHelper.build.
# pylint: disable=protected-access

View File

@ -143,7 +143,7 @@ class SequenceFeatures(kfc._BaseFeaturesLayer):
sequence_lengths = []
for column in self._feature_columns:
with ops.name_scope(column.name):
with backend.name_scope(column.name):
try:
dense_tensor, sequence_length = column.get_sequence_dense_tensor(
transformation_cache, self._state_manager, training=training)
@ -164,7 +164,7 @@ class SequenceFeatures(kfc._BaseFeaturesLayer):
def _assert_all_equal_and_return(tensors, name=None):
"""Asserts that all tensors are equal and returns the first one."""
with ops.name_scope(name, 'assert_all_equal', values=tensors):
with backend.name_scope(name or 'assert_all_equal'):
if len(tensors) == 1:
return tensors[0]
assert_equal_ops = []

View File

@ -33,6 +33,7 @@ from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.keras import activations
from tensorflow.python.keras import backend
from tensorflow.python.keras import initializers
from tensorflow.python.keras.engine import base_layer_utils
from tensorflow.python.keras.engine import input_spec
@ -334,7 +335,7 @@ class RNNCell(base_layer.Layer):
if (last_batch_size == batch_size and last_dtype == dtype and
last_state_size == state_size):
return last_output
with ops.name_scope(type(self).__name__ + "ZeroState", values=[batch_size]):
with backend.name_scope(type(self).__name__ + "ZeroState"):
output = _zero_state_tensors(state_size, batch_size, dtype)
if is_eager:
self._last_zero_state = (state_size, batch_size, dtype, output)
@ -1269,7 +1270,7 @@ class MultiRNNCell(RNNCell):
return self._cells[-1].output_size
def zero_state(self, batch_size, dtype):
with ops.name_scope(type(self).__name__ + "ZeroState", values=[batch_size]):
with backend.name_scope(type(self).__name__ + "ZeroState"):
if self._state_is_tuple:
return tuple(cell.zero_state(batch_size, dtype) for cell in self._cells)
else:

View File

@ -113,7 +113,7 @@ class DropoutWrapperBase(object):
raise TypeError("dropout_state_filter_visitor must be callable")
self._dropout_state_filter = (
dropout_state_filter_visitor or _default_dropout_state_filter_visitor)
with ops.name_scope("DropoutWrapperInit"):
with ops.name_scope_v2("DropoutWrapperInit"):
def tensor_and_const_value(v):
tensor_value = ops.convert_to_tensor(v)
@ -199,7 +199,7 @@ class DropoutWrapperBase(object):
self.built = True
def zero_state(self, batch_size, dtype):
with ops.name_scope(type(self).__name__ + "ZeroState", values=[batch_size]):
with ops.name_scope_v2(type(self).__name__ + "ZeroState"):
return self.cell.zero_state(batch_size, dtype)
def _variational_recurrent_dropout_value(
@ -346,7 +346,7 @@ class ResidualWrapperBase(object):
return self.cell.output_size
def zero_state(self, batch_size, dtype):
with ops.name_scope(type(self).__name__ + "ZeroState", values=[batch_size]):
with ops.name_scope_v2(type(self).__name__ + "ZeroState"):
return self.cell.zero_state(batch_size, dtype)
def _call_wrapped_cell(self, inputs, state, cell_call_fn, **kwargs):
@ -433,7 +433,7 @@ class DeviceWrapperBase(object):
return self.cell.output_size
def zero_state(self, batch_size, dtype):
with ops.name_scope(type(self).__name__ + "ZeroState", values=[batch_size]):
with ops.name_scope_v2(type(self).__name__ + "ZeroState"):
with ops.device(self._device):
return self.cell.zero_state(batch_size, dtype)

View File

@ -21,7 +21,7 @@ from __future__ import print_function
from tensorflow.python.distribute import distribution_strategy_context as ds
from tensorflow.python.distribute import reduce_util
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.keras import backend
from tensorflow.python.keras.layers import normalization
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
@ -158,7 +158,7 @@ class SyncBatchNormalization(normalization.BatchNormalizationBase):
def _calculate_mean_and_var(self, x, axes, keep_dims):
with ops.name_scope('moments', values=[x, axes]):
with backend.name_scope('moments'):
# The dynamic range of fp16 is too limited to support the collection of
# sufficient statistics. As a workaround we simply perform the operations
# on 32-bit floats before converting the mean and variance back to fp16

View File

@ -585,7 +585,7 @@ def get_translation_matrix(translations, name=None):
A tensor of shape (num_images, 8) projective transforms which can be given
to `transform`.
"""
with ops.name_scope(name, 'translation_matrix'):
with K.name_scope(name or 'translation_matrix'):
num_translations = array_ops.shape(translations)[0]
# The translation matrix looks like:
# [[1 0 -dx]
@ -665,7 +665,7 @@ def transform(images,
TypeError: If `image` is an invalid type.
ValueError: If output shape is not 1-D int32 Tensor.
"""
with ops.name_scope(name, 'transform'):
with K.name_scope(name or 'transform'):
if output_shape is None:
output_shape = array_ops.shape(images)[1:3]
if not context.executing_eagerly():
@ -708,7 +708,7 @@ def get_rotation_matrix(angles, image_height, image_width, name=None):
`(x', y') = ((a0 x + a1 y + a2) / k, (b0 x + b1 y + b2) / k)`,
where `k = c0 x + c1 y + 1`.
"""
with ops.name_scope(name, 'rotation_matrix'):
with K.name_scope(name or 'rotation_matrix'):
x_offset = ((image_width - 1) - (math_ops.cos(angles) *
(image_width - 1) - math_ops.sin(angles) *
(image_height - 1))) / 2.0
@ -1015,7 +1015,7 @@ def get_zoom_matrix(zooms, image_height, image_width, name=None):
`(x', y') = ((a0 x + a1 y + a2) / k, (b0 x + b1 y + b2) / k)`,
where `k = c0 x + c1 y + 1`.
"""
with ops.name_scope(name, 'zoom_matrix'):
with K.name_scope(name or 'zoom_matrix'):
num_zooms = array_ops.shape(zooms)[0]
# The zoom matrix looks like:
# [[zx 0 0]

View File

@ -440,7 +440,7 @@ class Layer(base_layer.Layer):
with vs.variable_scope(
self._scope, reuse=reuse, auxiliary_name_scope=False) as scope:
self._current_scope = scope
with ops.name_scope(self._name_scope(), skip_on_eager=False):
with backend.name_scope(self._name_scope()):
use_resource = (use_resource or
self._use_resource_variables or
scope.use_resource)

View File

@ -28,6 +28,7 @@ from tensorflow.python.eager import def_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.keras import backend
from tensorflow.python.keras import combinations
from tensorflow.python.keras.engine import base_layer as keras_base_layer
from tensorflow.python.keras.engine import input_spec
@ -68,12 +69,12 @@ class BaseLayerTest(test.TestCase, parameterized.TestCase):
@combinations.generate(combinations.combine(mode=['graph', 'eager']))
def testKerasStyleAddWeight(self):
keras_layer = keras_base_layer.Layer(name='keras_layer')
with ops.name_scope('foo', skip_on_eager=False):
with backend.name_scope('foo'):
keras_variable = keras_layer.add_variable(
'my_var', [2, 2], initializer=init_ops.zeros_initializer())
self.assertEqual(keras_variable.name, 'foo/my_var:0')
with ops.name_scope('baz', skip_on_eager=False):
with backend.name_scope('baz'):
old_style_layer = base_layers.Layer(name='my_layer')
# Test basic variable creation.
variable = old_style_layer.add_variable(
@ -83,7 +84,7 @@ class BaseLayerTest(test.TestCase, parameterized.TestCase):
with base_layers.keras_style_scope():
layer = base_layers.Layer(name='my_layer')
# Test basic variable creation.
with ops.name_scope('bar', skip_on_eager=False):
with backend.name_scope('bar'):
variable = layer.add_variable(
'my_var', [2, 2], initializer=init_ops.zeros_initializer())
self.assertEqual(variable.name, 'bar/my_var:0')
@ -241,7 +242,7 @@ class BaseLayerTest(test.TestCase, parameterized.TestCase):
my_layer2.apply(inputs)
self.assertEqual(my_layer2._scope.name, 'my_layer_2')
# Name scope shouldn't affect names.
with ops.name_scope('some_name_scope'):
with backend.name_scope('some_name_scope'):
default_layer2 = PrivateLayer()
default_layer2.apply(inputs)
self.assertEqual(default_layer2._scope.name, 'private_layer_2')

View File

@ -465,7 +465,7 @@ class CheckpointingTests(keras_parameterized.TestCase):
root, name=name, shape=[1, 2], dtype=dtypes.float64)
(named_variable,), _, _ = trackable_utils._serialize_object_graph(
root, saveables_cache=None)
with ops.name_scope("root/" + named_variable.name):
with ops.name_scope_v2("root/" + named_variable.name):
pass # Make sure we can use this as an op name if we prefix it.
return named_variable.name

View File

@ -61,8 +61,7 @@ def remove_squeezable_dimensions(
Returns:
Tuple of `labels` and `predictions`, possibly with last dim squeezed.
"""
with ops.name_scope(name, 'remove_squeezable_dimensions',
[labels, predictions]):
with K.name_scope(name or 'remove_squeezable_dimensions'):
predictions = ops.convert_to_tensor_v2_with_dispatch(predictions)
labels = ops.convert_to_tensor_v2_with_dispatch(labels)
predictions_shape = predictions.get_shape()