Checkpointable->AutoCheckpointable

CheckpointableBase->Checkpointable

In preparation for adding public symbols.

PiperOrigin-RevId: 229459684
This commit is contained in:
Allen Lavoie 2019-01-15 16:08:22 -08:00 committed by TensorFlower Gardener
parent c6f456c4eb
commit f9b9cf52eb
357 changed files with 535 additions and 536 deletions

View File

@ -51,11 +51,11 @@ from tensorflow.contrib.checkpoint.python.split_dependency import split_dependen
from tensorflow.contrib.checkpoint.python.visualize import dot_graph_from_checkpoint
from tensorflow.core.protobuf.checkpointable_object_graph_pb2 import CheckpointableObjectGraph
from tensorflow.python.training.checkpoint_management import CheckpointManager
from tensorflow.python.training.checkpointable.base import CheckpointableBase
from tensorflow.python.training.checkpointable.base import Checkpointable as CheckpointableBase
from tensorflow.python.training.checkpointable.data_structures import List
from tensorflow.python.training.checkpointable.data_structures import Mapping
from tensorflow.python.training.checkpointable.data_structures import NoDependency
from tensorflow.python.training.checkpointable.tracking import Checkpointable
from tensorflow.python.training.checkpointable.tracking import AutoCheckpointable as Checkpointable
from tensorflow.python.training.checkpointable.util import capture_dependencies
from tensorflow.python.training.checkpointable.util import list_objects
from tensorflow.python.training.checkpointable.util import object_metadata

View File

@ -63,7 +63,7 @@ class UniqueNameTracker(data_structures.CheckpointableDataStructure):
ValueError: If `checkpointable` is not a checkpointable object.
"""
if not isinstance(checkpointable, checkpointable_lib.CheckpointableBase):
if not isinstance(checkpointable, checkpointable_lib.Checkpointable):
raise ValueError(
("Expected a checkpointable value, got %s which does not inherit "
"from CheckpointableBase.") % (checkpointable,))

View File

@ -52,7 +52,7 @@ class UniqueNameTrackerTests(test.TestCase):
save_root = util.Checkpoint(slots=slots)
save_path = save_root.save(checkpoint_prefix)
restore_slots = tracking.Checkpointable()
restore_slots = tracking.AutoCheckpointable()
restore_root = util.Checkpoint(
slots=restore_slots)
status = restore_root.restore(save_path)
@ -68,7 +68,7 @@ class UniqueNameTrackerTests(test.TestCase):
@test_util.run_in_graph_and_eager_modes
def testExample(self):
class SlotManager(tracking.Checkpointable):
class SlotManager(tracking.AutoCheckpointable):
def __init__(self):
self.slotdeps = containers.UniqueNameTracker()

View File

@ -34,7 +34,7 @@ except ImportError:
# pylint: enable=g-import-not-at-top
class NumpyState(base.CheckpointableBase):
class NumpyState(base.Checkpointable):
"""A checkpointable object whose NumPy array attributes are saved/restored.
Example usage:
@ -130,7 +130,7 @@ class NumpyState(base.CheckpointableBase):
@six.add_metaclass(abc.ABCMeta)
class PythonStateWrapper(base.CheckpointableBase):
class PythonStateWrapper(base.Checkpointable):
"""Wraps a Python object for storage in an object-based checkpoint."""
@abc.abstractmethod

View File

@ -43,7 +43,7 @@ class _CallbackSaveable(saver_lib.BaseSaverBuilder.SaveableObject):
return self._restore_callback(tensor)
class _SplitDependency(checkpointable.CheckpointableBase):
class _SplitDependency(checkpointable.Checkpointable):
"""Looks like a regular variable while synchronizing save/restores."""
def __init__(self, save_buffer, restore_buffer, name, dtype, num_components,

View File

@ -44,7 +44,7 @@ def _combine_variable_closure(variable):
return _consume_restore_buffer_fn
class SaveTensorSlicesAsDeps(base.CheckpointableBase):
class SaveTensorSlicesAsDeps(base.Checkpointable):
def __init__(self):
self.combined = resource_variable_ops.ResourceVariable([0., 0., 0., 0.])
@ -59,14 +59,14 @@ class SaveTensorSlicesAsDeps(base.CheckpointableBase):
self._track_checkpointable(dep, name=name)
class HasRegularDeps(tracking.Checkpointable):
class HasRegularDeps(tracking.AutoCheckpointable):
def __init__(self):
self.first_half = resource_variable_ops.ResourceVariable([0., 0.])
self.second_half = resource_variable_ops.ResourceVariable([0., 0.])
class OnlyOneDep(tracking.Checkpointable):
class OnlyOneDep(tracking.AutoCheckpointable):
def __init__(self):
self.first_half = resource_variable_ops.ResourceVariable([0., 0.])

View File

@ -837,7 +837,7 @@ class CudnnLSTMSaveable(CudnnOpaqueParamsSaveable):
checkpointable._track_checkpointable(bias, name="bias") # pylint: disable=protected-access
assert len(biases) == len(weights)
for cell_index, (bias, kernel) in enumerate(zip(biases, weights)):
cell = checkpointable_lib.Checkpointable()
cell = checkpointable_lib.AutoCheckpointable()
checkpointable._track_checkpointable(cell, name="cell-%d" % cell_index) # pylint: disable=protected-access
cell.bias = bias
cell.kernel = kernel

View File

@ -34,7 +34,7 @@ from tensorflow.python.training.checkpointable import tracking
from tensorflow.python.training.checkpointable import util as checkpointable_utils
class NonLayerCheckpointable(tracking.Checkpointable):
class NonLayerCheckpointable(tracking.AutoCheckpointable):
def __init__(self):
super(NonLayerCheckpointable, self).__init__()

View File

@ -37,7 +37,7 @@ from tensorflow.python.training.checkpointable import base as checkpointable
_to_replace = re.compile("[^A-Za-z0-9.]")
class Metric(checkpointable.CheckpointableBase):
class Metric(checkpointable.Checkpointable):
"""A metric holds state for aggregating statistics over an evaluation run.
Example use with eager execution:

View File

@ -138,7 +138,7 @@ from tensorflow.python.ops.resource_variable_ops import ResourceVariable as Vari
from tensorflow.python.ops.variable_scope import EagerVariableStore
from tensorflow.python.ops import script_ops
from tensorflow.python.ops import template
from tensorflow.python.training.checkpointable.tracking import Checkpointable
from tensorflow.python.training.checkpointable.tracking import AutoCheckpointable as Checkpointable
from tensorflow.python.training.checkpointable.util import CheckpointableSaver
from tensorflow.python.training.checkpointable.util import Checkpoint
from tensorflow.python.util.all_util import remove_undocumented

View File

@ -48,7 +48,7 @@ from tensorflow.python.training.checkpointable import tracking
from tensorflow.python.training.checkpointable import util
class NonLayerCheckpointable(tracking.Checkpointable):
class NonLayerCheckpointable(tracking.AutoCheckpointable):
def __init__(self):
super(NonLayerCheckpointable, self).__init__()
@ -440,7 +440,7 @@ class CheckpointingTests(test.TestCase):
def testDeferredSlotRestoration(self):
checkpoint_directory = self.get_temp_dir()
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
root.var = util.add_variable(
root, name="var", initializer=0.)
optimizer = adam.AdamOptimizer(0.1)
@ -463,7 +463,7 @@ class CheckpointingTests(test.TestCase):
14.))
slots_path = util.CheckpointableSaver(root).save(
os.path.join(checkpoint_directory, "with_slots"))
new_root = tracking.Checkpointable()
new_root = tracking.AutoCheckpointable()
# Load the slot-containing checkpoint (deferred), then immediately overwrite
# the non-slot variable (also deferred).
slot_status = util.CheckpointableSaver(
@ -508,7 +508,7 @@ class CheckpointingTests(test.TestCase):
with graph.as_default(), self.session(graph):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
obj = tracking.Checkpointable()
obj = tracking.AutoCheckpointable()
obj.var = variable_scope.get_variable(name="v", initializer=0.)
obj.opt = adam.AdamOptimizer(0.1)
obj.opt.minimize(obj.var.read_value())
@ -526,7 +526,7 @@ class CheckpointingTests(test.TestCase):
with graph.as_default(), self.session(graph):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
obj = tracking.Checkpointable()
obj = tracking.AutoCheckpointable()
obj.var = variable_scope.get_variable(name="v", initializer=0.)
obj.opt = adam.AdamOptimizer(0.1)
obj.opt.minimize(obj.var.read_value())

View File

@ -68,7 +68,7 @@ def _device_stack_is_empty():
@tf_export(v1=["data.Iterator"])
class Iterator(checkpointable.CheckpointableBase):
class Iterator(checkpointable.Checkpointable):
"""Represents the state of iterating through a `Dataset`."""
def __init__(self, iterator_resource, initializer, output_types,
@ -491,7 +491,7 @@ def _generate_shared_name(prefix):
return "{}{}".format(prefix, uid)
class EagerIterator(checkpointable.CheckpointableBase):
class EagerIterator(checkpointable.Checkpointable):
"""An iterator producing tf.Tensor objects from a tf.data.Dataset."""
def __init__(self, dataset):

View File

@ -626,7 +626,7 @@ class _MirroredSaveable(saver.BaseSaverBuilder.ResourceVariableSaveable):
class MirroredVariable(DistributedVariable, Mirrored,
checkpointable.CheckpointableBase):
checkpointable.Checkpointable):
"""Holds a map from device to variables whose values are kept in sync."""
def __init__(
@ -748,7 +748,7 @@ def _enclosing_tpu_context():
# tpu.replicate() because it assumes that you're in a device context where you
# can operate on a single version of the variable, but a tpu.replicate()
# operates on all variables and is replicated during a rewrite pass.
class TPUMirroredVariable(checkpointable.CheckpointableBase):
class TPUMirroredVariable(checkpointable.Checkpointable):
"""Holds a map from device to TPU variables whose values are kept in sync."""
def __init__(
@ -1201,7 +1201,7 @@ def _assert_replica_context(strategy):
class ReplicaLocalVariable(DistributedVariable, PerReplica,
checkpointable.CheckpointableBase):
checkpointable.Checkpointable):
"""Holds a map from device to variables whose values are reduced on save."""
def __init__(
@ -1432,7 +1432,7 @@ def value_container(val):
# TODO(josh11b): Descend from Variable.
class AggregatingVariable(checkpointable.CheckpointableBase):
class AggregatingVariable(checkpointable.Checkpointable):
"""A wrapper around a variable that aggregates updates across replicas."""
def __init__(self, strategy, v, aggregation):

View File

@ -3175,7 +3175,7 @@ def _raise_shared_embedding_column_error():
'`DenseFeatures` or `LinearModel` instead.')
class SharedEmbeddingColumnCreator(tracking.Checkpointable):
class SharedEmbeddingColumnCreator(tracking.AutoCheckpointable):
def __init__(self,
dimension,

View File

@ -57,7 +57,7 @@ from tensorflow.tools.docs import doc_controls
@keras_export('keras.layers.Layer')
class Layer(checkpointable.CheckpointableBase):
class Layer(checkpointable.Checkpointable):
"""Base layer class.
This is the class from which all layers inherit.

View File

@ -1434,7 +1434,7 @@ class Network(base_layer.Layer):
session = backend.get_session()
optimizer = getattr(self, 'optimizer', None)
if (optimizer
and not isinstance(optimizer, checkpointable.CheckpointableBase)):
and not isinstance(optimizer, checkpointable.Checkpointable)):
logging.warning(
('This model was compiled with a Keras optimizer (%s) but is being '
'saved in TensorFlow format with `save_weights`. The model\'s '

View File

@ -261,7 +261,7 @@ class Model(Network):
self.optimizer = optimizer
# We've disabled automatic dependency tracking for this method, but do want
# to add a checkpoint dependency on the optimizer if it's checkpointable.
if isinstance(self.optimizer, checkpointable.CheckpointableBase):
if isinstance(self.optimizer, checkpointable.Checkpointable):
self._track_checkpointable(
self.optimizer, name='optimizer', overwrite=True)
self.loss = loss

View File

@ -468,7 +468,7 @@ class RNN(Layer):
self.zero_output_for_mask = kwargs.pop('zero_output_for_mask', False)
super(RNN, self).__init__(**kwargs)
self.cell = cell
if isinstance(cell, checkpointable.CheckpointableBase):
if isinstance(cell, checkpointable.Checkpointable):
self._track_checkpointable(self.cell, name='cell')
self.return_sequences = return_sequences
self.return_state = return_state

View File

@ -70,7 +70,7 @@ def _deduplicate_indexed_slices(values, indices):
@six.add_metaclass(abc.ABCMeta)
@keras_export("keras.optimizers.Optimizer")
class OptimizerV2(checkpointable.CheckpointableBase):
class OptimizerV2(checkpointable.Checkpointable):
"""Updated base class for optimizers.
This class defines the API to add Ops to train a model. You never use this

View File

@ -710,7 +710,7 @@ class Nadam(Optimizer):
return dict(list(base_config.items()) + list(config.items()))
class TFOptimizer(Optimizer, checkpointable.CheckpointableBase):
class TFOptimizer(Optimizer, checkpointable.Checkpointable):
"""Wrapper class for native TensorFlow optimizers.
"""

View File

@ -554,7 +554,7 @@ class Layer(base_layer.Layer):
def __setattr__(self, value, name):
# By-pass the automatic dependency tracking performed by the parent Layer.
super(checkpointable.CheckpointableBase, self).__setattr__(value, name)
super(checkpointable.Checkpointable, self).__setattr__(value, name)
def _add_elements_to_collection(elements, collection_list):

View File

@ -164,7 +164,7 @@ class InitializableLookupTableBase(LookupInterface):
self._default_value = ops.convert_to_tensor(
default_value, dtype=self._value_dtype)
self._default_value.get_shape().merge_with(tensor_shape.scalar())
if isinstance(initializer, checkpointable_base.CheckpointableBase):
if isinstance(initializer, checkpointable_base.Checkpointable):
self._initializer = self._track_checkpointable(
initializer, "_initializer")
self._resource_handle = self.create_resource()
@ -315,7 +315,7 @@ class HashTable(InitializableLookupTableBase):
return exported_keys, exported_values
class TableInitializerBase(checkpointable_base.CheckpointableBase):
class TableInitializerBase(checkpointable_base.Checkpointable):
"""Base class for lookup table initializers."""
def __init__(self, key_dtype, value_dtype):

View File

@ -1183,7 +1183,7 @@ class DropoutWrapper(RNNCell):
# Set cell, variational_recurrent, seed before running the code below
self._cell = cell
if isinstance(cell, checkpointable.CheckpointableBase):
if isinstance(cell, checkpointable.Checkpointable):
self._track_checkpointable(self._cell, name="cell")
self._variational_recurrent = variational_recurrent
self._seed = seed
@ -1424,7 +1424,7 @@ class ResidualWrapper(RNNCell):
"""
super(ResidualWrapper, self).__init__()
self._cell = cell
if isinstance(cell, checkpointable.CheckpointableBase):
if isinstance(cell, checkpointable.Checkpointable):
self._track_checkpointable(self._cell, name="cell")
self._residual_fn = residual_fn
@ -1482,7 +1482,7 @@ class DeviceWrapper(RNNCell):
"""
super(DeviceWrapper, self).__init__()
self._cell = cell
if isinstance(cell, checkpointable.CheckpointableBase):
if isinstance(cell, checkpointable.Checkpointable):
self._track_checkpointable(self._cell, name="cell")
self._device = device
@ -1551,7 +1551,7 @@ class MultiRNNCell(RNNCell):
for cell_number, cell in enumerate(self._cells):
# Add Checkpointable dependencies on these cells so their variables get
# saved with this object when using object-based saving.
if isinstance(cell, checkpointable.CheckpointableBase):
if isinstance(cell, checkpointable.Checkpointable):
# TODO(allenl): Track down non-Checkpointable callers.
self._track_checkpointable(cell, name="cell-%d" % (cell_number,))
self._state_is_tuple = state_is_tuple

View File

@ -151,7 +151,7 @@ def _shape_tensor(shape):
@tf_export("random.experimental.Generator")
class Generator(tracking.Checkpointable):
class Generator(tracking.AutoCheckpointable):
"""Random-number generator.
It uses Variable to manage its internal state.

View File

@ -232,7 +232,7 @@ def _skip_common_stack_elements(stacktrace, base_case):
return stacktrace[-1:]
class Template(checkpointable.CheckpointableBase):
class Template(checkpointable.Checkpointable):
"""Wrap a function to aid in variable sharing.
Templates are functions that create variables the first time they are called

View File

@ -204,7 +204,7 @@ class VariableMetaclass(type):
@tf_export("Variable", v1=[])
class Variable(six.with_metaclass(VariableMetaclass,
checkpointable.CheckpointableBase)):
checkpointable.Checkpointable)):
"""See the [Variables Guide](https://tensorflow.org/guide/variables).
A variable maintains state in the graph across calls to `run()`. You add a

View File

@ -150,7 +150,7 @@ class _Loader(object):
# individually callable by adding a `__call__` method to the classes of
# the objects instances that have a `__call__` property.
class _UserObject(tracking.Checkpointable):
class _UserObject(tracking.AutoCheckpointable):
pass
return _UserObject(), setattr
@ -200,4 +200,3 @@ def load(export_dir):
"Currently only SavedModels exported with `tf.saved_model.save` may be "
"imported. Other SavedModels may eventually be supported via load().")
return root

View File

@ -43,17 +43,17 @@ class LoadTest(test.TestCase):
return load.load(path)
def test_structure_import(self):
root = tracking.Checkpointable()
root.dep_one = tracking.Checkpointable()
root.dep_two = tracking.Checkpointable()
root.dep_two.dep = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
root.dep_one = tracking.AutoCheckpointable()
root.dep_two = tracking.AutoCheckpointable()
root.dep_two.dep = tracking.AutoCheckpointable()
root.dep_three = root.dep_two.dep
imported = self.cycle(root)
self.assertIs(imported.dep_three, imported.dep_two.dep)
self.assertIsNot(imported.dep_one, imported.dep_two)
def test_variables(self):
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
root.v1 = variables.Variable(1., trainable=True)
root.v2 = variables.Variable(2., trainable=False)
imported = self.cycle(root)
@ -63,7 +63,7 @@ class LoadTest(test.TestCase):
self.assertFalse(imported.v2.trainable)
def test_capture_variables(self):
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
root.weights = variables.Variable(2.)
root.f = def_function.function(
lambda x: root.weights * x,
@ -83,7 +83,7 @@ class LoadTest(test.TestCase):
file1 = self._make_asset("contents 1")
file2 = self._make_asset("contents 2")
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
root.asset1 = tracking.TrackableAsset(file1)
root.asset2 = tracking.TrackableAsset(file2)
@ -102,7 +102,7 @@ class LoadTest(test.TestCase):
self.assertEquals("contents 2", f.read())
def test_capture_assets(self):
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
root.vocab = tracking.TrackableAsset(self._make_asset("contents"))
root.f = def_function.function(
lambda: root.vocab.asset_path,
@ -116,7 +116,7 @@ class LoadTest(test.TestCase):
def test_dedup_assets(self):
vocab = self._make_asset("contents")
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
root.asset1 = tracking.TrackableAsset(vocab)
root.asset2 = tracking.TrackableAsset(vocab)
imported = self.cycle(root)
@ -128,7 +128,7 @@ class LoadTest(test.TestCase):
def func(x):
return 2 * x
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
root.f = func
# Add two traces.
@ -146,7 +146,7 @@ class LoadTest(test.TestCase):
def func(x):
return 2 * x
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
root.f = func
imported = self.cycle(root)
@ -157,7 +157,7 @@ class LoadTest(test.TestCase):
def func(x):
return 2 * x
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
root.f = func
imported = self.cycle(
@ -173,7 +173,7 @@ class LoadTest(test.TestCase):
lambda x: f(x) + 1.0,
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
root.g = g
imported = self.cycle(root)
imported.g(constant_op.constant([1.0]))
@ -186,7 +186,7 @@ class LoadTest(test.TestCase):
else:
return 7
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
root.f = def_function.function(func)
self.assertEqual(20, root.f(constant_op.constant(10), True).numpy())
@ -208,7 +208,7 @@ class LoadTest(test.TestCase):
else:
return 7
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
root.f = def_function.function(func)
x = constant_op.constant(10)
@ -240,7 +240,7 @@ class LoadTest(test.TestCase):
named_tuple = named_tuple_type(a=input1 + input2, b=input1 * input2)
return [named_tuple, input2, {"x": 0.5}]
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
root.f = def_function.function(func)
result = root.f(constant_op.constant(2), constant_op.constant(3))
@ -270,7 +270,7 @@ class LoadTest(test.TestCase):
else:
return 7
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
root.f = def_function.function(func)
self.assertEqual(20, root.f(constant_op.constant(10), True).numpy())
@ -285,7 +285,7 @@ class LoadTest(test.TestCase):
self.assertEqual(6, imported.f(constant_op.constant(1), defg=7.0).numpy())
def test_member_function(self):
class CheckpointableWithMember(tracking.Checkpointable):
class CheckpointableWithMember(tracking.AutoCheckpointable):
def __init__(self):
super(CheckpointableWithMember, self).__init__()
@ -310,7 +310,7 @@ class LoadTest(test.TestCase):
self.assertEqual(27, imported.f(constant_op.constant(2)).numpy())
def test_side_effect_listing(self):
class M(tracking.Checkpointable):
class M(tracking.AutoCheckpointable):
def __init__(self):
super(M, self).__init__()
@ -334,7 +334,7 @@ class LoadTest(test.TestCase):
lambda x: x*weight + bias,
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
root.weight = weight
root.bias = bias
root.g = g
@ -346,16 +346,16 @@ class LoadTest(test.TestCase):
self.assertAllClose(grad, [3.5, 1.0])
def test_callable(self):
class M1(tracking.Checkpointable):
class M1(tracking.AutoCheckpointable):
@def_function.function(
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])
def __call__(self, x):
return x
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
root.m1 = M1()
root.m2 = tracking.Checkpointable()
root.m2 = tracking.AutoCheckpointable()
root.m2.__call__ = def_function.function(
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])(
lambda x: x*3.0)
@ -378,9 +378,9 @@ class LoadTest(test.TestCase):
func = def_function.function(
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])(
lambda x: x*3.0)
root = tracking.Checkpointable()
root.__call__ = tracking.Checkpointable()
root.__call__.__call__ = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
root.__call__ = tracking.AutoCheckpointable()
root.__call__.__call__ = tracking.AutoCheckpointable()
root.__call__.__call__.__call__ = func
imported = self.cycle(root)
@ -395,7 +395,7 @@ class LoadTest(test.TestCase):
def func(x):
return 2 * x
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
root.f = func
self.assertAllEqual([2], root.f(constant_op.constant([1])).numpy())
@ -419,7 +419,7 @@ class LoadTest(test.TestCase):
imported.f(constant_op.constant([1, 2, 3])).numpy())
def test_dict(self):
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
root.variables = dict(a=variables.Variable(1.))
root.variables["b"] = variables.Variable(2.)
root.variables["c"] = 1
@ -429,7 +429,7 @@ class LoadTest(test.TestCase):
self.assertEqual(set(["a", "b"]), set(imported.variables.keys()))
def test_list(self):
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
root.variables = [variables.Variable(1.)]
root.variables.append(1)
root.variables.append(variables.Variable(3.))

View File

@ -25,7 +25,7 @@ from tensorflow.python.saved_model import saved_object_graph_pb2
from tensorflow.python.training.checkpointable import tracking
class CustomTestClass(tracking.Checkpointable):
class CustomTestClass(tracking.AutoCheckpointable):
def __init__(self, version):
self.version = version
@ -56,7 +56,7 @@ revived_types.register_revived_type(
class RegistrationMatchingTest(test.TestCase):
def test_save_typecheck(self):
self.assertIs(revived_types.serialize(tracking.Checkpointable()), None)
self.assertIs(revived_types.serialize(tracking.AutoCheckpointable()), None)
def test_load_identifier_not_found(self):
nothing_matches = revived_types.deserialize(

View File

@ -796,7 +796,7 @@ def save(obj, export_dir, signatures=None):
"tf.enable_eager_execution() must run first when calling it from "
"TensorFlow 1.x.")
# pylint: enable=line-too-long
if not isinstance(obj, base.CheckpointableBase):
if not isinstance(obj, base.Checkpointable):
raise ValueError(
"Expected a Checkpointable object for export, got {}.".format(obj))

View File

@ -87,7 +87,7 @@ def _import_and_infer(
class SaveTest(test.TestCase):
def test_method_save_signature(self):
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
root.f = def_function.function(
lambda x: 2. * x,
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])
@ -99,7 +99,7 @@ class SaveTest(test.TestCase):
_import_and_infer(save_dir, {"x": 1.}))
def test_method_save_concrete(self):
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
root.f = def_function.function(
lambda z: {"out": 2. * z})
root.f(constant_op.constant(1.))
@ -115,7 +115,7 @@ class SaveTest(test.TestCase):
save_dir, {"z": 1.}, signature_key="non_default_key"))
def test_non_concrete_error(self):
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
root.f = def_function.function(lambda x: 2. * x)
root.f(constant_op.constant(1.))
save_dir = os.path.join(self.get_temp_dir(), "saved_model")
@ -124,7 +124,7 @@ class SaveTest(test.TestCase):
save.save(root, save_dir, root.f)
def test_nested_inputs(self):
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
root.f = def_function.function(
lambda x: 2. * x[0],
input_signature=([tensor_spec.TensorSpec(None, dtypes.float32),
@ -137,7 +137,7 @@ class SaveTest(test.TestCase):
root.f.get_concrete_function()
def test_nested_outputs(self):
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
root.f = def_function.function(lambda x: (2. * x, (3. * x, 4. * x)))
root.f(constant_op.constant(1.))
to_save = root.f.get_concrete_function(constant_op.constant(1.))
@ -158,7 +158,7 @@ class SaveTest(test.TestCase):
save.save(root, save_dir, to_save)
def test_variable(self):
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
root.v1 = variables.Variable(3.)
root.v2 = variables.Variable(2.)
root.f = def_function.function(
@ -186,7 +186,7 @@ class SaveTest(test.TestCase):
def test_trivial_save_exception(self):
save_dir = os.path.join(self.get_temp_dir(), "saved_model")
with self.assertRaisesRegexp(ValueError, "signature"):
save.save(tracking.Checkpointable(), save_dir)
save.save(tracking.AutoCheckpointable(), save_dir)
def test_single_method_default_signature(self):
model = _ModelWithOptimizer()
@ -200,7 +200,7 @@ class SaveTest(test.TestCase):
{"x": [[3., 4.]], "y": [2.]}))
def test_single_function_default_signature(self):
model = tracking.Checkpointable()
model = tracking.AutoCheckpointable()
model.f = def_function.function(lambda: 3., input_signature=())
model.f()
save_dir = os.path.join(self.get_temp_dir(), "saved_model")
@ -369,7 +369,7 @@ class AssetTests(test.TestCase):
_import_and_infer(second_dir, {"keys": ["gamma", "beta"]}))
def test_unused_asset(self):
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
root.f = def_function.function(
lambda x: 2. * x,
input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])

View File

@ -460,16 +460,16 @@ def no_automatic_dependency_tracking(method):
target=method, decorator_func=_method_wrapper)
class CheckpointableBase(object):
class Checkpointable(object):
"""Base class for `Checkpointable` objects without automatic dependencies.
This class has no __setattr__ override for performance reasons. Dependencies
must be added explicitly. Unless attribute assignment is performance-critical,
use `Checkpointable` instead. Use `CheckpointableBase` for `isinstance`
use `AutoCheckpointable` instead. Use `Checkpointable` for `isinstance`
checks.
"""
# CheckpointableBase does not do automatic dependency tracking, but uses the
# Checkpointable does not do automatic dependency tracking, but uses the
# no_automatic_dependency_tracking decorator so it can avoid adding
# dependencies if a subclass is Checkpointable / inherits from Model (both of
# which have __setattr__ overrides).
@ -623,7 +623,7 @@ class CheckpointableBase(object):
# assign again. It will add this variable to our dependencies, and if there
# is a non-trivial restoration queued, it will handle that. This also
# handles slot variables.
if not overwrite or isinstance(new_variable, CheckpointableBase):
if not overwrite or isinstance(new_variable, Checkpointable):
return self._track_checkpointable(new_variable, name=name,
overwrite=overwrite)
else:
@ -695,7 +695,7 @@ class CheckpointableBase(object):
ValueError: If another object is already tracked by this name.
"""
self._maybe_initialize_checkpointable()
if not isinstance(checkpointable, CheckpointableBase):
if not isinstance(checkpointable, Checkpointable):
raise TypeError(
("Checkpointable._track_checkpointable() passed type %s, not a "
"Checkpointable.") % (type(checkpointable),))
@ -742,7 +742,7 @@ class CheckpointableBase(object):
name: The name of the dependency within this object (`self`), used to
match `checkpointable` with values saved in a checkpoint.
checkpointable: The Checkpointable object to restore (inheriting from
`CheckpointableBase`).
`Checkpointable`).
"""
self._maybe_initialize_checkpointable()
checkpointable._maybe_initialize_checkpointable() # pylint: disable=protected-access

View File

@ -29,13 +29,13 @@ from tensorflow.python.training.checkpointable import util
class InterfaceTests(test.TestCase):
def testOverwrite(self):
root = base.CheckpointableBase()
leaf = base.CheckpointableBase()
root = base.Checkpointable()
leaf = base.Checkpointable()
root._track_checkpointable(leaf, name="leaf")
(current_name, current_dependency), = root._checkpoint_dependencies
self.assertIs(leaf, current_dependency)
self.assertEqual("leaf", current_name)
duplicate_name_dep = base.CheckpointableBase()
duplicate_name_dep = base.Checkpointable()
with self.assertRaises(ValueError):
root._track_checkpointable(duplicate_name_dep, name="leaf")
root._track_checkpointable(duplicate_name_dep, name="leaf", overwrite=True)
@ -44,7 +44,7 @@ class InterfaceTests(test.TestCase):
self.assertEqual("leaf", current_name)
def testAddVariableOverwrite(self):
root = base.CheckpointableBase()
root = base.Checkpointable()
a = root._add_variable_with_custom_getter(
name="v", shape=[], getter=variable_scope.get_variable)
self.assertEqual([root, a], util.list_objects(root))
@ -61,15 +61,15 @@ class InterfaceTests(test.TestCase):
getter=variable_scope.get_variable)
def testAssertConsumedWithUnusedPythonState(self):
has_config = base.CheckpointableBase()
has_config = base.Checkpointable()
has_config.get_config = lambda: {}
saved = util.Checkpoint(obj=has_config)
save_path = saved.save(os.path.join(self.get_temp_dir(), "ckpt"))
restored = util.Checkpoint(obj=base.CheckpointableBase())
restored = util.Checkpoint(obj=base.Checkpointable())
restored.restore(save_path).assert_consumed()
def testAssertConsumedFailsWithUsedPythonState(self):
has_config = base.CheckpointableBase()
has_config = base.Checkpointable()
attributes = {
"foo_attr": functools.partial(
base.PythonStringStateSaveable,
@ -78,7 +78,7 @@ class InterfaceTests(test.TestCase):
has_config._gather_saveables_for_checkpoint = lambda: attributes
saved = util.Checkpoint(obj=has_config)
save_path = saved.save(os.path.join(self.get_temp_dir(), "ckpt"))
restored = util.Checkpoint(obj=base.CheckpointableBase())
restored = util.Checkpoint(obj=base.Checkpointable())
status = restored.restore(save_path)
with self.assertRaisesRegexp(AssertionError, "foo_attr"):
status.assert_consumed()

View File

@ -58,7 +58,7 @@ def _wrap_or_unwrap(value):
"""Wraps basic data structures, unwraps NoDependency objects."""
if isinstance(value, NoDependency):
return value.value
if isinstance(value, base.CheckpointableBase):
if isinstance(value, base.Checkpointable):
return value # Skip conversion for already checkpointable objects.
elif isinstance(value, dict):
return _DictWrapper(value)
@ -99,7 +99,7 @@ def sticky_attribute_assignment(checkpointable, name, value):
value = _wrap_or_unwrap(value)
if not add_dependency:
return value
if isinstance(value, base.CheckpointableBase):
if isinstance(value, base.Checkpointable):
checkpointable._track_checkpointable( # pylint: disable=protected-access
value, name=name,
# Allow the user to switch the Checkpointable which is tracked by this
@ -109,7 +109,7 @@ def sticky_attribute_assignment(checkpointable, name, value):
return value
class CheckpointableDataStructure(base.CheckpointableBase):
class CheckpointableDataStructure(base.Checkpointable):
"""Base class for data structures which contain checkpointable objects."""
def __init__(self):
@ -122,11 +122,11 @@ class CheckpointableDataStructure(base.CheckpointableBase):
checkpointable=self, value=value, name=name)
if isinstance(value, variables.Variable):
self._extra_variables.append(value)
if not isinstance(value, base.CheckpointableBase):
if not isinstance(value, base.Checkpointable):
raise ValueError(
("Only checkpointable objects (such as Layers or Optimizers) may be "
"stored in a List object. Got %s, which does not inherit from "
"CheckpointableBase.") % (value,))
"Checkpointable.") % (value,))
if hasattr(value, "_use_resource_variables"):
# In subclassed models, legacy layers (tf.layers) must always use
# resource variables.
@ -410,7 +410,7 @@ class _ListWrapper(List, collections.MutableSequence,
def __setitem__(self, key, value):
self._check_external_modification()
if isinstance(self._storage[key], base.CheckpointableBase):
if isinstance(self._storage[key], base.Checkpointable):
self._non_append_mutation = True
self._storage[key] = self._track_value(value, self._name_element(key))
self._update_snapshot()
@ -693,14 +693,14 @@ class _DictWrapper(Mapping, collections.MutableMapping):
else:
value = _wrap_or_unwrap(value)
existing_dependency = None
if not no_dep and isinstance(value, base.CheckpointableBase):
if not no_dep and isinstance(value, base.Checkpointable):
# Non-string keys are OK as long as we have no reason to add a
# dependency on the value (either because the value is not
# checkpointable, or because it was wrapped in a NoDependency object).
self._non_string_key = True
current_value = self._storage.setdefault(key, value)
if current_value is not value:
if ((not no_dep and isinstance(value, base.CheckpointableBase))
if ((not no_dep and isinstance(value, base.Checkpointable))
# We don't want to just check that the existing object is
# checkpointable, since it may have been wrapped in a NoDependency
# object.
@ -716,7 +716,7 @@ class _DictWrapper(Mapping, collections.MutableMapping):
def __delitem__(self, key):
self._check_external_modification()
existing_value = self[key]
if isinstance(existing_value, base.CheckpointableBase):
if isinstance(existing_value, base.Checkpointable):
# Deleting tracked checkpointable values means restoring is problematic,
# so we'll throw an exception on save.
self._non_append_mutation = True

View File

@ -210,8 +210,8 @@ class ListTests(test.TestCase):
def testListWrapperBasic(self):
# _ListWrapper, unlike List, compares like the built-in list type (since it
# is used to automatically replace lists).
a = tracking.Checkpointable()
b = tracking.Checkpointable()
a = tracking.AutoCheckpointable()
b = tracking.AutoCheckpointable()
self.assertEqual([a, a],
[a, a])
self.assertEqual(data_structures._ListWrapper([a, a]),
@ -343,7 +343,7 @@ class MappingTests(test.TestCase):
def testLayerCollectionWithExternalMutation(self):
d = {}
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
root.wrapper = d
self.assertEqual([], root.wrapper.layers)
self.assertEqual([], root.wrapper.trainable_weights)
@ -361,7 +361,7 @@ class MappingTests(test.TestCase):
self.assertEqual(2, len(has_mappings))
self.assertNotIn(data_structures.Mapping(), has_mappings)
# In contrast to Mapping, dict wrappers are not hashable
a = tracking.Checkpointable()
a = tracking.AutoCheckpointable()
a.d = {}
self.assertEqual({}, a.d)
self.assertFalse({} != a.d) # pylint: disable=g-explicit-bool-comparison
@ -370,7 +370,7 @@ class MappingTests(test.TestCase):
set([a.d])
def testDictWrapperBadKeys(self):
a = tracking.Checkpointable()
a = tracking.AutoCheckpointable()
a.d = {}
a.d[1] = data_structures.List()
model = training.Model()
@ -380,7 +380,7 @@ class MappingTests(test.TestCase):
model.save_weights(save_path)
def testDictWrapperNoDependency(self):
a = tracking.Checkpointable()
a = tracking.AutoCheckpointable()
a.d = data_structures.NoDependency({})
a.d[1] = [3]
self.assertEqual([a], util.list_objects(a))
@ -391,7 +391,7 @@ class MappingTests(test.TestCase):
model.load_weights(save_path)
def testNonStringKeyNotCheckpointableValue(self):
a = tracking.Checkpointable()
a = tracking.AutoCheckpointable()
a.d = {}
a.d["a"] = [3]
a.d[1] = data_structures.NoDependency([3])
@ -405,15 +405,15 @@ class MappingTests(test.TestCase):
def testNonAppendNotCheckpointable(self):
# Non-append mutations (deleting or overwriting values) are OK when the
# values aren't tracked.
a = tracking.Checkpointable()
a = tracking.AutoCheckpointable()
a.d = {}
a.d["a"] = [3]
a.d[1] = 3
a.d[1] = 2
self.assertEqual(2, a.d[1])
del a.d[1]
a.d[2] = data_structures.NoDependency(tracking.Checkpointable())
second = tracking.Checkpointable()
a.d[2] = data_structures.NoDependency(tracking.AutoCheckpointable())
second = tracking.AutoCheckpointable()
a.d[2] = data_structures.NoDependency(second)
self.assertIs(second, a.d[2])
self.assertEqual([a, a.d, a.d["a"]], util.list_objects(a))
@ -475,7 +475,7 @@ class MappingTests(test.TestCase):
self.assertEqual({1: 3}, new_dict)
def testListShallowCopy(self):
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
orig_list = [[1.]]
root.a = orig_list
copied = copy.copy(root.a)
@ -492,7 +492,7 @@ class MappingTests(test.TestCase):
util.list_objects(copy.copy(root.a))
def testListDeepCopy(self):
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
orig_list = [[1.]]
root.a = orig_list
copied = copy.deepcopy(root.a)
@ -509,7 +509,7 @@ class MappingTests(test.TestCase):
util.list_objects(copy.deepcopy(root.a))
def testDictShallowCopy(self):
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
orig_dict = {"a": [1.]}
root.a = orig_dict
copied = copy.copy(root.a)
@ -526,7 +526,7 @@ class MappingTests(test.TestCase):
util.list_objects(copy.copy(root.a))
def testDictDeepCopy(self):
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
orig_dict = {"a": [1.]}
root.a = orig_dict
copied = copy.deepcopy(root.a)
@ -543,8 +543,8 @@ class MappingTests(test.TestCase):
util.list_objects(copy.deepcopy(root.a))
def testShallowCopyCheckpointable(self):
original = tracking.Checkpointable()
original_sub = tracking.Checkpointable()
original = tracking.AutoCheckpointable()
original_sub = tracking.AutoCheckpointable()
original.a = [[1.]]
original.b = {"a": original_sub}
shallow_copied = copy.copy(original)
@ -557,15 +557,15 @@ class MappingTests(test.TestCase):
self.assertIn(shallow_copied.b["a"], shallow_deps)
def testDeepCopyCheckpointable(self):
original = tracking.Checkpointable()
original_sub = tracking.Checkpointable()
original = tracking.AutoCheckpointable()
original_sub = tracking.AutoCheckpointable()
original.a = [[1.]]
original.b = {"a": original_sub}
deep_copied = copy.deepcopy(original)
self.assertIsNot(original, deep_copied)
self.assertIsNot(original_sub, deep_copied.b["a"])
self.assertEqual([[1.]], deep_copied.a)
self.assertIsInstance(deep_copied.b["a"], tracking.Checkpointable)
self.assertIsInstance(deep_copied.b["a"], tracking.AutoCheckpointable)
deps = util.list_objects(deep_copied)
self.assertIn(deep_copied.a, deps)
self.assertIn(deep_copied.b, deps)

View File

@ -41,7 +41,7 @@ class NotCheckpointable(object):
pass
class Checkpointable(base.CheckpointableBase):
class AutoCheckpointable(base.Checkpointable):
"""Manages dependencies on other objects.
`Checkpointable` objects may have dependencies: other `Checkpointable` objects
@ -74,7 +74,7 @@ class Checkpointable(base.CheckpointableBase):
if getattr(self, "_setattr_tracking", True):
value = data_structures.sticky_attribute_assignment(
checkpointable=self, value=value, name=name)
super(Checkpointable, self).__setattr__(name, value)
super(AutoCheckpointable, self).__setattr__(name, value)
def _no_dependency(self, value):
"""Override to allow CheckpointableBase to disable dependency tracking."""
@ -124,7 +124,7 @@ def resource_tracker_scope(resource_tracker):
_RESOURCE_TRACKER_STACK = old
class TrackableResource(base.CheckpointableBase):
class TrackableResource(base.Checkpointable):
"""Base class for all resources that need to be tracked."""
def __init__(self):
@ -151,7 +151,7 @@ class TrackableResource(base.CheckpointableBase):
return self._resource_handle
class TrackableAsset(base.CheckpointableBase):
class TrackableAsset(base.Checkpointable):
"""Base class for asset files which need to be tracked."""
def __init__(self, path):

View File

@ -35,10 +35,10 @@ from tensorflow.python.util import nest
class InterfaceTests(test.TestCase):
def testMultipleAssignment(self):
root = tracking.Checkpointable()
root.leaf = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
root.leaf = tracking.AutoCheckpointable()
root.leaf = root.leaf
duplicate_name_dep = tracking.Checkpointable()
duplicate_name_dep = tracking.AutoCheckpointable()
with self.assertRaisesRegexp(ValueError, "already declared"):
root._track_checkpointable(duplicate_name_dep, name="leaf")
# No error; we're overriding __setattr__, so we can't really stop people
@ -50,10 +50,10 @@ class InterfaceTests(test.TestCase):
self.assertIs(duplicate_name_dep, dep_object)
def testNoDependency(self):
root = tracking.Checkpointable()
hasdep = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
hasdep = tracking.AutoCheckpointable()
root.hasdep = hasdep
nodep = tracking.Checkpointable()
nodep = tracking.AutoCheckpointable()
root.nodep = data_structures.NoDependency(nodep)
self.assertEqual(1, len(root._checkpoint_dependencies))
self.assertIs(root._checkpoint_dependencies[0].ref, root.hasdep)
@ -66,16 +66,16 @@ class InterfaceTests(test.TestCase):
def __init__(self):
super(NoDependencyModel, self).__init__()
self.a = []
self.b = tracking.Checkpointable()
self.b = tracking.AutoCheckpointable()
nodeps = NoDependencyModel()
self.assertEqual([nodeps], util.list_objects(nodeps))
def testListBasic(self):
a = tracking.Checkpointable()
b = tracking.Checkpointable()
a = tracking.AutoCheckpointable()
b = tracking.AutoCheckpointable()
a.l = [b]
c = tracking.Checkpointable()
c = tracking.AutoCheckpointable()
a.l.append(c)
a_deps = util.list_objects(a)
self.assertIn(b, a_deps)
@ -87,10 +87,10 @@ class InterfaceTests(test.TestCase):
@test_util.run_in_graph_and_eager_modes
def testMutationDirtiesList(self):
a = tracking.Checkpointable()
b = tracking.Checkpointable()
a = tracking.AutoCheckpointable()
b = tracking.AutoCheckpointable()
a.l = [b]
c = tracking.Checkpointable()
c = tracking.AutoCheckpointable()
a.l.insert(0, c)
checkpoint = util.Checkpoint(a=a)
with self.assertRaisesRegexp(ValueError, "A list element was replaced"):
@ -98,11 +98,11 @@ class InterfaceTests(test.TestCase):
@test_util.run_in_graph_and_eager_modes
def testOutOfBandEditDirtiesList(self):
a = tracking.Checkpointable()
b = tracking.Checkpointable()
a = tracking.AutoCheckpointable()
b = tracking.AutoCheckpointable()
held_reference = [b]
a.l = held_reference
c = tracking.Checkpointable()
c = tracking.AutoCheckpointable()
held_reference.append(c)
checkpoint = util.Checkpoint(a=a)
with self.assertRaisesRegexp(ValueError, "The wrapped list was modified"):
@ -110,25 +110,25 @@ class InterfaceTests(test.TestCase):
@test_util.run_in_graph_and_eager_modes
def testNestedLists(self):
a = tracking.Checkpointable()
a = tracking.AutoCheckpointable()
a.l = []
b = tracking.Checkpointable()
b = tracking.AutoCheckpointable()
a.l.append([b])
c = tracking.Checkpointable()
c = tracking.AutoCheckpointable()
a.l[0].append(c)
a_deps = util.list_objects(a)
self.assertIn(b, a_deps)
self.assertIn(c, a_deps)
a.l[0].append(1)
d = tracking.Checkpointable()
d = tracking.AutoCheckpointable()
a.l[0].append(d)
a_deps = util.list_objects(a)
self.assertIn(d, a_deps)
self.assertIn(b, a_deps)
self.assertIn(c, a_deps)
self.assertNotIn(1, a_deps)
e = tracking.Checkpointable()
f = tracking.Checkpointable()
e = tracking.AutoCheckpointable()
f = tracking.AutoCheckpointable()
a.l1 = [[], [e]]
a.l1[0].append(f)
a_deps = util.list_objects(a)
@ -183,7 +183,7 @@ class InterfaceTests(test.TestCase):
@test_util.run_in_graph_and_eager_modes
def testAssertions(self):
a = tracking.Checkpointable()
a = tracking.AutoCheckpointable()
a.l = {"k": [numpy.zeros([2, 2])]}
self.assertAllEqual(nest.flatten({"k": [numpy.zeros([2, 2])]}),
nest.flatten(a.l))

View File

@ -1655,7 +1655,7 @@ def frozen_saver(root_checkpointable):
@tf_export("train.Checkpoint")
class Checkpoint(tracking.Checkpointable):
class Checkpoint(tracking.AutoCheckpointable):
"""Groups checkpointable objects, saving and restoring them.
`Checkpoint`'s constructor accepts keyword arguments whose values are types
@ -1757,7 +1757,7 @@ class Checkpoint(tracking.Checkpointable):
"""
super(Checkpoint, self).__init__()
for k, v in sorted(kwargs.items(), key=lambda item: item[0]):
if not isinstance(v, (base.CheckpointableBase,
if not isinstance(v, (base.Checkpointable,
def_function.PolymorphicFunction)):
raise ValueError(
("`Checkpoint` was expecting a checkpointable object (an object "

View File

@ -51,7 +51,7 @@ from tensorflow.python.training.checkpointable import tracking
from tensorflow.python.training.checkpointable import util as checkpointable_utils
class NonLayerCheckpointable(tracking.Checkpointable):
class NonLayerCheckpointable(tracking.AutoCheckpointable):
def __init__(self):
super(NonLayerCheckpointable, self).__init__()
@ -139,7 +139,7 @@ class InterfaceTests(test.TestCase):
def testInitNotCalled(self):
class NoInit(tracking.Checkpointable):
class NoInit(tracking.AutoCheckpointable):
def __init__(self):
pass
@ -148,7 +148,7 @@ class InterfaceTests(test.TestCase):
checkpointable_utils.add_variable(NoInit(), "var", shape=[])
def testShapeDtype(self):
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
v1 = checkpointable_utils.add_variable(
root, name="v1", initializer=3., dtype=dtypes.float64)
self.assertEqual(dtypes.float64, v1.dtype)
@ -180,7 +180,7 @@ class InterfaceTests(test.TestCase):
def testNotCheckpointable(self):
class CallsFunctionalStuff(
tracking.NotCheckpointable, tracking.Checkpointable):
tracking.NotCheckpointable, tracking.AutoCheckpointable):
pass
test_dir = self.get_temp_dir()
@ -190,7 +190,7 @@ class InterfaceTests(test.TestCase):
checkpoint.save(prefix)
class CallsFunctionalStuffOtherMRO(
tracking.Checkpointable, tracking.NotCheckpointable):
tracking.AutoCheckpointable, tracking.NotCheckpointable):
pass
checkpoint_reversed = checkpointable_utils.Checkpoint(
@ -220,7 +220,7 @@ class _MirroringSaveable(saver_lib.BaseSaverBuilder.SaveableObject):
self._mirrored_variable.assign(tensor))
class _OwnsMirroredVariables(base.CheckpointableBase):
class _OwnsMirroredVariables(base.Checkpointable):
"""A Checkpointable object which returns a more complex SaveableObject."""
def __init__(self):
@ -653,7 +653,7 @@ class CheckpointingTests(parameterized.TestCase, test.TestCase):
# pylint: enable=cell-var-from-loop
def _get_checkpoint_name(self, name):
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
checkpointable_utils.add_variable(
root, name=name, shape=[1, 2], dtype=dtypes.float64)
(named_variable,), _, _ = checkpointable_utils._serialize_object_graph(
@ -674,8 +674,8 @@ class CheckpointingTests(parameterized.TestCase, test.TestCase):
@test_util.run_in_graph_and_eager_modes(assert_no_eager_garbage=True)
def testNumberedPath(self):
root = tracking.Checkpointable()
leaf = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
leaf = tracking.AutoCheckpointable()
root.leaf = leaf
checkpointable_utils.add_variable(leaf, name="v", shape=[])
(named_variable,), _, _ = checkpointable_utils._serialize_object_graph(
@ -684,8 +684,8 @@ class CheckpointingTests(parameterized.TestCase, test.TestCase):
@test_util.run_in_graph_and_eager_modes
def testLocalNameValidation(self):
root = tracking.Checkpointable()
leaf = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
leaf = tracking.AutoCheckpointable()
# Dots are escaped, which avoids conflicts with reserved names.
root._track_checkpointable(leaf, name=".ATTRIBUTES")
checkpointable_utils.add_variable(checkpointable=leaf, name="a", shape=[])
@ -726,13 +726,13 @@ class CheckpointingTests(parameterized.TestCase, test.TestCase):
@test_util.run_in_graph_and_eager_modes
def testLateDependencyTracking(self):
class Dependency(tracking.Checkpointable):
class Dependency(tracking.AutoCheckpointable):
def build(self):
self.var = checkpointable_utils.add_variable(
self, "var", initializer=0.)
class LateDependencies(tracking.Checkpointable):
class LateDependencies(tracking.AutoCheckpointable):
def add_dep(self):
self.dep = Dependency()
@ -759,13 +759,13 @@ class CheckpointingTests(parameterized.TestCase, test.TestCase):
@test_util.run_in_graph_and_eager_modes
def testDepAfterVar(self):
class Dependency(tracking.Checkpointable):
class Dependency(tracking.AutoCheckpointable):
def build(self):
self.var = checkpointable_utils.add_variable(
self, "var", initializer=0.)
class DepAfterVar(tracking.Checkpointable):
class DepAfterVar(tracking.AutoCheckpointable):
def add_dep(self):
dep = Dependency()
@ -792,7 +792,7 @@ class CheckpointingTests(parameterized.TestCase, test.TestCase):
def testDeferredSlotRestoration(self):
checkpoint_directory = self.get_temp_dir()
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
root.var = checkpointable_utils.add_variable(
root, name="var", initializer=0.)
optimizer = adam.Adam(0.1)
@ -815,7 +815,7 @@ class CheckpointingTests(parameterized.TestCase, test.TestCase):
14.))
slots_path = checkpointable_utils.CheckpointableSaver(root).save(
os.path.join(checkpoint_directory, "with_slots"))
new_root = tracking.Checkpointable()
new_root = tracking.AutoCheckpointable()
# Load the slot-containing checkpoint (deferred), then immediately overwrite
# the non-slot variable (also deferred).
slot_status = checkpointable_utils.CheckpointableSaver(
@ -861,8 +861,8 @@ class CheckpointingTests(parameterized.TestCase, test.TestCase):
@test_util.run_in_graph_and_eager_modes
def testOverlappingRestores(self):
checkpoint_directory = self.get_temp_dir()
save_root = tracking.Checkpointable()
save_root.dep = tracking.Checkpointable()
save_root = tracking.AutoCheckpointable()
save_root.dep = tracking.AutoCheckpointable()
save_root.dep.var = checkpointable_utils.add_variable(
save_root.dep, name="var", initializer=0.)
self.evaluate(state_ops.assign(save_root.dep.var, 12.))
@ -871,13 +871,13 @@ class CheckpointingTests(parameterized.TestCase, test.TestCase):
self.evaluate(state_ops.assign(save_root.dep.var, 13.))
second_path = saver.save(os.path.join(checkpoint_directory, "second"))
first_root = tracking.Checkpointable()
second_root = tracking.Checkpointable()
first_root = tracking.AutoCheckpointable()
second_root = tracking.AutoCheckpointable()
first_status = checkpointable_utils.CheckpointableSaver(
first_root).restore(first_path)
second_status = checkpointable_utils.CheckpointableSaver(
second_root).restore(second_path)
load_dep = tracking.Checkpointable()
load_dep = tracking.AutoCheckpointable()
load_dep.var = checkpointable_utils.add_variable(
load_dep, name="var", shape=[])
first_root.dep = load_dep
@ -891,13 +891,13 @@ class CheckpointingTests(parameterized.TestCase, test.TestCase):
# Try again with the order of the restore() reversed. The last restore
# determines the final value.
first_root = tracking.Checkpointable()
second_root = tracking.Checkpointable()
first_root = tracking.AutoCheckpointable()
second_root = tracking.AutoCheckpointable()
second_status = checkpointable_utils.CheckpointableSaver(
second_root).restore(second_path)
first_status = checkpointable_utils.CheckpointableSaver(
first_root).restore(first_path)
load_dep = tracking.Checkpointable()
load_dep = tracking.AutoCheckpointable()
load_dep.var = checkpointable_utils.add_variable(
load_dep, name="var", shape=[])
first_root.dep = load_dep
@ -913,23 +913,23 @@ class CheckpointingTests(parameterized.TestCase, test.TestCase):
def testAmbiguousLoad(self):
# Not OK to split one checkpoint object into two
checkpoint_directory = self.get_temp_dir()
save_root = tracking.Checkpointable()
save_root.dep_one = tracking.Checkpointable()
save_root.dep_two = tracking.Checkpointable()
dep_three = tracking.Checkpointable()
save_root = tracking.AutoCheckpointable()
save_root.dep_one = tracking.AutoCheckpointable()
save_root.dep_two = tracking.AutoCheckpointable()
dep_three = tracking.AutoCheckpointable()
save_root.dep_one.dep_three = dep_three
save_root.dep_two.dep_three = dep_three
checkpointable_utils.add_variable(dep_three, name="var", initializer=0.)
self.evaluate(checkpointable_utils.gather_initializers(save_root))
save_path = checkpointable_utils.CheckpointableSaver(save_root).save(
os.path.join(checkpoint_directory, "ckpt"))
load_root = tracking.Checkpointable()
load_root = tracking.AutoCheckpointable()
status = checkpointable_utils.CheckpointableSaver(load_root).restore(
save_path)
load_root.dep_one = tracking.Checkpointable()
load_root.dep_two = tracking.Checkpointable()
load_root.dep_one.dep_three = tracking.Checkpointable()
load_root.dep_two.dep_three = tracking.Checkpointable()
load_root.dep_one = tracking.AutoCheckpointable()
load_root.dep_two = tracking.AutoCheckpointable()
load_root.dep_one.dep_three = tracking.AutoCheckpointable()
load_root.dep_two.dep_three = tracking.AutoCheckpointable()
checkpointable_utils.add_variable(
load_root.dep_one.dep_three, name="var", initializer=0.)
with self.assertRaises(AssertionError):
@ -941,9 +941,9 @@ class CheckpointingTests(parameterized.TestCase, test.TestCase):
def testObjectsCombined(self):
# Currently fine to load two checkpoint objects into one Python object
checkpoint_directory = self.get_temp_dir()
save_root = tracking.Checkpointable()
save_root.dep_one = tracking.Checkpointable()
save_root.dep_two = tracking.Checkpointable()
save_root = tracking.AutoCheckpointable()
save_root.dep_one = tracking.AutoCheckpointable()
save_root.dep_two = tracking.AutoCheckpointable()
checkpointable_utils.add_variable(
save_root.dep_one, name="var1", initializer=32., dtype=dtypes.float64)
checkpointable_utils.add_variable(
@ -951,8 +951,8 @@ class CheckpointingTests(parameterized.TestCase, test.TestCase):
self.evaluate(checkpointable_utils.gather_initializers(save_root))
save_path = checkpointable_utils.CheckpointableSaver(save_root).save(
os.path.join(checkpoint_directory, "ckpt"))
load_root = tracking.Checkpointable()
load_root.dep_one = tracking.Checkpointable()
load_root = tracking.AutoCheckpointable()
load_root.dep_one = tracking.AutoCheckpointable()
load_root.dep_two = load_root.dep_one
v1 = checkpointable_utils.add_variable(
load_root.dep_one, name="var1", shape=[], dtype=dtypes.float64)
@ -968,8 +968,8 @@ class CheckpointingTests(parameterized.TestCase, test.TestCase):
def testDependencyLoop(self):
# Note: this test creates garbage during eager execution because it
# purposefully creates a reference cycle.
first = tracking.Checkpointable()
second = tracking.Checkpointable()
first = tracking.AutoCheckpointable()
second = tracking.AutoCheckpointable()
first.second = second
second.first = first
first.v = checkpointable_utils.add_variable(
@ -982,10 +982,10 @@ class CheckpointingTests(parameterized.TestCase, test.TestCase):
os.path.join(checkpoint_directory, "ckpt"))
# Test deferred loading
first_load = tracking.Checkpointable()
first_load = tracking.AutoCheckpointable()
status = checkpointable_utils.CheckpointableSaver(
first_load).restore(save_path)
second_load = tracking.Checkpointable()
second_load = tracking.AutoCheckpointable()
first_load.second = second_load
second_load.first = first_load
with self.assertRaises(AssertionError):
@ -1014,7 +1014,7 @@ class CheckpointingTests(parameterized.TestCase, test.TestCase):
def testRestoreOnAssign(self):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
first = tracking.Checkpointable()
first = tracking.AutoCheckpointable()
first.var1 = variables_lib.Variable(0., name="outside_var")
first.var2 = variables_lib.Variable(0., name="blah")
self.evaluate(first.var1.assign(4.))
@ -1022,7 +1022,7 @@ class CheckpointingTests(parameterized.TestCase, test.TestCase):
save_path = checkpointable_utils.CheckpointableSaver(first).save(
checkpoint_prefix)
second = tracking.Checkpointable()
second = tracking.AutoCheckpointable()
second.var2 = variables_lib.Variable(0., name="blah")
status = checkpointable_utils.CheckpointableSaver(
second).restore(save_path)
@ -1042,7 +1042,7 @@ class CheckpointingTests(parameterized.TestCase, test.TestCase):
with graph.as_default(), self.session(graph):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
obj = tracking.Checkpointable()
obj = tracking.AutoCheckpointable()
obj.var = variables_lib.Variable(0., name="v")
obj.opt = adam.Adam(0.1)
variables = [obj.var]
@ -1059,7 +1059,7 @@ class CheckpointingTests(parameterized.TestCase, test.TestCase):
# No checkpoints are deleted by default
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
obj = tracking.Checkpointable()
obj = tracking.AutoCheckpointable()
obj.var = variable_scope.get_variable(name="v", initializer=0.)
self.evaluate(checkpointable_utils.gather_initializers(obj))
saver = checkpointable_utils.Checkpoint(obj=obj)
@ -1079,7 +1079,7 @@ class CheckpointingTests(parameterized.TestCase, test.TestCase):
def testCheckpointStateChangingVarList(self):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
obj = tracking.Checkpointable()
obj = tracking.AutoCheckpointable()
obj.var = variable_scope.get_variable(name="v", initializer=0.)
self.evaluate(checkpointable_utils.gather_initializers(obj))
checkpoint = checkpointable_utils.Checkpoint(obj=obj)
@ -1132,7 +1132,7 @@ class CheckpointingTests(parameterized.TestCase, test.TestCase):
with graph.as_default(), self.session(graph):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
obj = tracking.Checkpointable()
obj = tracking.AutoCheckpointable()
obj.var = variables_lib.Variable(0., name="v")
obj.opt = adam.Adam(0.1)
variables = [obj.var]
@ -1286,7 +1286,7 @@ class CheckpointingTests(parameterized.TestCase, test.TestCase):
load_status.assert_existing_objects_matched().run_restore_ops()
class _ManualScope(tracking.Checkpointable):
class _ManualScope(tracking.AutoCheckpointable):
def __call__(self):
with variable_scope.variable_scope("ManualScope") as vs:

View File

@ -47,7 +47,7 @@ from tensorflow.python.training.checkpointable import tracking
from tensorflow.python.training.checkpointable import util as checkpointable_utils
class NonLayerCheckpointable(tracking.Checkpointable):
class NonLayerCheckpointable(tracking.AutoCheckpointable):
def __init__(self):
super(NonLayerCheckpointable, self).__init__()
@ -461,7 +461,7 @@ class CheckpointingTests(test.TestCase):
# pylint: enable=cell-var-from-loop
def _get_checkpoint_name(self, name):
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
checkpointable_utils.add_variable(
root, name=name, shape=[1, 2], dtype=dtypes.float64)
(named_variable,), _, _ = checkpointable_utils._serialize_object_graph(
@ -503,7 +503,7 @@ class CheckpointingTests(test.TestCase):
def testDeferredSlotRestoration(self):
checkpoint_directory = self.get_temp_dir()
root = tracking.Checkpointable()
root = tracking.AutoCheckpointable()
root.var = checkpointable_utils.add_variable(
root, name="var", initializer=0.)
optimizer = adam.AdamOptimizer(0.1)
@ -526,7 +526,7 @@ class CheckpointingTests(test.TestCase):
14.))
slots_path = checkpointable_utils.CheckpointableSaver(root).save(
os.path.join(checkpoint_directory, "with_slots"))
new_root = tracking.Checkpointable()
new_root = tracking.AutoCheckpointable()
# Load the slot-containing checkpoint (deferred), then immediately overwrite
# the non-slot variable (also deferred).
slot_status = checkpointable_utils.CheckpointableSaver(
@ -572,7 +572,7 @@ class CheckpointingTests(test.TestCase):
with graph.as_default(), self.session(graph):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
obj = tracking.Checkpointable()
obj = tracking.AutoCheckpointable()
obj.var = variable_scope.get_variable(name="v", initializer=0.)
obj.opt = adam.AdamOptimizer(0.1)
obj.opt.minimize(obj.var.read_value())
@ -590,7 +590,7 @@ class CheckpointingTests(test.TestCase):
with graph.as_default(), self.session(graph):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
obj = tracking.Checkpointable()
obj = tracking.AutoCheckpointable()
obj.var = variable_scope.get_variable(name="v", initializer=0.)
obj.opt = adam.AdamOptimizer(0.1)
obj.opt.minimize(obj.var.read_value())
@ -739,7 +739,7 @@ class CheckpointingTests(test.TestCase):
self.assertEqual(42., self.evaluate(optimizer.variables()[0]))
class _ManualScope(tracking.Checkpointable):
class _ManualScope(tracking.AutoCheckpointable):
def __call__(self):
with variable_scope.variable_scope("ManualScope") as vs:

View File

@ -29,7 +29,7 @@ from tensorflow.python.training.checkpointable import tracking
from tensorflow.python.training.checkpointable import util as checkpointable_utils
class NonLayerCheckpointable(tracking.Checkpointable):
class NonLayerCheckpointable(tracking.AutoCheckpointable):
def __init__(self):
super(NonLayerCheckpointable, self).__init__()

View File

@ -218,7 +218,7 @@ class Optimizer(
# Optimizers inherit from CheckpointableBase rather than Checkpointable
# since they do most of their dependency management themselves (slot
# variables are special-cased, and non-slot variables are keyed to graphs).
checkpointable.CheckpointableBase):
checkpointable.Checkpointable):
"""Base class for optimizers.
This class defines the API to add Ops to train a model. You never use this

View File

@ -2775,7 +2775,7 @@ class ScopedGraphTest(test.TestCase):
self.assertEqual(2.0, self.evaluate(var_dict2["variable2:0"]))
class _OwnsAVariableSimple(checkpointable_base.CheckpointableBase):
class _OwnsAVariableSimple(checkpointable_base.Checkpointable):
"""A Checkpointable object which can be saved using a tf.train.Saver."""
def __init__(self):
@ -2808,7 +2808,7 @@ class _MirroringSaveable(
self._mirrored_variable.assign(tensor))
class _OwnsMirroredVariables(checkpointable_base.CheckpointableBase):
class _OwnsMirroredVariables(checkpointable_base.Checkpointable):
"""A Checkpointable object which returns a more complex SaveableObject."""
def __init__(self):
@ -2831,7 +2831,7 @@ class _OwnsMirroredVariables(checkpointable_base.CheckpointableBase):
return self.non_dep_variable.name
class NonLayerCheckpointable(checkpointable_tracking.Checkpointable):
class NonLayerCheckpointable(checkpointable_tracking.AutoCheckpointable):
def __init__(self):
super(NonLayerCheckpointable, self).__init__()

View File

@ -165,7 +165,7 @@ def saveable_objects_for_op(op, name):
yield ResourceVariableSaveable(
variable, variable._save_slice_info.spec, name)
# pylint: enable=protected-access
elif isinstance(op, checkpointable.CheckpointableBase) and not isinstance(
elif isinstance(op, checkpointable.Checkpointable) and not isinstance(
op, variables.Variable):
# pylint: disable=protected-access
for attr, factory in op._gather_saveables_for_checkpoint().items():
@ -250,7 +250,7 @@ def op_list_to_dict(op_list, convert_variable_to_tensor=True):
names_to_saveables[name].append(var)
else:
names_to_saveables[name] = [var]
elif (isinstance(var, checkpointable.CheckpointableBase)
elif (isinstance(var, checkpointable.Checkpointable)
and not isinstance(var, variables.Variable)):
checkpointable_saveables = [
(factory() if callable(factory) else factory)

View File

@ -2,7 +2,7 @@ path: "tensorflow.Variable"
tf_class {
is_instance: "<class \'tensorflow.python.ops.variables.VariableV1\'>"
is_instance: "<class \'tensorflow.python.ops.variables.Variable\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "SaveSliceInfo"

View File

@ -1,7 +1,7 @@
path: "tensorflow.data.Iterator"
tf_class {
is_instance: "<class \'tensorflow.python.data.ops.iterator_ops.Iterator\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "initializer"

View File

@ -3,7 +3,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.engine.training.Model\'>"
is_instance: "<class \'tensorflow.python.keras.engine.network.Network\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -4,7 +4,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.engine.training.Model\'>"
is_instance: "<class \'tensorflow.python.keras.engine.network.Network\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -3,7 +3,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.recurrent.PeepholeLSTMCell\'>"
is_instance: "<class \'tensorflow.python.keras.layers.recurrent.LSTMCell\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -2,7 +2,7 @@ path: "tensorflow.keras.layers.Activation"
tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.core.Activation\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -2,7 +2,7 @@ path: "tensorflow.keras.layers.ActivityRegularization"
tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.core.ActivityRegularization\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -3,7 +3,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.merge.Add\'>"
is_instance: "<class \'tensorflow.python.keras.layers.merge._Merge\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -2,7 +2,7 @@ path: "tensorflow.keras.layers.AlphaDropout"
tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.noise.AlphaDropout\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -3,7 +3,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.pooling.AveragePooling1D\'>"
is_instance: "<class \'tensorflow.python.keras.layers.pooling.Pooling1D\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -3,7 +3,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.pooling.AveragePooling2D\'>"
is_instance: "<class \'tensorflow.python.keras.layers.pooling.Pooling2D\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -3,7 +3,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.pooling.AveragePooling3D\'>"
is_instance: "<class \'tensorflow.python.keras.layers.pooling.Pooling3D\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -3,7 +3,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.merge.Average\'>"
is_instance: "<class \'tensorflow.python.keras.layers.merge._Merge\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -3,7 +3,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.pooling.AveragePooling1D\'>"
is_instance: "<class \'tensorflow.python.keras.layers.pooling.Pooling1D\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -3,7 +3,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.pooling.AveragePooling2D\'>"
is_instance: "<class \'tensorflow.python.keras.layers.pooling.Pooling2D\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -3,7 +3,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.pooling.AveragePooling3D\'>"
is_instance: "<class \'tensorflow.python.keras.layers.pooling.Pooling3D\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -3,7 +3,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.normalization.BatchNormalizationV1\'>"
is_instance: "<class \'tensorflow.python.keras.layers.normalization.BatchNormalizationV2\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -3,7 +3,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.wrappers.Bidirectional\'>"
is_instance: "<class \'tensorflow.python.keras.layers.wrappers.Wrapper\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -3,7 +3,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.merge.Concatenate\'>"
is_instance: "<class \'tensorflow.python.keras.layers.merge._Merge\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -4,7 +4,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.convolutional_recurrent.ConvRNN2D\'>"
is_instance: "<class \'tensorflow.python.keras.layers.recurrent.RNN\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activation"

View File

@ -3,7 +3,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.convolutional.Conv1D\'>"
is_instance: "<class \'tensorflow.python.keras.layers.convolutional.Conv\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -4,7 +4,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.convolutional.Conv2D\'>"
is_instance: "<class \'tensorflow.python.keras.layers.convolutional.Conv\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -3,7 +3,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.convolutional.Conv2D\'>"
is_instance: "<class \'tensorflow.python.keras.layers.convolutional.Conv\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -4,7 +4,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.convolutional.Conv3D\'>"
is_instance: "<class \'tensorflow.python.keras.layers.convolutional.Conv\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -3,7 +3,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.convolutional.Conv3D\'>"
is_instance: "<class \'tensorflow.python.keras.layers.convolutional.Conv\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -3,7 +3,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.convolutional.Conv1D\'>"
is_instance: "<class \'tensorflow.python.keras.layers.convolutional.Conv\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -4,7 +4,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.convolutional.Conv2D\'>"
is_instance: "<class \'tensorflow.python.keras.layers.convolutional.Conv\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -3,7 +3,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.convolutional.Conv2D\'>"
is_instance: "<class \'tensorflow.python.keras.layers.convolutional.Conv\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -4,7 +4,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.convolutional.Conv3D\'>"
is_instance: "<class \'tensorflow.python.keras.layers.convolutional.Conv\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -3,7 +3,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.convolutional.Conv3D\'>"
is_instance: "<class \'tensorflow.python.keras.layers.convolutional.Conv\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -2,7 +2,7 @@ path: "tensorflow.keras.layers.Cropping1D"
tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.convolutional.Cropping1D\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -2,7 +2,7 @@ path: "tensorflow.keras.layers.Cropping2D"
tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.convolutional.Cropping2D\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -2,7 +2,7 @@ path: "tensorflow.keras.layers.Cropping3D"
tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.convolutional.Cropping3D\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -4,7 +4,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.cudnn_recurrent._CuDNNRNN\'>"
is_instance: "<class \'tensorflow.python.keras.layers.recurrent.RNN\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -4,7 +4,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.cudnn_recurrent._CuDNNRNN\'>"
is_instance: "<class \'tensorflow.python.keras.layers.recurrent.RNN\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -2,7 +2,7 @@ path: "tensorflow.keras.layers.Dense"
tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.core.Dense\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -4,7 +4,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.convolutional.Conv2D\'>"
is_instance: "<class \'tensorflow.python.keras.layers.convolutional.Conv\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -3,7 +3,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.merge.Dot\'>"
is_instance: "<class \'tensorflow.python.keras.layers.merge._Merge\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -2,7 +2,7 @@ path: "tensorflow.keras.layers.Dropout"
tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.core.Dropout\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -2,7 +2,7 @@ path: "tensorflow.keras.layers.ELU"
tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.advanced_activations.ELU\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -2,7 +2,7 @@ path: "tensorflow.keras.layers.Embedding"
tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.embeddings.Embedding\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -2,7 +2,7 @@ path: "tensorflow.keras.layers.Flatten"
tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.core.Flatten\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -2,7 +2,7 @@ path: "tensorflow.keras.layers.GRUCell"
tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.recurrent.GRUCell\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -3,7 +3,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.recurrent.GRU\'>"
is_instance: "<class \'tensorflow.python.keras.layers.recurrent.RNN\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activation"

View File

@ -2,7 +2,7 @@ path: "tensorflow.keras.layers.GaussianDropout"
tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.noise.GaussianDropout\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -2,7 +2,7 @@ path: "tensorflow.keras.layers.GaussianNoise"
tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.noise.GaussianNoise\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -3,7 +3,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.pooling.GlobalAveragePooling1D\'>"
is_instance: "<class \'tensorflow.python.keras.layers.pooling.GlobalPooling1D\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -3,7 +3,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.pooling.GlobalAveragePooling2D\'>"
is_instance: "<class \'tensorflow.python.keras.layers.pooling.GlobalPooling2D\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -3,7 +3,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.pooling.GlobalAveragePooling3D\'>"
is_instance: "<class \'tensorflow.python.keras.layers.pooling.GlobalPooling3D\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -3,7 +3,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.pooling.GlobalAveragePooling1D\'>"
is_instance: "<class \'tensorflow.python.keras.layers.pooling.GlobalPooling1D\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -3,7 +3,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.pooling.GlobalAveragePooling2D\'>"
is_instance: "<class \'tensorflow.python.keras.layers.pooling.GlobalPooling2D\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -3,7 +3,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.pooling.GlobalAveragePooling3D\'>"
is_instance: "<class \'tensorflow.python.keras.layers.pooling.GlobalPooling3D\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -3,7 +3,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.pooling.GlobalMaxPooling1D\'>"
is_instance: "<class \'tensorflow.python.keras.layers.pooling.GlobalPooling1D\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -3,7 +3,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.pooling.GlobalMaxPooling2D\'>"
is_instance: "<class \'tensorflow.python.keras.layers.pooling.GlobalPooling2D\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -3,7 +3,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.pooling.GlobalMaxPooling3D\'>"
is_instance: "<class \'tensorflow.python.keras.layers.pooling.GlobalPooling3D\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

View File

@ -3,7 +3,7 @@ tf_class {
is_instance: "<class \'tensorflow.python.keras.layers.pooling.GlobalMaxPooling1D\'>"
is_instance: "<class \'tensorflow.python.keras.layers.pooling.GlobalPooling1D\'>"
is_instance: "<class \'tensorflow.python.keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.CheckpointableBase\'>"
is_instance: "<class \'tensorflow.python.training.checkpointable.base.Checkpointable\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"

Some files were not shown because too many files have changed in this diff Show More