Adding add_metric
API for layers/models in Keras.
Supports - adding metric tensor (both aggregated and not aggregated) on model/layer in graph mode. - adding metric tensor (both aggregated and not aggregated) in model/layer call in graph and eager mode. PiperOrigin-RevId: 221865769
This commit is contained in:
parent
dd18b138ff
commit
8930d5aff5
@ -102,7 +102,10 @@ py_test(
|
||||
size = "medium",
|
||||
srcs = ["python/saved_model/keras_saved_model_test.py"],
|
||||
srcs_version = "PY2AND3",
|
||||
tags = ["no_windows"],
|
||||
tags = [
|
||||
"no_oss", # TODO(b/119349471): Re-enable
|
||||
"no_windows",
|
||||
],
|
||||
deps = [
|
||||
":keras_saved_model",
|
||||
"//tensorflow/python:client_testlib",
|
||||
|
@ -348,12 +348,14 @@ class TestModelSavedModelExport(test.TestCase, parameterized.TestCase):
|
||||
# feeding in the inputs and targets.
|
||||
loss, predictions, _ = sess.run(
|
||||
(outputs['loss'], outputs['predictions/' + output_name],
|
||||
outputs['metrics/mae/update_op']),
|
||||
{inputs[input_name]: input_arr, inputs[target_name]: target_arr})
|
||||
outputs['metrics/mean_absolute_error/update_op']), {
|
||||
inputs[input_name]: input_arr,
|
||||
inputs[target_name]: target_arr
|
||||
})
|
||||
|
||||
# The metric value should be run after the update op, to ensure that it
|
||||
# reflects the correct value.
|
||||
metric_value = sess.run(outputs['metrics/mae/value'])
|
||||
metric_value = sess.run(outputs['metrics/mean_absolute_error/value'])
|
||||
|
||||
self.assertEqual(int(train_before_export),
|
||||
sess.run(training_module.get_global_step()))
|
||||
@ -368,8 +370,8 @@ class TestModelSavedModelExport(test.TestCase, parameterized.TestCase):
|
||||
self.assertEqual(int(train_before_export),
|
||||
sess.run(training_module.get_global_step()))
|
||||
self.assertIn('loss', outputs)
|
||||
self.assertIn('metrics/mae/update_op', outputs)
|
||||
self.assertIn('metrics/mae/value', outputs)
|
||||
self.assertIn('metrics/mean_absolute_error/update_op', outputs)
|
||||
self.assertIn('metrics/mean_absolute_error/value', outputs)
|
||||
self.assertIn('predictions/' + output_name, outputs)
|
||||
|
||||
# Train for a step
|
||||
|
@ -1012,9 +1012,10 @@ class TPUFunction(object):
|
||||
optimizer=_replicated_optimizer(self._cloned_optimizer),
|
||||
loss=self.model.loss,
|
||||
loss_weights=self.model.loss_weights,
|
||||
metrics=metrics_module.clone_metrics(self.model.metrics),
|
||||
metrics=metrics_module.clone_metrics(
|
||||
self.model._compile_metrics),
|
||||
weighted_metrics=metrics_module.clone_metrics(
|
||||
self.model.weighted_metrics),
|
||||
self.model._compile_weighted_metrics),
|
||||
target_tensors=tpu_targets,
|
||||
)
|
||||
|
||||
@ -1376,6 +1377,7 @@ class KerasTPUModel(models.Model):
|
||||
self.train_function = None
|
||||
self._fit_function = None
|
||||
self._eval_function = None
|
||||
self._stateful_metric_functions = []
|
||||
|
||||
cluster_resolver = strategy._tpu_cluster_resolver
|
||||
self._tpu_name_or_address = cluster_resolver.get_master()
|
||||
@ -1390,10 +1392,10 @@ class KerasTPUModel(models.Model):
|
||||
self.compile(
|
||||
self._cpu_model.optimizer,
|
||||
self._cpu_model.loss,
|
||||
self._cpu_model.metrics,
|
||||
self._cpu_model._compile_metrics,
|
||||
self._cpu_model.loss_weights,
|
||||
self._cpu_model.sample_weight_mode,
|
||||
self._cpu_model.weighted_metrics,
|
||||
self._cpu_model._compile_weighted_metrics,
|
||||
self._cpu_model.target_tensors,
|
||||
)
|
||||
|
||||
@ -1697,7 +1699,7 @@ class KerasTPUModel(models.Model):
|
||||
callbacks.on_train_begin()
|
||||
for epoch in range(initial_epoch, epochs):
|
||||
# Reset stateful metrics
|
||||
for m in self.stateful_metric_functions:
|
||||
for m in self.metrics:
|
||||
m.reset_states()
|
||||
# Update callbacks
|
||||
callbacks.on_epoch_begin(epoch)
|
||||
@ -1995,14 +1997,14 @@ class KerasTPUModel(models.Model):
|
||||
self._optimizer = optimizer
|
||||
|
||||
@property
|
||||
def stateful_metric_functions(self):
|
||||
def metrics(self):
|
||||
if self._tpu_model:
|
||||
return self._tpu_model.stateful_metric_functions
|
||||
return self._tpu_model.metrics
|
||||
return self._stateful_metric_functions
|
||||
|
||||
@stateful_metric_functions.setter
|
||||
def stateful_metric_functions(self, stateful_metric_functions):
|
||||
self._stateful_metric_functions = stateful_metric_functions
|
||||
@metrics.setter
|
||||
def metrics(self, metrics):
|
||||
self._stateful_metric_functions = metrics
|
||||
|
||||
def _make_train_function(self):
|
||||
if not self.train_function:
|
||||
@ -2227,10 +2229,10 @@ def tpu_model(model, strategy=None):
|
||||
cpu_model.compile(
|
||||
_clone_optimizer(model.optimizer, optimizer_config),
|
||||
model.loss,
|
||||
metrics_module.clone_metrics(model.metrics),
|
||||
metrics_module.clone_metrics(model._compile_metrics),
|
||||
model.loss_weights,
|
||||
model.sample_weight_mode,
|
||||
metrics_module.clone_metrics(model.weighted_metrics),
|
||||
metrics_module.clone_metrics(model._compile_weighted_metrics),
|
||||
)
|
||||
|
||||
if model_weights:
|
||||
|
@ -126,7 +126,7 @@ def configure_callbacks(callbacks,
|
||||
callback_metrics = []
|
||||
# When we have deferred build scenario with iterator input, we will compile
|
||||
# when we standardize first batch of data.
|
||||
if mode != 'predict' and model._is_compiled: # pylint: disable=protected-access
|
||||
if mode != 'predict' and hasattr(model, 'metrics_names'):
|
||||
callback_metrics = copy.copy(model.metrics_names)
|
||||
if do_validation:
|
||||
callback_metrics += ['val_' + n for n in model.metrics_names]
|
||||
|
@ -68,6 +68,14 @@ class CallConvention(enum.Enum):
|
||||
POSITIONAL_ARGUMENTS_ARE_INPUTS = 3
|
||||
|
||||
|
||||
def _create_mean_metric(value, name=None):
|
||||
# TODO(psv): Remove this import when b/110718070 is fixed.
|
||||
from tensorflow.python.keras import metrics as metrics_module # pylint: disable=g-import-not-at-top
|
||||
metric_obj = metrics_module.Mean(name=name)
|
||||
result = metric_obj(value)
|
||||
return metric_obj, result
|
||||
|
||||
|
||||
@tf_export('keras.layers.Layer')
|
||||
class Layer(checkpointable.CheckpointableBase):
|
||||
"""Base layer class.
|
||||
@ -170,6 +178,13 @@ class Layer(checkpointable.CheckpointableBase):
|
||||
# in eager mode or graph mode alternatively, we need to keep track of
|
||||
# eager losses and symbolic losses via separate attributes.
|
||||
self._eager_losses = []
|
||||
# A list of metric instances corresponding to the symbolic metric tensors
|
||||
# added using the `add_metric` API.
|
||||
self._metrics = []
|
||||
# TODO(psv): Remove this property.
|
||||
# A dictionary that maps metric names to metric result tensors. The results
|
||||
# are the running averages of metric values over an epoch.
|
||||
self._metrics_tensors = {}
|
||||
self._dtype = None if dtype is None else dtypes.as_dtype(dtype).name
|
||||
self._call_fn_args = function_utils.fn_args(self.call)
|
||||
self._compute_previous_mask = ('mask' in self._call_fn_args or
|
||||
@ -433,6 +448,84 @@ class Layer(checkpointable.CheckpointableBase):
|
||||
else:
|
||||
self._losses.append(_tag_unconditional(loss))
|
||||
|
||||
@doc_controls.for_subclass_implementers
|
||||
def add_metric(self, value, aggregation=None, name=None):
|
||||
"""Adds metric tensor to the layer.
|
||||
|
||||
Args:
|
||||
value: Metric tensor.
|
||||
aggregation: Sample-wise metric reduction function. If `aggregation=None`,
|
||||
it indicates that the metric tensor provided has been aggregated
|
||||
already. eg, `model.add_metric(BinaryAccuracy(name='acc')(y_true,
|
||||
y_pred))`. If aggregation='mean', the given metric tensor will be
|
||||
sample-wise reduced using `mean` function. eg, `model.add_metric(
|
||||
tf.reduce_mean(outputs), name='output_mean', aggregation='mean')`.
|
||||
name: String metric name.
|
||||
|
||||
Raises:
|
||||
ValueError: If `aggregation` is anything other than None or `mean`.
|
||||
"""
|
||||
if aggregation is not None and aggregation != 'mean':
|
||||
raise ValueError(
|
||||
'We currently support only `mean` sample-wise metric aggregation. '
|
||||
'You provided aggregation=`%s`' % aggregation)
|
||||
|
||||
if tf_utils.is_symbolic_tensor(value):
|
||||
self._symbolic_add_metric(value, aggregation, name)
|
||||
else:
|
||||
self._eager_add_metric(value, aggregation, name)
|
||||
|
||||
def _get_existing_metric(self, name=None):
|
||||
match = [m for m in self._metrics if m.name == name]
|
||||
if not match:
|
||||
return
|
||||
if len(match) > 1:
|
||||
raise ValueError(
|
||||
'Please provide different names for the metrics you have added. '
|
||||
'We found {} metrics with the name: "{}"'.format(len(match), name))
|
||||
return match[0]
|
||||
|
||||
def _eager_add_metric(self, value, aggregation=None, name=None):
|
||||
# If the given metric is available in `metrics` list we just update state
|
||||
# on it, otherwise we create a new metric instance and
|
||||
# add it to the `metrics` list.
|
||||
match = self._get_existing_metric(name)
|
||||
if match:
|
||||
match(value) # Update the metric state.
|
||||
return
|
||||
else:
|
||||
if aggregation is None:
|
||||
raise ValueError('We do not support adding an aggregated metric tensor '
|
||||
'in `call` in eager execution.')
|
||||
metric_obj, _ = _create_mean_metric(value, name)
|
||||
self._metrics.append(metric_obj)
|
||||
|
||||
def _symbolic_add_metric(self, value, aggregation=None, name=None):
|
||||
if aggregation is None:
|
||||
# Iterate over the metrics and check if the given metric exists already.
|
||||
# This can happen when a metric instance is created in subclassed model
|
||||
# layer `__init__` and we have tracked that instance already in
|
||||
# model.__setattr__.
|
||||
match = self._get_existing_metric(name)
|
||||
if match:
|
||||
result_tensor = value
|
||||
if match.name not in self._metrics_tensors:
|
||||
self._metrics_tensors[match.name] = result_tensor
|
||||
return
|
||||
else:
|
||||
raise ValueError(
|
||||
'We currently do not support reusing a metric instance.')
|
||||
else:
|
||||
# We track the instance using the metadata on the result tensor.
|
||||
result_tensor = value
|
||||
metric_obj = result_tensor._metric_obj
|
||||
else:
|
||||
# If a non-aggregated tensor is given as input (ie. `aggregation` is
|
||||
# explicitly set to `mean`), we wrap the tensor in `Mean` metric.
|
||||
metric_obj, result_tensor = _create_mean_metric(value, name)
|
||||
self._metrics.append(metric_obj)
|
||||
self._metrics_tensors[metric_obj.name] = result_tensor
|
||||
|
||||
def get_losses_for(self, inputs):
|
||||
"""Retrieves losses relevant to a specific set of inputs.
|
||||
|
||||
|
@ -134,6 +134,11 @@ class Network(base_layer.Layer):
|
||||
self._updates = [] # Used in symbolic mode only.
|
||||
self._losses = []
|
||||
self._eager_losses = []
|
||||
# A list of metric instances corresponding to the symbolic metric tensors
|
||||
# added using the `add_metric` API.
|
||||
self._metrics = []
|
||||
# A dictionary that maps metric names to metric result tensors.
|
||||
self._metrics_tensors = {}
|
||||
self._scope = None # Never used.
|
||||
self._reuse = None # Never used.
|
||||
self._call_is_graph_friendly = True
|
||||
@ -412,6 +417,13 @@ class Network(base_layer.Layer):
|
||||
else:
|
||||
if value not in self._non_trainable_weights:
|
||||
self._non_trainable_weights.append(value)
|
||||
|
||||
# Keeping track of metric instance created in subclassed model/layer.
|
||||
# We do this so that we can maintain the correct order of metrics by adding
|
||||
# the instance to the `metrics` list as soon as it is created.
|
||||
from tensorflow.python.keras import metrics as metrics_module # pylint: disable=g-import-not-at-top
|
||||
if isinstance(value, metrics_module.Metric):
|
||||
self._metrics.append(value)
|
||||
super(Network, self).__setattr__(name, value)
|
||||
|
||||
@property
|
||||
@ -697,6 +709,30 @@ class Network(base_layer.Layer):
|
||||
sub_layers=self._layers,
|
||||
extra_variables=self._non_trainable_weights + self._trainable_weights)
|
||||
|
||||
@property
|
||||
def metrics(self):
|
||||
"""Returns the network's symbolic metrics.
|
||||
|
||||
Model overrides this function to include the metrics from `compile` API.
|
||||
"""
|
||||
metrics = []
|
||||
for layer in self.layers:
|
||||
metrics += layer._metrics # pylint: disable=protected-access
|
||||
return metrics + self._metrics
|
||||
|
||||
@property
|
||||
def _all_metrics_tensors(self):
|
||||
"""Returns the network's symbolic metric tensors."""
|
||||
# TODO(psv): Remove this property.
|
||||
metrics_tensors = {}
|
||||
for layer in self.layers:
|
||||
if isinstance(layer, Network):
|
||||
metrics_tensors.update(layer._all_metrics_tensors)
|
||||
else:
|
||||
metrics_tensors.update(layer._metrics_tensors)
|
||||
metrics_tensors.update(self._metrics_tensors)
|
||||
return metrics_tensors
|
||||
|
||||
@property
|
||||
def input_spec(self):
|
||||
"""Gets the network's input specs.
|
||||
|
@ -79,6 +79,10 @@ def save_model(model, filepath, overwrite=True, include_optimizer=True):
|
||||
|
||||
from tensorflow.python.keras import __version__ as keras_version # pylint: disable=g-import-not-at-top
|
||||
|
||||
# TODO(psv) Add warning when we save models that contain non-serializable
|
||||
# entities like metrics added using `add_metric` and losses added using
|
||||
# `add_loss.`
|
||||
|
||||
if not isinstance(filepath, h5py.File):
|
||||
# If file exists and should not be overwritten.
|
||||
if not overwrite and os.path.isfile(filepath):
|
||||
@ -126,8 +130,8 @@ def save_model(model, filepath, overwrite=True, include_optimizer=True):
|
||||
'config': model.optimizer.get_config()
|
||||
},
|
||||
'loss': model.loss,
|
||||
'metrics': model.metrics,
|
||||
'weighted_metrics': model.weighted_metrics,
|
||||
'metrics': model._compile_metrics,
|
||||
'weighted_metrics': model._compile_weighted_metrics,
|
||||
'sample_weight_mode': model.sample_weight_mode,
|
||||
'loss_weights': model.loss_weights,
|
||||
},
|
||||
|
@ -18,6 +18,7 @@ from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
import collections
|
||||
import weakref
|
||||
import numpy as np
|
||||
|
||||
@ -174,25 +175,66 @@ class Model(Network):
|
||||
metric_name = '%s_%s' % (self.output_names[output_index], metric_name)
|
||||
j = 1
|
||||
base_metric_name = metric_name
|
||||
while metric_name in self.metrics_names:
|
||||
while metric_name in self._compile_metrics_names:
|
||||
metric_name = '%s_%d' % (base_metric_name, j)
|
||||
j += 1
|
||||
|
||||
return metric_name
|
||||
|
||||
@property
|
||||
def metrics(self):
|
||||
"""Returns the model's metrics added using `compile`, `add_metric` APIs."""
|
||||
metrics = []
|
||||
if self._is_compiled:
|
||||
metrics += self._compile_stateful_metric_functions
|
||||
return metrics + super(Model, self).metrics
|
||||
|
||||
@property
|
||||
def metrics_names(self):
|
||||
"""Returns the model's display labels for all outputs."""
|
||||
metrics_names = []
|
||||
if self._is_compiled:
|
||||
metrics_names += self._compile_metrics_names # Includes names of losses.
|
||||
|
||||
# Add metric names from layers.
|
||||
for layer in self.layers:
|
||||
metrics_names += [m.name for m in layer._metrics] # pylint: disable=protected-access
|
||||
metrics_names += [m.name for m in self._metrics]
|
||||
return metrics_names
|
||||
|
||||
@property
|
||||
def _all_metrics_tensors(self):
|
||||
"""Returns the network's symbolic metric tensors."""
|
||||
metrics_tensors = {}
|
||||
if self._is_compiled:
|
||||
metrics_tensors.update(self._compile_metrics_tensors)
|
||||
metrics_tensors.update(super(Model, self)._all_metrics_tensors)
|
||||
return metrics_tensors
|
||||
|
||||
@property
|
||||
def _all_stateful_metrics_tensors(self):
|
||||
"""Returns the network's symbolic metric tensors."""
|
||||
metrics_tensors = {}
|
||||
if self._is_compiled:
|
||||
metrics_tensors.update(self._compile_stateful_metrics_tensors)
|
||||
metrics_tensors.update(super(Model, self)._all_metrics_tensors)
|
||||
return metrics_tensors
|
||||
|
||||
def _init_metric_attributes(self):
|
||||
"""Initialized model metric attributes."""
|
||||
# List of all metric names in the model.
|
||||
self.metrics_names = ['loss']
|
||||
# List of all aggregated metric result tensors. This includes aggregated
|
||||
# loss result tensors.
|
||||
self._stateful_metrics_tensors = []
|
||||
# List of all metric result tensors (aggregated or not - based on the
|
||||
# values given in compile.)
|
||||
self.metrics_tensors = []
|
||||
self._compile_metrics_names = ['loss']
|
||||
# List of stateful metric functions. Used for resetting metric state during
|
||||
# training/eval. This includes loss functions.
|
||||
self.stateful_metric_functions = []
|
||||
# training/eval.
|
||||
# This includes loss functions when there are multiple outputs.
|
||||
self._compile_stateful_metric_functions = []
|
||||
# Dict of all aggregated metric result tensors. This includes aggregated
|
||||
# loss result tensors when there are multiple outputs.
|
||||
self._compile_stateful_metrics_tensors = {}
|
||||
# Dict of all metric result tensors (aggregated or not - based on the
|
||||
# values given in compile.). This includes aggregated loss result tensors
|
||||
# when there are multiple outputs.
|
||||
self._compile_metrics_tensors = {}
|
||||
|
||||
def _set_per_output_metric_attributes(self, metrics_dict, output_index):
|
||||
"""Sets the metric attributes on the model for the given output.
|
||||
@ -201,24 +243,39 @@ class Model(Network):
|
||||
metrics_dict: A dict with metric names as keys and metric fns as values.
|
||||
output_index: The index of the model output for which the metric
|
||||
attributes are added.
|
||||
"""
|
||||
for metric_name, (_, stateful_metric_fn) in metrics_dict.items():
|
||||
metric_name = self._add_unique_metric_name(metric_name, output_index)
|
||||
# Keep track of metric name.
|
||||
self.metrics_names.append(metric_name)
|
||||
|
||||
# Keep track of stateful metric function.
|
||||
self.stateful_metric_functions.append(stateful_metric_fn)
|
||||
Returns:
|
||||
Metrics dict updated with unique metric names as keys.
|
||||
"""
|
||||
updated_metrics_dict = collections.OrderedDict()
|
||||
for metric_name, (metric_fn, stateful_metric_fn) in metrics_dict.items():
|
||||
metric_name = self._add_unique_metric_name(metric_name, output_index)
|
||||
updated_metrics_dict[metric_name] = (metric_fn, stateful_metric_fn)
|
||||
# Keep track of metric name, function and stateful function.
|
||||
self._compile_metrics_names.append(metric_name)
|
||||
self._compile_stateful_metric_functions.append(stateful_metric_fn)
|
||||
return updated_metrics_dict
|
||||
|
||||
def _set_metric_attributes(self, outputs, skip_target_indices=None):
|
||||
"""Sets the metric attributes on the model for all the model outputs."""
|
||||
skip_target_indices = skip_target_indices or []
|
||||
updated_per_output_metrics = []
|
||||
updated_per_output_weighted_metrics = []
|
||||
for i in range(len(outputs)):
|
||||
if i in skip_target_indices:
|
||||
updated_per_output_metrics.append(self._per_output_metrics[i])
|
||||
updated_per_output_weighted_metrics.append(
|
||||
self._per_output_weighted_metrics[i])
|
||||
continue
|
||||
self._set_per_output_metric_attributes(self._per_output_metrics[i], i)
|
||||
self._set_per_output_metric_attributes(
|
||||
self._per_output_weighted_metrics[i], i)
|
||||
updated_per_output_metrics.append(
|
||||
self._set_per_output_metric_attributes(self._per_output_metrics[i],
|
||||
i))
|
||||
updated_per_output_weighted_metrics.append(
|
||||
self._set_per_output_metric_attributes(
|
||||
self._per_output_weighted_metrics[i], i))
|
||||
|
||||
self._per_output_metrics = updated_per_output_metrics
|
||||
self._per_output_weighted_metrics = updated_per_output_weighted_metrics
|
||||
|
||||
def _handle_per_output_metrics(self,
|
||||
metrics_dict,
|
||||
@ -253,16 +310,16 @@ class Model(Network):
|
||||
weighted_metric_fn = training_utils.weighted_masked_objective(fn)
|
||||
return weighted_metric_fn(y_true, y_pred, weights=weights, mask=mask)
|
||||
|
||||
def _track_metric_tensors(stateless_result, stateful_result):
|
||||
self.metrics_tensors.append(stateless_result)
|
||||
self._stateful_metrics_tensors.append(stateful_result)
|
||||
def _track_metric_tensors(name, stateless_result, stateful_result):
|
||||
self._compile_metrics_tensors[name] = stateless_result
|
||||
self._compile_stateful_metrics_tensors[name] = stateful_result
|
||||
|
||||
if isinstance(metric_fn, metrics_module.Metric):
|
||||
# If the given metric fn is stateful, call the fn and return result.
|
||||
metric_result = _call_stateful_fn(metric_fn)
|
||||
metric_results.append(metric_result)
|
||||
if not self.run_eagerly:
|
||||
_track_metric_tensors(metric_result, metric_result)
|
||||
_track_metric_tensors(metric_name, metric_result, metric_result)
|
||||
elif self.run_eagerly:
|
||||
# In eager mode, if the given metric fn is not stateful, we invoke the
|
||||
# given fn or its stateful version based on the given flag.
|
||||
@ -276,7 +333,8 @@ class Model(Network):
|
||||
# stateless fns.
|
||||
stateful_metric_result = _call_stateful_fn(stateful_fn)
|
||||
metric_result = _call_stateless_fn(metric_fn)
|
||||
_track_metric_tensors(metric_result, stateful_metric_result)
|
||||
_track_metric_tensors(metric_name, metric_result,
|
||||
stateful_metric_result)
|
||||
|
||||
return metric_results
|
||||
|
||||
@ -304,6 +362,7 @@ class Model(Network):
|
||||
skip_target_indices = skip_target_indices or []
|
||||
metric_results = []
|
||||
with K.name_scope('metrics'):
|
||||
# Invoke all metrics added using `compile`.
|
||||
for i in range(len(outputs)):
|
||||
if i in skip_target_indices:
|
||||
continue
|
||||
@ -325,6 +384,12 @@ class Model(Network):
|
||||
output_mask,
|
||||
weights=sample_weights[i],
|
||||
return_stateful_result=return_stateful_result))
|
||||
|
||||
# Add metric results from the `add_metric` metrics in eager mode.
|
||||
if context.executing_eagerly():
|
||||
for m in self.metrics:
|
||||
if m not in self._compile_stateful_metric_functions:
|
||||
metric_results.append(m.result())
|
||||
return metric_results
|
||||
|
||||
@property
|
||||
@ -461,10 +526,10 @@ class Model(Network):
|
||||
self._track_checkpointable(
|
||||
self.optimizer, name='optimizer', overwrite=True)
|
||||
self.loss = loss
|
||||
self.metrics = metrics or []
|
||||
self._compile_metrics = metrics or []
|
||||
self.loss_weights = loss_weights
|
||||
self.sample_weight_mode = sample_weight_mode
|
||||
self.weighted_metrics = weighted_metrics
|
||||
self._compile_weighted_metrics = weighted_metrics
|
||||
if self.run_eagerly and target_tensors is not None:
|
||||
raise ValueError(
|
||||
'target_tensors argument is not supported when '
|
||||
@ -573,7 +638,7 @@ class Model(Network):
|
||||
self.total_loss = None
|
||||
for i in range(len(self.outputs)):
|
||||
if len(self.outputs) > 1:
|
||||
self.metrics_names.append(self.output_names[i] + '_loss')
|
||||
self._compile_metrics_names.append(self.output_names[i] + '_loss')
|
||||
|
||||
# Set metric attributes on model.
|
||||
self._set_metric_attributes(
|
||||
@ -666,7 +731,8 @@ class Model(Network):
|
||||
|
||||
if len(self.outputs) > 1:
|
||||
# Keep track of the un-aggregated loss result tensor.
|
||||
self.metrics_tensors.append(output_loss)
|
||||
self._compile_metrics_tensors[self.output_names[i] +
|
||||
'_loss'] = output_loss
|
||||
|
||||
# Keep track of stateful result tensor and function for the loss.
|
||||
mean_wrapped_loss = metrics_module.MeanMetricWrapper(
|
||||
@ -677,10 +743,11 @@ class Model(Network):
|
||||
y_pred,
|
||||
weights=sample_weight,
|
||||
mask=mask)
|
||||
self._stateful_metrics_tensors.append(result_tensor)
|
||||
self.stateful_metric_functions.append(mean_wrapped_loss)
|
||||
self._compile_stateful_metrics_tensors[self.output_names[i] +
|
||||
'_loss'] = result_tensor
|
||||
self._compile_stateful_metric_functions.append(mean_wrapped_loss)
|
||||
|
||||
self.metrics_names.append(self.output_names[i] + '_loss')
|
||||
self._compile_metrics_names.append(self.output_names[i] + '_loss')
|
||||
if total_loss is None:
|
||||
total_loss = loss_weight * output_loss
|
||||
else:
|
||||
@ -782,18 +849,24 @@ class Model(Network):
|
||||
setattr(self, fn_name, fn)
|
||||
|
||||
def _make_train_function(self):
|
||||
metrics_tensors = [
|
||||
self._all_metrics_tensors[m] for m in self.metrics_names[1:]
|
||||
]
|
||||
self._make_train_function_helper('train_function',
|
||||
[self.total_loss] + self.metrics_tensors)
|
||||
[self.total_loss] + metrics_tensors)
|
||||
|
||||
def _make_fit_function(self):
|
||||
# TODO(psv/anjalisridhar): Remove updates after we fix b/118841692
|
||||
# Stateful metrics updates
|
||||
metric_updates = []
|
||||
for m in self.stateful_metric_functions:
|
||||
for m in self.metrics:
|
||||
metric_updates += m.updates
|
||||
|
||||
metrics_tensors = [
|
||||
self._all_stateful_metrics_tensors[m] for m in self.metrics_names[1:]
|
||||
]
|
||||
self._make_train_function_helper(
|
||||
'_fit_function', [self.total_loss] + self._stateful_metrics_tensors,
|
||||
metric_updates)
|
||||
'_fit_function', [self.total_loss] + metrics_tensors, metric_updates)
|
||||
|
||||
def _make_test_function_helper(self, fn_name, outputs, metric_updates=None):
|
||||
if not hasattr(self, fn_name):
|
||||
@ -819,12 +892,18 @@ class Model(Network):
|
||||
setattr(self, fn_name, fn)
|
||||
|
||||
def _make_test_function(self):
|
||||
metrics_tensors = [
|
||||
self._all_metrics_tensors[m] for m in self.metrics_names[1:]
|
||||
]
|
||||
self._make_test_function_helper('test_function',
|
||||
[self.total_loss] + self.metrics_tensors)
|
||||
[self.total_loss] + metrics_tensors)
|
||||
|
||||
def _make_eval_function(self):
|
||||
self._make_test_function_helper(
|
||||
'_eval_function', [self.total_loss] + self._stateful_metrics_tensors)
|
||||
metrics_tensors = [
|
||||
self._all_stateful_metrics_tensors[m] for m in self.metrics_names[1:]
|
||||
]
|
||||
self._make_test_function_helper('_eval_function',
|
||||
[self.total_loss] + metrics_tensors)
|
||||
|
||||
def _make_predict_function(self):
|
||||
if not hasattr(self, 'predict_function'):
|
||||
@ -1202,12 +1281,14 @@ class Model(Network):
|
||||
y = [y]
|
||||
target_tensors = [v for v in y if tensor_util.is_tensor(v)]
|
||||
is_compile_called = True
|
||||
self.compile(optimizer=self.optimizer,
|
||||
loss=self.loss,
|
||||
metrics=self.metrics,
|
||||
loss_weights=self.loss_weights,
|
||||
target_tensors=target_tensors,
|
||||
run_eagerly=self.run_eagerly)
|
||||
self.compile(
|
||||
optimizer=self.optimizer,
|
||||
loss=self.loss,
|
||||
metrics=self._compile_metrics,
|
||||
weighted_metrics=self._compile_weighted_metrics,
|
||||
loss_weights=self.loss_weights,
|
||||
target_tensors=target_tensors,
|
||||
run_eagerly=self.run_eagerly)
|
||||
|
||||
# In graph mode, if we had just set inputs and targets as symbolic tensors
|
||||
# by invoking build and compile on the model respectively, we do not have to
|
||||
|
@ -299,8 +299,8 @@ def model_iteration(model,
|
||||
# Setup work for each epoch
|
||||
results = []
|
||||
epoch_logs = {}
|
||||
if hasattr(model, 'stateful_metric_functions'):
|
||||
for m in model.stateful_metric_functions:
|
||||
if hasattr(model, 'metrics'):
|
||||
for m in model.metrics:
|
||||
m.reset_states()
|
||||
callbacks.on_epoch_begin(epoch, epoch_logs, mode=mode)
|
||||
progbar.on_epoch_begin(epoch, epoch_logs)
|
||||
|
@ -171,7 +171,7 @@ def fit_loop(
|
||||
|
||||
for epoch in range(initial_epoch, epochs):
|
||||
# Reset stateful metrics
|
||||
for m in model.stateful_metric_functions:
|
||||
for m in model.metrics:
|
||||
m.reset_states()
|
||||
callbacks.on_epoch_begin(epoch)
|
||||
epoch_logs = {}
|
||||
@ -311,7 +311,8 @@ def _experimental_fit_loop(
|
||||
# Add initial dummy values for loss and other metric tensors.
|
||||
initial_loop_values = {}
|
||||
initial_loop_values['loss'] = constant_op.constant(1e7)
|
||||
for name, tensor in zip(model.metrics_names[1:], model.metrics_tensors):
|
||||
for name in model.metrics_names[1:]:
|
||||
tensor = model._all_stateful_metrics_tensors[name]
|
||||
initial_loop_values[name] = array_ops.zeros(tensor.shape, tensor.dtype)
|
||||
|
||||
if steps_per_epoch is None:
|
||||
@ -480,7 +481,7 @@ def test_loop(model, iterator, verbose=0, steps=None):
|
||||
len(model.outputs) * current_strategy.num_replicas_in_sync)]
|
||||
ins = dataset_inputs + dataset_targets + sample_weights
|
||||
|
||||
for m in model.stateful_metric_functions:
|
||||
for m in model.metrics:
|
||||
m.reset_states()
|
||||
|
||||
outs = []
|
||||
@ -590,7 +591,8 @@ def _experimental_test_loop(model, iterator, verbose=0, steps=None,
|
||||
# Add initial dummy values for loss and other metric tensors.
|
||||
initial_loop_values = {}
|
||||
initial_loop_values['loss'] = constant_op.constant(1e7)
|
||||
for name, tensor in zip(model.metrics_names[1:], model.metrics_tensors):
|
||||
for name in model.metrics_names[1:]:
|
||||
tensor = model._all_stateful_metrics_tensors[name]
|
||||
initial_loop_values[name] = array_ops.zeros(tensor.shape, tensor.dtype)
|
||||
|
||||
with current_strategy.scope():
|
||||
@ -867,10 +869,11 @@ def _clone_and_build_model(model, inputs=None, targets=None):
|
||||
cloned_model.compile(
|
||||
optimizer,
|
||||
model.loss,
|
||||
metrics=metrics_module.clone_metrics(model.metrics),
|
||||
metrics=metrics_module.clone_metrics(model._compile_metrics),
|
||||
loss_weights=model.loss_weights,
|
||||
sample_weight_mode=model.sample_weight_mode,
|
||||
weighted_metrics=metrics_module.clone_metrics(model.weighted_metrics),
|
||||
weighted_metrics=metrics_module.clone_metrics(
|
||||
model._compile_weighted_metrics),
|
||||
target_tensors=targets)
|
||||
return cloned_model
|
||||
|
||||
|
@ -254,16 +254,25 @@ def iterator_fit_loop(model,
|
||||
if val is not None else None for val in sample_weights
|
||||
]
|
||||
|
||||
# Set stateful_metrics in callbacks. We do not do this before the
|
||||
# `steps_per_epoch` loop because model will be compiled only in the first
|
||||
# iteration of this loop in the deferred build scenario.
|
||||
# Train model.
|
||||
outs, loss, _, aggregated_loss_metrics, masks = _process_single_batch(
|
||||
model,
|
||||
x,
|
||||
y,
|
||||
output_loss_metrics=output_loss_metrics,
|
||||
sample_weights=sample_weights,
|
||||
training=True)
|
||||
outs = generic_utils.to_list(outs)
|
||||
|
||||
if step_index == 0:
|
||||
# Set stateful_metrics in callbacks. We do not do this before the
|
||||
# `steps_per_epoch` loop because model will be compiled only in the first
|
||||
# iteration of this loop in the deferred build scenario.
|
||||
for cbk in callbacks:
|
||||
if (isinstance(cbk, cbks.BaseLogger) or
|
||||
isinstance(cbk, cbks.ProgbarLogger)):
|
||||
cbk.stateful_metrics = model.metrics_names[1:] # Exclude `loss`
|
||||
|
||||
if step_index == 0 and not callbacks.params['metrics']:
|
||||
callback_metrics = copy.copy(model.metrics_names)
|
||||
if do_validation:
|
||||
callback_metrics += ['val_' + n for n in model.metrics_names]
|
||||
@ -277,16 +286,6 @@ def iterator_fit_loop(model,
|
||||
'validation_steps': validation_steps
|
||||
})
|
||||
|
||||
# Train model.
|
||||
outs, loss, _, aggregated_loss_metrics, masks = _process_single_batch(
|
||||
model,
|
||||
x,
|
||||
y,
|
||||
output_loss_metrics=output_loss_metrics,
|
||||
sample_weights=sample_weights,
|
||||
training=True)
|
||||
outs = generic_utils.to_list(outs)
|
||||
|
||||
# Calculate metrics.
|
||||
for l, o in zip(model.metrics_names, outs):
|
||||
batch_logs[l] = o
|
||||
@ -392,8 +391,8 @@ def iterator_test_loop(model, inputs, steps, verbose=0):
|
||||
# Get stateful metrics indices. We do not do this before the `steps` loop
|
||||
# because model will be compiled only in the first iteration of this loop
|
||||
# in the deferred build scenario.
|
||||
if hasattr(model, 'metrics'):
|
||||
for m in model.stateful_metric_functions:
|
||||
if hasattr(model, '_compile_metrics'):
|
||||
for m in model.metrics:
|
||||
m.reset_states()
|
||||
for m in output_loss_metrics:
|
||||
m.reset_states()
|
||||
@ -750,7 +749,7 @@ def fit_loop(model,
|
||||
for epoch in range(initial_epoch, epochs):
|
||||
if model._is_compiled: # Model may not be compiled the first time.
|
||||
# Reset stateful metrics
|
||||
for m in model.stateful_metric_functions:
|
||||
for m in model.metrics:
|
||||
m.reset_states()
|
||||
|
||||
for m in output_loss_metrics:
|
||||
|
@ -137,7 +137,7 @@ def fit_generator(model,
|
||||
# Construct epoch logs.
|
||||
epoch_logs = {}
|
||||
while epoch < epochs:
|
||||
for m in model.stateful_metric_functions:
|
||||
for m in model.metrics:
|
||||
m.reset_states()
|
||||
callbacks.on_epoch_begin(epoch)
|
||||
steps_done = 0
|
||||
@ -240,8 +240,8 @@ def evaluate_generator(model,
|
||||
if not context.executing_eagerly():
|
||||
model._make_test_function()
|
||||
|
||||
if hasattr(model, 'metrics'):
|
||||
for m in model.stateful_metric_functions:
|
||||
if hasattr(model, '_compile_metrics'):
|
||||
for m in model.metrics:
|
||||
m.reset_states()
|
||||
|
||||
steps_done = 0
|
||||
|
@ -37,6 +37,7 @@ from tensorflow.python.keras import testing_utils
|
||||
from tensorflow.python.keras.callbacks import Callback
|
||||
from tensorflow.python.keras.engine.training_utils import weighted_masked_objective
|
||||
from tensorflow.python.ops import array_ops
|
||||
from tensorflow.python.ops import math_ops
|
||||
from tensorflow.python.ops import sparse_ops
|
||||
from tensorflow.python.ops import variables as variables_lib
|
||||
from tensorflow.python.platform import test
|
||||
@ -574,6 +575,31 @@ class TrainingTest(test.TestCase):
|
||||
# Test with eager execution and iterator
|
||||
model.fit(iterator, epochs=1, steps_per_epoch=2)
|
||||
|
||||
def test_losses_in_defun(self):
|
||||
with context.eager_mode():
|
||||
layer = keras.layers.Dense(1, kernel_regularizer='l1')
|
||||
layer(array_ops.ones([1, 10]))
|
||||
|
||||
@function.defun
|
||||
def get_losses():
|
||||
return layer.losses
|
||||
|
||||
self.assertAllEqual(
|
||||
self.evaluate(layer.losses), self.evaluate(get_losses()))
|
||||
|
||||
@tf_test_util.run_in_graph_and_eager_modes
|
||||
def test_logging(self):
|
||||
mock_stdout = io.BytesIO() if six.PY2 else io.StringIO()
|
||||
model = keras.models.Sequential()
|
||||
model.add(keras.layers.Dense(10, activation='relu'))
|
||||
model.add(keras.layers.Dense(1, activation='sigmoid'))
|
||||
model.compile(
|
||||
RMSPropOptimizer(learning_rate=0.001), loss='binary_crossentropy')
|
||||
with test.mock.patch.object(sys, 'stdout', mock_stdout):
|
||||
model.fit(
|
||||
np.ones((10, 10), 'float32'), np.ones((10, 1), 'float32'), epochs=10)
|
||||
self.assertTrue('Epoch 5/10' in mock_stdout.getvalue())
|
||||
|
||||
|
||||
class TestExceptionsAndWarnings(test.TestCase):
|
||||
|
||||
@ -2248,30 +2274,327 @@ class TestTrainingWithMetrics(test.TestCase):
|
||||
scores = model.train_on_batch(x, y, sample_weight=w)
|
||||
self.assertArrayNear(scores, [0.2, 0.8], 0.1)
|
||||
|
||||
def test_add_metric_with_tensor_on_model_in_graph_mode(self):
|
||||
with self.cached_session():
|
||||
x = keras.layers.Input(shape=(1,))
|
||||
y = keras.layers.Dense(1, kernel_initializer='ones')(x)
|
||||
model = keras.models.Model(x, y)
|
||||
model.add_metric(
|
||||
math_ops.reduce_sum(y), name='metric_1', aggregation='mean')
|
||||
|
||||
# test with a metric which does not have the standard signature:
|
||||
# (y_true, y_pred, sample_Weight)
|
||||
model.add_metric(metrics_module.Mean(name='metric_2')(y))
|
||||
model.compile('sgd', loss='mse')
|
||||
|
||||
inputs = np.ones(shape=(10, 1))
|
||||
targets = np.ones(shape=(10, 1))
|
||||
history = model.fit(
|
||||
inputs,
|
||||
targets,
|
||||
epochs=2,
|
||||
batch_size=5,
|
||||
validation_data=(inputs, targets))
|
||||
self.assertEqual(history.history['metric_1'][-1], 5)
|
||||
self.assertEqual(history.history['metric_2'][-1], 1)
|
||||
self.assertEqual(history.history['val_metric_1'][-1], 5)
|
||||
self.assertEqual(history.history['val_metric_2'][-1], 1)
|
||||
|
||||
eval_results = model.evaluate(inputs, targets, batch_size=5)
|
||||
self.assertEqual(eval_results[-1], 1)
|
||||
self.assertEqual(eval_results[-2], 5)
|
||||
|
||||
model.predict(inputs, batch_size=5)
|
||||
model.train_on_batch(inputs, targets)
|
||||
model.test_on_batch(inputs, targets)
|
||||
|
||||
@tf_test_util.run_in_graph_and_eager_modes
|
||||
def test_logging(self):
|
||||
mock_stdout = io.BytesIO() if six.PY2 else io.StringIO()
|
||||
model = keras.models.Sequential()
|
||||
model.add(keras.layers.Dense(10, activation='relu'))
|
||||
model.add(keras.layers.Dense(1, activation='sigmoid'))
|
||||
model.compile(
|
||||
RMSPropOptimizer(learning_rate=0.001), loss='binary_crossentropy')
|
||||
with test.mock.patch.object(sys, 'stdout', mock_stdout):
|
||||
model.fit(
|
||||
np.ones((10, 10), 'float32'), np.ones((10, 1), 'float32'), epochs=10)
|
||||
self.assertTrue('Epoch 5/10' in mock_stdout.getvalue())
|
||||
def test_add_metric_in_model_call(self):
|
||||
|
||||
def test_losses_in_defun(self):
|
||||
class TestModel(keras.Model):
|
||||
|
||||
def __init__(self):
|
||||
super(TestModel, self).__init__(name='test_model')
|
||||
self.dense1 = keras.layers.Dense(2, kernel_initializer='ones')
|
||||
self.mean = metrics_module.Mean(name='metric_1')
|
||||
|
||||
def call(self, x):
|
||||
self.add_metric(
|
||||
math_ops.reduce_sum(x), name='metric_2', aggregation='mean')
|
||||
# Provide same name as in the instance created in __init__
|
||||
# for eager mode
|
||||
self.add_metric(self.mean(x), name='metric_1')
|
||||
return self.dense1(x)
|
||||
|
||||
model = TestModel()
|
||||
model.compile(loss='mse', optimizer=RMSPropOptimizer(0.01))
|
||||
|
||||
x = np.ones(shape=(10, 1))
|
||||
y = np.ones(shape=(10, 2))
|
||||
history = model.fit(x, y, epochs=2, batch_size=5, validation_data=(x, y))
|
||||
self.assertAlmostEqual(history.history['metric_1'][-1], 1, 0)
|
||||
self.assertAlmostEqual(history.history['val_metric_1'][-1], 1, 0)
|
||||
self.assertAlmostEqual(history.history['metric_2'][-1], 5, 0)
|
||||
self.assertAlmostEqual(history.history['val_metric_2'][-1], 5, 0)
|
||||
|
||||
eval_results = model.evaluate(x, y, batch_size=5)
|
||||
self.assertAlmostEqual(eval_results[1], 1, 0)
|
||||
self.assertAlmostEqual(eval_results[2], 5, 0)
|
||||
|
||||
model.predict(x, batch_size=5)
|
||||
model.train_on_batch(x, y)
|
||||
model.test_on_batch(x, y)
|
||||
|
||||
def test_add_metric_in_model_call_run_eagerly(self):
|
||||
with context.eager_mode():
|
||||
layer = keras.layers.Dense(1, kernel_regularizer='l1')
|
||||
layer(array_ops.ones([1, 10]))
|
||||
|
||||
@function.defun
|
||||
def get_losses():
|
||||
return layer.losses
|
||||
class TestModel(keras.Model):
|
||||
|
||||
def __init__(self):
|
||||
super(TestModel, self).__init__(name='test_model')
|
||||
self.dense1 = keras.layers.Dense(2, kernel_initializer='ones')
|
||||
self.mean = metrics_module.Mean(name='metric_1')
|
||||
|
||||
def call(self, x):
|
||||
self.add_metric(
|
||||
math_ops.reduce_sum(x), name='metric_2', aggregation='mean')
|
||||
# Provide same name as in the instance created in __init__
|
||||
# for eager mode
|
||||
self.add_metric(self.mean(x), name='metric_1')
|
||||
return self.dense1(x)
|
||||
|
||||
model = TestModel()
|
||||
model.compile(
|
||||
loss='mse', optimizer=RMSPropOptimizer(0.01), run_eagerly=True)
|
||||
|
||||
x = np.ones(shape=(10, 1))
|
||||
y = np.ones(shape=(10, 2))
|
||||
history = model.fit(x, y, epochs=2, batch_size=5, validation_data=(x, y))
|
||||
self.assertAlmostEqual(history.history['metric_1'][-1], 1, 0)
|
||||
self.assertAlmostEqual(history.history['val_metric_1'][-1], 1, 0)
|
||||
self.assertAlmostEqual(history.history['metric_2'][-1], 5, 0)
|
||||
self.assertAlmostEqual(history.history['val_metric_2'][-1], 5, 0)
|
||||
|
||||
eval_results = model.evaluate(x, y, batch_size=5)
|
||||
self.assertAlmostEqual(eval_results[1], 1, 0)
|
||||
self.assertAlmostEqual(eval_results[2], 5, 0)
|
||||
|
||||
model.predict(x, batch_size=5)
|
||||
model.train_on_batch(x, y)
|
||||
model.test_on_batch(x, y)
|
||||
|
||||
@tf_test_util.run_in_graph_and_eager_modes
|
||||
def test_add_metric_in_layer_call(self):
|
||||
|
||||
class TestLayer(keras.layers.Layer):
|
||||
|
||||
def build(self, input_shape):
|
||||
self.a = self.add_variable(
|
||||
'a', (1, 1), initializer='ones', trainable=False)
|
||||
self.built = True
|
||||
|
||||
def call(self, inputs):
|
||||
self.add_metric(
|
||||
math_ops.reduce_sum(inputs), name='metric_1', aggregation='mean')
|
||||
return inputs + 1
|
||||
|
||||
model = keras.Sequential()
|
||||
model.add(TestLayer(input_shape=(1,)))
|
||||
model.add(keras.layers.Dense(2, kernel_initializer='ones'))
|
||||
model.compile(loss='mse', optimizer=RMSPropOptimizer(0.01))
|
||||
|
||||
x = np.ones(shape=(10, 1))
|
||||
y = np.ones(shape=(10, 2))
|
||||
history = model.fit(x, y, epochs=2, batch_size=5, validation_data=(x, y))
|
||||
self.assertEqual(history.history['metric_1'][-1], 5)
|
||||
self.assertAlmostEqual(history.history['val_metric_1'][-1], 5, 0)
|
||||
|
||||
def test_add_metric_in_layer_call_run_eagerly(self):
|
||||
with context.eager_mode():
|
||||
|
||||
class TestLayer(keras.layers.Layer):
|
||||
|
||||
def build(self, input_shape):
|
||||
self.a = self.add_variable(
|
||||
'a', (1, 1), initializer='ones', trainable=False)
|
||||
self.built = True
|
||||
|
||||
def call(self, inputs):
|
||||
self.add_metric(
|
||||
math_ops.reduce_sum(inputs), name='metric_1', aggregation='mean')
|
||||
return inputs + 1
|
||||
|
||||
model = keras.Sequential()
|
||||
model.add(TestLayer(input_shape=(1,)))
|
||||
model.add(keras.layers.Dense(2, kernel_initializer='ones'))
|
||||
model.compile(
|
||||
loss='mse', optimizer=RMSPropOptimizer(0.01), run_eagerly=True)
|
||||
|
||||
x = np.ones(shape=(10, 1))
|
||||
y = np.ones(shape=(10, 2))
|
||||
history = model.fit(x, y, epochs=2, batch_size=5, validation_data=(x, y))
|
||||
self.assertEqual(history.history['metric_1'][-1], 5)
|
||||
self.assertAlmostEqual(history.history['val_metric_1'][-1], 5, 0)
|
||||
|
||||
def test_model_metrics_list(self):
|
||||
with self.cached_session():
|
||||
x = keras.layers.Input(shape=(1,))
|
||||
y = keras.layers.Dense(1, kernel_initializer='ones')(x)
|
||||
model = keras.models.Model(x, y)
|
||||
model.add_metric(
|
||||
math_ops.reduce_sum(y), name='metric_1', aggregation='mean')
|
||||
model.add_metric(metrics_module.Mean(name='metric_2')(y))
|
||||
model.compile('sgd', loss='mse', metrics=['acc'])
|
||||
|
||||
# Verify that the metrics added using `compile` and `add_metric` API are
|
||||
# included
|
||||
self.assertEqual(model._compile_metrics, ['acc'])
|
||||
names = []
|
||||
for m in model.metrics:
|
||||
if isinstance(m, metrics_module.Metric):
|
||||
names.append(m.name)
|
||||
else:
|
||||
names.append(m.__name__)
|
||||
self.assertEqual(names, ['binary_accuracy', 'metric_1', 'metric_2'])
|
||||
|
||||
def test_model_eager_metrics_list(self):
|
||||
with context.eager_mode():
|
||||
|
||||
class TestModel(keras.Model):
|
||||
|
||||
def __init__(self):
|
||||
super(TestModel, self).__init__(name='test_model')
|
||||
self.dense1 = keras.layers.Dense(2, kernel_initializer='ones')
|
||||
|
||||
def call(self, x):
|
||||
self.add_metric(
|
||||
math_ops.reduce_sum(x), name='metric_1', aggregation='mean')
|
||||
return self.dense1(x)
|
||||
|
||||
model = TestModel()
|
||||
model.compile(
|
||||
loss='mse',
|
||||
optimizer=RMSPropOptimizer(0.01),
|
||||
metrics=['acc'],
|
||||
run_eagerly=True)
|
||||
x = np.ones(shape=(10, 1))
|
||||
y = np.ones(shape=(10, 2))
|
||||
model.fit(x, y, epochs=2, batch_size=5, validation_data=(x, y))
|
||||
|
||||
self.assertEqual(model._compile_metrics, ['acc'])
|
||||
names = []
|
||||
for m in model.metrics:
|
||||
if isinstance(m, metrics_module.Metric):
|
||||
names.append(m.name)
|
||||
else:
|
||||
names.append(m.__name__)
|
||||
self.assertEqual(names, ['categorical_accuracy', 'metric_1'])
|
||||
|
||||
@tf_test_util.run_in_graph_and_eager_modes
|
||||
def test_multiple_add_metric_calls(self):
|
||||
|
||||
class TestModel(keras.Model):
|
||||
|
||||
def __init__(self):
|
||||
super(TestModel, self).__init__(name='test_model')
|
||||
self.dense1 = keras.layers.Dense(2, kernel_initializer='ones')
|
||||
self.mean1 = metrics_module.Mean(name='metric_1')
|
||||
self.mean2 = metrics_module.Mean(name='metric_2')
|
||||
|
||||
def call(self, x):
|
||||
self.add_metric(self.mean2(x), name='metric_2')
|
||||
self.add_metric(self.mean1(x), name='metric_1')
|
||||
self.add_metric(
|
||||
math_ops.reduce_sum(x), name='metric_3', aggregation='mean')
|
||||
return self.dense1(x)
|
||||
|
||||
model = TestModel()
|
||||
model.compile(loss='mse', optimizer=RMSPropOptimizer(0.01))
|
||||
|
||||
x = np.ones(shape=(10, 1))
|
||||
y = np.ones(shape=(10, 2))
|
||||
history = model.fit(x, y, epochs=2, batch_size=5, validation_data=(x, y))
|
||||
self.assertAlmostEqual(history.history['metric_1'][-1], 1, 0)
|
||||
self.assertAlmostEqual(history.history['metric_2'][-1], 1, 0)
|
||||
self.assertAlmostEqual(history.history['metric_3'][-1], 5, 0)
|
||||
|
||||
eval_results = model.evaluate(x, y, batch_size=5)
|
||||
self.assertArrayNear(eval_results[1:4], [1, 1, 5], 0.1)
|
||||
|
||||
model.predict(x, batch_size=5)
|
||||
model.train_on_batch(x, y)
|
||||
model.test_on_batch(x, y)
|
||||
|
||||
def test_invalid_metric_tensor_in_call(self):
|
||||
with context.eager_mode():
|
||||
|
||||
class TestLayer(keras.layers.Layer):
|
||||
|
||||
def call(self, inputs):
|
||||
self.add_metric(metrics_module.Mean(name='metric_1')(inputs))
|
||||
return inputs + 1
|
||||
|
||||
model = keras.Sequential()
|
||||
model.add(TestLayer(input_shape=(1,)))
|
||||
model.add(keras.layers.Dense(2, kernel_initializer='ones'))
|
||||
model.compile(
|
||||
loss='mse', optimizer=RMSPropOptimizer(0.01), run_eagerly=True)
|
||||
|
||||
x = np.ones(shape=(10, 1))
|
||||
y = np.ones(shape=(10, 2))
|
||||
with self.assertRaisesRegexp(
|
||||
ValueError,
|
||||
'We do not support adding an aggregated metric tensor in `call` in '
|
||||
'eager execution.'):
|
||||
model.fit(x, y, epochs=2, batch_size=5, validation_data=(x, y))
|
||||
|
||||
@tf_test_util.run_in_graph_and_eager_modes
|
||||
def test_duplicate_metric_name_in_add_metric(self):
|
||||
|
||||
class TestModel(keras.Model):
|
||||
|
||||
def __init__(self):
|
||||
super(TestModel, self).__init__(name='test_model')
|
||||
self.dense1 = keras.layers.Dense(2, kernel_initializer='ones')
|
||||
self.mean = metrics_module.Mean(name='metric_1')
|
||||
self.mean2 = metrics_module.Mean(name='metric_1')
|
||||
|
||||
def call(self, x):
|
||||
self.add_metric(self.mean(x), name='metric_1')
|
||||
return self.dense1(x)
|
||||
|
||||
model = TestModel()
|
||||
model.compile(loss='mse', optimizer=RMSPropOptimizer(0.01))
|
||||
|
||||
x = np.ones(shape=(10, 1))
|
||||
y = np.ones(shape=(10, 2))
|
||||
with self.assertRaisesRegexp(
|
||||
ValueError,
|
||||
'Please provide different names for the metrics you have added. '
|
||||
'We found 2 metrics with the name: "metric_1"'):
|
||||
model.fit(x, y, epochs=2, batch_size=5, validation_data=(x, y))
|
||||
|
||||
@tf_test_util.run_in_graph_and_eager_modes
|
||||
def test_multiple_no_name_input_to_add_metric(self):
|
||||
|
||||
class TestModel(keras.Model):
|
||||
|
||||
def __init__(self):
|
||||
super(TestModel, self).__init__(name='test_model')
|
||||
self.dense1 = keras.layers.Dense(2, kernel_initializer='ones')
|
||||
|
||||
def call(self, x):
|
||||
self.add_metric(math_ops.reduce_sum(x), aggregation='mean')
|
||||
self.add_metric(math_ops.reduce_sum(x), aggregation='mean')
|
||||
return self.dense1(x)
|
||||
|
||||
model = TestModel()
|
||||
model.compile(loss='mse', optimizer=RMSPropOptimizer(0.01))
|
||||
x = np.ones(shape=(10, 1))
|
||||
y = np.ones(shape=(10, 2))
|
||||
model.fit(x, y, epochs=2, batch_size=5, validation_data=(x, y))
|
||||
self.assertEqual([m.name for m in model.metrics], ['mean', 'mean_1'])
|
||||
|
||||
self.assertAllEqual(self.evaluate(layer.losses),
|
||||
self.evaluate(get_losses()))
|
||||
|
||||
if __name__ == '__main__':
|
||||
test.main()
|
||||
|
@ -545,9 +545,20 @@ class Metric(Layer):
|
||||
Returns:
|
||||
The metric value tensor.
|
||||
"""
|
||||
update_op = self.update_state(*args, **kwargs) # pylint: disable=not-callable
|
||||
update_op = self.update_state(*args, **kwargs)
|
||||
with ops.control_dependencies([update_op]):
|
||||
return self.result() # pylint: disable=not-callable
|
||||
result_t = self.result()
|
||||
|
||||
# We are adding the metric object as metadata on the result tensor.
|
||||
# This is required when we want to use a metric with `add_metric` API on
|
||||
# a Model/Layer in graph mode. This metric instance will later be used
|
||||
# to reset variable state after each epoch of training.
|
||||
# Example:
|
||||
# model = Model()
|
||||
# model.add_metric(Mean()(values), name='mean')
|
||||
if not context.executing_eagerly():
|
||||
result_t._metric_obj = self # pylint: disable=protected-access
|
||||
return result_t
|
||||
|
||||
def reset_states(self):
|
||||
"""Resets all of the metric state variables.
|
||||
@ -731,7 +742,8 @@ class MeanMetricWrapper(Mean):
|
||||
matches, sample_weight=sample_weight)
|
||||
|
||||
def get_config(self):
|
||||
config = self._fn_kwargs
|
||||
config = {'fn': self._fn}
|
||||
config.update(self._fn_kwargs)
|
||||
base_config = super(MeanMetricWrapper, self).get_config()
|
||||
return dict(list(base_config.items()) + list(config.items()))
|
||||
|
||||
@ -760,6 +772,12 @@ class BinaryAccuracy(MeanMetricWrapper):
|
||||
super(BinaryAccuracy, self).__init__(
|
||||
binary_accuracy, name, dtype=dtype, threshold=threshold)
|
||||
|
||||
@classmethod
|
||||
def from_config(cls, config):
|
||||
if 'fn' in config:
|
||||
config.pop('fn')
|
||||
return super(BinaryAccuracy, cls).from_config(config)
|
||||
|
||||
|
||||
class CategoricalAccuracy(MeanMetricWrapper):
|
||||
"""Calculates how often predictions matches labels.
|
||||
@ -783,6 +801,12 @@ class CategoricalAccuracy(MeanMetricWrapper):
|
||||
super(CategoricalAccuracy, self).__init__(
|
||||
categorical_accuracy, name, dtype=dtype)
|
||||
|
||||
@classmethod
|
||||
def from_config(cls, config):
|
||||
if 'fn' in config:
|
||||
config.pop('fn')
|
||||
return super(CategoricalAccuracy, cls).from_config(config)
|
||||
|
||||
|
||||
class SparseCategoricalAccuracy(MeanMetricWrapper):
|
||||
"""Calculates how often predictions matches integer labels.
|
||||
@ -800,6 +824,12 @@ class SparseCategoricalAccuracy(MeanMetricWrapper):
|
||||
super(SparseCategoricalAccuracy, self).__init__(
|
||||
sparse_categorical_accuracy, name, dtype=dtype)
|
||||
|
||||
@classmethod
|
||||
def from_config(cls, config):
|
||||
if 'fn' in config:
|
||||
config.pop('fn')
|
||||
return super(SparseCategoricalAccuracy, cls).from_config(config)
|
||||
|
||||
|
||||
class _ConfusionMatrixConditionCount(Metric):
|
||||
"""Calculates the number of the given confusion matrix condition."""
|
||||
|
@ -304,8 +304,9 @@ def _in_place_subclassed_model_reset(model):
|
||||
attributes_cache[name] = value
|
||||
assert value in model._layers
|
||||
elif isinstance(
|
||||
value, (list, tuple)) and name not in ('layers', '_layers',
|
||||
'stateful_metric_functions'):
|
||||
value,
|
||||
(list, tuple)) and name not in ('layers', '_layers', 'metrics',
|
||||
'_compile_stateful_metric_functions'):
|
||||
# Handle case: list/tuple of layers (also tracked by the Network API).
|
||||
if value and all(isinstance(val, Layer) for val in value):
|
||||
raise ValueError('We do not support the use of list-of-layers '
|
||||
@ -345,9 +346,6 @@ def _in_place_subclassed_model_reset(model):
|
||||
'targets',
|
||||
'_feed_targets',
|
||||
'sample_weight_modes',
|
||||
'weighted_metrics',
|
||||
'metrics_names',
|
||||
'metrics_tensors',
|
||||
'total_loss',
|
||||
'sample_weights',
|
||||
'_feed_sample_weights',
|
||||
@ -495,10 +493,11 @@ def clone_and_build_model(
|
||||
clone.compile(
|
||||
optimizer,
|
||||
model.loss,
|
||||
metrics=metrics_module.clone_metrics(model.metrics),
|
||||
metrics=metrics_module.clone_metrics(model._compile_metrics),
|
||||
loss_weights=model.loss_weights,
|
||||
sample_weight_mode=model.sample_weight_mode,
|
||||
weighted_metrics=metrics_module.clone_metrics(model.weighted_metrics),
|
||||
weighted_metrics=metrics_module.clone_metrics(
|
||||
model._compile_weighted_metrics),
|
||||
target_tensors=target_tensors)
|
||||
|
||||
return clone
|
||||
|
@ -331,7 +331,8 @@ class TestCloneAndBuildModel(test.TestCase):
|
||||
self.assertEqual('mse', model.loss)
|
||||
self.assertTrue(
|
||||
isinstance(model.optimizer, keras.optimizers.RMSprop))
|
||||
self.assertEqual(['acc', metrics.categorical_accuracy], model.metrics)
|
||||
self.assertEqual(['acc', metrics.categorical_accuracy],
|
||||
model._compile_metrics)
|
||||
|
||||
def _clone_and_build_test_helper(self, model, is_subclassed=False):
|
||||
inp = np.random.random((10, 4))
|
||||
|
@ -41,6 +41,14 @@ tf_class {
|
||||
name: "losses"
|
||||
mtype: "<type \'property\'>"
|
||||
}
|
||||
member {
|
||||
name: "metrics"
|
||||
mtype: "<type \'property\'>"
|
||||
}
|
||||
member {
|
||||
name: "metrics_names"
|
||||
mtype: "<type \'property\'>"
|
||||
}
|
||||
member {
|
||||
name: "name"
|
||||
mtype: "<type \'property\'>"
|
||||
@ -109,6 +117,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -42,6 +42,14 @@ tf_class {
|
||||
name: "losses"
|
||||
mtype: "<type \'property\'>"
|
||||
}
|
||||
member {
|
||||
name: "metrics"
|
||||
mtype: "<type \'property\'>"
|
||||
}
|
||||
member {
|
||||
name: "metrics_names"
|
||||
mtype: "<type \'property\'>"
|
||||
}
|
||||
member {
|
||||
name: "name"
|
||||
mtype: "<type \'property\'>"
|
||||
@ -114,6 +122,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -88,6 +88,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -88,6 +88,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -88,6 +88,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -88,6 +88,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -97,6 +97,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -178,6 +178,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -90,6 +90,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -90,6 +90,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -90,6 +90,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -90,6 +90,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -88,6 +88,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -88,6 +88,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -88,6 +88,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -98,6 +98,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -98,6 +98,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -88,6 +88,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -90,6 +90,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -88,6 +88,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -88,6 +88,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -88,6 +88,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -88,6 +88,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -88,6 +88,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -161,6 +161,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -88,6 +88,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -88,6 +88,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -88,6 +88,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -88,6 +88,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -161,6 +161,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -88,6 +88,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -87,6 +87,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -88,6 +88,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -88,6 +88,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -88,6 +88,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -88,6 +88,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -89,6 +89,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -88,6 +88,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -88,6 +88,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -92,6 +92,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -88,6 +88,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -88,6 +88,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -88,6 +88,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -90,6 +90,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -90,6 +90,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -90,6 +90,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
@ -90,6 +90,10 @@ tf_class {
|
||||
name: "add_loss"
|
||||
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_metric"
|
||||
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_update"
|
||||
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user