Extensive fixes in metrics docstrings.
PiperOrigin-RevId: 304917158 Change-Id: I60ccca4b1a13d75012a95f8d7a2f43fb17682c22
This commit is contained in:
parent
248675bf77
commit
79e6396788
@ -48,12 +48,14 @@ class Loss(object):
|
||||
* `call()`: Contains the logic for loss calculation using `y_true`, `y_pred`.
|
||||
|
||||
Example subclass implementation:
|
||||
|
||||
```python
|
||||
class MeanSquaredError(Loss):
|
||||
|
||||
def call(self, y_true, y_pred):
|
||||
y_pred = ops.convert_to_tensor_v2(y_pred)
|
||||
y_true = math_ops.cast(y_true, y_pred.dtype)
|
||||
return K.mean(math_ops.square(y_pred - y_true), axis=-1)
|
||||
y_pred = tf.convert_to_tensor_v2(y_pred)
|
||||
y_true = tf.cast(y_true, y_pred.dtype)
|
||||
return tf.reduce_mean(math_ops.square(y_pred - y_true), axis=-1)
|
||||
```
|
||||
|
||||
When used with `tf.distribute.Strategy`, outside of built-in training loops
|
||||
@ -259,7 +261,7 @@ class MeanSquaredError(LossFunctionWrapper):
|
||||
|
||||
`loss = square(y_true - y_pred)`
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> y_true = [[0., 1.], [0., 0.]]
|
||||
>>> y_pred = [[1., 1.], [1., 0.]]
|
||||
@ -284,11 +286,10 @@ class MeanSquaredError(LossFunctionWrapper):
|
||||
>>> mse(y_true, y_pred).numpy()
|
||||
array([0.5, 0.5], dtype=float32)
|
||||
|
||||
Usage with the `compile` API:
|
||||
Usage with the `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile('sgd', loss=tf.keras.losses.MeanSquaredError())
|
||||
model.compile(optimizer='sgd', loss=tf.keras.losses.MeanSquaredError())
|
||||
```
|
||||
"""
|
||||
|
||||
@ -319,7 +320,7 @@ class MeanAbsoluteError(LossFunctionWrapper):
|
||||
|
||||
`loss = abs(y_true - y_pred)`
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> y_true = [[0., 1.], [0., 0.]]
|
||||
>>> y_pred = [[1., 1.], [1., 0.]]
|
||||
@ -344,11 +345,10 @@ class MeanAbsoluteError(LossFunctionWrapper):
|
||||
>>> mae(y_true, y_pred).numpy()
|
||||
array([0.5, 0.5], dtype=float32)
|
||||
|
||||
Usage with the `compile` API:
|
||||
Usage with the `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile('sgd', loss=tf.keras.losses.MeanAbsoluteError())
|
||||
model.compile(optimizer='sgd', loss=tf.keras.losses.MeanAbsoluteError())
|
||||
```
|
||||
"""
|
||||
|
||||
@ -379,7 +379,7 @@ class MeanAbsolutePercentageError(LossFunctionWrapper):
|
||||
|
||||
`loss = 100 * abs(y_true - y_pred) / y_true`
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> y_true = [[2., 1.], [2., 3.]]
|
||||
>>> y_pred = [[1., 1.], [1., 0.]]
|
||||
@ -404,11 +404,11 @@ class MeanAbsolutePercentageError(LossFunctionWrapper):
|
||||
>>> mape(y_true, y_pred).numpy()
|
||||
array([25., 75.], dtype=float32)
|
||||
|
||||
Usage with the `compile` API:
|
||||
Usage with the `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile('sgd', loss=tf.keras.losses.MeanAbsolutePercentageError())
|
||||
model.compile(optimizer='sgd',
|
||||
loss=tf.keras.losses.MeanAbsolutePercentageError())
|
||||
```
|
||||
"""
|
||||
|
||||
@ -440,7 +440,7 @@ class MeanSquaredLogarithmicError(LossFunctionWrapper):
|
||||
|
||||
`loss = square(log(y_true + 1.) - log(y_pred + 1.))`
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> y_true = [[0., 1.], [0., 0.]]
|
||||
>>> y_pred = [[1., 1.], [1., 0.]]
|
||||
@ -465,11 +465,11 @@ class MeanSquaredLogarithmicError(LossFunctionWrapper):
|
||||
>>> msle(y_true, y_pred).numpy()
|
||||
array([0.240, 0.240], dtype=float32)
|
||||
|
||||
Usage with the `compile` API:
|
||||
Usage with the `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile('sgd', loss=tf.keras.losses.MeanSquaredLogarithmicError())
|
||||
model.compile(optimizer='sgd',
|
||||
loss=tf.keras.losses.MeanSquaredLogarithmicError())
|
||||
```
|
||||
"""
|
||||
|
||||
@ -507,7 +507,7 @@ class BinaryCrossentropy(LossFunctionWrapper):
|
||||
floating-pointing value, and both `y_pred` and `y_true` have the shape
|
||||
`[batch_size]`.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> y_true = [[0., 1.], [0., 0.]]
|
||||
>>> y_pred = [[0.6, 0.4], [0.4, 0.6]]
|
||||
@ -535,8 +535,7 @@ class BinaryCrossentropy(LossFunctionWrapper):
|
||||
Usage with the `tf.keras` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile('sgd', loss=tf.keras.losses.BinaryCrossentropy())
|
||||
model.compile(optimizer='sgd', loss=tf.keras.losses.BinaryCrossentropy())
|
||||
```
|
||||
"""
|
||||
|
||||
@ -589,7 +588,7 @@ class CategoricalCrossentropy(LossFunctionWrapper):
|
||||
example. The shape of both `y_pred` and `y_true` are
|
||||
`[batch_size, num_classes]`.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> y_true = [[0, 1, 0], [0, 0, 1]]
|
||||
>>> y_pred = [[0.05, 0.95, 0], [0.1, 0.8, 0.1]]
|
||||
@ -614,11 +613,10 @@ class CategoricalCrossentropy(LossFunctionWrapper):
|
||||
>>> cce(y_true, y_pred).numpy()
|
||||
array([0.0513, 2.303], dtype=float32)
|
||||
|
||||
Usage with the `compile` API:
|
||||
Usage with the `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile('sgd', loss=tf.keras.losses.CategoricalCrossentropy())
|
||||
model.compile(optimizer='sgd', loss=tf.keras.losses.CategoricalCrossentropy())
|
||||
```
|
||||
"""
|
||||
|
||||
@ -671,7 +669,7 @@ class SparseCategoricalCrossentropy(LossFunctionWrapper):
|
||||
The shape of `y_true` is `[batch_size]` and the shape of `y_pred` is
|
||||
`[batch_size, num_classes]`.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> y_true = [1, 2]
|
||||
>>> y_pred = [[0.05, 0.95, 0], [0.1, 0.8, 0.1]]
|
||||
@ -696,11 +694,11 @@ class SparseCategoricalCrossentropy(LossFunctionWrapper):
|
||||
>>> scce(y_true, y_pred).numpy()
|
||||
array([0.0513, 2.303], dtype=float32)
|
||||
|
||||
Usage with the `compile` API:
|
||||
Usage with the `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile('sgd', loss=tf.keras.losses.SparseCategoricalCrossentropy())
|
||||
model.compile(optimizer='sgd',
|
||||
loss=tf.keras.losses.SparseCategoricalCrossentropy())
|
||||
```
|
||||
"""
|
||||
|
||||
@ -742,7 +740,7 @@ class Hinge(LossFunctionWrapper):
|
||||
`y_true` values are expected to be -1 or 1. If binary (0 or 1) labels are
|
||||
provided we will convert them to -1 or 1.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> y_true = [[0., 1.], [0., 0.]]
|
||||
>>> y_pred = [[0.6, 0.4], [0.4, 0.6]]
|
||||
@ -767,11 +765,10 @@ class Hinge(LossFunctionWrapper):
|
||||
>>> h(y_true, y_pred).numpy()
|
||||
array([1.1, 1.5], dtype=float32)
|
||||
|
||||
Usage with the `compile` API:
|
||||
Usage with the `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile('sgd', loss=tf.keras.losses.Hinge())
|
||||
model.compile(optimizer='sgd', loss=tf.keras.losses.Hinge())
|
||||
```
|
||||
"""
|
||||
|
||||
@ -802,7 +799,7 @@ class SquaredHinge(LossFunctionWrapper):
|
||||
`y_true` values are expected to be -1 or 1. If binary (0 or 1) labels are
|
||||
provided we will convert them to -1 or 1.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> y_true = [[0., 1.], [0., 0.]]
|
||||
>>> y_pred = [[0.6, 0.4], [0.4, 0.6]]
|
||||
@ -827,11 +824,10 @@ class SquaredHinge(LossFunctionWrapper):
|
||||
>>> h(y_true, y_pred).numpy()
|
||||
array([1.46, 2.26], dtype=float32)
|
||||
|
||||
Usage with the `compile` API:
|
||||
Usage with the `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile('sgd', loss=tf.keras.losses.SquaredHinge())
|
||||
model.compile(optimizer='sgd', loss=tf.keras.losses.SquaredHinge())
|
||||
```
|
||||
"""
|
||||
|
||||
@ -863,7 +859,7 @@ class CategoricalHinge(LossFunctionWrapper):
|
||||
`loss = maximum(neg - pos + 1, 0)`
|
||||
where `neg=maximum((1-y_true)*y_pred) and pos=sum(y_true*y_pred)`
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> y_true = [[0, 1], [0, 0]]
|
||||
>>> y_pred = [[0.6, 0.4], [0.4, 0.6]]
|
||||
@ -888,11 +884,10 @@ class CategoricalHinge(LossFunctionWrapper):
|
||||
>>> h(y_true, y_pred).numpy()
|
||||
array([1.2, 1.6], dtype=float32)
|
||||
|
||||
Usage with the `compile` API:
|
||||
Usage with the `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile('sgd', loss=tf.keras.losses.CategoricalHinge())
|
||||
model.compile(optimizer='sgd', loss=tf.keras.losses.CategoricalHinge())
|
||||
```
|
||||
"""
|
||||
|
||||
@ -923,7 +918,7 @@ class Poisson(LossFunctionWrapper):
|
||||
|
||||
`loss = y_pred - y_true * log(y_pred)`
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> y_true = [[0., 1.], [0., 0.]]
|
||||
>>> y_pred = [[1., 1.], [0., 0.]]
|
||||
@ -948,11 +943,10 @@ class Poisson(LossFunctionWrapper):
|
||||
>>> p(y_true, y_pred).numpy()
|
||||
array([0.999, 0.], dtype=float32)
|
||||
|
||||
Usage with the `compile` API:
|
||||
Usage with the `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile('sgd', loss=tf.keras.losses.Poisson())
|
||||
model.compile(optimizer='sgd', loss=tf.keras.losses.Poisson())
|
||||
```
|
||||
"""
|
||||
|
||||
@ -981,7 +975,7 @@ class LogCosh(LossFunctionWrapper):
|
||||
`logcosh = log((exp(x) + exp(-x))/2)`,
|
||||
where x is the error `y_pred - y_true`.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> y_true = [[0., 1.], [0., 0.]]
|
||||
>>> y_pred = [[1., 1.], [0., 0.]]
|
||||
@ -1006,11 +1000,10 @@ class LogCosh(LossFunctionWrapper):
|
||||
>>> l(y_true, y_pred).numpy()
|
||||
array([0.217, 0.], dtype=float32)
|
||||
|
||||
Usage with the `compile` API:
|
||||
Usage with the `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile('sgd', loss=tf.keras.losses.LogCosh())
|
||||
model.compile(optimizer='sgd', loss=tf.keras.losses.LogCosh())
|
||||
```
|
||||
"""
|
||||
|
||||
@ -1040,7 +1033,7 @@ class KLDivergence(LossFunctionWrapper):
|
||||
|
||||
See: https://en.wikipedia.org/wiki/Kullback%E2%80%93Leibler_divergence
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> y_true = [[0, 1], [0, 0]]
|
||||
>>> y_pred = [[0.6, 0.4], [0.4, 0.6]]
|
||||
@ -1065,11 +1058,10 @@ class KLDivergence(LossFunctionWrapper):
|
||||
>>> kl(y_true, y_pred).numpy()
|
||||
array([0.916, -3.08e-06], dtype=float32)
|
||||
|
||||
Usage with the `compile` API:
|
||||
Usage with the `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile('sgd', loss=tf.keras.losses.KLDivergence())
|
||||
model.compile(optimizer='sgd', loss=tf.keras.losses.KLDivergence())
|
||||
```
|
||||
"""
|
||||
|
||||
@ -1106,7 +1098,7 @@ class Huber(LossFunctionWrapper):
|
||||
```
|
||||
where d is `delta`. See: https://en.wikipedia.org/wiki/Huber_loss
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> y_true = [[0, 1], [0, 0]]
|
||||
>>> y_pred = [[0.6, 0.4], [0.4, 0.6]]
|
||||
@ -1131,11 +1123,10 @@ class Huber(LossFunctionWrapper):
|
||||
>>> h(y_true, y_pred).numpy()
|
||||
array([0.18, 0.13], dtype=float32)
|
||||
|
||||
Usage with the `compile` API:
|
||||
Usage with the `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile('sgd', loss=tf.keras.losses.Huber())
|
||||
model.compile(optimizer='sgd', loss=tf.keras.losses.Huber())
|
||||
```
|
||||
"""
|
||||
|
||||
@ -1177,7 +1168,7 @@ def mean_squared_error(y_true, y_pred):
|
||||
|
||||
`loss = mean(square(y_true - y_pred), axis=-1)`
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> y_true = np.random.randint(0, 2, size=(2, 3))
|
||||
>>> y_pred = np.random.random(size=(2, 3))
|
||||
@ -1209,7 +1200,7 @@ def mean_absolute_error(y_true, y_pred):
|
||||
|
||||
`loss = mean(abs(y_true - y_pred), axis=-1)`
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> y_true = np.random.randint(0, 2, size=(2, 3))
|
||||
>>> y_pred = np.random.random(size=(2, 3))
|
||||
@ -1241,7 +1232,7 @@ def mean_absolute_percentage_error(y_true, y_pred):
|
||||
|
||||
`loss = 100 * mean(abs(y_true - y_pred) / y_true, axis=-1)`
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> y_true = np.random.random(size=(2, 3))
|
||||
>>> y_true = np.maximum(y_true, 1e-7) # Prevent division by zero
|
||||
@ -1277,7 +1268,7 @@ def mean_squared_logarithmic_error(y_true, y_pred):
|
||||
|
||||
`loss = mean(square(log(y_true + 1) - log(y_pred + 1)), axis=-1)`
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> y_true = np.random.randint(0, 2, size=(2, 3))
|
||||
>>> y_pred = np.random.random(size=(2, 3))
|
||||
@ -1325,7 +1316,7 @@ def squared_hinge(y_true, y_pred):
|
||||
|
||||
`loss = mean(square(maximum(1 - y_true * y_pred, 0)), axis=-1)`
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> y_true = np.random.choice([-1, 1], size=(2, 3))
|
||||
>>> y_pred = np.random.random(size=(2, 3))
|
||||
@ -1357,7 +1348,7 @@ def hinge(y_true, y_pred):
|
||||
|
||||
`loss = mean(maximum(1 - y_true * y_pred, 0), axis=-1)`
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> y_true = np.random.choice([-1, 1], size=(2, 3))
|
||||
>>> y_pred = np.random.random(size=(2, 3))
|
||||
@ -1389,7 +1380,7 @@ def categorical_hinge(y_true, y_pred):
|
||||
`loss = maximum(neg - pos + 1, 0)`
|
||||
where `neg=maximum((1-y_true)*y_pred) and pos=sum(y_true*y_pred)`
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> y_true = np.random.randint(0, 3, size=(2,))
|
||||
>>> y_true = tf.keras.utils.to_categorical(y_true, num_classes=3)
|
||||
@ -1459,7 +1450,7 @@ def log_cosh(y_true, y_pred):
|
||||
like the mean squared error, but will not be so strongly affected by the
|
||||
occasional wildly incorrect prediction.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> y_true = np.random.random(size=(2, 3))
|
||||
>>> y_pred = np.random.random(size=(2, 3))
|
||||
@ -1495,7 +1486,7 @@ def categorical_crossentropy(y_true,
|
||||
label_smoothing=0):
|
||||
"""Computes the categorical crossentropy loss.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> y_true = [[0, 1, 0], [0, 0, 1]]
|
||||
>>> y_pred = [[0.05, 0.95, 0], [0.1, 0.8, 0.1]]
|
||||
@ -1532,7 +1523,7 @@ def categorical_crossentropy(y_true,
|
||||
def sparse_categorical_crossentropy(y_true, y_pred, from_logits=False, axis=-1):
|
||||
"""Computes the sparse categorical crossentropy loss.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> y_true = [1, 2]
|
||||
>>> y_pred = [[0.05, 0.95, 0], [0.1, 0.8, 0.1]]
|
||||
@ -1563,7 +1554,7 @@ def sparse_categorical_crossentropy(y_true, y_pred, from_logits=False, axis=-1):
|
||||
def binary_crossentropy(y_true, y_pred, from_logits=False, label_smoothing=0):
|
||||
"""Computes the binary crossentropy loss.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> y_true = [[0, 1], [0, 0]]
|
||||
>>> y_pred = [[0.6, 0.4], [0.4, 0.6]]
|
||||
@ -1610,7 +1601,7 @@ def kl_divergence(y_true, y_pred):
|
||||
|
||||
See: https://en.wikipedia.org/wiki/Kullback%E2%80%93Leibler_divergence
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> y_true = np.random.randint(0, 2, size=(2, 3)).astype(np.float64)
|
||||
>>> y_pred = np.random.random(size=(2, 3))
|
||||
@ -1645,7 +1636,7 @@ def poisson(y_true, y_pred):
|
||||
The Poisson loss is the mean of the elements of the `Tensor`
|
||||
`y_pred - y_true * log(y_pred)`.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> y_true = np.random.randint(0, 2, size=(2, 3))
|
||||
>>> y_pred = np.random.random(size=(2, 3))
|
||||
@ -1694,7 +1685,7 @@ def cosine_similarity(y_true, y_pred, axis=-1):
|
||||
|
||||
`loss = -sum(l2_norm(y_true) * l2_norm(y_pred))`
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> y_true = [[0., 1.], [1., 1.], [1., 1.]]
|
||||
>>> y_pred = [[1., 0.], [1., 1.], [-1., -1.]]
|
||||
@ -1728,7 +1719,7 @@ class CosineSimilarity(LossFunctionWrapper):
|
||||
|
||||
`loss = -sum(l2_norm(y_true) * l2_norm(y_pred))`
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> y_true = [[0., 1.], [1., 1.]]
|
||||
>>> y_pred = [[1., 0.], [1., 1.]]
|
||||
@ -1758,11 +1749,10 @@ class CosineSimilarity(LossFunctionWrapper):
|
||||
>>> cosine_loss(y_true, y_pred).numpy()
|
||||
array([-0., -0.999], dtype=float32)
|
||||
|
||||
Usage with the `compile` API:
|
||||
Usage with the `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile('sgd', loss=tf.keras.losses.CosineSimilarity(axis=1))
|
||||
model.compile(optimizer='sgd', loss=tf.keras.losses.CosineSimilarity(axis=1))
|
||||
```
|
||||
|
||||
Args:
|
||||
|
@ -83,7 +83,7 @@ class Metric(base_layer.Layer):
|
||||
dtype: (Optional) data type of the metric result.
|
||||
**kwargs: Additional layer keywords arguments.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
```python
|
||||
m = SomeMetric(...)
|
||||
@ -92,7 +92,7 @@ class Metric(base_layer.Layer):
|
||||
print('Final result: ', m.result().numpy())
|
||||
```
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Sequential()
|
||||
@ -404,19 +404,18 @@ class Sum(Reduce):
|
||||
name: (Optional) string name of the metric instance.
|
||||
dtype: (Optional) data type of the metric result.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.Sum()
|
||||
>>> m.update_state([1, 3, 5, 7])
|
||||
>>> m.result().numpy()
|
||||
16.0
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.add_metric(tf.keras.metrics.Sum(name='sum_1')(outputs))
|
||||
model.compile('sgd', loss='mse')
|
||||
model.compile(optimizer='sgd', loss='mse')
|
||||
```
|
||||
"""
|
||||
|
||||
@ -443,7 +442,7 @@ class Mean(Reduce):
|
||||
name: (Optional) string name of the metric instance.
|
||||
dtype: (Optional) data type of the metric result.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.Mean()
|
||||
>>> m.update_state([1, 3, 5, 7])
|
||||
@ -454,12 +453,11 @@ class Mean(Reduce):
|
||||
>>> m.result().numpy()
|
||||
2.0
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.add_metric(tf.keras.metrics.Mean(name='mean_1')(outputs))
|
||||
model.compile('sgd', loss='mse')
|
||||
model.compile(optimizer='sgd', loss='mse')
|
||||
```
|
||||
"""
|
||||
|
||||
@ -485,7 +483,7 @@ class MeanRelativeError(Mean):
|
||||
name: (Optional) string name of the metric instance.
|
||||
dtype: (Optional) data type of the metric result.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.MeanRelativeError(normalizer=[1, 3, 2, 3])
|
||||
>>> m.update_state([1, 3, 2, 3], [2, 4, 6, 8])
|
||||
@ -496,12 +494,11 @@ class MeanRelativeError(Mean):
|
||||
>>> m.result().numpy()
|
||||
1.25
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile(
|
||||
'sgd',
|
||||
optimizer='sgd',
|
||||
loss='mse',
|
||||
metrics=[tf.keras.metrics.MeanRelativeError(normalizer=[1, 3])])
|
||||
```
|
||||
@ -638,7 +635,7 @@ class Accuracy(MeanMetricWrapper):
|
||||
name: (Optional) string name of the metric instance.
|
||||
dtype: (Optional) data type of the metric result.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.Accuracy()
|
||||
>>> m.update_state([[1], [2], [3], [4]], [[0], [2], [3], [4]])
|
||||
@ -651,11 +648,12 @@ class Accuracy(MeanMetricWrapper):
|
||||
>>> m.result().numpy()
|
||||
0.5
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile('sgd', loss='mse', metrics=[tf.keras.metrics.Accuracy()])
|
||||
model.compile(optimizer='sgd',
|
||||
loss='mse',
|
||||
metrics=[tf.keras.metrics.Accuracy()])
|
||||
```
|
||||
"""
|
||||
|
||||
@ -681,7 +679,7 @@ class BinaryAccuracy(MeanMetricWrapper):
|
||||
threshold: (Optional) Float representing the threshold for deciding
|
||||
whether prediction values are 1 or 0.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.BinaryAccuracy()
|
||||
>>> m.update_state([[1], [1], [0], [0]], [[0.98], [1], [0], [0.6]])
|
||||
@ -694,11 +692,12 @@ class BinaryAccuracy(MeanMetricWrapper):
|
||||
>>> m.result().numpy()
|
||||
0.5
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile('sgd', loss='mse', metrics=[tf.keras.metrics.BinaryAccuracy()])
|
||||
model.compile(optimizer='sgd',
|
||||
loss='mse',
|
||||
metrics=[tf.keras.metrics.BinaryAccuracy()])
|
||||
```
|
||||
"""
|
||||
|
||||
@ -729,7 +728,7 @@ class CategoricalAccuracy(MeanMetricWrapper):
|
||||
name: (Optional) string name of the metric instance.
|
||||
dtype: (Optional) data type of the metric result.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.CategoricalAccuracy()
|
||||
>>> m.update_state([[0, 0, 1], [0, 1, 0]], [[0.1, 0.9, 0.8],
|
||||
@ -744,12 +743,11 @@ class CategoricalAccuracy(MeanMetricWrapper):
|
||||
>>> m.result().numpy()
|
||||
0.3
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile(
|
||||
'sgd',
|
||||
optimizer='sgd',
|
||||
loss='mse',
|
||||
metrics=[tf.keras.metrics.CategoricalAccuracy()])
|
||||
```
|
||||
@ -783,7 +781,7 @@ class SparseCategoricalAccuracy(MeanMetricWrapper):
|
||||
name: (Optional) string name of the metric instance.
|
||||
dtype: (Optional) data type of the metric result.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.SparseCategoricalAccuracy()
|
||||
>>> m.update_state([[2], [1]], [[0.1, 0.6, 0.3], [0.05, 0.95, 0]])
|
||||
@ -796,12 +794,11 @@ class SparseCategoricalAccuracy(MeanMetricWrapper):
|
||||
>>> m.result().numpy()
|
||||
0.3
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile(
|
||||
'sgd',
|
||||
optimizer='sgd',
|
||||
loss='mse',
|
||||
metrics=[tf.keras.metrics.SparseCategoricalAccuracy()])
|
||||
```
|
||||
@ -822,7 +819,7 @@ class TopKCategoricalAccuracy(MeanMetricWrapper):
|
||||
name: (Optional) string name of the metric instance.
|
||||
dtype: (Optional) data type of the metric result.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.TopKCategoricalAccuracy(k=1)
|
||||
>>> m.update_state([[0, 0, 1], [0, 1, 0]],
|
||||
@ -837,11 +834,12 @@ class TopKCategoricalAccuracy(MeanMetricWrapper):
|
||||
>>> m.result().numpy()
|
||||
0.3
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile('sgd', metrics=[tf.keras.metrics.TopKCategoricalAccuracy()])
|
||||
model.compile(optimizer='sgd',
|
||||
loss='mse',
|
||||
metrics=[tf.keras.metrics.TopKCategoricalAccuracy()])
|
||||
```
|
||||
"""
|
||||
|
||||
@ -860,7 +858,7 @@ class SparseTopKCategoricalAccuracy(MeanMetricWrapper):
|
||||
name: (Optional) string name of the metric instance.
|
||||
dtype: (Optional) data type of the metric result.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.SparseTopKCategoricalAccuracy(k=1)
|
||||
>>> m.update_state([2, 1], [[0.1, 0.9, 0.8], [0.05, 0.95, 0]])
|
||||
@ -873,12 +871,12 @@ class SparseTopKCategoricalAccuracy(MeanMetricWrapper):
|
||||
>>> m.result().numpy()
|
||||
0.3
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile(
|
||||
'sgd',
|
||||
optimizer='sgd',
|
||||
loss='mse',
|
||||
metrics=[tf.keras.metrics.SparseTopKCategoricalAccuracy()])
|
||||
```
|
||||
"""
|
||||
@ -975,7 +973,7 @@ class FalsePositives(_ConfusionMatrixConditionCount):
|
||||
name: (Optional) string name of the metric instance.
|
||||
dtype: (Optional) data type of the metric result.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.FalsePositives()
|
||||
>>> m.update_state([0, 1, 0, 0], [0, 0, 1, 1])
|
||||
@ -987,11 +985,12 @@ class FalsePositives(_ConfusionMatrixConditionCount):
|
||||
>>> m.result().numpy()
|
||||
1.0
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile('sgd', loss='mse', metrics=[tf.keras.metrics.FalsePositives()])
|
||||
model.compile(optimizer='sgd',
|
||||
loss='mse',
|
||||
metrics=[tf.keras.metrics.FalsePositives()])
|
||||
```
|
||||
"""
|
||||
|
||||
@ -1023,7 +1022,7 @@ class FalseNegatives(_ConfusionMatrixConditionCount):
|
||||
name: (Optional) string name of the metric instance.
|
||||
dtype: (Optional) data type of the metric result.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.FalseNegatives()
|
||||
>>> m.update_state([0, 1, 1, 1], [0, 1, 0, 0])
|
||||
@ -1035,11 +1034,12 @@ class FalseNegatives(_ConfusionMatrixConditionCount):
|
||||
>>> m.result().numpy()
|
||||
1.0
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile('sgd', loss='mse', metrics=[tf.keras.metrics.FalseNegatives()])
|
||||
model.compile(optimizer='sgd',
|
||||
loss='mse',
|
||||
metrics=[tf.keras.metrics.FalseNegatives()])
|
||||
```
|
||||
"""
|
||||
|
||||
@ -1071,7 +1071,7 @@ class TrueNegatives(_ConfusionMatrixConditionCount):
|
||||
name: (Optional) string name of the metric instance.
|
||||
dtype: (Optional) data type of the metric result.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.TrueNegatives()
|
||||
>>> m.update_state([0, 1, 0, 0], [1, 1, 0, 0])
|
||||
@ -1083,11 +1083,12 @@ class TrueNegatives(_ConfusionMatrixConditionCount):
|
||||
>>> m.result().numpy()
|
||||
1.0
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile('sgd', loss='mse', metrics=[tf.keras.metrics.TrueNegatives()])
|
||||
model.compile(optimizer='sgd',
|
||||
loss='mse',
|
||||
metrics=[tf.keras.metrics.TrueNegatives()])
|
||||
```
|
||||
"""
|
||||
|
||||
@ -1119,7 +1120,7 @@ class TruePositives(_ConfusionMatrixConditionCount):
|
||||
name: (Optional) string name of the metric instance.
|
||||
dtype: (Optional) data type of the metric result.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.TruePositives()
|
||||
>>> m.update_state([0, 1, 1, 1], [1, 0, 1, 1])
|
||||
@ -1131,11 +1132,12 @@ class TruePositives(_ConfusionMatrixConditionCount):
|
||||
>>> m.result().numpy()
|
||||
1.0
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile('sgd', loss='mse', metrics=[tf.keras.metrics.TruePositives()])
|
||||
model.compile(optimizer='sgd',
|
||||
loss='mse',
|
||||
metrics=[tf.keras.metrics.TruePositives()])
|
||||
```
|
||||
"""
|
||||
|
||||
@ -1183,7 +1185,7 @@ class Precision(Metric):
|
||||
name: (Optional) string name of the metric instance.
|
||||
dtype: (Optional) data type of the metric result.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.Precision()
|
||||
>>> m.update_state([0, 1, 1, 1], [1, 0, 1, 1])
|
||||
@ -1207,11 +1209,12 @@ class Precision(Metric):
|
||||
>>> m.result().numpy()
|
||||
0.5
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile('sgd', loss='mse', metrics=[tf.keras.metrics.Precision()])
|
||||
model.compile(optimizer='sgd',
|
||||
loss='mse',
|
||||
metrics=[tf.keras.metrics.Precision()])
|
||||
```
|
||||
"""
|
||||
|
||||
@ -1319,7 +1322,7 @@ class Recall(Metric):
|
||||
name: (Optional) string name of the metric instance.
|
||||
dtype: (Optional) data type of the metric result.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.Recall()
|
||||
>>> m.update_state([0, 1, 1, 1], [1, 0, 1, 1])
|
||||
@ -1331,11 +1334,12 @@ class Recall(Metric):
|
||||
>>> m.result().numpy()
|
||||
1.0
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile('sgd', loss='mse', metrics=[tf.keras.metrics.Recall()])
|
||||
model.compile(optimizer='sgd',
|
||||
loss='mse',
|
||||
metrics=[tf.keras.metrics.Recall()])
|
||||
```
|
||||
"""
|
||||
|
||||
@ -1529,7 +1533,7 @@ class SensitivityAtSpecificity(SensitivitySpecificityBase):
|
||||
name: (Optional) string name of the metric instance.
|
||||
dtype: (Optional) data type of the metric result.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.SensitivityAtSpecificity(0.5)
|
||||
>>> m.update_state([0, 0, 0, 1, 1], [0, 0.3, 0.8, 0.3, 0.8])
|
||||
@ -1542,12 +1546,11 @@ class SensitivityAtSpecificity(SensitivitySpecificityBase):
|
||||
>>> m.result().numpy()
|
||||
0.333333
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile(
|
||||
'sgd',
|
||||
optimizer='sgd',
|
||||
loss='mse',
|
||||
metrics=[tf.keras.metrics.SensitivityAtSpecificity()])
|
||||
```
|
||||
@ -1605,7 +1608,7 @@ class SpecificityAtSensitivity(SensitivitySpecificityBase):
|
||||
name: (Optional) string name of the metric instance.
|
||||
dtype: (Optional) data type of the metric result.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.SpecificityAtSensitivity(0.5)
|
||||
>>> m.update_state([0, 0, 0, 1, 1], [0, 0.3, 0.8, 0.3, 0.8])
|
||||
@ -1618,12 +1621,11 @@ class SpecificityAtSensitivity(SensitivitySpecificityBase):
|
||||
>>> m.result().numpy()
|
||||
0.5
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile(
|
||||
'sgd',
|
||||
optimizer='sgd',
|
||||
loss='mse',
|
||||
metrics=[tf.keras.metrics.SpecificityAtSensitivity()])
|
||||
```
|
||||
@ -1673,7 +1675,7 @@ class PrecisionAtRecall(SensitivitySpecificityBase):
|
||||
name: (Optional) string name of the metric instance.
|
||||
dtype: (Optional) data type of the metric result.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.PrecisionAtRecall(0.5)
|
||||
>>> m.update_state([0, 0, 0, 1, 1], [0, 0.3, 0.8, 0.3, 0.8])
|
||||
@ -1686,12 +1688,11 @@ class PrecisionAtRecall(SensitivitySpecificityBase):
|
||||
>>> m.result().numpy()
|
||||
0.33333333
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile(
|
||||
'sgd',
|
||||
optimizer='sgd',
|
||||
loss='mse',
|
||||
metrics=[tf.keras.metrics.PrecisionAtRecall(recall=0.8)])
|
||||
```
|
||||
@ -1744,7 +1745,7 @@ class RecallAtPrecision(SensitivitySpecificityBase):
|
||||
name: (Optional) string name of the metric instance.
|
||||
dtype: (Optional) data type of the metric result.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.RecallAtPrecision(0.8)
|
||||
>>> m.update_state([0, 0, 1, 1], [0, 0.5, 0.3, 0.9])
|
||||
@ -1757,12 +1758,11 @@ class RecallAtPrecision(SensitivitySpecificityBase):
|
||||
>>> m.result().numpy()
|
||||
1.0
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile(
|
||||
'sgd',
|
||||
optimizer='sgd',
|
||||
loss='mse',
|
||||
metrics=[tf.keras.metrics.RecallAtPrecision(precision=0.8)])
|
||||
```
|
||||
@ -1861,7 +1861,7 @@ class AUC(Metric):
|
||||
before flattening; therefore `label_weights` should not be used for
|
||||
multi-class data.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.AUC(num_thresholds=3)
|
||||
>>> m.update_state([0, 0, 1, 1], [0, 0.5, 0.3, 0.9])
|
||||
@ -1878,11 +1878,10 @@ class AUC(Metric):
|
||||
>>> m.result().numpy()
|
||||
1.0
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile('sgd', loss='mse', metrics=[tf.keras.metrics.AUC()])
|
||||
model.compile(optimizer='sgd', loss='mse', metrics=[tf.keras.metrics.AUC()])
|
||||
```
|
||||
"""
|
||||
|
||||
@ -2239,7 +2238,7 @@ class CosineSimilarity(MeanMetricWrapper):
|
||||
axis: (Optional) Defaults to -1. The dimension along which the cosine
|
||||
similarity is computed.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> # l2_norm(y_true) = [[0., 1.], [1./1.414], 1./1.414]]]
|
||||
>>> # l2_norm(y_pred) = [[1., 0.], [1./1.414], 1./1.414]]]
|
||||
@ -2257,12 +2256,11 @@ class CosineSimilarity(MeanMetricWrapper):
|
||||
>>> m.result().numpy()
|
||||
0.6999999
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile(
|
||||
'sgd',
|
||||
optimizer='sgd',
|
||||
loss='mse',
|
||||
metrics=[tf.keras.metrics.CosineSimilarity(axis=1)])
|
||||
```
|
||||
@ -2281,7 +2279,7 @@ class MeanAbsoluteError(MeanMetricWrapper):
|
||||
name: (Optional) string name of the metric instance.
|
||||
dtype: (Optional) data type of the metric result.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.MeanAbsoluteError()
|
||||
>>> m.update_state([[0, 1], [0, 0]], [[1, 1], [0, 0]])
|
||||
@ -2294,12 +2292,13 @@ class MeanAbsoluteError(MeanMetricWrapper):
|
||||
>>> m.result().numpy()
|
||||
0.5
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile(
|
||||
'sgd', loss='mse', metrics=[tf.keras.metrics.MeanAbsoluteError()])
|
||||
optimizer='sgd',
|
||||
loss='mse',
|
||||
metrics=[tf.keras.metrics.MeanAbsoluteError()])
|
||||
```
|
||||
"""
|
||||
|
||||
@ -2316,7 +2315,7 @@ class MeanAbsolutePercentageError(MeanMetricWrapper):
|
||||
name: (Optional) string name of the metric instance.
|
||||
dtype: (Optional) data type of the metric result.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.MeanAbsolutePercentageError()
|
||||
>>> m.update_state([[0, 1], [0, 0]], [[1, 1], [0, 0]])
|
||||
@ -2329,12 +2328,11 @@ class MeanAbsolutePercentageError(MeanMetricWrapper):
|
||||
>>> m.result().numpy()
|
||||
500000000.0
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile(
|
||||
'sgd',
|
||||
optimizer='sgd',
|
||||
loss='mse',
|
||||
metrics=[tf.keras.metrics.MeanAbsolutePercentageError()])
|
||||
```
|
||||
@ -2353,7 +2351,7 @@ class MeanSquaredError(MeanMetricWrapper):
|
||||
name: (Optional) string name of the metric instance.
|
||||
dtype: (Optional) data type of the metric result.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.MeanSquaredError()
|
||||
>>> m.update_state([[0, 1], [0, 0]], [[1, 1], [0, 0]])
|
||||
@ -2366,12 +2364,13 @@ class MeanSquaredError(MeanMetricWrapper):
|
||||
>>> m.result().numpy()
|
||||
0.5
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile(
|
||||
'sgd', loss='mse', metrics=[tf.keras.metrics.MeanSquaredError()])
|
||||
optimizer='sgd',
|
||||
loss='mse',
|
||||
metrics=[tf.keras.metrics.MeanSquaredError()])
|
||||
```
|
||||
"""
|
||||
|
||||
@ -2388,7 +2387,7 @@ class MeanSquaredLogarithmicError(MeanMetricWrapper):
|
||||
name: (Optional) string name of the metric instance.
|
||||
dtype: (Optional) data type of the metric result.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.MeanSquaredLogarithmicError()
|
||||
>>> m.update_state([[0, 1], [0, 0]], [[1, 1], [0, 0]])
|
||||
@ -2401,12 +2400,11 @@ class MeanSquaredLogarithmicError(MeanMetricWrapper):
|
||||
>>> m.result().numpy()
|
||||
0.24022643
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile(
|
||||
'sgd',
|
||||
optimizer='sgd',
|
||||
loss='mse',
|
||||
metrics=[tf.keras.metrics.MeanSquaredLogarithmicError()])
|
||||
```
|
||||
@ -2428,7 +2426,7 @@ class Hinge(MeanMetricWrapper):
|
||||
name: (Optional) string name of the metric instance.
|
||||
dtype: (Optional) data type of the metric result.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.Hinge()
|
||||
>>> m.update_state([[0, 1], [0, 0]], [[0.6, 0.4], [0.4, 0.6]])
|
||||
@ -2441,11 +2439,10 @@ class Hinge(MeanMetricWrapper):
|
||||
>>> m.result().numpy()
|
||||
1.1
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile('sgd', loss='mse', metrics=[tf.keras.metrics.Hinge()])
|
||||
model.compile(optimizer='sgd', loss='mse', metrics=[tf.keras.metrics.Hinge()])
|
||||
```
|
||||
"""
|
||||
|
||||
@ -2464,7 +2461,7 @@ class SquaredHinge(MeanMetricWrapper):
|
||||
name: (Optional) string name of the metric instance.
|
||||
dtype: (Optional) data type of the metric result.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.SquaredHinge()
|
||||
>>> m.update_state([[0, 1], [0, 0]], [[0.6, 0.4], [0.4, 0.6]])
|
||||
@ -2477,12 +2474,11 @@ class SquaredHinge(MeanMetricWrapper):
|
||||
>>> m.result().numpy()
|
||||
1.46
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile(
|
||||
'sgd',
|
||||
optimizer='sgd',
|
||||
loss='mse',
|
||||
metrics=[tf.keras.metrics.SquaredHinge()])
|
||||
```
|
||||
@ -2500,7 +2496,7 @@ class CategoricalHinge(MeanMetricWrapper):
|
||||
name: (Optional) string name of the metric instance.
|
||||
dtype: (Optional) data type of the metric result.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.CategoricalHinge()
|
||||
>>> m.update_state([[0, 1], [0, 0]], [[0.6, 0.4], [0.4, 0.6]])
|
||||
@ -2513,12 +2509,11 @@ class CategoricalHinge(MeanMetricWrapper):
|
||||
>>> m.result().numpy()
|
||||
1.2
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile(
|
||||
'sgd',
|
||||
optimizer='sgd',
|
||||
loss='mse',
|
||||
metrics=[tf.keras.metrics.CategoricalHinge()])
|
||||
```
|
||||
@ -2532,7 +2527,7 @@ class CategoricalHinge(MeanMetricWrapper):
|
||||
class RootMeanSquaredError(Mean):
|
||||
"""Computes root mean squared error metric between `y_true` and `y_pred`.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.RootMeanSquaredError()
|
||||
>>> m.update_state([[0, 1], [0, 0]], [[1, 1], [0, 0]])
|
||||
@ -2545,12 +2540,11 @@ class RootMeanSquaredError(Mean):
|
||||
>>> m.result().numpy()
|
||||
0.70710677
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile(
|
||||
'sgd',
|
||||
optimizer='sgd',
|
||||
loss='mse',
|
||||
metrics=[tf.keras.metrics.RootMeanSquaredError()])
|
||||
```
|
||||
@ -2594,7 +2588,7 @@ class LogCoshError(MeanMetricWrapper):
|
||||
name: (Optional) string name of the metric instance.
|
||||
dtype: (Optional) data type of the metric result.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.LogCoshError()
|
||||
>>> m.update_state([[0, 1], [0, 0]], [[1, 1], [0, 0]])
|
||||
@ -2607,11 +2601,12 @@ class LogCoshError(MeanMetricWrapper):
|
||||
>>> m.result().numpy()
|
||||
0.21689045
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile('sgd', loss='mse', metrics=[tf.keras.metrics.LogCoshError()])
|
||||
model.compile(optimizer='sgd',
|
||||
loss='mse',
|
||||
metrics=[tf.keras.metrics.LogCoshError()])
|
||||
```
|
||||
"""
|
||||
|
||||
@ -2629,7 +2624,7 @@ class Poisson(MeanMetricWrapper):
|
||||
name: (Optional) string name of the metric instance.
|
||||
dtype: (Optional) data type of the metric result.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.Poisson()
|
||||
>>> m.update_state([[0, 1], [0, 0]], [[1, 1], [0, 0]])
|
||||
@ -2642,11 +2637,12 @@ class Poisson(MeanMetricWrapper):
|
||||
>>> m.result().numpy()
|
||||
0.99999994
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile('sgd', loss='mse', metrics=[tf.keras.metrics.Poisson()])
|
||||
model.compile(optimizer='sgd',
|
||||
loss='mse',
|
||||
metrics=[tf.keras.metrics.Poisson()])
|
||||
```
|
||||
"""
|
||||
|
||||
@ -2664,7 +2660,7 @@ class KLDivergence(MeanMetricWrapper):
|
||||
name: (Optional) string name of the metric instance.
|
||||
dtype: (Optional) data type of the metric result.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.KLDivergence()
|
||||
>>> m.update_state([[0, 1], [0, 0]], [[0.6, 0.4], [0.4, 0.6]])
|
||||
@ -2677,11 +2673,12 @@ class KLDivergence(MeanMetricWrapper):
|
||||
>>> m.result().numpy()
|
||||
0.9162892
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile('sgd', loss='mse', metrics=[tf.keras.metrics.KLDivergence()])
|
||||
model.compile(optimizer='sgd',
|
||||
loss='mse',
|
||||
metrics=[tf.keras.metrics.KLDivergence()])
|
||||
```
|
||||
"""
|
||||
|
||||
@ -2711,7 +2708,7 @@ class MeanIoU(Metric):
|
||||
name: (Optional) string name of the metric instance.
|
||||
dtype: (Optional) data type of the metric result.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> # cm = [[1, 1],
|
||||
>>> # [1, 1]]
|
||||
@ -2729,12 +2726,11 @@ class MeanIoU(Metric):
|
||||
>>> m.result().numpy()
|
||||
0.23809525
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile(
|
||||
'sgd',
|
||||
optimizer='sgd',
|
||||
loss='mse',
|
||||
metrics=[tf.keras.metrics.MeanIoU(num_classes=2)])
|
||||
```
|
||||
@ -2836,7 +2832,7 @@ class MeanTensor(Metric):
|
||||
name: (Optional) string name of the metric instance.
|
||||
dtype: (Optional) data type of the metric result.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.MeanTensor()
|
||||
>>> m.update_state([0, 1, 2, 3])
|
||||
@ -2951,7 +2947,7 @@ class BinaryCrossentropy(MeanMetricWrapper):
|
||||
e.g. `label_smoothing=0.2` means that we will use a value of `0.1` for
|
||||
label `0` and `0.9` for label `1`".
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> m = tf.keras.metrics.BinaryCrossentropy()
|
||||
>>> m.update_state([[0, 1], [0, 0]], [[0.6, 0.4], [0.4, 0.6]])
|
||||
@ -2964,12 +2960,11 @@ class BinaryCrossentropy(MeanMetricWrapper):
|
||||
>>> m.result().numpy()
|
||||
0.9162905
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile(
|
||||
'sgd',
|
||||
optimizer='sgd',
|
||||
loss='mse',
|
||||
metrics=[tf.keras.metrics.BinaryCrossentropy()])
|
||||
```
|
||||
@ -3007,7 +3002,7 @@ class CategoricalCrossentropy(MeanMetricWrapper):
|
||||
`label_smoothing=0.2` means that we will use a value of `0.1` for label
|
||||
`0` and `0.9` for label `1`"
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> # EPSILON = 1e-7, y = y_true, y` = y_pred
|
||||
>>> # y` = clip_ops.clip_by_value(output, EPSILON, 1. - EPSILON)
|
||||
@ -3029,12 +3024,11 @@ class CategoricalCrossentropy(MeanMetricWrapper):
|
||||
>>> m.result().numpy()
|
||||
1.6271976
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile(
|
||||
'sgd',
|
||||
optimizer='sgd',
|
||||
loss='mse',
|
||||
metrics=[tf.keras.metrics.CategoricalCrossentropy()])
|
||||
```
|
||||
@ -3076,7 +3070,7 @@ class SparseCategoricalCrossentropy(MeanMetricWrapper):
|
||||
axis: (Optional) Defaults to -1. The dimension along which the metric is
|
||||
computed.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
|
||||
>>> # y_true = one_hot(y_true) = [[0, 1, 0], [0, 0, 1]]
|
||||
>>> # logits = log(y_pred)
|
||||
@ -3101,12 +3095,11 @@ class SparseCategoricalCrossentropy(MeanMetricWrapper):
|
||||
>>> m.result().numpy()
|
||||
1.6271976
|
||||
|
||||
Usage with tf.keras API:
|
||||
Usage with `compile()` API:
|
||||
|
||||
```python
|
||||
model = tf.keras.Model(inputs, outputs)
|
||||
model.compile(
|
||||
'sgd',
|
||||
optimizer='sgd',
|
||||
loss='mse',
|
||||
metrics=[tf.keras.metrics.SparseCategoricalCrossentropy()])
|
||||
```
|
||||
@ -3196,7 +3189,7 @@ def accuracy(y_true, y_pred):
|
||||
def binary_accuracy(y_true, y_pred, threshold=0.5):
|
||||
"""Calculates how often predictions matches binary labels.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
>>> y_true = [[1], [1], [0], [0]]
|
||||
>>> y_pred = [[1], [1], [0], [0]]
|
||||
>>> m = tf.keras.metrics.binary_accuracy(y_true, y_pred)
|
||||
@ -3223,7 +3216,7 @@ def binary_accuracy(y_true, y_pred, threshold=0.5):
|
||||
def categorical_accuracy(y_true, y_pred):
|
||||
"""Calculates how often predictions matches one-hot labels.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
>>> y_true = [[0, 0, 1], [0, 1, 0]]
|
||||
>>> y_pred = [[0.1, 0.9, 0.8], [0.05, 0.95, 0]]
|
||||
>>> m = tf.keras.metrics.categorical_accuracy(y_true, y_pred)
|
||||
@ -3251,7 +3244,7 @@ def categorical_accuracy(y_true, y_pred):
|
||||
def sparse_categorical_accuracy(y_true, y_pred):
|
||||
"""Calculates how often predictions matches integer labels.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
>>> y_true = [2, 1]
|
||||
>>> y_pred = [[0.1, 0.9, 0.8], [0.05, 0.95, 0]]
|
||||
>>> m = tf.keras.metrics.sparse_categorical_accuracy(y_true, y_pred)
|
||||
@ -3291,7 +3284,7 @@ def sparse_categorical_accuracy(y_true, y_pred):
|
||||
def top_k_categorical_accuracy(y_true, y_pred, k=5):
|
||||
"""Computes how often targets are in the top `K` predictions.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
>>> y_true = [[0, 0, 1], [0, 1, 0]]
|
||||
>>> y_pred = [[0.1, 0.9, 0.8], [0.05, 0.95, 0]]
|
||||
>>> m = tf.keras.metrics.top_k_categorical_accuracy(y_true, y_pred, k=3)
|
||||
@ -3316,7 +3309,7 @@ def top_k_categorical_accuracy(y_true, y_pred, k=5):
|
||||
def sparse_top_k_categorical_accuracy(y_true, y_pred, k=5):
|
||||
"""Computes how often integer targets are in the top `K` predictions.
|
||||
|
||||
Usage:
|
||||
Standalone usage:
|
||||
>>> y_true = [2, 1]
|
||||
>>> y_pred = [[0.1, 0.9, 0.8], [0.05, 0.95, 0]]
|
||||
>>> m = tf.keras.metrics.sparse_top_k_categorical_accuracy(
|
||||
@ -3467,3 +3460,4 @@ def get(identifier):
|
||||
|
||||
def is_built_in(cls):
|
||||
return cls.__module__ == Metric.__module__
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user