- Adding V2 API for MeanSquaredError loss.

- Deprecating V1 losses APIs.

PiperOrigin-RevId: 222910192
This commit is contained in:
Pavithra Vijay 2018-11-26 16:51:20 -08:00 committed by TensorFlower Gardener
parent 958859263d
commit 35dcdd967e
10 changed files with 136 additions and 56 deletions

View File

@ -115,8 +115,28 @@ class Loss(object):
NotImplementedError('Must be implemented in subclasses.')
@tf_export('losses.MeanSquaredError', 'keras.losses.MeanSquaredError')
class MeanSquaredError(Loss):
"""Computes the mean of squares of errors between labels and predictions."""
"""Computes the mean of squares of errors between labels and predictions.
For example, if `y_true` is [0., 0., 1., 1.] and `y_pred` is [1., 1., 1., 0.]
then the mean squared error value is 3/4 (0.75).
Usage:
```python
mse = tf.losses.MeanSquaredError()
loss = mse([0., 0., 1., 1.], [1., 1., 1., 0.])
print('Loss: ', loss.numpy()) # Loss: 0.75
```
Usage with tf.keras API:
```python
model = keras.models.Model(inputs, outputs)
model.compile('sgd', loss=tf.losses.MeanSquaredError())
```
"""
def call(self, y_true, y_pred):
"""Invokes the `MeanSquaredError` instance.

View File

@ -133,7 +133,7 @@ def _num_elements(losses):
return math_ops.cast(array_ops.size(losses, name=scope), dtype=losses.dtype)
@tf_export("losses.compute_weighted_loss")
@tf_export(v1=["losses.compute_weighted_loss"])
def compute_weighted_loss(
losses, weights=1.0, scope=None, loss_collection=ops.GraphKeys.LOSSES,
reduction=Reduction.SUM_BY_NONZERO_WEIGHTS):
@ -203,7 +203,7 @@ def compute_weighted_loss(
return loss
@tf_export("losses.absolute_difference")
@tf_export(v1=["losses.absolute_difference"])
def absolute_difference(
labels, predictions, weights=1.0, scope=None,
loss_collection=ops.GraphKeys.LOSSES,
@ -256,7 +256,7 @@ def absolute_difference(
losses, weights, scope, loss_collection, reduction=reduction)
@tf_export("losses.cosine_distance")
@tf_export(v1=["losses.cosine_distance"])
@deprecated_args(None, "dim is deprecated, use axis instead", "dim")
def cosine_distance(
labels, predictions, axis=None, weights=1.0, scope=None,
@ -312,7 +312,7 @@ def cosine_distance(
losses, weights, scope, loss_collection, reduction=reduction)
@tf_export("losses.hinge_loss")
@tf_export(v1=["losses.hinge_loss"])
def hinge_loss(labels, logits, weights=1.0, scope=None,
loss_collection=ops.GraphKeys.LOSSES,
reduction=Reduction.SUM_BY_NONZERO_WEIGHTS):
@ -362,7 +362,7 @@ def hinge_loss(labels, logits, weights=1.0, scope=None,
losses, weights, scope, loss_collection, reduction=reduction)
@tf_export("losses.huber_loss")
@tf_export(v1=["losses.huber_loss"])
def huber_loss(labels, predictions, weights=1.0, delta=1.0, scope=None,
loss_collection=ops.GraphKeys.LOSSES,
reduction=Reduction.SUM_BY_NONZERO_WEIGHTS):
@ -440,7 +440,7 @@ def huber_loss(labels, predictions, weights=1.0, delta=1.0, scope=None,
losses, weights, scope, loss_collection, reduction=reduction)
@tf_export("losses.log_loss")
@tf_export(v1=["losses.log_loss"])
def log_loss(labels, predictions, weights=1.0, epsilon=1e-7, scope=None,
loss_collection=ops.GraphKeys.LOSSES,
reduction=Reduction.SUM_BY_NONZERO_WEIGHTS):
@ -497,7 +497,7 @@ def log_loss(labels, predictions, weights=1.0, epsilon=1e-7, scope=None,
# TODO(b/37208492): Add reduction arg.
@tf_export("losses.mean_pairwise_squared_error")
@tf_export(v1=["losses.mean_pairwise_squared_error"])
def mean_pairwise_squared_error(
labels, predictions, weights=1.0, scope=None,
loss_collection=ops.GraphKeys.LOSSES):
@ -593,7 +593,7 @@ def mean_pairwise_squared_error(
return mean_loss
@tf_export("losses.mean_squared_error")
@tf_export(v1=["losses.mean_squared_error"])
def mean_squared_error(
labels, predictions, weights=1.0, scope=None,
loss_collection=ops.GraphKeys.LOSSES,
@ -646,7 +646,7 @@ def mean_squared_error(
losses, weights, scope, loss_collection, reduction=reduction)
@tf_export("losses.sigmoid_cross_entropy")
@tf_export(v1=["losses.sigmoid_cross_entropy"])
def sigmoid_cross_entropy(
multi_class_labels, logits, weights=1.0, label_smoothing=0, scope=None,
loss_collection=ops.GraphKeys.LOSSES,
@ -710,7 +710,7 @@ def sigmoid_cross_entropy(
losses, weights, scope, loss_collection, reduction=reduction)
@tf_export("losses.softmax_cross_entropy")
@tf_export(v1=["losses.softmax_cross_entropy"])
def softmax_cross_entropy(
onehot_labels, logits, weights=1.0, label_smoothing=0, scope=None,
loss_collection=ops.GraphKeys.LOSSES,
@ -832,7 +832,7 @@ def _remove_squeezable_dimensions(
return labels, predictions, weights
@tf_export("losses.sparse_softmax_cross_entropy")
@tf_export(v1=["losses.sparse_softmax_cross_entropy"])
def sparse_softmax_cross_entropy(
labels, logits, weights=1.0, scope=None,
loss_collection=ops.GraphKeys.LOSSES,

View File

@ -0,0 +1,22 @@
path: "tensorflow.keras.losses.MeanSquaredError"
tf_class {
is_instance: "<class \'tensorflow.python.keras.losses.MeanSquaredError\'>"
is_instance: "<class \'tensorflow.python.keras.losses.Loss\'>"
is_instance: "<type \'object\'>"
member_method {
name: "__init__"
argspec: "args=[\'self\', \'reduction\', \'name\'], varargs=None, keywords=None, defaults=[\'sum_over_batch_size\', \'None\'], "
}
member_method {
name: "call"
argspec: "args=[\'self\', \'y_true\', \'y_pred\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
}

View File

@ -1,5 +1,9 @@
path: "tensorflow.keras.losses"
tf_module {
member {
name: "MeanSquaredError"
mtype: "<type \'type\'>"
}
member_method {
name: "KLD"
argspec: "args=[\'y_true\', \'y_pred\'], varargs=None, keywords=None, defaults=None"

View File

@ -0,0 +1,22 @@
path: "tensorflow.losses.MeanSquaredError"
tf_class {
is_instance: "<class \'tensorflow.python.keras.losses.MeanSquaredError\'>"
is_instance: "<class \'tensorflow.python.keras.losses.Loss\'>"
is_instance: "<type \'object\'>"
member_method {
name: "__init__"
argspec: "args=[\'self\', \'reduction\', \'name\'], varargs=None, keywords=None, defaults=[\'sum_over_batch_size\', \'None\'], "
}
member_method {
name: "call"
argspec: "args=[\'self\', \'y_true\', \'y_pred\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
}

View File

@ -1,5 +1,9 @@
path: "tensorflow.losses"
tf_module {
member {
name: "MeanSquaredError"
mtype: "<type \'type\'>"
}
member {
name: "Reduction"
mtype: "<type \'type\'>"

View File

@ -0,0 +1,22 @@
path: "tensorflow.keras.losses.MeanSquaredError"
tf_class {
is_instance: "<class \'tensorflow.python.keras.losses.MeanSquaredError\'>"
is_instance: "<class \'tensorflow.python.keras.losses.Loss\'>"
is_instance: "<type \'object\'>"
member_method {
name: "__init__"
argspec: "args=[\'self\', \'reduction\', \'name\'], varargs=None, keywords=None, defaults=[\'sum_over_batch_size\', \'None\'], "
}
member_method {
name: "call"
argspec: "args=[\'self\', \'y_true\', \'y_pred\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
}

View File

@ -1,5 +1,9 @@
path: "tensorflow.keras.losses"
tf_module {
member {
name: "MeanSquaredError"
mtype: "<type \'type\'>"
}
member {
name: "Reduction"
mtype: "<type \'type\'>"

View File

@ -0,0 +1,22 @@
path: "tensorflow.losses.MeanSquaredError"
tf_class {
is_instance: "<class \'tensorflow.python.keras.losses.MeanSquaredError\'>"
is_instance: "<class \'tensorflow.python.keras.losses.Loss\'>"
is_instance: "<type \'object\'>"
member_method {
name: "__init__"
argspec: "args=[\'self\', \'reduction\', \'name\'], varargs=None, keywords=None, defaults=[\'sum_over_batch_size\', \'None\'], "
}
member_method {
name: "call"
argspec: "args=[\'self\', \'y_true\', \'y_pred\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
}

View File

@ -1,25 +1,17 @@
path: "tensorflow.losses"
tf_module {
member {
name: "MeanSquaredError"
mtype: "<type \'type\'>"
}
member {
name: "Reduction"
mtype: "<type \'type\'>"
}
member_method {
name: "absolute_difference"
argspec: "args=[\'labels\', \'predictions\', \'weights\', \'scope\', \'loss_collection\', \'reduction\'], varargs=None, keywords=None, defaults=[\'1.0\', \'None\', \'losses\', \'weighted_sum_by_nonzero_weights\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'loss\', \'loss_collection\'], varargs=None, keywords=None, defaults=[\'losses\'], "
}
member_method {
name: "compute_weighted_loss"
argspec: "args=[\'losses\', \'weights\', \'scope\', \'loss_collection\', \'reduction\'], varargs=None, keywords=None, defaults=[\'1.0\', \'None\', \'losses\', \'weighted_sum_by_nonzero_weights\'], "
}
member_method {
name: "cosine_distance"
argspec: "args=[\'labels\', \'predictions\', \'axis\', \'weights\', \'scope\', \'loss_collection\', \'reduction\', \'dim\'], varargs=None, keywords=None, defaults=[\'None\', \'1.0\', \'None\', \'losses\', \'weighted_sum_by_nonzero_weights\', \'None\'], "
}
member_method {
name: "get_losses"
argspec: "args=[\'scope\', \'loss_collection\'], varargs=None, keywords=None, defaults=[\'None\', \'losses\'], "
@ -36,36 +28,4 @@ tf_module {
name: "get_total_loss"
argspec: "args=[\'add_regularization_losses\', \'name\'], varargs=None, keywords=None, defaults=[\'True\', \'total_loss\'], "
}
member_method {
name: "hinge_loss"
argspec: "args=[\'labels\', \'logits\', \'weights\', \'scope\', \'loss_collection\', \'reduction\'], varargs=None, keywords=None, defaults=[\'1.0\', \'None\', \'losses\', \'weighted_sum_by_nonzero_weights\'], "
}
member_method {
name: "huber_loss"
argspec: "args=[\'labels\', \'predictions\', \'weights\', \'delta\', \'scope\', \'loss_collection\', \'reduction\'], varargs=None, keywords=None, defaults=[\'1.0\', \'1.0\', \'None\', \'losses\', \'weighted_sum_by_nonzero_weights\'], "
}
member_method {
name: "log_loss"
argspec: "args=[\'labels\', \'predictions\', \'weights\', \'epsilon\', \'scope\', \'loss_collection\', \'reduction\'], varargs=None, keywords=None, defaults=[\'1.0\', \'1e-07\', \'None\', \'losses\', \'weighted_sum_by_nonzero_weights\'], "
}
member_method {
name: "mean_pairwise_squared_error"
argspec: "args=[\'labels\', \'predictions\', \'weights\', \'scope\', \'loss_collection\'], varargs=None, keywords=None, defaults=[\'1.0\', \'None\', \'losses\'], "
}
member_method {
name: "mean_squared_error"
argspec: "args=[\'labels\', \'predictions\', \'weights\', \'scope\', \'loss_collection\', \'reduction\'], varargs=None, keywords=None, defaults=[\'1.0\', \'None\', \'losses\', \'weighted_sum_by_nonzero_weights\'], "
}
member_method {
name: "sigmoid_cross_entropy"
argspec: "args=[\'multi_class_labels\', \'logits\', \'weights\', \'label_smoothing\', \'scope\', \'loss_collection\', \'reduction\'], varargs=None, keywords=None, defaults=[\'1.0\', \'0\', \'None\', \'losses\', \'weighted_sum_by_nonzero_weights\'], "
}
member_method {
name: "softmax_cross_entropy"
argspec: "args=[\'onehot_labels\', \'logits\', \'weights\', \'label_smoothing\', \'scope\', \'loss_collection\', \'reduction\'], varargs=None, keywords=None, defaults=[\'1.0\', \'0\', \'None\', \'losses\', \'weighted_sum_by_nonzero_weights\'], "
}
member_method {
name: "sparse_softmax_cross_entropy"
argspec: "args=[\'labels\', \'logits\', \'weights\', \'scope\', \'loss_collection\', \'reduction\'], varargs=None, keywords=None, defaults=[\'1.0\', \'None\', \'losses\', \'weighted_sum_by_nonzero_weights\'], "
}
}