Regularize loss naming quirks.
Previously, some of our losses did not respect the rule "for every loss class with name XxxYyy, there is an equivalent loss function with name xxx_yyy". In particular: KLDivergence class -> kullback_leibler_divergence function (expected: kl_divergence) LogCosh class -> logcosh function (expected: log_cosh) Huber class -> corresponding function not exported (expected: huber) This change is backwards compatible (only adding aliases, and changing default names for LogCosh and KLDivergence, which is fine as we make no guarantees with regard to default names). PiperOrigin-RevId: 303812304 Change-Id: I2f62d594d99f3fa30fbf04bf92c0dd5caadc0958
This commit is contained in:
parent
111fa339f6
commit
c1065c4c79
tensorflow
@ -1014,7 +1014,7 @@ class LogCosh(LossFunctionWrapper):
|
||||
```
|
||||
"""
|
||||
|
||||
def __init__(self, reduction=losses_utils.ReductionV2.AUTO, name='logcosh'):
|
||||
def __init__(self, reduction=losses_utils.ReductionV2.AUTO, name='log_cosh'):
|
||||
"""Initializes `LogCosh` instance.
|
||||
|
||||
Args:
|
||||
@ -1027,9 +1027,9 @@ class LogCosh(LossFunctionWrapper):
|
||||
will raise an error. Please see this custom training [tutorial]
|
||||
(https://www.tensorflow.org/tutorials/distribute/custom_training)
|
||||
for more details.
|
||||
name: Optional name for the op. Defaults to 'logcosh'.
|
||||
name: Optional name for the op. Defaults to 'log_cosh'.
|
||||
"""
|
||||
super(LogCosh, self).__init__(logcosh, name=name, reduction=reduction)
|
||||
super(LogCosh, self).__init__(log_cosh, name=name, reduction=reduction)
|
||||
|
||||
|
||||
@keras_export('keras.losses.KLDivergence')
|
||||
@ -1075,7 +1075,7 @@ class KLDivergence(LossFunctionWrapper):
|
||||
|
||||
def __init__(self,
|
||||
reduction=losses_utils.ReductionV2.AUTO,
|
||||
name='kullback_leibler_divergence'):
|
||||
name='kl_divergence'):
|
||||
"""Initializes `KLDivergence` instance.
|
||||
|
||||
Args:
|
||||
@ -1088,10 +1088,10 @@ class KLDivergence(LossFunctionWrapper):
|
||||
will raise an error. Please see this custom training [tutorial]
|
||||
(https://www.tensorflow.org/tutorials/distribute/custom_training)
|
||||
for more details.
|
||||
name: Optional name for the op. Defaults to 'kullback_leibler_divergence'.
|
||||
name: Optional name for the op. Defaults to 'kl_divergence'.
|
||||
"""
|
||||
super(KLDivergence, self).__init__(
|
||||
kullback_leibler_divergence, name=name, reduction=reduction)
|
||||
kl_divergence, name=name, reduction=reduction)
|
||||
|
||||
|
||||
@keras_export('keras.losses.Huber')
|
||||
@ -1160,7 +1160,7 @@ class Huber(LossFunctionWrapper):
|
||||
name: Optional name for the op. Defaults to 'huber_loss'.
|
||||
"""
|
||||
super(Huber, self).__init__(
|
||||
huber_loss, name=name, reduction=reduction, delta=delta)
|
||||
huber, name=name, reduction=reduction, delta=delta)
|
||||
|
||||
|
||||
@keras_export('keras.metrics.mean_squared_error',
|
||||
@ -1414,7 +1414,8 @@ def categorical_hinge(y_true, y_pred):
|
||||
return math_ops.maximum(0., neg - pos + 1.)
|
||||
|
||||
|
||||
def huber_loss(y_true, y_pred, delta=1.0):
|
||||
@keras_export('keras.losses.huber', v1=[])
|
||||
def huber(y_true, y_pred, delta=1.0):
|
||||
"""Computes Huber loss value.
|
||||
|
||||
For each value x in `error = y_true - y_pred`:
|
||||
@ -1449,8 +1450,8 @@ def huber_loss(y_true, y_pred, delta=1.0):
|
||||
axis=-1)
|
||||
|
||||
|
||||
@keras_export('keras.losses.logcosh')
|
||||
def logcosh(y_true, y_pred):
|
||||
@keras_export('keras.losses.log_cosh', 'keras.losses.logcosh')
|
||||
def log_cosh(y_true, y_pred):
|
||||
"""Logarithm of the hyperbolic cosine of the prediction error.
|
||||
|
||||
`log(cosh(x))` is approximately equal to `(x ** 2) / 2` for small `x` and
|
||||
@ -1594,13 +1595,15 @@ def binary_crossentropy(y_true, y_pred, from_logits=False, label_smoothing=0):
|
||||
K.binary_crossentropy(y_true, y_pred, from_logits=from_logits), axis=-1)
|
||||
|
||||
|
||||
@keras_export('keras.metrics.kullback_leibler_divergence',
|
||||
@keras_export('keras.metrics.kl_divergence',
|
||||
'keras.metrics.kullback_leibler_divergence',
|
||||
'keras.metrics.kld',
|
||||
'keras.metrics.KLD',
|
||||
'keras.losses.kl_divergence',
|
||||
'keras.losses.kullback_leibler_divergence',
|
||||
'keras.losses.kld',
|
||||
'keras.losses.KLD')
|
||||
def kullback_leibler_divergence(y_true, y_pred):
|
||||
def kl_divergence(y_true, y_pred):
|
||||
"""Computes Kullback-Leibler divergence loss between `y_true` and `y_pred`.
|
||||
|
||||
`loss = y_true * log(y_true / y_pred)`
|
||||
@ -1795,7 +1798,9 @@ mse = MSE = mean_squared_error
|
||||
mae = MAE = mean_absolute_error
|
||||
mape = MAPE = mean_absolute_percentage_error
|
||||
msle = MSLE = mean_squared_logarithmic_error
|
||||
kld = KLD = kullback_leibler_divergence
|
||||
kld = KLD = kullback_leibler_divergence = kl_divergence
|
||||
logcosh = log_cosh
|
||||
huber_loss = huber
|
||||
|
||||
|
||||
def is_categorical_crossentropy(loss):
|
||||
|
@ -36,8 +36,8 @@ ALL_LOSSES = [
|
||||
losses.mean_absolute_percentage_error,
|
||||
losses.mean_squared_logarithmic_error, losses.squared_hinge, losses.hinge,
|
||||
losses.categorical_crossentropy, losses.binary_crossentropy,
|
||||
losses.kullback_leibler_divergence, losses.poisson,
|
||||
losses.cosine_similarity, losses.logcosh, losses.categorical_hinge
|
||||
losses.kl_divergence, losses.poisson,
|
||||
losses.cosine_similarity, losses.log_cosh, losses.categorical_hinge
|
||||
]
|
||||
|
||||
|
||||
|
@ -6,7 +6,7 @@ tf_class {
|
||||
is_instance: "<type \'object\'>"
|
||||
member_method {
|
||||
name: "__init__"
|
||||
argspec: "args=[\'self\', \'reduction\', \'name\'], varargs=None, keywords=None, defaults=[\'auto\', \'kullback_leibler_divergence\'], "
|
||||
argspec: "args=[\'self\', \'reduction\', \'name\'], varargs=None, keywords=None, defaults=[\'auto\', \'kl_divergence\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "call"
|
||||
|
@ -6,7 +6,7 @@ tf_class {
|
||||
is_instance: "<type \'object\'>"
|
||||
member_method {
|
||||
name: "__init__"
|
||||
argspec: "args=[\'self\', \'reduction\', \'name\'], varargs=None, keywords=None, defaults=[\'auto\', \'logcosh\'], "
|
||||
argspec: "args=[\'self\', \'reduction\', \'name\'], varargs=None, keywords=None, defaults=[\'auto\', \'log_cosh\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "call"
|
||||
|
@ -120,6 +120,10 @@ tf_module {
|
||||
name: "hinge"
|
||||
argspec: "args=[\'y_true\', \'y_pred\'], varargs=None, keywords=None, defaults=None"
|
||||
}
|
||||
member_method {
|
||||
name: "kl_divergence"
|
||||
argspec: "args=[\'y_true\', \'y_pred\'], varargs=None, keywords=None, defaults=None"
|
||||
}
|
||||
member_method {
|
||||
name: "kld"
|
||||
argspec: "args=[\'y_true\', \'y_pred\'], varargs=None, keywords=None, defaults=None"
|
||||
@ -128,6 +132,10 @@ tf_module {
|
||||
name: "kullback_leibler_divergence"
|
||||
argspec: "args=[\'y_true\', \'y_pred\'], varargs=None, keywords=None, defaults=None"
|
||||
}
|
||||
member_method {
|
||||
name: "log_cosh"
|
||||
argspec: "args=[\'y_true\', \'y_pred\'], varargs=None, keywords=None, defaults=None"
|
||||
}
|
||||
member_method {
|
||||
name: "logcosh"
|
||||
argspec: "args=[\'y_true\', \'y_pred\'], varargs=None, keywords=None, defaults=None"
|
||||
|
@ -208,6 +208,10 @@ tf_module {
|
||||
name: "hinge"
|
||||
argspec: "args=[\'y_true\', \'y_pred\'], varargs=None, keywords=None, defaults=None"
|
||||
}
|
||||
member_method {
|
||||
name: "kl_divergence"
|
||||
argspec: "args=[\'y_true\', \'y_pred\'], varargs=None, keywords=None, defaults=None"
|
||||
}
|
||||
member_method {
|
||||
name: "kld"
|
||||
argspec: "args=[\'y_true\', \'y_pred\'], varargs=None, keywords=None, defaults=None"
|
||||
|
@ -6,7 +6,7 @@ tf_class {
|
||||
is_instance: "<type \'object\'>"
|
||||
member_method {
|
||||
name: "__init__"
|
||||
argspec: "args=[\'self\', \'reduction\', \'name\'], varargs=None, keywords=None, defaults=[\'auto\', \'kullback_leibler_divergence\'], "
|
||||
argspec: "args=[\'self\', \'reduction\', \'name\'], varargs=None, keywords=None, defaults=[\'auto\', \'kl_divergence\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "call"
|
||||
|
@ -6,7 +6,7 @@ tf_class {
|
||||
is_instance: "<type \'object\'>"
|
||||
member_method {
|
||||
name: "__init__"
|
||||
argspec: "args=[\'self\', \'reduction\', \'name\'], varargs=None, keywords=None, defaults=[\'auto\', \'logcosh\'], "
|
||||
argspec: "args=[\'self\', \'reduction\', \'name\'], varargs=None, keywords=None, defaults=[\'auto\', \'log_cosh\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "call"
|
||||
|
@ -116,6 +116,14 @@ tf_module {
|
||||
name: "hinge"
|
||||
argspec: "args=[\'y_true\', \'y_pred\'], varargs=None, keywords=None, defaults=None"
|
||||
}
|
||||
member_method {
|
||||
name: "huber"
|
||||
argspec: "args=[\'y_true\', \'y_pred\', \'delta\'], varargs=None, keywords=None, defaults=[\'1.0\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "kl_divergence"
|
||||
argspec: "args=[\'y_true\', \'y_pred\'], varargs=None, keywords=None, defaults=None"
|
||||
}
|
||||
member_method {
|
||||
name: "kld"
|
||||
argspec: "args=[\'y_true\', \'y_pred\'], varargs=None, keywords=None, defaults=None"
|
||||
@ -124,6 +132,10 @@ tf_module {
|
||||
name: "kullback_leibler_divergence"
|
||||
argspec: "args=[\'y_true\', \'y_pred\'], varargs=None, keywords=None, defaults=None"
|
||||
}
|
||||
member_method {
|
||||
name: "log_cosh"
|
||||
argspec: "args=[\'y_true\', \'y_pred\'], varargs=None, keywords=None, defaults=None"
|
||||
}
|
||||
member_method {
|
||||
name: "logcosh"
|
||||
argspec: "args=[\'y_true\', \'y_pred\'], varargs=None, keywords=None, defaults=None"
|
||||
|
@ -200,6 +200,10 @@ tf_module {
|
||||
name: "hinge"
|
||||
argspec: "args=[\'y_true\', \'y_pred\'], varargs=None, keywords=None, defaults=None"
|
||||
}
|
||||
member_method {
|
||||
name: "kl_divergence"
|
||||
argspec: "args=[\'y_true\', \'y_pred\'], varargs=None, keywords=None, defaults=None"
|
||||
}
|
||||
member_method {
|
||||
name: "kld"
|
||||
argspec: "args=[\'y_true\', \'y_pred\'], varargs=None, keywords=None, defaults=None"
|
||||
|
@ -6,7 +6,7 @@ tf_class {
|
||||
is_instance: "<type \'object\'>"
|
||||
member_method {
|
||||
name: "__init__"
|
||||
argspec: "args=[\'self\', \'reduction\', \'name\'], varargs=None, keywords=None, defaults=[\'auto\', \'kullback_leibler_divergence\'], "
|
||||
argspec: "args=[\'self\', \'reduction\', \'name\'], varargs=None, keywords=None, defaults=[\'auto\', \'kl_divergence\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "call"
|
||||
|
@ -6,7 +6,7 @@ tf_class {
|
||||
is_instance: "<type \'object\'>"
|
||||
member_method {
|
||||
name: "__init__"
|
||||
argspec: "args=[\'self\', \'reduction\', \'name\'], varargs=None, keywords=None, defaults=[\'auto\', \'logcosh\'], "
|
||||
argspec: "args=[\'self\', \'reduction\', \'name\'], varargs=None, keywords=None, defaults=[\'auto\', \'log_cosh\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "call"
|
||||
|
@ -116,6 +116,14 @@ tf_module {
|
||||
name: "hinge"
|
||||
argspec: "args=[\'y_true\', \'y_pred\'], varargs=None, keywords=None, defaults=None"
|
||||
}
|
||||
member_method {
|
||||
name: "huber"
|
||||
argspec: "args=[\'y_true\', \'y_pred\', \'delta\'], varargs=None, keywords=None, defaults=[\'1.0\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "kl_divergence"
|
||||
argspec: "args=[\'y_true\', \'y_pred\'], varargs=None, keywords=None, defaults=None"
|
||||
}
|
||||
member_method {
|
||||
name: "kld"
|
||||
argspec: "args=[\'y_true\', \'y_pred\'], varargs=None, keywords=None, defaults=None"
|
||||
@ -124,6 +132,10 @@ tf_module {
|
||||
name: "kullback_leibler_divergence"
|
||||
argspec: "args=[\'y_true\', \'y_pred\'], varargs=None, keywords=None, defaults=None"
|
||||
}
|
||||
member_method {
|
||||
name: "log_cosh"
|
||||
argspec: "args=[\'y_true\', \'y_pred\'], varargs=None, keywords=None, defaults=None"
|
||||
}
|
||||
member_method {
|
||||
name: "logcosh"
|
||||
argspec: "args=[\'y_true\', \'y_pred\'], varargs=None, keywords=None, defaults=None"
|
||||
|
@ -200,6 +200,10 @@ tf_module {
|
||||
name: "hinge"
|
||||
argspec: "args=[\'y_true\', \'y_pred\'], varargs=None, keywords=None, defaults=None"
|
||||
}
|
||||
member_method {
|
||||
name: "kl_divergence"
|
||||
argspec: "args=[\'y_true\', \'y_pred\'], varargs=None, keywords=None, defaults=None"
|
||||
}
|
||||
member_method {
|
||||
name: "kld"
|
||||
argspec: "args=[\'y_true\', \'y_pred\'], varargs=None, keywords=None, defaults=None"
|
||||
|
Loading…
Reference in New Issue
Block a user