Automated rollback of commit e75cf2b3c3

PiperOrigin-RevId: 266491204
This commit is contained in:
Pavithra Vijay 2019-08-30 18:08:55 -07:00 committed by TensorFlower Gardener
parent 25ac189760
commit c3fb862245
3 changed files with 11 additions and 30 deletions

View File

@ -1056,6 +1056,9 @@ def poisson(y_true, y_pred):
return K.mean(y_pred - y_true * math_ops.log(y_pred + K.epsilon()), axis=-1)
# Retaining the legacy namespaces: 'cosine_proximity' and 'cosine'.
# TODO(psv): Change name of this function to `cosine_similarity` after fixing
# estimator test.
@keras_export(
'keras.losses.cosine_similarity',
v1=[
@ -1066,26 +1069,10 @@ def poisson(y_true, y_pred):
'keras.losses.cosine_similarity',
])
def cosine_proximity(y_true, y_pred, axis=-1):
"""Computes the cosine similarity between labels and predictions.
Note that it is a negative quantity between -1 and 0, where 0 indicates
orthogonality and values closer to -1 indicate greater similarity. This makes
it usable as a loss function in a setting where you try to maximize the
proximity between predictions and targets.
`loss = -sum(y_true * y_pred)`
Args:
y_true: Tensor of true targets.
y_pred: Tensor of predicted targets.
axis: Axis along which to determine similarity.
Returns:
Cosine similarity tensor.
"""
"""Computes the cosine similarity between labels and predictions."""
y_true = nn.l2_normalize(y_true, axis=axis)
y_pred = nn.l2_normalize(y_pred, axis=axis)
return -math_ops.reduce_sum(y_true * y_pred, axis=axis)
return math_ops.reduce_sum(y_true * y_pred, axis=axis)
@keras_export('keras.losses.CosineSimilarity')

View File

@ -605,7 +605,7 @@ class CosineSimilarityTest(test.TestCase):
self.setup()
cosine_obj = keras.losses.CosineSimilarity()
loss = cosine_obj(self.y_true, self.y_pred)
expected_loss = -np.mean(self.expected_loss)
expected_loss = np.mean(self.expected_loss)
self.assertAlmostEqual(self.evaluate(loss), expected_loss, 3)
def test_scalar_weighted(self):
@ -613,7 +613,7 @@ class CosineSimilarityTest(test.TestCase):
cosine_obj = keras.losses.CosineSimilarity()
sample_weight = 2.3
loss = cosine_obj(self.y_true, self.y_pred, sample_weight=sample_weight)
expected_loss = -np.mean(self.expected_loss * sample_weight)
expected_loss = np.mean(self.expected_loss * sample_weight)
self.assertAlmostEqual(self.evaluate(loss), expected_loss, 3)
def test_sample_weighted(self):
@ -624,7 +624,7 @@ class CosineSimilarityTest(test.TestCase):
self.y_true,
self.y_pred,
sample_weight=constant_op.constant(sample_weight))
expected_loss = -np.mean(self.expected_loss * sample_weight)
expected_loss = np.mean(self.expected_loss * sample_weight)
self.assertAlmostEqual(self.evaluate(loss), expected_loss, 3)
def test_timestep_weighted(self):
@ -643,7 +643,7 @@ class CosineSimilarityTest(test.TestCase):
loss = cosine_obj(
y_true, y_pred, sample_weight=constant_op.constant(sample_weight))
expected_loss = -np.mean(expected_loss * sample_weight)
expected_loss = np.mean(expected_loss * sample_weight)
self.assertAlmostEqual(self.evaluate(loss), expected_loss, 3)
def test_zero_weighted(self):
@ -656,7 +656,7 @@ class CosineSimilarityTest(test.TestCase):
self.setup(axis=1)
cosine_obj = keras.losses.CosineSimilarity(axis=1)
loss = cosine_obj(self.y_true, self.y_pred)
expected_loss = -np.mean(self.expected_loss)
expected_loss = np.mean(self.expected_loss)
self.assertAlmostEqual(self.evaluate(loss), expected_loss, 3)

View File

@ -35,6 +35,7 @@ from tensorflow.python.keras.engine import base_layer_utils
from tensorflow.python.keras.losses import binary_crossentropy
from tensorflow.python.keras.losses import categorical_crossentropy
from tensorflow.python.keras.losses import categorical_hinge
from tensorflow.python.keras.losses import cosine_similarity
from tensorflow.python.keras.losses import hinge
from tensorflow.python.keras.losses import kullback_leibler_divergence
from tensorflow.python.keras.losses import logcosh
@ -2804,13 +2805,6 @@ def sparse_top_k_categorical_accuracy(y_true, y_pred, k=5):
return math_ops.cast(
nn.in_top_k(y_pred, math_ops.cast(y_true, 'int32'), k), K.floatx())
def cosine_similarity(y_true, y_pred, axis=-1):
"""Computes the cosine similarity between labels and predictions."""
y_true = nn.l2_normalize(y_true, axis=axis)
y_pred = nn.l2_normalize(y_pred, axis=axis)
return math_ops.reduce_sum(y_true * y_pred, axis=axis)
# Aliases
mse = MSE = mean_squared_error