Add tf.log_sigmoid

This is a numerically stable version of tf.log(tf.sigmoid(x)).  It's just
-tf.nn.softplus(-x), but it's easy to add and the identity is easy to mistype.

RELNOTES: Add tf.log_sigmoid(x) = tf.log(tf.sigmoid(x)) = -tf.nn.softplus(-x).

Fixes #3719.
Change: 154308666
This commit is contained in:
Geoffrey Irving 2017-04-26 08:53:36 -08:00 committed by TensorFlower Gardener
parent cfbeafe11d
commit 9845d0e822
7 changed files with 32 additions and 0 deletions

View File

@ -1499,6 +1499,7 @@ py_library(
":framework_ops",
":graph_util",
":math_ops_gen",
":nn_ops_gen",
":sparse_ops_gen",
":sparse_tensor",
":spectral_ops_gen",

View File

@ -2347,6 +2347,7 @@ cuda_py_test(
"//tensorflow/python:gradients",
"//tensorflow/python:math_ops",
"//tensorflow/python:math_ops_gen",
"//tensorflow/python:nn_grad",
"//tensorflow/python:platform",
"//tensorflow/python:variables",
],

View File

@ -31,6 +31,7 @@ from tensorflow.python.ops import gen_math_ops
from tensorflow.python.ops import gradient_checker
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_grad # pylint: disable=unused-import
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging
@ -165,6 +166,9 @@ class UnaryOpTest(test.TestCase):
def _sigmoid(self, x):
return 1.0 / (1.0 + np.exp(-x))
def _log_sigmoid(self, x):
return np.log(self._sigmoid(x))
def _replace_domain_error_with_inf(self, fn):
def func(x):
@ -198,6 +202,7 @@ class UnaryOpTest(test.TestCase):
self._compareBoth(z, np.log1p, math_ops.log1p)
self._compareBoth(x, np.tanh, math_ops.tanh)
self._compareBoth(x, self._sigmoid, math_ops.sigmoid)
self._compareBoth(x, self._log_sigmoid, math_ops.log_sigmoid)
self._compareBoth(y, np.sign, math_ops.sign)
self._compareBoth(x, np.sin, math_ops.sin)
self._compareBoth(x, np.cos, math_ops.cos)

View File

@ -151,6 +151,7 @@ from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_control_flow_ops
from tensorflow.python.ops import gen_data_flow_ops
from tensorflow.python.ops import gen_math_ops
from tensorflow.python.ops import gen_nn_ops
from tensorflow.python.ops import gen_sparse_ops
from tensorflow.python.ops import gen_spectral_ops
from tensorflow.python.ops import gen_state_ops
@ -2004,6 +2005,24 @@ def sigmoid(x, name=None):
return gen_math_ops._sigmoid(x, name=name)
def log_sigmoid(x, name=None):
"""Computes log sigmoid of `x` element-wise.
Specifically, `y = log(1 / (1 + exp(-x)))`. For numerical stability,
we use `y = -tf.nn.softplus(-x)`.
Args:
x: A Tensor with type `float32` or `float64`.
name: A name for the operation (optional).
Returns:
A Tensor with the same type as `x`.
"""
with ops.name_scope(name, "LogSigmoid", [x]) as name:
x = ops.convert_to_tensor(x, name="x")
return gen_math_ops._neg(gen_nn_ops.softplus(-x), name=name)
def tanh(x, name=None):
"""Computes hyperbolic tangent of `x` element-wise.

View File

@ -27,6 +27,7 @@ See the @{$python/nn} guide.
@@dropout
@@bias_add
@@sigmoid
@@log_sigmoid
@@tanh
@@convolution
@@conv2d

View File

@ -145,6 +145,7 @@ _allowed_symbols_math_ops = [
# These are documented in nn.
# We are are not importing nn because it would create a circular dependency.
"sigmoid",
"log_sigmoid",
"tanh",
]

View File

@ -1168,6 +1168,10 @@ tf_module {
name: "log1p"
argspec: "args=[\'x\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "log_sigmoid"
argspec: "args=[\'x\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "logical_and"
argspec: "args=[\'x\', \'y\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], "