Merge pull request #27814 from Bharat123rox:update-elu
PiperOrigin-RevId: 315765454 Change-Id: I778ecaea90e3d857d2ab090f5a5770b156114819
This commit is contained in:
commit
12ae3da1b7
|
@ -86,18 +86,49 @@ def softmax(x, axis=-1):
|
|||
@keras_export('keras.activations.elu')
|
||||
@dispatch.add_dispatch_support
|
||||
def elu(x, alpha=1.0):
|
||||
"""Exponential linear unit.
|
||||
"""Exponential Linear Unit.
|
||||
|
||||
The exponential linear unit (ELU) with `alpha > 0` is:
|
||||
`x` if `x > 0` and
|
||||
`alpha * (exp(x) - 1)` if `x < 0`
|
||||
The ELU hyperparameter `alpha` controls the value to which an
|
||||
ELU saturates for negative net inputs. ELUs diminish the
|
||||
vanishing gradient effect.
|
||||
|
||||
ELUs have negative values which pushes the mean of the activations
|
||||
closer to zero.
|
||||
Mean activations that are closer to zero enable faster learning as they
|
||||
bring the gradient closer to the natural gradient.
|
||||
ELUs saturate to a negative value when the argument gets smaller.
|
||||
Saturation means a small derivative which decreases the variation
|
||||
and the information that is propagated to the next layer.
|
||||
|
||||
Example Usage:
|
||||
|
||||
>>> import tensorflow as tf
|
||||
>>> model = tf.keras.Sequential()
|
||||
>>> model.add(tf.keras.layers.Conv2D(32, (3, 3), activation='elu',
|
||||
... input_shape=(28, 28, 1)))
|
||||
>>> model.add(tf.keras.layers.MaxPooling2D((2, 2)))
|
||||
>>> model.add(tf.keras.layers.Conv2D(64, (3, 3), activation='elu'))
|
||||
>>> model.add(tf.keras.layers.MaxPooling2D((2, 2)))
|
||||
>>> model.add(tf.keras.layers.Conv2D(64, (3, 3), activation='elu'))
|
||||
|
||||
<tensorflow.python.keras.engine.sequential.Sequential object ...>
|
||||
|
||||
Arguments:
|
||||
x: Input tensor.
|
||||
alpha: A scalar, slope of negative section.
|
||||
alpha: A scalar, slope of negative section. `alpha` controls the value to
|
||||
which an ELU saturates for negative net inputs.
|
||||
|
||||
Returns:
|
||||
The exponential linear activation: `x` if `x > 0` and
|
||||
`alpha * (exp(x)-1)` if `x < 0`.
|
||||
The exponential linear unit (ELU) activation function: `x` if `x > 0` and
|
||||
`alpha * (exp(x) - 1)` if `x < 0`.
|
||||
|
||||
|
||||
Reference:
|
||||
- [Clevert et al. 2016](https://arxiv.org/abs/1511.07289)
|
||||
[Fast and Accurate Deep Network Learning by Exponential Linear Units
|
||||
(ELUs) (Clevert et al, 2016)](https://arxiv.org/abs/1511.07289)
|
||||
"""
|
||||
return K.elu(x, alpha)
|
||||
|
||||
|
|
Loading…
Reference in New Issue