Update docstring for keras.layers.SimpleRNN and SimpleRNNCell.
PiperOrigin-RevId: 270153457
This commit is contained in:
parent
9414a67fc4
commit
e627965279
@ -1131,41 +1131,66 @@ class DropoutRNNCellMixin(object):
|
||||
class SimpleRNNCell(DropoutRNNCellMixin, Layer):
|
||||
"""Cell class for SimpleRNN.
|
||||
|
||||
This class processes one step within the whole time sequence input, whereas
|
||||
`tf.keras.layer.SimpleRNN` processes the whole sequence.
|
||||
|
||||
Arguments:
|
||||
units: Positive integer, dimensionality of the output space.
|
||||
activation: Activation function to use.
|
||||
Default: hyperbolic tangent (`tanh`).
|
||||
If you pass `None`, no activation is applied
|
||||
(ie. "linear" activation: `a(x) = x`).
|
||||
use_bias: Boolean, whether the layer uses a bias vector.
|
||||
use_bias: Boolean, (default `True`), whether the layer uses a bias vector.
|
||||
kernel_initializer: Initializer for the `kernel` weights matrix,
|
||||
used for the linear transformation of the inputs.
|
||||
used for the linear transformation of the inputs. Default:
|
||||
`glorot_uniform`.
|
||||
recurrent_initializer: Initializer for the `recurrent_kernel`
|
||||
weights matrix, used for the linear transformation of the recurrent state.
|
||||
bias_initializer: Initializer for the bias vector.
|
||||
kernel_regularizer: Regularizer function applied to
|
||||
the `kernel` weights matrix.
|
||||
recurrent_regularizer: Regularizer function applied to
|
||||
the `recurrent_kernel` weights matrix.
|
||||
bias_regularizer: Regularizer function applied to the bias vector.
|
||||
kernel_constraint: Constraint function applied to
|
||||
the `kernel` weights matrix.
|
||||
recurrent_constraint: Constraint function applied to
|
||||
the `recurrent_kernel` weights matrix.
|
||||
bias_constraint: Constraint function applied to the bias vector.
|
||||
dropout: Float between 0 and 1.
|
||||
Fraction of the units to drop for
|
||||
the linear transformation of the inputs.
|
||||
recurrent_dropout: Float between 0 and 1.
|
||||
Fraction of the units to drop for
|
||||
the linear transformation of the recurrent state.
|
||||
Default: `orthogonal`.
|
||||
bias_initializer: Initializer for the bias vector. Default: `zeros`.
|
||||
kernel_regularizer: Regularizer function applied to the `kernel` weights
|
||||
matrix. Default: `None`.
|
||||
recurrent_regularizer: Regularizer function applied to the
|
||||
`recurrent_kernel` weights matrix. Default: `None`.
|
||||
bias_regularizer: Regularizer function applied to the bias vector. Default:
|
||||
`None`.
|
||||
kernel_constraint: Constraint function applied to the `kernel` weights
|
||||
matrix. Default: `None`.
|
||||
recurrent_constraint: Constraint function applied to the `recurrent_kernel`
|
||||
weights matrix. Default: `None`.
|
||||
bias_constraint: Constraint function applied to the bias vector. Default:
|
||||
`None`.
|
||||
dropout: Float between 0 and 1. Fraction of the units to drop for the linear
|
||||
transformation of the inputs. Default: 0.
|
||||
recurrent_dropout: Float between 0 and 1. Fraction of the units to drop for
|
||||
the linear transformation of the recurrent state. Default: 0.
|
||||
|
||||
Call arguments:
|
||||
inputs: A 2D tensor.
|
||||
states: List of state tensors corresponding to the previous timestep.
|
||||
inputs: A 2D tensor, with shape of `[batch, feature]`.
|
||||
states: A 2D tensor with shape of `[batch, units]`, which is the state from
|
||||
the previous time step. For timestep 0, the initial state provided by user
|
||||
will be feed to cell.
|
||||
training: Python boolean indicating whether the layer should behave in
|
||||
training mode or in inference mode. Only relevant when `dropout` or
|
||||
`recurrent_dropout` is used.
|
||||
|
||||
Examples:
|
||||
|
||||
```python
|
||||
inputs = np.random.random([32, 10, 8]).astype(np.float32)
|
||||
rnn = tf.keras.layers.RNN(tf.keras.layers.SimpleRNNCell(4))
|
||||
|
||||
output = rnn(inputs) # The output has shape `[32, 4]`.
|
||||
|
||||
rnn = tf.keras.layers.RNN(
|
||||
tf.keras.layers.SimpleRNNCell(4),
|
||||
return_sequences=True,
|
||||
return_state=True)
|
||||
|
||||
# whole_sequence_output has shape `[32, 10, 4]`.
|
||||
# final_state has shape `[32, 4]`.
|
||||
whole_sequence_output, final_state = rnn(inputs)
|
||||
```
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
@ -1300,35 +1325,38 @@ class SimpleRNN(RNN):
|
||||
Default: hyperbolic tangent (`tanh`).
|
||||
If you pass None, no activation is applied
|
||||
(ie. "linear" activation: `a(x) = x`).
|
||||
use_bias: Boolean, whether the layer uses a bias vector.
|
||||
use_bias: Boolean, (default `True`), whether the layer uses a bias vector.
|
||||
kernel_initializer: Initializer for the `kernel` weights matrix,
|
||||
used for the linear transformation of the inputs.
|
||||
used for the linear transformation of the inputs. Default:
|
||||
`glorot_uniform`.
|
||||
recurrent_initializer: Initializer for the `recurrent_kernel`
|
||||
weights matrix,
|
||||
used for the linear transformation of the recurrent state.
|
||||
bias_initializer: Initializer for the bias vector.
|
||||
kernel_regularizer: Regularizer function applied to
|
||||
the `kernel` weights matrix.
|
||||
recurrent_regularizer: Regularizer function applied to
|
||||
the `recurrent_kernel` weights matrix.
|
||||
bias_regularizer: Regularizer function applied to the bias vector.
|
||||
activity_regularizer: Regularizer function applied to
|
||||
the output of the layer (its "activation")..
|
||||
kernel_constraint: Constraint function applied to
|
||||
the `kernel` weights matrix.
|
||||
recurrent_constraint: Constraint function applied to
|
||||
the `recurrent_kernel` weights matrix.
|
||||
bias_constraint: Constraint function applied to the bias vector.
|
||||
weights matrix, used for the linear transformation of the recurrent state.
|
||||
Default: `orthogonal`.
|
||||
bias_initializer: Initializer for the bias vector. Default: `zeros`.
|
||||
kernel_regularizer: Regularizer function applied to the `kernel` weights
|
||||
matrix. Default: `None`.
|
||||
recurrent_regularizer: Regularizer function applied to the
|
||||
`recurrent_kernel` weights matrix. Default: `None`.
|
||||
bias_regularizer: Regularizer function applied to the bias vector. Default:
|
||||
`None`.
|
||||
activity_regularizer: Regularizer function applied to the output of the
|
||||
layer (its "activation"). Default: `None`.
|
||||
kernel_constraint: Constraint function applied to the `kernel` weights
|
||||
matrix. Default: `None`.
|
||||
recurrent_constraint: Constraint function applied to the `recurrent_kernel`
|
||||
weights matrix. Default: `None`.
|
||||
bias_constraint: Constraint function applied to the bias vector. Default:
|
||||
`None`.
|
||||
dropout: Float between 0 and 1.
|
||||
Fraction of the units to drop for
|
||||
the linear transformation of the inputs.
|
||||
Fraction of the units to drop for the linear transformation of the inputs.
|
||||
Default: 0.
|
||||
recurrent_dropout: Float between 0 and 1.
|
||||
Fraction of the units to drop for
|
||||
the linear transformation of the recurrent state.
|
||||
Fraction of the units to drop for the linear transformation of the
|
||||
recurrent state. Default: 0.
|
||||
return_sequences: Boolean. Whether to return the last output
|
||||
in the output sequence, or the full sequence.
|
||||
in the output sequence, or the full sequence. Default: `False`.
|
||||
return_state: Boolean. Whether to return the last state
|
||||
in addition to the output.
|
||||
in addition to the output. Default: `False`
|
||||
go_backwards: Boolean (default False).
|
||||
If True, process the input sequence backwards and return the
|
||||
reversed sequence.
|
||||
@ -1343,8 +1371,8 @@ class SimpleRNN(RNN):
|
||||
Unrolling is only suitable for short sequences.
|
||||
|
||||
Call arguments:
|
||||
inputs: A 3D tensor.
|
||||
mask: Binary tensor of shape `(samples, timesteps)` indicating whether
|
||||
inputs: A 3D tensor, with shape `[batch, timesteps, feature]`.
|
||||
mask: Binary tensor of shape `[batch, timesteps]` indicating whether
|
||||
a given timestep should be masked.
|
||||
training: Python boolean indicating whether the layer should behave in
|
||||
training mode or in inference mode. This argument is passed to the cell
|
||||
@ -1352,6 +1380,22 @@ class SimpleRNN(RNN):
|
||||
`recurrent_dropout` is used.
|
||||
initial_state: List of initial state tensors to be passed to the first
|
||||
call of the cell.
|
||||
|
||||
Examples:
|
||||
|
||||
```python
|
||||
inputs = np.random.random([32, 10, 8]).astype(np.float32)
|
||||
simple_rnn = tf.keras.layers.SimpleRNN(4)
|
||||
|
||||
output = simple_rnn(inputs) # The output has shape `[32, 4]`.
|
||||
|
||||
simple_rnn = tf.keras.layers.SimpleRNN(
|
||||
4, return_sequences=True, return_state=True)
|
||||
|
||||
# whole_sequence_output has shape `[32, 10, 4]`.
|
||||
# final_state has shape `[32, 4]`.
|
||||
whole_sequence_output, final_state = simple_rnn(inputs)
|
||||
```
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
|
Loading…
x
Reference in New Issue
Block a user