Update the TF export for RNN layer API.

The Keras RNN API should be preferred in TF 2.0. I only kept the state_saver
one for the moment. keras.RNN(stateful=True) will be the replacement for this
API, but I think it will need some extra help function to for user to breakdown
input data sequence and reset the RNN states.

PiperOrigin-RevId: 221104731
This commit is contained in:
Scott Zhu 2018-11-12 09:28:59 -08:00 committed by TensorFlower Gardener
parent 058717dd41
commit c40983fe9a
2 changed files with 10 additions and 15 deletions

View File

@ -32,6 +32,7 @@ from tensorflow.python.ops import math_ops
from tensorflow.python.ops import rnn_cell_impl from tensorflow.python.ops import rnn_cell_impl
from tensorflow.python.ops import tensor_array_ops from tensorflow.python.ops import tensor_array_ops
from tensorflow.python.ops import variable_scope as vs from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.util import deprecation
from tensorflow.python.util import nest from tensorflow.python.util import nest
from tensorflow.python.util.tf_export import tf_export from tensorflow.python.util.tf_export import tf_export
@ -480,7 +481,10 @@ def bidirectional_dynamic_rnn(cell_fw, cell_bw, inputs, sequence_length=None,
return (outputs, output_states) return (outputs, output_states)
@tf_export("nn.dynamic_rnn") @deprecation.deprecated(
None,
"Please use `keras.layers.RNN(cell)`, which is equivalent to this API")
@tf_export(v1=["nn.dynamic_rnn"])
def dynamic_rnn(cell, inputs, sequence_length=None, initial_state=None, def dynamic_rnn(cell, inputs, sequence_length=None, initial_state=None,
dtype=None, parallel_iterations=None, swap_memory=False, dtype=None, parallel_iterations=None, swap_memory=False,
time_major=False, scope=None): time_major=False, scope=None):
@ -891,7 +895,7 @@ def _dynamic_rnn_loop(cell,
return (final_outputs, final_state) return (final_outputs, final_state)
@tf_export("nn.raw_rnn") @tf_export(v1=["nn.raw_rnn"])
def raw_rnn(cell, loop_fn, def raw_rnn(cell, loop_fn,
parallel_iterations=None, swap_memory=False, scope=None): parallel_iterations=None, swap_memory=False, scope=None):
"""Creates an `RNN` specified by RNNCell `cell` and loop function `loop_fn`. """Creates an `RNN` specified by RNNCell `cell` and loop function `loop_fn`.
@ -1210,7 +1214,10 @@ def raw_rnn(cell, loop_fn,
return (emit_ta, final_state, final_loop_state) return (emit_ta, final_state, final_loop_state)
@tf_export("nn.static_rnn") @deprecation.deprecated(
None, "Please use `keras.layers.RNN(cell, unroll=True)`, "
"which is equivalent to this API")
@tf_export(v1=["nn.static_rnn"])
def static_rnn(cell, def static_rnn(cell,
inputs, inputs,
initial_state=None, initial_state=None,

View File

@ -128,10 +128,6 @@ tf_module {
name: "dropout" name: "dropout"
argspec: "args=[\'x\', \'keep_prob\', \'noise_shape\', \'seed\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], " argspec: "args=[\'x\', \'keep_prob\', \'noise_shape\', \'seed\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], "
} }
member_method {
name: "dynamic_rnn"
argspec: "args=[\'cell\', \'inputs\', \'sequence_length\', \'initial_state\', \'dtype\', \'parallel_iterations\', \'swap_memory\', \'time_major\', \'scope\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\', \'None\', \'False\', \'False\', \'None\'], "
}
member_method { member_method {
name: "elu" name: "elu"
argspec: "args=[\'features\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], " argspec: "args=[\'features\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], "
@ -244,10 +240,6 @@ tf_module {
name: "quantized_relu_x" name: "quantized_relu_x"
argspec: "args=[\'features\', \'max_value\', \'min_features\', \'max_features\', \'out_type\', \'name\'], varargs=None, keywords=None, defaults=[\"<dtype: \'quint8\'>\", \'None\'], " argspec: "args=[\'features\', \'max_value\', \'min_features\', \'max_features\', \'out_type\', \'name\'], varargs=None, keywords=None, defaults=[\"<dtype: \'quint8\'>\", \'None\'], "
} }
member_method {
name: "raw_rnn"
argspec: "args=[\'cell\', \'loop_fn\', \'parallel_iterations\', \'swap_memory\', \'scope\'], varargs=None, keywords=None, defaults=[\'None\', \'False\', \'None\'], "
}
member_method { member_method {
name: "relu" name: "relu"
argspec: "args=[\'features\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], " argspec: "args=[\'features\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], "
@ -316,10 +308,6 @@ tf_module {
name: "static_bidirectional_rnn" name: "static_bidirectional_rnn"
argspec: "args=[\'cell_fw\', \'cell_bw\', \'inputs\', \'initial_state_fw\', \'initial_state_bw\', \'dtype\', \'sequence_length\', \'scope\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\'], " argspec: "args=[\'cell_fw\', \'cell_bw\', \'inputs\', \'initial_state_fw\', \'initial_state_bw\', \'dtype\', \'sequence_length\', \'scope\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\'], "
} }
member_method {
name: "static_rnn"
argspec: "args=[\'cell\', \'inputs\', \'initial_state\', \'dtype\', \'sequence_length\', \'scope\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\', \'None\'], "
}
member_method { member_method {
name: "static_state_saving_rnn" name: "static_state_saving_rnn"
argspec: "args=[\'cell\', \'inputs\', \'state_saver\', \'state_name\', \'sequence_length\', \'scope\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], " argspec: "args=[\'cell\', \'inputs\', \'state_saver\', \'state_name\', \'sequence_length\', \'scope\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "