Pass missing dropout rate parameters

This commit is contained in:
Reuben Morais 2019-04-04 22:56:12 -03:00
parent 5ee856d075
commit 6154150317

View File

@ -128,9 +128,9 @@ def create_model(batch_x, seq_length, dropout, reuse=False, previous_state=None,
# The next three blocks will pass `batch_x` through three hidden layers with
# clipped RELU activation and dropout.
layers['layer_1'] = layer_1 = dense('layer_1', batch_x, Config.n_hidden_1)
layers['layer_2'] = layer_2 = dense('layer_2', layer_1, Config.n_hidden_2)
layers['layer_3'] = layer_3 = dense('layer_3', layer_2, Config.n_hidden_3)
layers['layer_1'] = layer_1 = dense('layer_1', batch_x, Config.n_hidden_1, dropout_rate=dropout[0])
layers['layer_2'] = layer_2 = dense('layer_2', layer_1, Config.n_hidden_2, dropout_rate=dropout[1])
layers['layer_3'] = layer_3 = dense('layer_3', layer_2, Config.n_hidden_3, dropout_rate=dropout[2])
# `layer_3` is now reshaped into `[n_steps, batch_size, 2*n_cell_dim]`,
# as the LSTM RNN expects its input to be of shape `[max_time, batch_size, input_size]`.
@ -147,7 +147,7 @@ def create_model(batch_x, seq_length, dropout, reuse=False, previous_state=None,
layers['rnn_output_state'] = output_state
# Now we feed `output` to the fifth hidden layer with clipped RELU activation
layers['layer_5'] = layer_5 = dense('layer_5', output, Config.n_hidden_5)
layers['layer_5'] = layer_5 = dense('layer_5', output, Config.n_hidden_5, dropout_rate=dropout[5])
# Now we apply a final linear layer creating `n_classes` dimensional vectors, the logits.
layers['layer_6'] = layer_6 = dense('layer_6', layer_5, Config.n_hidden_6, relu=False)