Merge pull request #1890 from danielwinkler/patch-1

Fixed typo in relu_clip description.
This commit is contained in:
Kelly Davis 2019-02-19 11:49:59 +01:00 committed by GitHub
commit 87856d5801
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -47,7 +47,7 @@ def create_flags():
tf.app.flags.DEFINE_float ('dropout_rate5', 0.0, 'dropout rate for layer 5 - defaults to 0.0')
tf.app.flags.DEFINE_float ('dropout_rate6', -1.0, 'dropout rate for layer 6 - defaults to dropout_rate')
tf.app.flags.DEFINE_float ('relu_clip', 20.0, 'ReLU clipping value for non-recurrant layers')
tf.app.flags.DEFINE_float ('relu_clip', 20.0, 'ReLU clipping value for non-recurrent layers')
# Adam optimizer (http://arxiv.org/abs/1412.6980) parameters