From 0de9e4bf80cf2f9cc7c848b9189811aa32a570aa Mon Sep 17 00:00:00 2001 From: Richard Hamnett Date: Fri, 21 Feb 2020 18:32:03 +0000 Subject: [PATCH] Add force_initialize_learning_rate Ability to reset learning rate which has been reduced by reduce_lr_on_plateau --- util/flags.py | 1 + 1 file changed, 1 insertion(+) diff --git a/util/flags.py b/util/flags.py index f46fdc81..a7f9a43f 100644 --- a/util/flags.py +++ b/util/flags.py @@ -148,6 +148,7 @@ def create_flags(): f.DEFINE_boolean('reduce_lr_on_plateau', False, 'Enable reducing the learning rate if a plateau is reached. This is the case if the validation loss did not improve for some epochs.') f.DEFINE_integer('plateau_epochs', 10, 'Number of epochs to consider for RLROP. Has to be smaller than es_epochs from early stopping') f.DEFINE_float('plateau_reduction', 0.1, 'Multiplicative factor to apply to the current learning rate if a plateau has occurred.') + f.DEFINE_float('force_initialize_learning_rate', False, 'Force re-initialization of learning rate which was previously reduced.') # Decoder