From 78e8dfdf386d533c6581ec45c082ce94a78a11d4 Mon Sep 17 00:00:00 2001 From: Reuben Morais Date: Tue, 18 Feb 2020 16:14:33 +0100 Subject: [PATCH] Disable early stopping and LR reduction on plateau by default --- util/flags.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/util/flags.py b/util/flags.py index 24ff764f..f46fdc81 100644 --- a/util/flags.py +++ b/util/flags.py @@ -139,13 +139,13 @@ def create_flags(): # Early Stopping - f.DEFINE_boolean('early_stop', True, 'Enable early stopping mechanism over validation dataset. If validation is not being run, early stopping is disabled.') + f.DEFINE_boolean('early_stop', False, 'Enable early stopping mechanism over validation dataset. If validation is not being run, early stopping is disabled.') f.DEFINE_integer('es_epochs', 25, 'Number of epochs with no improvement after which training will be stopped. Loss is not stored in the checkpoint so when checkpoint is revived it starts the loss calculation from start at that point') f.DEFINE_float('es_min_delta', 0.05, 'Minimum change in loss to qualify as an improvement. This value will also be used in Reduce learning rate on plateau') # Reduce learning rate on plateau - f.DEFINE_boolean('reduce_lr_on_plateau', True, 'Enable reducing the learning rate if a plateau is reached. This is the case if the validation loss did not improve for some epochs.') + f.DEFINE_boolean('reduce_lr_on_plateau', False, 'Enable reducing the learning rate if a plateau is reached. This is the case if the validation loss did not improve for some epochs.') f.DEFINE_integer('plateau_epochs', 10, 'Number of epochs to consider for RLROP. Has to be smaller than es_epochs from early stopping') f.DEFINE_float('plateau_reduction', 0.1, 'Multiplicative factor to apply to the current learning rate if a plateau has occurred.')