Refactor rlrop condition.

This commit is contained in:
Daniel 2020-08-31 12:57:48 +02:00
parent 555a265010
commit c10f7f1ad6
1 changed files with 5 additions and 3 deletions

View File

@ -642,9 +642,11 @@ def train():
# Reduce learning rate on plateau
# If the learning rate was reduced and there is still no improvement
# wait FLAGS.plateau_epochs before the learning rate is reduced again
if (FLAGS.reduce_lr_on_plateau and
epochs_without_improvement % FLAGS.plateau_epochs == 0 and epochs_without_improvement > 0):
if (
FLAGS.reduce_lr_on_plateau
and epochs_without_improvement > 0
and epochs_without_improvement % FLAGS.plateau_epochs == 0
):
# Reload checkpoint that we use the best_dev weights again
reload_best_checkpoint(session)