Clean up callback warnings

PiperOrigin-RevId: 328969711
Change-Id: I28109e56f38138fe75b7f0cc257b1fddc579b051
This commit is contained in:
Francois Chollet 2020-08-28 10:59:28 -07:00 committed by TensorFlower Gardener
parent db5d58af9b
commit b73c9f1236

View File

@ -1264,16 +1264,6 @@ class ModelCheckpoint(Callback):
self.save_weights_only = True
def on_train_begin(self, logs=None):
# pylint: disable=protected-access
if self.model._in_multi_worker_mode:
logging.warning(
'Automatic model reloading for interrupted job was removed from '
'the `ModelCheckpoint` callback in multi-worker mode, please use the '
'`keras.callbacks.experimental.BackupAndRestore` callback instead. '
'See this tutorial for details: '
'https://www.tensorflow.org/tutorials/distribute/'
'multi_worker_with_keras#backupandrestore_callback.'
)
if self.load_weights_on_restart:
filepath_to_load = (
self._get_most_recently_modified_file_matching_pattern(self.filepath))
@ -2422,7 +2412,7 @@ class ReduceLROnPlateau(Callback):
"""Resets wait counter and cooldown counter.
"""
if self.mode not in ['auto', 'min', 'max']:
logging.warning('Learning Rate Plateau Reducing mode %s is unknown, '
logging.warning('Learning rate reduction mode %s is unknown, '
'fallback to auto mode.', self.mode)
self.mode = 'auto'
if (self.mode == 'min' or
@ -2443,7 +2433,7 @@ class ReduceLROnPlateau(Callback):
logs['lr'] = K.get_value(self.model.optimizer.lr)
current = logs.get(self.monitor)
if current is None:
logging.warning('Reduce LR on plateau conditioned on metric `%s` '
logging.warning('Learning rate reduction is conditioned on metric `%s` '
'which is not available. Available metrics are: %s',
self.monitor, ','.join(list(logs.keys())))