Disables logging and summaries of steps, steps/s, loss etc. when RunConfig.log_step_count_steps is None.
This is already how Estimator works (as one can see in line 2136 where we explicitly replace log_step_count_steps as None). It should be backward compatible as current code would have crashed if log_step_count_steps is None. PiperOrigin-RevId: 217019985
This commit is contained in:
parent
47b04fdb3e
commit
109a0c1b15
@ -2480,6 +2480,7 @@ class TPUEstimator(estimator_lib.Estimator):
|
|||||||
|
|
||||||
# examples_hook is added to training_hooks for both CPU and TPU
|
# examples_hook is added to training_hooks for both CPU and TPU
|
||||||
# execution.
|
# execution.
|
||||||
|
if self._log_every_n_steps is not None:
|
||||||
examples_hook = ExamplesPerSecondHook(
|
examples_hook = ExamplesPerSecondHook(
|
||||||
ctx.global_batch_size,
|
ctx.global_batch_size,
|
||||||
output_dir=self.model_dir,
|
output_dir=self.model_dir,
|
||||||
@ -2489,6 +2490,7 @@ class TPUEstimator(estimator_lib.Estimator):
|
|||||||
logging.info('Running %s on CPU', mode)
|
logging.info('Running %s on CPU', mode)
|
||||||
estimator_spec = model_fn_wrapper.call_without_tpu(
|
estimator_spec = model_fn_wrapper.call_without_tpu(
|
||||||
features, labels, is_export_mode=is_export_mode)
|
features, labels, is_export_mode=is_export_mode)
|
||||||
|
if self._log_every_n_steps is not None:
|
||||||
estimator_spec = estimator_spec._replace(
|
estimator_spec = estimator_spec._replace(
|
||||||
training_hooks=estimator_spec.training_hooks + (examples_hook,))
|
training_hooks=estimator_spec.training_hooks + (examples_hook,))
|
||||||
return estimator_spec
|
return estimator_spec
|
||||||
@ -2540,10 +2542,6 @@ class TPUEstimator(estimator_lib.Estimator):
|
|||||||
with ops.control_dependencies([loss]):
|
with ops.control_dependencies([loss]):
|
||||||
global_step = array_ops.identity(training.get_global_step())
|
global_step = array_ops.identity(training.get_global_step())
|
||||||
hooks = input_hooks + shutdown_hooks
|
hooks = input_hooks + shutdown_hooks
|
||||||
logging_hook_frequency = ( # Divide and round up
|
|
||||||
(self._log_every_n_steps +
|
|
||||||
self._config.tpu_config.iterations_per_loop - 1) //
|
|
||||||
self._config.tpu_config.iterations_per_loop)
|
|
||||||
hooks.extend([
|
hooks.extend([
|
||||||
TPUInfeedOutfeedSessionHook(
|
TPUInfeedOutfeedSessionHook(
|
||||||
ctx,
|
ctx,
|
||||||
@ -2553,13 +2551,19 @@ class TPUEstimator(estimator_lib.Estimator):
|
|||||||
run_infeed_loop_on_coordinator),
|
run_infeed_loop_on_coordinator),
|
||||||
rendezvous=self._rendezvous[mode],
|
rendezvous=self._rendezvous[mode],
|
||||||
),
|
),
|
||||||
InstallSignalHandlerHook(),
|
InstallSignalHandlerHook()
|
||||||
|
])
|
||||||
|
if self._log_every_n_steps is not None:
|
||||||
|
logging_hook_frequency = ( # Divide and round up
|
||||||
|
(self._log_every_n_steps +
|
||||||
|
self._config.tpu_config.iterations_per_loop - 1) //
|
||||||
|
self._config.tpu_config.iterations_per_loop)
|
||||||
|
hooks.append(
|
||||||
training.LoggingTensorHook({
|
training.LoggingTensorHook({
|
||||||
'loss': array_ops.identity(loss),
|
'loss': array_ops.identity(loss),
|
||||||
'step': global_step,
|
'step': global_step,
|
||||||
},
|
},
|
||||||
every_n_iter=logging_hook_frequency)
|
every_n_iter=logging_hook_frequency))
|
||||||
])
|
|
||||||
examples_hook._set_steps_per_run( # pylint: disable=protected-access
|
examples_hook._set_steps_per_run( # pylint: disable=protected-access
|
||||||
self._config.tpu_config.iterations_per_loop)
|
self._config.tpu_config.iterations_per_loop)
|
||||||
hooks.append(examples_hook)
|
hooks.append(examples_hook)
|
||||||
|
Loading…
Reference in New Issue
Block a user