Fix deprecation warnings of *_global_step functions.

This commit is contained in:
Guillaume Klein 2017-08-24 11:58:05 +02:00 committed by Martin Wicke
parent 2f5caa40bf
commit 5f95081698

View File

@ -156,9 +156,9 @@ def optimize_loss(loss,
loss = ops.convert_to_tensor(loss)
contrib_framework.assert_scalar(loss)
if global_step is None:
global_step = contrib_framework.get_global_step()
global_step = train.get_global_step()
else:
contrib_framework.assert_global_step(global_step)
train.assert_global_step(global_step)
with vs.variable_scope(name, "OptimizeLoss", [loss, global_step]):
# Update ops take UPDATE_OPS collection if not provided.
if update_ops is None: