Fix bug in MixedPrecisionLossScaleOptimizer
This commit is contained in:
parent
e8dbf1de1a
commit
df6e5c6b59
tensorflow/python/training/experimental
@ -197,6 +197,10 @@ class LossScale(trackable.Trackable):
|
||||
"""Creates the LossScale from its config."""
|
||||
return cls(**config)
|
||||
|
||||
def variables(self):
|
||||
"""Returns the variables defined in this LossScale."""
|
||||
return list(self._weights.values())
|
||||
|
||||
|
||||
def get_loss_scale_weights(loss_scale):
|
||||
return loss_scale._weights.values() # pylint: disable=protected-access
|
||||
|
@ -243,3 +243,7 @@ class MixedPrecisionLossScaleOptimizer(optimizer.Optimizer):
|
||||
def _resource_apply_dense(self, grad, handle):
|
||||
"""This function should never be called."""
|
||||
raise RuntimeError('This function should never be called')
|
||||
|
||||
def variables(self):
|
||||
"""Returns the variables of the Optimizer."""
|
||||
return self._optimizer.variables() + self._loss_scale.variables()
|
||||
|
Loading…
Reference in New Issue
Block a user