Merge pull request #35041 from reedwm/r2.1

[r2.1 Cherrypick]: Unexpose LossScaleGradientTape.
This commit is contained in:
Goldie Gadde 2019-12-20 16:35:38 -08:00 committed by GitHub
commit 87143b53e1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 1 additions and 44 deletions

View File

@ -25,10 +25,9 @@ from tensorflow.python.ops import math_ops
from tensorflow.python.ops.unconnected_gradients import UnconnectedGradients
from tensorflow.python.training.experimental import loss_scale as loss_scale_module
from tensorflow.python.util import nest
from tensorflow.python.util.tf_export import tf_export
@tf_export("mixed_precision.experimental.LossScaleGradientTape", v1=[])
# TODO(reedwm): Expose this. Currently it doesn't work with DistributionStrategy
class LossScaleGradientTape(backprop.GradientTape):
"""A gradient tape that scales losses and unscales resulting gradients.

View File

@ -1,38 +0,0 @@
path: "tensorflow.mixed_precision.experimental.LossScaleGradientTape"
tf_class {
is_instance: "<class \'tensorflow.python.training.experimental.loss_scaling_gradient_tape.LossScaleGradientTape\'>"
is_instance: "<class \'tensorflow.python.eager.backprop.GradientTape\'>"
is_instance: "<type \'object\'>"
member_method {
name: "__init__"
argspec: "args=[\'self\', \'loss_scale\', \'persistent\', \'watch_accessed_variables\'], varargs=None, keywords=None, defaults=[\'False\', \'True\'], "
}
member_method {
name: "batch_jacobian"
argspec: "args=[\'self\', \'target\', \'source\', \'unconnected_gradients\', \'parallel_iterations\', \'experimental_use_pfor\'], varargs=None, keywords=None, defaults=[\'UnconnectedGradients.NONE\', \'None\', \'True\'], "
}
member_method {
name: "gradient"
argspec: "args=[\'self\', \'target\', \'sources\', \'output_gradients\', \'unconnected_gradients\'], varargs=None, keywords=None, defaults=[\'None\', \'UnconnectedGradients.NONE\'], "
}
member_method {
name: "jacobian"
argspec: "args=[\'self\', \'target\', \'sources\', \'unconnected_gradients\', \'parallel_iterations\', \'experimental_use_pfor\'], varargs=None, keywords=None, defaults=[\'UnconnectedGradients.NONE\', \'None\', \'True\'], "
}
member_method {
name: "reset"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "stop_recording"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "watch"
argspec: "args=[\'self\', \'tensor\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "watched_variables"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
}

View File

@ -12,8 +12,4 @@ tf_module {
name: "LossScale"
mtype: "<type \'type\'>"
}
member {
name: "LossScaleGradientTape"
mtype: "<type \'type\'>"
}
}