diff --git a/tensorflow/python/training/experimental/loss_scaling_gradient_tape.py b/tensorflow/python/training/experimental/loss_scaling_gradient_tape.py index caae7052b84..0afe4c78caf 100644 --- a/tensorflow/python/training/experimental/loss_scaling_gradient_tape.py +++ b/tensorflow/python/training/experimental/loss_scaling_gradient_tape.py @@ -25,10 +25,9 @@ from tensorflow.python.ops import math_ops from tensorflow.python.ops.unconnected_gradients import UnconnectedGradients from tensorflow.python.training.experimental import loss_scale as loss_scale_module from tensorflow.python.util import nest -from tensorflow.python.util.tf_export import tf_export -@tf_export("mixed_precision.experimental.LossScaleGradientTape", v1=[]) +# TODO(reedwm): Expose this. Currently it doesn't work with DistributionStrategy class LossScaleGradientTape(backprop.GradientTape): """A gradient tape that scales losses and unscales resulting gradients. diff --git a/tensorflow/tools/api/golden/v2/tensorflow.mixed_precision.experimental.-loss-scale-gradient-tape.pbtxt b/tensorflow/tools/api/golden/v2/tensorflow.mixed_precision.experimental.-loss-scale-gradient-tape.pbtxt deleted file mode 100644 index 7f4715832e2..00000000000 --- a/tensorflow/tools/api/golden/v2/tensorflow.mixed_precision.experimental.-loss-scale-gradient-tape.pbtxt +++ /dev/null @@ -1,38 +0,0 @@ -path: "tensorflow.mixed_precision.experimental.LossScaleGradientTape" -tf_class { - is_instance: "" - is_instance: "" - is_instance: "" - member_method { - name: "__init__" - argspec: "args=[\'self\', \'loss_scale\', \'persistent\', \'watch_accessed_variables\'], varargs=None, keywords=None, defaults=[\'False\', \'True\'], " - } - member_method { - name: "batch_jacobian" - argspec: "args=[\'self\', \'target\', \'source\', \'unconnected_gradients\', \'parallel_iterations\', \'experimental_use_pfor\'], varargs=None, keywords=None, defaults=[\'UnconnectedGradients.NONE\', \'None\', \'True\'], " - } - member_method { - name: "gradient" - argspec: "args=[\'self\', \'target\', \'sources\', \'output_gradients\', \'unconnected_gradients\'], varargs=None, keywords=None, defaults=[\'None\', \'UnconnectedGradients.NONE\'], " - } - member_method { - name: "jacobian" - argspec: "args=[\'self\', \'target\', \'sources\', \'unconnected_gradients\', \'parallel_iterations\', \'experimental_use_pfor\'], varargs=None, keywords=None, defaults=[\'UnconnectedGradients.NONE\', \'None\', \'True\'], " - } - member_method { - name: "reset" - argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None" - } - member_method { - name: "stop_recording" - argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None" - } - member_method { - name: "watch" - argspec: "args=[\'self\', \'tensor\'], varargs=None, keywords=None, defaults=None" - } - member_method { - name: "watched_variables" - argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None" - } -} diff --git a/tensorflow/tools/api/golden/v2/tensorflow.mixed_precision.experimental.pbtxt b/tensorflow/tools/api/golden/v2/tensorflow.mixed_precision.experimental.pbtxt index 5abfdcd109d..61700226fbb 100644 --- a/tensorflow/tools/api/golden/v2/tensorflow.mixed_precision.experimental.pbtxt +++ b/tensorflow/tools/api/golden/v2/tensorflow.mixed_precision.experimental.pbtxt @@ -12,8 +12,4 @@ tf_module { name: "LossScale" mtype: "" } - member { - name: "LossScaleGradientTape" - mtype: "" - } }