From 688c9d1d9434045f2b89fe19fd2e51a41e5560fa Mon Sep 17 00:00:00 2001 From: Reed Wanderman-Milne Date: Wed, 11 Dec 2019 14:15:13 -0800 Subject: [PATCH] Unexpose LossScaleGradientTape. It doesn't support DistributionStrategy. It will be reexposed when it does. I tried to fix this in #34974, but only made the issue worse. The issue is that when taking gradients with respect to variables (which occurs almost every time), it would crash with a very long error message when DistributionStrategy is used. The unit tests only tested taking gradients w.r.t. constants, as it was assumed there would be no functional difference between taking gradients w.r.t. variables and constants. PiperOrigin-RevId: 285059221 Change-Id: I9ffc5d68f092f9ff3ea634b9523b67ff2bbc4bd7 --- .../loss_scaling_gradient_tape.py | 3 +- ...perimental.-loss-scale-gradient-tape.pbtxt | 38 ------------------- ...sorflow.mixed_precision.experimental.pbtxt | 4 -- 3 files changed, 1 insertion(+), 44 deletions(-) delete mode 100644 tensorflow/tools/api/golden/v2/tensorflow.mixed_precision.experimental.-loss-scale-gradient-tape.pbtxt diff --git a/tensorflow/python/training/experimental/loss_scaling_gradient_tape.py b/tensorflow/python/training/experimental/loss_scaling_gradient_tape.py index caae7052b84..0afe4c78caf 100644 --- a/tensorflow/python/training/experimental/loss_scaling_gradient_tape.py +++ b/tensorflow/python/training/experimental/loss_scaling_gradient_tape.py @@ -25,10 +25,9 @@ from tensorflow.python.ops import math_ops from tensorflow.python.ops.unconnected_gradients import UnconnectedGradients from tensorflow.python.training.experimental import loss_scale as loss_scale_module from tensorflow.python.util import nest -from tensorflow.python.util.tf_export import tf_export -@tf_export("mixed_precision.experimental.LossScaleGradientTape", v1=[]) +# TODO(reedwm): Expose this. Currently it doesn't work with DistributionStrategy class LossScaleGradientTape(backprop.GradientTape): """A gradient tape that scales losses and unscales resulting gradients. diff --git a/tensorflow/tools/api/golden/v2/tensorflow.mixed_precision.experimental.-loss-scale-gradient-tape.pbtxt b/tensorflow/tools/api/golden/v2/tensorflow.mixed_precision.experimental.-loss-scale-gradient-tape.pbtxt deleted file mode 100644 index 7f4715832e2..00000000000 --- a/tensorflow/tools/api/golden/v2/tensorflow.mixed_precision.experimental.-loss-scale-gradient-tape.pbtxt +++ /dev/null @@ -1,38 +0,0 @@ -path: "tensorflow.mixed_precision.experimental.LossScaleGradientTape" -tf_class { - is_instance: "" - is_instance: "" - is_instance: "" - member_method { - name: "__init__" - argspec: "args=[\'self\', \'loss_scale\', \'persistent\', \'watch_accessed_variables\'], varargs=None, keywords=None, defaults=[\'False\', \'True\'], " - } - member_method { - name: "batch_jacobian" - argspec: "args=[\'self\', \'target\', \'source\', \'unconnected_gradients\', \'parallel_iterations\', \'experimental_use_pfor\'], varargs=None, keywords=None, defaults=[\'UnconnectedGradients.NONE\', \'None\', \'True\'], " - } - member_method { - name: "gradient" - argspec: "args=[\'self\', \'target\', \'sources\', \'output_gradients\', \'unconnected_gradients\'], varargs=None, keywords=None, defaults=[\'None\', \'UnconnectedGradients.NONE\'], " - } - member_method { - name: "jacobian" - argspec: "args=[\'self\', \'target\', \'sources\', \'unconnected_gradients\', \'parallel_iterations\', \'experimental_use_pfor\'], varargs=None, keywords=None, defaults=[\'UnconnectedGradients.NONE\', \'None\', \'True\'], " - } - member_method { - name: "reset" - argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None" - } - member_method { - name: "stop_recording" - argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None" - } - member_method { - name: "watch" - argspec: "args=[\'self\', \'tensor\'], varargs=None, keywords=None, defaults=None" - } - member_method { - name: "watched_variables" - argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None" - } -} diff --git a/tensorflow/tools/api/golden/v2/tensorflow.mixed_precision.experimental.pbtxt b/tensorflow/tools/api/golden/v2/tensorflow.mixed_precision.experimental.pbtxt index 5abfdcd109d..61700226fbb 100644 --- a/tensorflow/tools/api/golden/v2/tensorflow.mixed_precision.experimental.pbtxt +++ b/tensorflow/tools/api/golden/v2/tensorflow.mixed_precision.experimental.pbtxt @@ -12,8 +12,4 @@ tf_module { name: "LossScale" mtype: "" } - member { - name: "LossScaleGradientTape" - mtype: "" - } }