Update keras related gradient_test to be keras integration test.
PiperOrigin-RevId: 306577348 Change-Id: I97f59ea373980b3303109cbf3ee7346a124db823
This commit is contained in:
parent
990e78d98d
commit
e22095ef16
|
@ -4885,7 +4885,6 @@ cuda_py_test(
|
|||
":test_ops",
|
||||
":unconnected_gradients",
|
||||
":variable_scope",
|
||||
"//tensorflow/python/keras:engine",
|
||||
"//third_party/py/numpy",
|
||||
"@absl_py//absl/testing:parameterized",
|
||||
],
|
||||
|
|
|
@ -32,6 +32,16 @@ tf_py_test(
|
|||
],
|
||||
)
|
||||
|
||||
tf_py_test(
|
||||
name = "gradients_test",
|
||||
srcs = ["gradients_test.py"],
|
||||
python_version = "PY3",
|
||||
deps = [
|
||||
"//tensorflow:tensorflow_py",
|
||||
"//tensorflow/python:extra_py_tests_deps",
|
||||
],
|
||||
)
|
||||
|
||||
tf_py_test(
|
||||
name = "legacy_rnn_test", # Remove this target in when TF 1 is deprecated.
|
||||
srcs = ["legacy_rnn_test.py"],
|
||||
|
|
|
@ -0,0 +1,84 @@
|
|||
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ==============================================================================
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
import numpy as np
|
||||
import tensorflow as tf
|
||||
|
||||
|
||||
class TestKerasModelClass(tf.keras.Model):
|
||||
"""A simple tensorflow keras Model class definition."""
|
||||
|
||||
def __init__(self, width):
|
||||
super(TestKerasModelClass, self).__init__()
|
||||
self.width = width
|
||||
|
||||
def build(self, input_shape):
|
||||
self.weight = self.add_weight(
|
||||
name="test_keras_var",
|
||||
shape=(self.width,),
|
||||
dtype=tf.float32,
|
||||
trainable=True,
|
||||
)
|
||||
|
||||
def call(self, inputs):
|
||||
return self.weight * inputs
|
||||
|
||||
|
||||
class GradientsTest(tf.test.TestCase):
|
||||
|
||||
def _TestVariablesGradient(self, inputs, test_model, vars_to_grad):
|
||||
"""Returns gradients of `test_model` with respect to `vars_to_grad`."""
|
||||
|
||||
test_model_re = tf.recompute_grad(test_model)
|
||||
|
||||
with tf.GradientTape(persistent=True) as tape:
|
||||
tape.watch(vars_to_grad)
|
||||
out_re = test_model_re(inputs)
|
||||
out = test_model(inputs)
|
||||
|
||||
grads_re = tape.gradient(out_re, vars_to_grad)
|
||||
grads = tape.gradient(out, vars_to_grad)
|
||||
|
||||
return grads_re, grads
|
||||
|
||||
def testKerasRecompute(self):
|
||||
"""Checks that recompute_grad works for a simple Keras Model."""
|
||||
|
||||
test_model = TestKerasModelClass(10)
|
||||
test_input = tf.constant(tf.zeros((10, 10), dtype=np.float32))
|
||||
# Ensures keras model is initialized.
|
||||
test_model(test_input) # pylint: disable=not-callable
|
||||
grads_re, grads = self._TestVariablesGradient(test_input, test_model,
|
||||
test_input)
|
||||
|
||||
grads_re = self.evaluate(grads_re)
|
||||
grads = self.evaluate(grads)
|
||||
for g, g_re in zip(grads, grads_re):
|
||||
self.assertAllClose(g, g_re)
|
||||
|
||||
grads_re, grads = self._TestVariablesGradient(test_input, test_model,
|
||||
test_model.variables)
|
||||
|
||||
grads_re = self.evaluate(grads_re)
|
||||
grads = self.evaluate(grads)
|
||||
for g, g_re in zip(grads, grads_re):
|
||||
self.assertAllClose(g, g_re)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
tf.test.main()
|
|
@ -33,7 +33,6 @@ from tensorflow.python.framework import ops
|
|||
from tensorflow.python.framework import test_ops
|
||||
from tensorflow.python.framework import test_util
|
||||
from tensorflow.python.framework.constant_op import constant
|
||||
from tensorflow.python.keras.engine import training
|
||||
from tensorflow.python.layers import core as core_layers
|
||||
from tensorflow.python.ops import array_grad # pylint: disable=unused-import
|
||||
from tensorflow.python.ops import array_ops
|
||||
|
@ -1324,41 +1323,8 @@ class TensorListGradientsTest(test_util.TensorFlowTestCase):
|
|||
self.assertEquals(self.evaluate(grad), 5.)
|
||||
|
||||
|
||||
class TestKerasModelClass(training.Model):
|
||||
"""A simple tensorflow keras Model class definition."""
|
||||
|
||||
def __init__(self, width):
|
||||
super(TestKerasModelClass, self).__init__()
|
||||
|
||||
self.weight = variable_scope.get_variable(
|
||||
name="test_keras_var",
|
||||
shape=width,
|
||||
dtype=dtypes.float32,
|
||||
trainable=True,
|
||||
use_resource=True,
|
||||
)
|
||||
|
||||
def call(self, inputs):
|
||||
return self.weight * inputs
|
||||
|
||||
|
||||
class VariablesGradientTest(test_util.TensorFlowTestCase):
|
||||
|
||||
def _TestVariablesGradient(self, inputs, test_model, vars_to_grad):
|
||||
"""Returns gradients of `test_model` with respect to `vars_to_grad`."""
|
||||
|
||||
test_model_re = custom_gradient.recompute_grad(test_model)
|
||||
|
||||
with backprop.GradientTape(persistent=True) as tape:
|
||||
tape.watch(vars_to_grad)
|
||||
out_re = test_model_re(inputs)
|
||||
out = test_model(inputs)
|
||||
|
||||
grads_re = tape.gradient(out_re, vars_to_grad)
|
||||
grads = tape.gradient(out, vars_to_grad)
|
||||
|
||||
return grads_re, grads
|
||||
|
||||
def _TestFnVariablesGradient(self, inputs, test_fn, vars_to_grad):
|
||||
"""Returns gradients of `test_model` with respect to `vars_to_grad`."""
|
||||
|
||||
|
@ -1374,30 +1340,6 @@ class VariablesGradientTest(test_util.TensorFlowTestCase):
|
|||
|
||||
return grads_re, grads
|
||||
|
||||
@test_util.run_in_graph_and_eager_modes
|
||||
def testKerasRecompute(self):
|
||||
"""Checks that recompute_grad works for a simple Keras Model."""
|
||||
|
||||
test_model = TestKerasModelClass(10)
|
||||
test_input = constant(np.zeros((10, 10), dtype=np.float32))
|
||||
self.evaluate(variables.global_variables_initializer())
|
||||
test_model(test_input) # Ensures keras model is initialized.
|
||||
grads_re, grads = self._TestVariablesGradient(test_input, test_model,
|
||||
test_input)
|
||||
|
||||
grads_re = self.evaluate(grads_re)
|
||||
grads = self.evaluate(grads)
|
||||
for g, g_re in zip(grads, grads_re):
|
||||
self.assertAllClose(g, g_re)
|
||||
|
||||
grads_re, grads = self._TestVariablesGradient(test_input, test_model,
|
||||
test_model.variables)
|
||||
|
||||
grads_re = self.evaluate(grads_re)
|
||||
grads = self.evaluate(grads)
|
||||
for g, g_re in zip(grads, grads_re):
|
||||
self.assertAllClose(g, g_re)
|
||||
|
||||
@test_util.run_in_graph_and_eager_modes
|
||||
def testFnRecompute(self):
|
||||
"""Checks that recompute_grad works grads of function args."""
|
||||
|
|
Loading…
Reference in New Issue