Move Keras related parallel ops test to keras/integration

PiperOrigin-RevId: 306709806
Change-Id: I9e59adba1ec7c0192c17ab74f8fa9759f2d4ee58
This commit is contained in:
Scott Zhu 2020-04-15 13:31:13 -07:00 committed by TensorFlower Gardener
parent 97e4f0d394
commit 8c849c6550
3 changed files with 57 additions and 23 deletions

View File

@ -60,3 +60,13 @@ tf_py_test(
"//tensorflow/python:extra_py_tests_deps",
],
)
tf_py_test(
name = "vectorized_map_test",
srcs = ["vectorized_map_test.py"],
python_version = "PY3",
deps = [
"//tensorflow:tensorflow_py",
"//tensorflow/python:extra_py_tests_deps",
],
)

View File

@ -0,0 +1,47 @@
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
class VectorizedMapTest(tf.test.TestCase):
def test_vectorized_map(self):
batch_size = 10
num_features = 32
layer = tf.keras.layers.Dense(1)
def model_fn(arg):
with tf.GradientTape() as g:
inp, label = arg
inp = tf.expand_dims(inp, 0)
label = tf.expand_dims(label, 0)
prediction = layer(inp)
loss = tf.nn.l2_loss(label - prediction)
return g.gradient(loss, (layer.kernel, layer.bias))
inputs = tf.random.uniform([batch_size, num_features])
labels = tf.random.uniform([batch_size, 1])
per_example_gradients = tf.vectorized_map(model_fn, (inputs, labels))
self.assertEqual(per_example_gradients[0].shape,
(batch_size, num_features, 1))
self.assertEqual(per_example_gradients[1].shape, (batch_size, 1))
if __name__ == "__main__":
tf.test.main()

View File

@ -37,7 +37,6 @@ from tensorflow.python.framework import indexed_slices
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import test_util
from tensorflow.python.keras.layers import core as keras_core
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import bitwise_ops
from tensorflow.python.ops import cond_v2
@ -140,28 +139,6 @@ class PForTest(PForTestCase):
c = pfor_control_flow_ops.vectorized_map(outer_product, a)
self.assertAllEqual((batch_size, 32, 32, 32, 32), c.shape)
def test_vectorized_map_example_2(self):
batch_size = 10
num_features = 32
layer = keras_core.Dense(1)
def model_fn(arg):
with backprop.GradientTape() as g:
inp, label = arg
inp = array_ops.expand_dims(inp, 0)
label = array_ops.expand_dims(label, 0)
prediction = layer(inp)
loss = nn.l2_loss(label - prediction)
return g.gradient(loss, (layer.kernel, layer.bias))
inputs = random_ops.random_uniform([batch_size, num_features])
labels = random_ops.random_uniform([batch_size, 1])
per_example_gradients = pfor_control_flow_ops.vectorized_map(
model_fn, (inputs, labels))
self.assertAllEqual(per_example_gradients[0].shape,
(batch_size, num_features, 1))
self.assertAllEqual(per_example_gradients[1].shape, (batch_size, 1))
def test_disable_tf_function(self):
def_function.run_functions_eagerly(True)
# vectorized_map should ignore disabling tf.functions