Make core layers tests run in graph and eager mode.
PiperOrigin-RevId: 225231668
This commit is contained in:
parent
16069bf874
commit
250ab666f6
@ -401,7 +401,7 @@ py_test(
|
||||
name = "core_test",
|
||||
size = "medium",
|
||||
srcs = ["layers/core_test.py"],
|
||||
shard_count = 2,
|
||||
shard_count = 3,
|
||||
srcs_version = "PY2AND3",
|
||||
deps = [
|
||||
":keras",
|
||||
|
@ -22,43 +22,36 @@ import numpy as np
|
||||
|
||||
from tensorflow.python import keras
|
||||
from tensorflow.python.eager import context
|
||||
from tensorflow.python.framework import test_util as tf_test_util
|
||||
from tensorflow.python.keras import keras_parameterized
|
||||
from tensorflow.python.keras import testing_utils
|
||||
from tensorflow.python.ops import math_ops
|
||||
from tensorflow.python.platform import test
|
||||
|
||||
|
||||
class CoreLayersTest(test.TestCase):
|
||||
|
||||
def test_masking(self):
|
||||
with self.cached_session():
|
||||
testing_utils.layer_test(
|
||||
keras.layers.Masking, kwargs={}, input_shape=(3, 2, 3))
|
||||
@keras_parameterized.run_all_keras_modes
|
||||
class DropoutLayersTest(keras_parameterized.TestCase):
|
||||
|
||||
def test_dropout(self):
|
||||
with self.cached_session():
|
||||
testing_utils.layer_test(
|
||||
keras.layers.Dropout, kwargs={'rate': 0.5}, input_shape=(3, 2))
|
||||
testing_utils.layer_test(
|
||||
keras.layers.Dropout, kwargs={'rate': 0.5}, input_shape=(3, 2))
|
||||
|
||||
with self.cached_session():
|
||||
testing_utils.layer_test(
|
||||
keras.layers.Dropout,
|
||||
kwargs={'rate': 0.5,
|
||||
'noise_shape': [3, 1]},
|
||||
input_shape=(3, 2))
|
||||
testing_utils.layer_test(
|
||||
keras.layers.Dropout,
|
||||
kwargs={'rate': 0.5,
|
||||
'noise_shape': [3, 1]},
|
||||
input_shape=(3, 2))
|
||||
|
||||
# https://github.com/tensorflow/tensorflow/issues/14819
|
||||
with self.cached_session():
|
||||
dropout = keras.layers.Dropout(0.5)
|
||||
self.assertEqual(True, dropout.supports_masking)
|
||||
def test_dropout_supports_masking(self):
|
||||
dropout = keras.layers.Dropout(0.5)
|
||||
self.assertEqual(True, dropout.supports_masking)
|
||||
|
||||
@tf_test_util.run_in_graph_and_eager_modes
|
||||
def test_spatial_dropout(self):
|
||||
def test_spatial_dropout_1d(self):
|
||||
testing_utils.layer_test(
|
||||
keras.layers.SpatialDropout1D,
|
||||
kwargs={'rate': 0.5},
|
||||
input_shape=(2, 3, 4))
|
||||
|
||||
def test_spatial_dropout_2d(self):
|
||||
testing_utils.layer_test(
|
||||
keras.layers.SpatialDropout2D,
|
||||
kwargs={'rate': 0.5},
|
||||
@ -69,6 +62,7 @@ class CoreLayersTest(test.TestCase):
|
||||
kwargs={'rate': 0.5, 'data_format': 'channels_first'},
|
||||
input_shape=(2, 3, 4, 5))
|
||||
|
||||
def test_spatial_dropout_3d(self):
|
||||
testing_utils.layer_test(
|
||||
keras.layers.SpatialDropout3D,
|
||||
kwargs={'rate': 0.5},
|
||||
@ -79,94 +73,9 @@ class CoreLayersTest(test.TestCase):
|
||||
kwargs={'rate': 0.5, 'data_format': 'channels_first'},
|
||||
input_shape=(2, 3, 4, 4, 5))
|
||||
|
||||
@tf_test_util.run_in_graph_and_eager_modes
|
||||
def test_activation(self):
|
||||
# with string argument
|
||||
testing_utils.layer_test(
|
||||
keras.layers.Activation,
|
||||
kwargs={'activation': 'relu'},
|
||||
input_shape=(3, 2))
|
||||
|
||||
# with function argument
|
||||
testing_utils.layer_test(
|
||||
keras.layers.Activation,
|
||||
kwargs={'activation': keras.backend.relu},
|
||||
input_shape=(3, 2))
|
||||
|
||||
@tf_test_util.run_in_graph_and_eager_modes
|
||||
def test_reshape(self):
|
||||
testing_utils.layer_test(
|
||||
keras.layers.Reshape,
|
||||
kwargs={'target_shape': (8, 1)},
|
||||
input_shape=(3, 2, 4))
|
||||
|
||||
testing_utils.layer_test(
|
||||
keras.layers.Reshape,
|
||||
kwargs={'target_shape': (-1, 1)},
|
||||
input_shape=(3, 2, 4))
|
||||
|
||||
testing_utils.layer_test(
|
||||
keras.layers.Reshape,
|
||||
kwargs={'target_shape': (1, -1)},
|
||||
input_shape=(3, 2, 4))
|
||||
|
||||
testing_utils.layer_test(
|
||||
keras.layers.Reshape,
|
||||
kwargs={'target_shape': (-1, 1)},
|
||||
input_shape=(None, None, 2))
|
||||
|
||||
@tf_test_util.run_in_graph_and_eager_modes
|
||||
def test_permute(self):
|
||||
testing_utils.layer_test(
|
||||
keras.layers.Permute, kwargs={'dims': (2, 1)}, input_shape=(3, 2, 4))
|
||||
|
||||
@tf_test_util.run_in_graph_and_eager_modes
|
||||
def test_permute_errors_on_invalid_starting_dims_index(self):
|
||||
with self.assertRaisesRegexp(ValueError, r'Invalid permutation .*dims.*'):
|
||||
testing_utils.layer_test(
|
||||
keras.layers.Permute,
|
||||
kwargs={'dims': (0, 1, 2)}, input_shape=(3, 2, 4))
|
||||
|
||||
@tf_test_util.run_in_graph_and_eager_modes
|
||||
def test_permute_errors_on_invalid_set_of_dims_indices(self):
|
||||
with self.assertRaisesRegexp(ValueError, r'Invalid permutation .*dims.*'):
|
||||
testing_utils.layer_test(
|
||||
keras.layers.Permute,
|
||||
kwargs={'dims': (1, 4, 2)}, input_shape=(3, 2, 4))
|
||||
|
||||
@tf_test_util.run_in_graph_and_eager_modes
|
||||
def test_flatten(self):
|
||||
testing_utils.layer_test(
|
||||
keras.layers.Flatten, kwargs={}, input_shape=(3, 2, 4))
|
||||
|
||||
# Test channels_first
|
||||
inputs = np.random.random((10, 3, 5, 5)).astype('float32')
|
||||
outputs = testing_utils.layer_test(
|
||||
keras.layers.Flatten,
|
||||
kwargs={'data_format': 'channels_first'},
|
||||
input_data=inputs)
|
||||
target_outputs = np.reshape(
|
||||
np.transpose(inputs, (0, 2, 3, 1)), (-1, 5 * 5 * 3))
|
||||
self.assertAllClose(outputs, target_outputs)
|
||||
|
||||
@tf_test_util.run_in_graph_and_eager_modes
|
||||
def test_flatten_scalar_channels(self):
|
||||
testing_utils.layer_test(
|
||||
keras.layers.Flatten, kwargs={}, input_shape=(3,))
|
||||
|
||||
# Test channels_first
|
||||
inputs = np.random.random((10,)).astype('float32')
|
||||
outputs = testing_utils.layer_test(
|
||||
keras.layers.Flatten,
|
||||
kwargs={'data_format': 'channels_first'},
|
||||
input_data=inputs)
|
||||
target_outputs = np.expand_dims(inputs, -1)
|
||||
self.assertAllClose(outputs, target_outputs)
|
||||
|
||||
@tf_test_util.run_in_graph_and_eager_modes
|
||||
def test_repeat_vector(self):
|
||||
testing_utils.layer_test(
|
||||
keras.layers.RepeatVector, kwargs={'n': 3}, input_shape=(3, 2))
|
||||
@keras_parameterized.run_all_keras_modes
|
||||
class LambdaLayerTest(keras_parameterized.TestCase):
|
||||
|
||||
def test_lambda(self):
|
||||
testing_utils.layer_test(
|
||||
@ -202,7 +111,6 @@ class CoreLayersTest(test.TestCase):
|
||||
config = ld.get_config()
|
||||
ld = keras.layers.Lambda.from_config(config)
|
||||
|
||||
@tf_test_util.run_in_graph_and_eager_modes
|
||||
def test_lambda_multiple_inputs(self):
|
||||
ld = keras.layers.Lambda(lambda x: x[0], output_shape=lambda x: x[0])
|
||||
x1 = np.ones([3, 2], np.float32)
|
||||
@ -210,7 +118,158 @@ class CoreLayersTest(test.TestCase):
|
||||
out = ld([x1, x2])
|
||||
self.assertAllEqual(out.shape, [3, 2])
|
||||
|
||||
@tf_test_util.run_in_graph_and_eager_modes
|
||||
def test_lambda_output_shape(self):
|
||||
l = keras.layers.Lambda(lambda x: x + 1, output_shape=(1, 1))
|
||||
l(keras.backend.variable(np.ones((1, 1))))
|
||||
self.assertEqual((1, 1), l.get_config()['output_shape'])
|
||||
|
||||
def test_lambda_output_shape_function(self):
|
||||
def get_output_shape(input_shape):
|
||||
return 1 * input_shape
|
||||
|
||||
l = keras.layers.Lambda(lambda x: x + 1, output_shape=get_output_shape)
|
||||
l(keras.backend.variable(np.ones((1, 1))))
|
||||
self.assertEqual('lambda', l.get_config()['output_shape_type'])
|
||||
|
||||
def test_lambda_output_shape_autocalculate_multiple_inputs(self):
|
||||
|
||||
def lambda_fn(x):
|
||||
return math_ops.matmul(x[0], x[1])
|
||||
|
||||
l = keras.layers.Lambda(lambda_fn)
|
||||
output_shape = l.compute_output_shape([(10, 10), (10, 20)])
|
||||
self.assertAllEqual((10, 20), output_shape)
|
||||
|
||||
def test_lambda_output_shape_list_multiple_outputs(self):
|
||||
|
||||
def lambda_fn(x):
|
||||
return x
|
||||
|
||||
l = keras.layers.Lambda(lambda_fn, output_shape=[(10,), (20,)])
|
||||
output_shape = l.compute_output_shape([(10, 10), (10, 20)])
|
||||
self.assertAllEqual([(10, 10), (10, 20)], output_shape)
|
||||
|
||||
def test_lambda_output_shape_tuple_with_none(self):
|
||||
|
||||
def lambda_fn(x):
|
||||
return x
|
||||
|
||||
l = keras.layers.Lambda(lambda_fn, output_shape=(None, 10))
|
||||
output_shape = l.compute_output_shape((5, 10, 20))
|
||||
self.assertAllEqual([5, None, 10], output_shape.as_list())
|
||||
|
||||
def test_lambda_output_shape_function_multiple_outputs(self):
|
||||
|
||||
def lambda_fn(x):
|
||||
return x
|
||||
|
||||
def output_shape_fn(input_shape):
|
||||
return input_shape
|
||||
|
||||
l = keras.layers.Lambda(lambda_fn, output_shape=output_shape_fn)
|
||||
output_shape = l.compute_output_shape([(10, 10), (10, 20)])
|
||||
self.assertAllEqual([(10, 10), (10, 20)], output_shape)
|
||||
|
||||
def test_lambda_config_serialization(self):
|
||||
# Test serialization with output_shape and output_shape_type
|
||||
layer = keras.layers.Lambda(lambda x: x + 1, output_shape=(1, 1))
|
||||
layer(keras.backend.variable(np.ones((1, 1))))
|
||||
config = layer.get_config()
|
||||
layer = keras.layers.deserialize({
|
||||
'class_name': 'Lambda',
|
||||
'config': config
|
||||
})
|
||||
layer = keras.layers.Lambda.from_config(config)
|
||||
|
||||
|
||||
@keras_parameterized.run_all_keras_modes
|
||||
class CoreLayersTest(keras_parameterized.TestCase):
|
||||
|
||||
def test_masking(self):
|
||||
testing_utils.layer_test(
|
||||
keras.layers.Masking, kwargs={}, input_shape=(3, 2, 3))
|
||||
|
||||
def test_activation(self):
|
||||
# with string argument
|
||||
testing_utils.layer_test(
|
||||
keras.layers.Activation,
|
||||
kwargs={'activation': 'relu'},
|
||||
input_shape=(3, 2))
|
||||
|
||||
# with function argument
|
||||
testing_utils.layer_test(
|
||||
keras.layers.Activation,
|
||||
kwargs={'activation': keras.backend.relu},
|
||||
input_shape=(3, 2))
|
||||
|
||||
def test_reshape(self):
|
||||
testing_utils.layer_test(
|
||||
keras.layers.Reshape,
|
||||
kwargs={'target_shape': (8, 1)},
|
||||
input_shape=(3, 2, 4))
|
||||
|
||||
testing_utils.layer_test(
|
||||
keras.layers.Reshape,
|
||||
kwargs={'target_shape': (-1, 1)},
|
||||
input_shape=(3, 2, 4))
|
||||
|
||||
testing_utils.layer_test(
|
||||
keras.layers.Reshape,
|
||||
kwargs={'target_shape': (1, -1)},
|
||||
input_shape=(3, 2, 4))
|
||||
|
||||
testing_utils.layer_test(
|
||||
keras.layers.Reshape,
|
||||
kwargs={'target_shape': (-1, 1)},
|
||||
input_shape=(None, None, 2))
|
||||
|
||||
def test_permute(self):
|
||||
testing_utils.layer_test(
|
||||
keras.layers.Permute, kwargs={'dims': (2, 1)}, input_shape=(3, 2, 4))
|
||||
|
||||
def test_permute_errors_on_invalid_starting_dims_index(self):
|
||||
with self.assertRaisesRegexp(ValueError, r'Invalid permutation .*dims.*'):
|
||||
testing_utils.layer_test(
|
||||
keras.layers.Permute,
|
||||
kwargs={'dims': (0, 1, 2)}, input_shape=(3, 2, 4))
|
||||
|
||||
def test_permute_errors_on_invalid_set_of_dims_indices(self):
|
||||
with self.assertRaisesRegexp(ValueError, r'Invalid permutation .*dims.*'):
|
||||
testing_utils.layer_test(
|
||||
keras.layers.Permute,
|
||||
kwargs={'dims': (1, 4, 2)}, input_shape=(3, 2, 4))
|
||||
|
||||
def test_flatten(self):
|
||||
testing_utils.layer_test(
|
||||
keras.layers.Flatten, kwargs={}, input_shape=(3, 2, 4))
|
||||
|
||||
# Test channels_first
|
||||
inputs = np.random.random((10, 3, 5, 5)).astype('float32')
|
||||
outputs = testing_utils.layer_test(
|
||||
keras.layers.Flatten,
|
||||
kwargs={'data_format': 'channels_first'},
|
||||
input_data=inputs)
|
||||
target_outputs = np.reshape(
|
||||
np.transpose(inputs, (0, 2, 3, 1)), (-1, 5 * 5 * 3))
|
||||
self.assertAllClose(outputs, target_outputs)
|
||||
|
||||
def test_flatten_scalar_channels(self):
|
||||
testing_utils.layer_test(
|
||||
keras.layers.Flatten, kwargs={}, input_shape=(3,))
|
||||
|
||||
# Test channels_first
|
||||
inputs = np.random.random((10,)).astype('float32')
|
||||
outputs = testing_utils.layer_test(
|
||||
keras.layers.Flatten,
|
||||
kwargs={'data_format': 'channels_first'},
|
||||
input_data=inputs)
|
||||
target_outputs = np.expand_dims(inputs, -1)
|
||||
self.assertAllClose(outputs, target_outputs)
|
||||
|
||||
def test_repeat_vector(self):
|
||||
testing_utils.layer_test(
|
||||
keras.layers.RepeatVector, kwargs={'n': 3}, input_shape=(3, 2))
|
||||
|
||||
def test_dense(self):
|
||||
testing_utils.layer_test(
|
||||
keras.layers.Dense, kwargs={'units': 3}, input_shape=(3, 2))
|
||||
@ -225,105 +284,31 @@ class CoreLayersTest(test.TestCase):
|
||||
keras.layers.Dense, kwargs={'units': 3}, input_shape=(3, 4, 5, 2))
|
||||
|
||||
def test_dense_regularization(self):
|
||||
with self.cached_session():
|
||||
layer = keras.layers.Dense(
|
||||
3,
|
||||
kernel_regularizer=keras.regularizers.l1(0.01),
|
||||
bias_regularizer='l1',
|
||||
activity_regularizer='l2',
|
||||
name='dense_reg')
|
||||
layer(keras.backend.variable(np.ones((2, 4))))
|
||||
self.assertEqual(3, len(layer.losses))
|
||||
layer = keras.layers.Dense(
|
||||
3,
|
||||
kernel_regularizer=keras.regularizers.l1(0.01),
|
||||
bias_regularizer='l1',
|
||||
activity_regularizer='l2',
|
||||
name='dense_reg')
|
||||
layer(keras.backend.variable(np.ones((2, 4))))
|
||||
self.assertEqual(3, len(layer.losses))
|
||||
|
||||
def test_dense_constraints(self):
|
||||
with self.cached_session():
|
||||
k_constraint = keras.constraints.max_norm(0.01)
|
||||
b_constraint = keras.constraints.max_norm(0.01)
|
||||
layer = keras.layers.Dense(
|
||||
3, kernel_constraint=k_constraint, bias_constraint=b_constraint)
|
||||
layer(keras.backend.variable(np.ones((2, 4))))
|
||||
self.assertEqual(layer.kernel.constraint, k_constraint)
|
||||
self.assertEqual(layer.bias.constraint, b_constraint)
|
||||
k_constraint = keras.constraints.max_norm(0.01)
|
||||
b_constraint = keras.constraints.max_norm(0.01)
|
||||
layer = keras.layers.Dense(
|
||||
3, kernel_constraint=k_constraint, bias_constraint=b_constraint)
|
||||
layer(keras.backend.variable(np.ones((2, 4))))
|
||||
self.assertEqual(layer.kernel.constraint, k_constraint)
|
||||
self.assertEqual(layer.bias.constraint, b_constraint)
|
||||
|
||||
def test_activity_regularization(self):
|
||||
with self.cached_session():
|
||||
layer = keras.layers.ActivityRegularization(l1=0.1)
|
||||
layer(keras.backend.variable(np.ones((2, 4))))
|
||||
self.assertEqual(1, len(layer.losses))
|
||||
_ = layer.get_config()
|
||||
layer = keras.layers.ActivityRegularization(l1=0.1)
|
||||
layer(keras.backend.variable(np.ones((2, 4))))
|
||||
self.assertEqual(1, len(layer.losses))
|
||||
config = layer.get_config()
|
||||
self.assertEqual(config.pop('l1'), 0.1)
|
||||
|
||||
def test_lambda_output_shape(self):
|
||||
with self.cached_session():
|
||||
l = keras.layers.Lambda(lambda x: x + 1, output_shape=(1, 1))
|
||||
l(keras.backend.variable(np.ones((1, 1))))
|
||||
self.assertEqual((1, 1), l.get_config()['output_shape'])
|
||||
|
||||
def test_lambda_output_shape_function(self):
|
||||
def get_output_shape(input_shape):
|
||||
return 1 * input_shape
|
||||
|
||||
with self.cached_session():
|
||||
l = keras.layers.Lambda(lambda x: x + 1, output_shape=get_output_shape)
|
||||
l(keras.backend.variable(np.ones((1, 1))))
|
||||
self.assertEqual('lambda', l.get_config()['output_shape_type'])
|
||||
|
||||
@tf_test_util.run_in_graph_and_eager_modes
|
||||
def test_lambda_output_shape_autocalculate_multiple_inputs(self):
|
||||
|
||||
def lambda_fn(x):
|
||||
return math_ops.matmul(x[0], x[1])
|
||||
|
||||
l = keras.layers.Lambda(lambda_fn)
|
||||
output_shape = l.compute_output_shape([(10, 10), (10, 20)])
|
||||
self.assertAllEqual((10, 20), output_shape)
|
||||
|
||||
@tf_test_util.run_in_graph_and_eager_modes
|
||||
def test_lambda_output_shape_list_multiple_outputs(self):
|
||||
|
||||
def lambda_fn(x):
|
||||
return x
|
||||
|
||||
l = keras.layers.Lambda(lambda_fn, output_shape=[(10,), (20,)])
|
||||
output_shape = l.compute_output_shape([(10, 10), (10, 20)])
|
||||
self.assertAllEqual([(10, 10), (10, 20)], output_shape)
|
||||
|
||||
@tf_test_util.run_in_graph_and_eager_modes
|
||||
def test_lambda_output_shape_tuple_with_none(self):
|
||||
|
||||
def lambda_fn(x):
|
||||
return x
|
||||
|
||||
l = keras.layers.Lambda(lambda_fn, output_shape=(None, 10))
|
||||
output_shape = l.compute_output_shape((5, 10, 20))
|
||||
self.assertAllEqual([5, None, 10], output_shape.as_list())
|
||||
|
||||
@tf_test_util.run_in_graph_and_eager_modes
|
||||
def test_lambda_output_shape_function_multiple_outputs(self):
|
||||
|
||||
def lambda_fn(x):
|
||||
return x
|
||||
|
||||
def output_shape_fn(input_shape):
|
||||
return input_shape
|
||||
|
||||
l = keras.layers.Lambda(lambda_fn, output_shape=output_shape_fn)
|
||||
output_shape = l.compute_output_shape([(10, 10), (10, 20)])
|
||||
self.assertAllEqual([(10, 10), (10, 20)], output_shape)
|
||||
|
||||
def test_lambda_config_serialization(self):
|
||||
with self.cached_session():
|
||||
# test serialization with output_shape and output_shape_type
|
||||
layer = keras.layers.Lambda(lambda x: x + 1, output_shape=(1, 1))
|
||||
layer(keras.backend.variable(np.ones((1, 1))))
|
||||
config = layer.get_config()
|
||||
layer = keras.layers.deserialize({
|
||||
'class_name': 'Lambda',
|
||||
'config': config
|
||||
})
|
||||
|
||||
layer = keras.layers.Lambda.from_config(config)
|
||||
|
||||
@tf_test_util.run_in_graph_and_eager_modes
|
||||
def test_numpy_inputs(self):
|
||||
if context.executing_eagerly():
|
||||
layer = keras.layers.RepeatVector(2)
|
||||
|
Loading…
Reference in New Issue
Block a user