From 64a9a35d4ec4b6f42fc050b00aeaa13ed86bffbf Mon Sep 17 00:00:00 2001 From: Rohan Jain Date: Tue, 25 Jun 2019 13:19:33 -0700 Subject: [PATCH] During non-eager execution there was a bit of inconsistency with the way we handled the placeholders where as the actually fed inputs when the inputs were scalars. The standardize_single_array method expands dimensions so that (None,) inputs look like (None, 1) instead but the placeholder shapes were the old (None,). PiperOrigin-RevId: 255033566 --- .../feature_columns_integration_test.py | 39 +++++++++++++++++++ .../python/keras/engine/training_test.py | 2 +- .../python/keras/engine/training_utils.py | 4 ++ 3 files changed, 44 insertions(+), 1 deletion(-) diff --git a/tensorflow/python/keras/engine/feature_columns_integration_test.py b/tensorflow/python/keras/engine/feature_columns_integration_test.py index 8c3d7a9d1a7..c06b049fbcc 100644 --- a/tensorflow/python/keras/engine/feature_columns_integration_test.py +++ b/tensorflow/python/keras/engine/feature_columns_integration_test.py @@ -94,6 +94,45 @@ class FeatureColumnsIntegrationTest(keras_parameterized.TestCase): model.evaluate(ds, steps=1) model.predict(ds, steps=1) + @keras_parameterized.run_all_keras_modes(always_skip_v1=True) + def test_sequential_model_with_crossed_column(self): + feature_columns = [] + age_buckets = fc.bucketized_column( + fc.numeric_column('age'), + boundaries=[18, 25, 30, 35, 40, 45, 50, 55, 60, 65]) + feature_columns.append(age_buckets) + + # indicator cols + thal = fc.categorical_column_with_vocabulary_list( + 'thal', ['fixed', 'normal', 'reversible']) + + crossed_feature = fc.crossed_column([age_buckets, thal], + hash_bucket_size=1000) + crossed_feature = fc.indicator_column(crossed_feature) + feature_columns.append(crossed_feature) + + feature_layer = fc.DenseFeatures(feature_columns) + + model = keras.models.Sequential([ + feature_layer, + keras.layers.Dense(128, activation='relu'), + keras.layers.Dense(128, activation='relu'), + keras.layers.Dense(1, activation='sigmoid') + ]) + + age_data = np.random.randint(10, 100, size=100) + thal_data = np.random.choice(['fixed', 'normal', 'reversible'], size=100) + inp_x = {'age': age_data, 'thal': thal_data} + inp_y = np.random.randint(0, 1, size=100) + ds = dataset_ops.Dataset.from_tensor_slices((inp_x, inp_y)).batch(5) + model.compile(optimizer='adam', + loss='binary_crossentropy', + metrics=['accuracy'],) + model.fit(ds, epochs=1) + model.fit(ds, epochs=1) + model.evaluate(ds) + model.predict(ds) + @keras_parameterized.run_all_keras_modes def test_subclassed_model_with_feature_columns(self): col_a = fc.numeric_column('a') diff --git a/tensorflow/python/keras/engine/training_test.py b/tensorflow/python/keras/engine/training_test.py index b415e8c8fa4..389fb5ab1f5 100644 --- a/tensorflow/python/keras/engine/training_test.py +++ b/tensorflow/python/keras/engine/training_test.py @@ -250,7 +250,7 @@ class TrainingTest(keras_parameterized.TestCase): model = keras.Sequential([ReturnTraining()]) model.compile('sgd', 'mse') hist = model.fit(x=np.array([0.]), y=np.array([0.])) - self.assertAllClose(hist.history['loss'][0], (10000,)) + self.assertAllClose(hist.history['loss'][0], 10000) @keras_parameterized.run_with_all_model_types(exclude_models='sequential') @keras_parameterized.run_all_keras_modes diff --git a/tensorflow/python/keras/engine/training_utils.py b/tensorflow/python/keras/engine/training_utils.py index a63053a2236..bbcab3ee81f 100644 --- a/tensorflow/python/keras/engine/training_utils.py +++ b/tensorflow/python/keras/engine/training_utils.py @@ -1619,12 +1619,16 @@ class ModelInputs(object): # we have. The user should call `model._set_inputs(placeholders)` # to specify custom placeholders if the need arises. shape = (None,) + tuple(v.shape[1:]) + if shape == (None,): + shape = (None, 1) dtype = dtypes.as_dtype(v.dtype) if dtype.is_floating: dtype = K.floatx() v = K.placeholder(shape=shape, name=k, dtype=dtype) elif isinstance(v, tensor_spec.TensorSpec): shape = (None,) + tuple(v.shape.as_list()[1:]) + if shape == (None,): + shape = (None, 1) v = K.placeholder(shape=shape, name=k, dtype=v.dtype) self._flattened_inputs[i] = v