Only enable use_fused_avg_updates in non-XLA context.

PiperOrigin-RevId: 305503998
Change-Id: Ibe12750b5b545e5ba9e29c3e28aea9f8e44ea2a4
This commit is contained in:
Ruoxin Sang 2020-04-08 10:22:39 -07:00 committed by TensorFlower Gardener
parent 809c4e4114
commit e8a98c86ab
2 changed files with 39 additions and 1 deletions

View File

@ -214,6 +214,42 @@ class KerasModelsTest(test.TestCase, parameterized.TestCase):
train_step(input_iterator)
@combinations.generate(
combinations.combine(
distribution=strategy_combinations.all_strategies,
mode=["eager"]
))
def test_batch_norm_with_dynamic_batch(self, distribution):
inputs = np.zeros((10, 3, 3, 3), dtype=np.float32)
targets = np.zeros((10, 4), dtype=np.float32)
dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets))
dataset = dataset.repeat()
dataset = dataset.batch(10, drop_remainder=False)
input_iterator = iter(distribution.experimental_distribute_dataset(dataset))
with distribution.scope():
x = keras.layers.Input(shape=(3, 3, 3), name="input")
y = keras.layers.BatchNormalization(fused=True, name="bn")(x)
y = keras.layers.Flatten()(y)
y = keras.layers.Dense(4, name="dense")(y)
model = keras.Model(x, y)
optimizer = keras.optimizer_v2.rmsprop.RMSprop()
@def_function.function
def train_step(iterator):
def step_fn(inputs):
images, targets = inputs
with backprop.GradientTape() as tape:
outputs = model(images, training=True)
loss = math_ops.reduce_sum(outputs - targets)
grads = tape.gradient(loss, model.variables)
optimizer.apply_gradients(zip(grads, model.variables))
return loss
distribution.run(step_fn, args=(next(iterator),))
train_step(input_iterator)
@combinations.generate(
combinations.combine(
distribution=strategy_combinations.all_strategies,

View File

@ -38,6 +38,7 @@ from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variables as tf_variables
from tensorflow.python.platform import device_context
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import keras_export
@ -546,7 +547,8 @@ class BatchNormalizationBase(Layer):
use_fused_avg_updates = (
compat.forward_compatible(2020, 3, 6) and
ops.executing_eagerly_outside_functions() and
isinstance(self.momentum, (float, int)))
isinstance(self.momentum, (float, int)) and
device_context.enclosing_tpu_context() is None)
if use_fused_avg_updates:
exponential_avg_factor = 1.0 - self.momentum
else: