Add fused/non-fused inference/training overhead benchmarks for

BatchNormalization.

PiperOrigin-RevId: 316690878
Change-Id: I36a0c8595b973657ae3cb8f95c11ba797cc4dcab
This commit is contained in:
Thomas O'Malley 2020-06-16 08:54:22 -07:00 committed by TensorFlower Gardener
parent c048776216
commit 878ac5ae83
1 changed files with 33 additions and 3 deletions

View File

@ -246,10 +246,40 @@ class MicroBenchmarksBase(test.Benchmark):
self._run(fn, 10000) self._run(fn, 10000)
def benchmark_layers_normalization_batch_normalization_overhead(self): def benchmark_layers_batch_norm_fused_inf(self):
layer = normalization.BatchNormalization() layer = normalization.BatchNormalization(fused=True)
x = array_ops.ones((1, 1)) x = array_ops.ones((1, 1, 1, 1))
def fn():
layer(x)
self._run(fn, 10000)
def benchmark_layers_batch_norm_fused_train(self):
layer = normalization.BatchNormalization(fused=True)
x = array_ops.ones((1, 1, 1, 1))
def fn():
layer(x, training=True)
self._run(fn, 10000)
def benchmark_layers_batch_norm_nonfused_inf(self):
layer = normalization.BatchNormalization(fused=False)
x = array_ops.ones((1, 1, 1, 1))
def fn():
layer(x)
self._run(fn, 10000)
def benchmark_layers_batch_norm_nonfused_train(self):
layer = normalization.BatchNormalization(fused=False)
x = array_ops.ones((1, 1, 1, 1))
def fn(): def fn():
layer(x, training=True) layer(x, training=True)