Set fused=False for batch norm, because the test assumes no bessel's

correction. Fused=True would add bessel's correction to variance.

PiperOrigin-RevId: 168274392
This commit is contained in:
Yao Zhang 2017-09-11 13:16:46 -07:00 committed by TensorFlower Gardener
parent f0e8c545e0
commit 123fb01eeb

View File

@ -47,7 +47,7 @@ def logistic_classifier(inputs):
def batchnorm_classifier(inputs): def batchnorm_classifier(inputs):
inputs = layers.batch_norm(inputs, decay=0.1) inputs = layers.batch_norm(inputs, decay=0.1, fused=False)
return layers.fully_connected(inputs, 1, activation_fn=math_ops.sigmoid) return layers.fully_connected(inputs, 1, activation_fn=math_ops.sigmoid)