Set fused=False for batch norm, because the test assumes no bessel's
correction. Fused=True would add bessel's correction to variance. PiperOrigin-RevId: 168274392
This commit is contained in:
parent
f0e8c545e0
commit
123fb01eeb
@ -47,7 +47,7 @@ def logistic_classifier(inputs):
|
|||||||
|
|
||||||
|
|
||||||
def batchnorm_classifier(inputs):
|
def batchnorm_classifier(inputs):
|
||||||
inputs = layers.batch_norm(inputs, decay=0.1)
|
inputs = layers.batch_norm(inputs, decay=0.1, fused=False)
|
||||||
return layers.fully_connected(inputs, 1, activation_fn=math_ops.sigmoid)
|
return layers.fully_connected(inputs, 1, activation_fn=math_ops.sigmoid)
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user