mnist example now uses sparse_softmax_cross_entropy_with_logits.
Change: 115636069
This commit is contained in:
parent
00986d48bb
commit
5c5b29adda
@ -93,19 +93,9 @@ def loss(logits, labels):
|
||||
Returns:
|
||||
loss: Loss tensor of type float.
|
||||
"""
|
||||
# Convert from sparse integer labels in the range [0, NUM_CLASSES)
|
||||
# to 1-hot dense float vectors (that is we will have batch_size vectors,
|
||||
# each with NUM_CLASSES values, all of which are 0.0 except there will
|
||||
# be a 1.0 in the entry corresponding to the label).
|
||||
batch_size = tf.size(labels)
|
||||
labels = tf.expand_dims(labels, 1)
|
||||
indices = tf.expand_dims(tf.range(0, batch_size), 1)
|
||||
concated = tf.concat(1, [indices, labels])
|
||||
onehot_labels = tf.sparse_to_dense(
|
||||
concated, tf.pack([batch_size, NUM_CLASSES]), 1.0, 0.0)
|
||||
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(logits,
|
||||
onehot_labels,
|
||||
name='xentropy')
|
||||
labels = tf.to_int64(labels)
|
||||
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(
|
||||
logits, labels, name='xentropy')
|
||||
loss = tf.reduce_mean(cross_entropy, name='xentropy_mean')
|
||||
return loss
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user