From 5c5b29adda87f6fbedd1e629d516af317358d556 Mon Sep 17 00:00:00 2001 From: Eugene Brevdo Date: Thu, 25 Feb 2016 20:44:34 -0800 Subject: [PATCH] mnist example now uses sparse_softmax_cross_entropy_with_logits. Change: 115636069 --- tensorflow/examples/tutorials/mnist/mnist.py | 16 +++------------- 1 file changed, 3 insertions(+), 13 deletions(-) diff --git a/tensorflow/examples/tutorials/mnist/mnist.py b/tensorflow/examples/tutorials/mnist/mnist.py index 5cbd6306566..647b226afa2 100644 --- a/tensorflow/examples/tutorials/mnist/mnist.py +++ b/tensorflow/examples/tutorials/mnist/mnist.py @@ -93,19 +93,9 @@ def loss(logits, labels): Returns: loss: Loss tensor of type float. """ - # Convert from sparse integer labels in the range [0, NUM_CLASSES) - # to 1-hot dense float vectors (that is we will have batch_size vectors, - # each with NUM_CLASSES values, all of which are 0.0 except there will - # be a 1.0 in the entry corresponding to the label). - batch_size = tf.size(labels) - labels = tf.expand_dims(labels, 1) - indices = tf.expand_dims(tf.range(0, batch_size), 1) - concated = tf.concat(1, [indices, labels]) - onehot_labels = tf.sparse_to_dense( - concated, tf.pack([batch_size, NUM_CLASSES]), 1.0, 0.0) - cross_entropy = tf.nn.softmax_cross_entropy_with_logits(logits, - onehot_labels, - name='xentropy') + labels = tf.to_int64(labels) + cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits( + logits, labels, name='xentropy') loss = tf.reduce_mean(cross_entropy, name='xentropy_mean') return loss