From 5e32d8fd7f80f36f04fb991b82f747a11083bc68 Mon Sep 17 00:00:00 2001 From: "A. Unique TensorFlower" Date: Thu, 28 Feb 2019 11:54:03 -0800 Subject: [PATCH] PR #26170: Fix typo in nmt_with_attention.ipynb Please approve this CL. It will be submitted automatically, and its GitHub pull request will be marked as merged. Imported from GitHub PR #26170 weigths -> weights Copybara import of the project: - edcca75684186e4467b255e84ef36d4bbfda4e0e Fix typo in nmt_with_attention.ipynb by Alexa - 7599568da82684829d1546116947ce2c5c256e6e Merge edcca75684186e4467b255e84ef36d4bbfda4e0e into d4639... by Alexa COPYBARA_INTEGRATE_REVIEW=https://github.com/tensorflow/tensorflow/pull/26170 from nguyen-alexa:master edcca75684186e4467b255e84ef36d4bbfda4e0e PiperOrigin-RevId: 236166878 --- .../examples/nmt_with_attention/nmt_with_attention.ipynb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tensorflow/contrib/eager/python/examples/nmt_with_attention/nmt_with_attention.ipynb b/tensorflow/contrib/eager/python/examples/nmt_with_attention/nmt_with_attention.ipynb index 436e8877361..512605a17eb 100644 --- a/tensorflow/contrib/eager/python/examples/nmt_with_attention/nmt_with_attention.ipynb +++ b/tensorflow/contrib/eager/python/examples/nmt_with_attention/nmt_with_attention.ipynb @@ -688,7 +688,7 @@ " for t in range(max_length_targ):\n", " predictions, dec_hidden, attention_weights = decoder(dec_input, dec_hidden, enc_out)\n", " \n", - " # storing the attention weigths to plot later on\n", + " # storing the attention weights to plot later on\n", " attention_weights = tf.reshape(attention_weights, (-1, ))\n", " attention_plot[t] = attention_weights.numpy()\n", "\n", @@ -842,4 +842,4 @@ ] } ] -} \ No newline at end of file +}