Edited comments for test script

This commit is contained in:
Saket Khandelwal 2019-08-07 19:35:05 +10:00
parent 385af760d2
commit 4e70b01c14
2 changed files with 6 additions and 6 deletions

View File

@ -56,9 +56,9 @@ class AdagradDAOptimizerTest(xla_test.XLATestCase):
# Run a step of AdagradDA
update.run()
# Let g to be gradient accumulator, gg to be gradient squared
# accumulator, T be the global step, lr is the learning rate, and k the
# initial gradient squared accumulator value.
# Let g be the gradient accumulator, gg be the gradient squared
# accumulator, T be the global step, lr be the learning rate,
# and k the initial gradient squared accumulator value.
# w = \dfrac{sign(-g)*lr*|g - l1*T|_{+}}{l2*T*lr + \sqrt{k+gg})}
# For -0.1*3.0*(0.1 - 0)/(0 + sqrt(0.1 + 0.1*0.1)) = -0.904534
# similarly for others.

View File

@ -63,9 +63,9 @@ class AdagradDAOptimizerTest(test.TestCase):
update.run()
v0_val, v1_val = self.evaluate([var0, var1])
# Let g to be gradient accumulator, gg to be gradient squared
# accumulator, T be the global step, lr is the learning rate, and k the
# initial gradient squared accumulator value.
# Let g be the gradient accumulator, gg be the gradient squared
# accumulator, T be the global step, lr be the learning rate,
# and k the initial gradient squared accumulator value.
# w = \dfrac{sign(-g)*lr*|g - l1*T|_{+}}{l2*T*lr + \sqrt{k+gg})}
# For -0.1*3.0*(0.1 - 0)/(0 + sqrt(0.1 + 0.1*0.1)) = -0.904534
# similarly for others.