From 1954120b8d3fae63e874f1fb28b7a08bf9c02297 Mon Sep 17 00:00:00 2001 From: Moses Marin Date: Wed, 10 Apr 2019 22:14:42 -0700 Subject: [PATCH 1/3] change format of references change format of references --- tensorflow/python/keras/optimizer_v2/adam.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/tensorflow/python/keras/optimizer_v2/adam.py b/tensorflow/python/keras/optimizer_v2/adam.py index 29d3beea2b4..9c2241d2278 100644 --- a/tensorflow/python/keras/optimizer_v2/adam.py +++ b/tensorflow/python/keras/optimizer_v2/adam.py @@ -32,16 +32,13 @@ class Adam(optimizer_v2.OptimizerV2): """Optimizer that implements the Adam algorithm. Adam optimization is a stochastic gradient descent method that is based on - adaptive estimation of first-order and second-order moments. According to the - reference, the method is 'computationally efficient, has little memory + adaptive estimation of first-order and second-order moments. According to the paper + [Adam: A Method for Stochastic Optimization. Kingma et al., 2014](http://arxiv.org/abs/1412.6980), + the method is "*computationally efficient, has little memory requirement, invariant to diagonal rescaling of gradients, and is well suited - for problems that are large in terms of data/parameters'. + for problems that are large in terms of data/parameters*". - # References - See [Kingma et al., 2014](http://arxiv.org/abs/1412.6980) - ([pdf](http://arxiv.org/pdf/1412.6980.pdf)). - For AMSGrad see [Reddi et al., 2-18] - (https://openreview.net/pdf?id=ryQu7f-RZ) + For AMSGrad see [On The Convergence Of Adam And Beyond. Reddi et al., 5-8](https://openreview.net/pdf?id=ryQu7f-RZ). """ def __init__(self, From a96f9f0b1609a93ac823ab01e8e84c082d40b1e4 Mon Sep 17 00:00:00 2001 From: Moses Marin Date: Fri, 12 Apr 2019 15:58:39 -0700 Subject: [PATCH 2/3] fix ubuntu sanity build failures (line-too-long) fix ubuntu sanity build failures (line-too-long) --- tensorflow/python/keras/optimizer_v2/adam.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tensorflow/python/keras/optimizer_v2/adam.py b/tensorflow/python/keras/optimizer_v2/adam.py index 9c2241d2278..97ead0583f9 100644 --- a/tensorflow/python/keras/optimizer_v2/adam.py +++ b/tensorflow/python/keras/optimizer_v2/adam.py @@ -32,7 +32,8 @@ class Adam(optimizer_v2.OptimizerV2): """Optimizer that implements the Adam algorithm. Adam optimization is a stochastic gradient descent method that is based on - adaptive estimation of first-order and second-order moments. According to the paper + adaptive estimation of first-order and second-order moments. + According to the paper [Adam: A Method for Stochastic Optimization. Kingma et al., 2014](http://arxiv.org/abs/1412.6980), the method is "*computationally efficient, has little memory requirement, invariant to diagonal rescaling of gradients, and is well suited From 118ff148f442c02798cb6391c25e642b38b2ac47 Mon Sep 17 00:00:00 2001 From: Moses Marin Date: Mon, 15 Apr 2019 20:30:50 -0700 Subject: [PATCH 3/3] split long line split long line --- tensorflow/python/keras/optimizer_v2/adam.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tensorflow/python/keras/optimizer_v2/adam.py b/tensorflow/python/keras/optimizer_v2/adam.py index 97ead0583f9..f9ff0a98e9d 100644 --- a/tensorflow/python/keras/optimizer_v2/adam.py +++ b/tensorflow/python/keras/optimizer_v2/adam.py @@ -34,12 +34,14 @@ class Adam(optimizer_v2.OptimizerV2): Adam optimization is a stochastic gradient descent method that is based on adaptive estimation of first-order and second-order moments. According to the paper - [Adam: A Method for Stochastic Optimization. Kingma et al., 2014](http://arxiv.org/abs/1412.6980), + [Adam: A Method for Stochastic Optimization. Kingma et al., + 2014](http://arxiv.org/abs/1412.6980), the method is "*computationally efficient, has little memory requirement, invariant to diagonal rescaling of gradients, and is well suited for problems that are large in terms of data/parameters*". - For AMSGrad see [On The Convergence Of Adam And Beyond. Reddi et al., 5-8](https://openreview.net/pdf?id=ryQu7f-RZ). + For AMSGrad see [On The Convergence Of Adam And Beyond. + Reddi et al., 5-8](https://openreview.net/pdf?id=ryQu7f-RZ). """ def __init__(self,