From 93787127d8edb9cfb740ebd14451fbd838a4f8b9 Mon Sep 17 00:00:00 2001
From: PiyushDatta <piyushdattaca@gmail.com>
Date: Thu, 7 May 2020 05:54:17 -0400
Subject: [PATCH 01/12] We need to bring in the classes from
 advanced_activations if there are no custom objects specified. When no custom
 objects are specified, our module_objects/globals() in
 activations.deserialize() won't contain any advanced_activations.

---
 tensorflow/python/keras/activations.py               | 12 +++++++++++-
 tensorflow/python/keras/activations_test.py          |  7 +++++++
 .../python/keras/layers/advanced_activations.py      |  2 ++
 3 files changed, 20 insertions(+), 1 deletion(-)

diff --git a/tensorflow/python/keras/activations.py b/tensorflow/python/keras/activations.py
index 0ee4a91f417..9b958af9321 100644
--- a/tensorflow/python/keras/activations.py
+++ b/tensorflow/python/keras/activations.py
@@ -26,6 +26,7 @@ from tensorflow.python.ops import math_ops
 from tensorflow.python.ops import nn
 from tensorflow.python.util import dispatch
 from tensorflow.python.util.tf_export import keras_export
+from tensorflow.python.keras.layers import advanced_activations
 
 # b/123041942
 # In TF 2.x, if the `tf.nn.softmax` is used as an activation function in Keras
@@ -454,9 +455,18 @@ def deserialize(name, custom_objects=None):
       ValueError: `Unknown activation function` if the input string does not
       denote any defined Tensorflow activation function.
   """
+  globs = globals()
+
+  # only replace missing activations, when there are no custom objects
+  if custom_objects is None:
+    advanced_activations_globs = advanced_activations.get_globals()
+    for key,val in advanced_activations_globs.items():
+      if key not in globs:
+        globs[key] = val
+
   return deserialize_keras_object(
       name,
-      module_objects=globals(),
+      module_objects=globs,
       custom_objects=custom_objects,
       printable_module_name='activation function')
 
diff --git a/tensorflow/python/keras/activations_test.py b/tensorflow/python/keras/activations_test.py
index f951076efbb..756ab131148 100644
--- a/tensorflow/python/keras/activations_test.py
+++ b/tensorflow/python/keras/activations_test.py
@@ -64,12 +64,19 @@ class KerasActivationsTest(test.TestCase, parameterized.TestCase):
     activation = advanced_activations.LeakyReLU(alpha=0.1)
     layer = core.Dense(3, activation=activation)
     config = serialization.serialize(layer)
+    # with custom objects
     deserialized_layer = serialization.deserialize(
         config, custom_objects={'LeakyReLU': activation})
     self.assertEqual(deserialized_layer.__class__.__name__,
                      layer.__class__.__name__)
     self.assertEqual(deserialized_layer.activation.__class__.__name__,
                      activation.__class__.__name__)
+    # without custom objects
+    deserialized_layer = serialization.deserialize(config)
+    self.assertEqual(deserialized_layer.__class__.__name__,
+                     layer.__class__.__name__)
+    self.assertEqual(deserialized_layer.activation.__class__.__name__,
+                     activation.__class__.__name__)
 
   def test_softmax(self):
     x = backend.placeholder(ndim=2)
diff --git a/tensorflow/python/keras/layers/advanced_activations.py b/tensorflow/python/keras/layers/advanced_activations.py
index 7cb40c172b7..762c66461fb 100644
--- a/tensorflow/python/keras/layers/advanced_activations.py
+++ b/tensorflow/python/keras/layers/advanced_activations.py
@@ -28,6 +28,8 @@ from tensorflow.python.keras.utils import tf_utils
 from tensorflow.python.ops import math_ops
 from tensorflow.python.util.tf_export import keras_export
 
+def get_globals():
+  return globals()
 
 @keras_export('keras.layers.LeakyReLU')
 class LeakyReLU(Layer):

From b42f30e171ec7db7c8b91a2f5c5b61a72eaa125f Mon Sep 17 00:00:00 2001
From: PiyushDatta <piyushdattaca@gmail.com>
Date: Wed, 3 Jun 2020 14:01:06 -0400
Subject: [PATCH 02/12] no need for if statement since custom object dict is
 checked before module objects

---
 tensorflow/python/keras/activations.py | 11 +++++------
 1 file changed, 5 insertions(+), 6 deletions(-)

diff --git a/tensorflow/python/keras/activations.py b/tensorflow/python/keras/activations.py
index 9b958af9321..c1b89346e5a 100644
--- a/tensorflow/python/keras/activations.py
+++ b/tensorflow/python/keras/activations.py
@@ -457,12 +457,11 @@ def deserialize(name, custom_objects=None):
   """
   globs = globals()
 
-  # only replace missing activations, when there are no custom objects
-  if custom_objects is None:
-    advanced_activations_globs = advanced_activations.get_globals()
-    for key,val in advanced_activations_globs.items():
-      if key not in globs:
-        globs[key] = val
+  # only replace missing activations
+  advanced_activations_globs = advanced_activations.get_globals()
+  for key,val in advanced_activations_globs.items():
+    if key not in globs:
+      globs[key] = val
 
   return deserialize_keras_object(
       name,

From bb3cec3b33aa5cb8a2a8d4918ae4de203dd941e1 Mon Sep 17 00:00:00 2001
From: PiyushDatta <piyushdattaca@gmail.com>
Date: Wed, 3 Jun 2020 16:33:38 -0400
Subject: [PATCH 03/12] fixing pylint issues

---
 tensorflow/python/keras/activations.py                 | 2 +-
 tensorflow/python/keras/layers/advanced_activations.py | 2 ++
 2 files changed, 3 insertions(+), 1 deletion(-)

diff --git a/tensorflow/python/keras/activations.py b/tensorflow/python/keras/activations.py
index c1b89346e5a..960f5b18819 100644
--- a/tensorflow/python/keras/activations.py
+++ b/tensorflow/python/keras/activations.py
@@ -459,7 +459,7 @@ def deserialize(name, custom_objects=None):
 
   # only replace missing activations
   advanced_activations_globs = advanced_activations.get_globals()
-  for key,val in advanced_activations_globs.items():
+  for key, val in advanced_activations_globs.items():
     if key not in globs:
       globs[key] = val
 
diff --git a/tensorflow/python/keras/layers/advanced_activations.py b/tensorflow/python/keras/layers/advanced_activations.py
index 762c66461fb..058ed0c8f51 100644
--- a/tensorflow/python/keras/layers/advanced_activations.py
+++ b/tensorflow/python/keras/layers/advanced_activations.py
@@ -28,9 +28,11 @@ from tensorflow.python.keras.utils import tf_utils
 from tensorflow.python.ops import math_ops
 from tensorflow.python.util.tf_export import keras_export
 
+
 def get_globals():
   return globals()
 
+
 @keras_export('keras.layers.LeakyReLU')
 class LeakyReLU(Layer):
   """Leaky version of a Rectified Linear Unit.

From 99d2416b614c94d9b4e74fb334ea81c3e8e15635 Mon Sep 17 00:00:00 2001
From: PiyushDatta <piyushdattaca@gmail.com>
Date: Fri, 5 Jun 2020 18:20:16 -0400
Subject: [PATCH 04/12] New feature. Use new param log_all in CSVLogger to log
 all elements in training even if some epochs don't contain the same elements.

---
 tensorflow/python/keras/callbacks.py      | 35 +++++++++++++++++++----
 tensorflow/python/keras/callbacks_test.py |  6 +++-
 2 files changed, 35 insertions(+), 6 deletions(-)

diff --git a/tensorflow/python/keras/callbacks.py b/tensorflow/python/keras/callbacks.py
index 1bca5419774..90e2b8003a7 100644
--- a/tensorflow/python/keras/callbacks.py
+++ b/tensorflow/python/keras/callbacks.py
@@ -2381,12 +2381,18 @@ class CSVLogger(Callback):
       separator: String used to separate elements in the CSV file.
       append: Boolean. True: append if file exists (useful for continuing
           training). False: overwrite existing file.
+      log_all: Boolean. True: log all elements, even elements that are
+          only recorded every x epochs (ex. validation sometimes is
+          only recorded every validation_freq). False: Don't log all
+          elements, only log the elements that are present in every epoch.
   """
 
-  def __init__(self, filename, separator=',', append=False):
+  def __init__(self, filename, separator=',', append=False, log_all=False):
     self.sep = separator
     self.filename = path_to_string(filename)
     self.append = append
+    self.log_all = log_all
+    self._row_dicts = []
     self.writer = None
     self.keys = None
     self.append_header = True
@@ -2424,6 +2430,10 @@ class CSVLogger(Callback):
 
     if self.keys is None:
       self.keys = sorted(logs.keys())
+    elif self.log_all and len(self.keys) < len(logs.keys()):
+      # have to make a new writer to accommodate for the new keys
+      self.keys = sorted(logs.keys())
+      self.writer = None
 
     if self.model.stop_training:
       # We set NA so that csv parsers do not fail for this last epoch.
@@ -2442,15 +2452,30 @@ class CSVLogger(Callback):
           self.csv_file,
           fieldnames=fieldnames,
           dialect=CustomDialect)
-      if self.append_header:
+      # if user wants to log all, then we append_header
+      # at the end of training
+      if self.append_header and not self.log_all:
         self.writer.writeheader()
 
     row_dict = collections.OrderedDict({'epoch': epoch})
-    row_dict.update((key, handle_value(logs[key])) for key in self.keys)
-    self.writer.writerow(row_dict)
-    self.csv_file.flush()
+    row_dict.update((key, handle_value(logs[key]))
+                    for key in self.keys if key in logs)
+    # if user wants to log all, then we write all rows to csv file
+    # at the end of training
+    if not self.log_all:
+      self.writer.writerow(row_dict)
+      self.csv_file.flush()
+    else:
+      self._row_dicts.append(row_dict)
 
   def on_train_end(self, logs=None):
+    if self.log_all:
+      if self.append_header:
+        self.writer.writeheader()
+      self.writer.writerows(self._row_dicts)
+      self._row_dicts = []
+      self.csv_file.flush()
+
     self.csv_file.close()
     self.writer = None
 
diff --git a/tensorflow/python/keras/callbacks_test.py b/tensorflow/python/keras/callbacks_test.py
index 28f85304688..be86e19c7a2 100644
--- a/tensorflow/python/keras/callbacks_test.py
+++ b/tensorflow/python/keras/callbacks_test.py
@@ -1243,7 +1243,7 @@ class KerasCallbacksTest(keras_parameterized.TestCase):
     self.assertTrue(hasattr(reduce_on_plateau, 'min_delta'))
     self.assertEqual(reduce_on_plateau.min_delta, 1e-13)
 
-  def test_CSVLogger(self):
+  def test_CSVLogger(self, log_all=False):
     with self.cached_session():
       np.random.seed(1337)
       temp_dir = self.get_temp_dir()
@@ -1306,6 +1306,7 @@ class KerasCallbacksTest(keras_parameterized.TestCase):
           y_train,
           batch_size=BATCH_SIZE,
           validation_data=(x_test, y_test),
+          validation_freq=1 if not log_all else 2,
           callbacks=cbks,
           epochs=2,
           verbose=0)
@@ -1320,6 +1321,9 @@ class KerasCallbacksTest(keras_parameterized.TestCase):
 
       os.remove(filepath)
 
+  def test_CSVLogger_log_all(self):
+    self.test_CSVLogger(log_all=True)
+
   def test_stop_training_csv(self):
     # Test that using the CSVLogger callback with the TerminateOnNaN callback
     # does not result in invalid CSVs.

From f268f059e16e619cab89a347d0ca597945e7f0d2 Mon Sep 17 00:00:00 2001
From: PiyushDatta <piyushdattaca@gmail.com>
Date: Fri, 5 Jun 2020 18:29:08 -0400
Subject: [PATCH 05/12] Revert "New feature. Use new param log_all in CSVLogger
 to log all elements in training even if some epochs don't contain the same
 elements."

This reverts commit 204913109700abfa7fd620bf05c4603dc7795f34.
---
 tensorflow/python/keras/callbacks.py      | 35 ++++-------------------
 tensorflow/python/keras/callbacks_test.py |  6 +---
 2 files changed, 6 insertions(+), 35 deletions(-)

diff --git a/tensorflow/python/keras/callbacks.py b/tensorflow/python/keras/callbacks.py
index 90e2b8003a7..1bca5419774 100644
--- a/tensorflow/python/keras/callbacks.py
+++ b/tensorflow/python/keras/callbacks.py
@@ -2381,18 +2381,12 @@ class CSVLogger(Callback):
       separator: String used to separate elements in the CSV file.
       append: Boolean. True: append if file exists (useful for continuing
           training). False: overwrite existing file.
-      log_all: Boolean. True: log all elements, even elements that are
-          only recorded every x epochs (ex. validation sometimes is
-          only recorded every validation_freq). False: Don't log all
-          elements, only log the elements that are present in every epoch.
   """
 
-  def __init__(self, filename, separator=',', append=False, log_all=False):
+  def __init__(self, filename, separator=',', append=False):
     self.sep = separator
     self.filename = path_to_string(filename)
     self.append = append
-    self.log_all = log_all
-    self._row_dicts = []
     self.writer = None
     self.keys = None
     self.append_header = True
@@ -2430,10 +2424,6 @@ class CSVLogger(Callback):
 
     if self.keys is None:
       self.keys = sorted(logs.keys())
-    elif self.log_all and len(self.keys) < len(logs.keys()):
-      # have to make a new writer to accommodate for the new keys
-      self.keys = sorted(logs.keys())
-      self.writer = None
 
     if self.model.stop_training:
       # We set NA so that csv parsers do not fail for this last epoch.
@@ -2452,30 +2442,15 @@ class CSVLogger(Callback):
           self.csv_file,
           fieldnames=fieldnames,
           dialect=CustomDialect)
-      # if user wants to log all, then we append_header
-      # at the end of training
-      if self.append_header and not self.log_all:
+      if self.append_header:
         self.writer.writeheader()
 
     row_dict = collections.OrderedDict({'epoch': epoch})
-    row_dict.update((key, handle_value(logs[key]))
-                    for key in self.keys if key in logs)
-    # if user wants to log all, then we write all rows to csv file
-    # at the end of training
-    if not self.log_all:
-      self.writer.writerow(row_dict)
-      self.csv_file.flush()
-    else:
-      self._row_dicts.append(row_dict)
+    row_dict.update((key, handle_value(logs[key])) for key in self.keys)
+    self.writer.writerow(row_dict)
+    self.csv_file.flush()
 
   def on_train_end(self, logs=None):
-    if self.log_all:
-      if self.append_header:
-        self.writer.writeheader()
-      self.writer.writerows(self._row_dicts)
-      self._row_dicts = []
-      self.csv_file.flush()
-
     self.csv_file.close()
     self.writer = None
 
diff --git a/tensorflow/python/keras/callbacks_test.py b/tensorflow/python/keras/callbacks_test.py
index be86e19c7a2..28f85304688 100644
--- a/tensorflow/python/keras/callbacks_test.py
+++ b/tensorflow/python/keras/callbacks_test.py
@@ -1243,7 +1243,7 @@ class KerasCallbacksTest(keras_parameterized.TestCase):
     self.assertTrue(hasattr(reduce_on_plateau, 'min_delta'))
     self.assertEqual(reduce_on_plateau.min_delta, 1e-13)
 
-  def test_CSVLogger(self, log_all=False):
+  def test_CSVLogger(self):
     with self.cached_session():
       np.random.seed(1337)
       temp_dir = self.get_temp_dir()
@@ -1306,7 +1306,6 @@ class KerasCallbacksTest(keras_parameterized.TestCase):
           y_train,
           batch_size=BATCH_SIZE,
           validation_data=(x_test, y_test),
-          validation_freq=1 if not log_all else 2,
           callbacks=cbks,
           epochs=2,
           verbose=0)
@@ -1321,9 +1320,6 @@ class KerasCallbacksTest(keras_parameterized.TestCase):
 
       os.remove(filepath)
 
-  def test_CSVLogger_log_all(self):
-    self.test_CSVLogger(log_all=True)
-
   def test_stop_training_csv(self):
     # Test that using the CSVLogger callback with the TerminateOnNaN callback
     # does not result in invalid CSVs.

From e9a7eec1dc6f2e2e72f468c167b93795ff60544b Mon Sep 17 00:00:00 2001
From: PiyushDatta <piyushdattaca@gmail.com>
Date: Thu, 7 May 2020 05:54:17 -0400
Subject: [PATCH 06/12] We need to bring in the classes from
 advanced_activations if there are no custom objects specified. When no custom
 objects are specified, our module_objects/globals() in
 activations.deserialize() won't contain any advanced_activations.

---
 tensorflow/python/keras/activations.py               | 12 +++++++++++-
 tensorflow/python/keras/activations_test.py          |  7 +++++++
 .../python/keras/layers/advanced_activations.py      |  2 ++
 3 files changed, 20 insertions(+), 1 deletion(-)

diff --git a/tensorflow/python/keras/activations.py b/tensorflow/python/keras/activations.py
index fe0bf5977f9..28d6f18dcf8 100644
--- a/tensorflow/python/keras/activations.py
+++ b/tensorflow/python/keras/activations.py
@@ -26,6 +26,7 @@ from tensorflow.python.ops import math_ops
 from tensorflow.python.ops import nn
 from tensorflow.python.util import dispatch
 from tensorflow.python.util.tf_export import keras_export
+from tensorflow.python.keras.layers import advanced_activations
 
 # b/123041942
 # In TF 2.x, if the `tf.nn.softmax` is used as an activation function in Keras
@@ -525,9 +526,18 @@ def deserialize(name, custom_objects=None):
       ValueError: `Unknown activation function` if the input string does not
       denote any defined Tensorflow activation function.
   """
+  globs = globals()
+
+  # only replace missing activations, when there are no custom objects
+  if custom_objects is None:
+    advanced_activations_globs = advanced_activations.get_globals()
+    for key,val in advanced_activations_globs.items():
+      if key not in globs:
+        globs[key] = val
+
   return deserialize_keras_object(
       name,
-      module_objects=globals(),
+      module_objects=globs,
       custom_objects=custom_objects,
       printable_module_name='activation function')
 
diff --git a/tensorflow/python/keras/activations_test.py b/tensorflow/python/keras/activations_test.py
index ddd3863a3f6..e2bdec0dd45 100644
--- a/tensorflow/python/keras/activations_test.py
+++ b/tensorflow/python/keras/activations_test.py
@@ -65,12 +65,19 @@ class KerasActivationsTest(test.TestCase, parameterized.TestCase):
     activation = advanced_activations.LeakyReLU(alpha=0.1)
     layer = core.Dense(3, activation=activation)
     config = serialization.serialize(layer)
+    # with custom objects
     deserialized_layer = serialization.deserialize(
         config, custom_objects={'LeakyReLU': activation})
     self.assertEqual(deserialized_layer.__class__.__name__,
                      layer.__class__.__name__)
     self.assertEqual(deserialized_layer.activation.__class__.__name__,
                      activation.__class__.__name__)
+    # without custom objects
+    deserialized_layer = serialization.deserialize(config)
+    self.assertEqual(deserialized_layer.__class__.__name__,
+                     layer.__class__.__name__)
+    self.assertEqual(deserialized_layer.activation.__class__.__name__,
+                     activation.__class__.__name__)
 
   def test_softmax(self):
     x = backend.placeholder(ndim=2)
diff --git a/tensorflow/python/keras/layers/advanced_activations.py b/tensorflow/python/keras/layers/advanced_activations.py
index e4323b45dc4..e9ce23654fd 100644
--- a/tensorflow/python/keras/layers/advanced_activations.py
+++ b/tensorflow/python/keras/layers/advanced_activations.py
@@ -29,6 +29,8 @@ from tensorflow.python.keras.utils import tf_utils
 from tensorflow.python.ops import math_ops
 from tensorflow.python.util.tf_export import keras_export
 
+def get_globals():
+  return globals()
 
 @keras_export('keras.layers.LeakyReLU')
 class LeakyReLU(Layer):

From f9ff67c03adf5f0a7b9e0455f26486902f9d4e8a Mon Sep 17 00:00:00 2001
From: PiyushDatta <piyushdattaca@gmail.com>
Date: Wed, 3 Jun 2020 14:01:06 -0400
Subject: [PATCH 07/12] no need for if statement since custom object dict is
 checked before module objects

---
 tensorflow/python/keras/activations.py | 11 +++++------
 1 file changed, 5 insertions(+), 6 deletions(-)

diff --git a/tensorflow/python/keras/activations.py b/tensorflow/python/keras/activations.py
index 28d6f18dcf8..32e54f8059f 100644
--- a/tensorflow/python/keras/activations.py
+++ b/tensorflow/python/keras/activations.py
@@ -528,12 +528,11 @@ def deserialize(name, custom_objects=None):
   """
   globs = globals()
 
-  # only replace missing activations, when there are no custom objects
-  if custom_objects is None:
-    advanced_activations_globs = advanced_activations.get_globals()
-    for key,val in advanced_activations_globs.items():
-      if key not in globs:
-        globs[key] = val
+  # only replace missing activations
+  advanced_activations_globs = advanced_activations.get_globals()
+  for key,val in advanced_activations_globs.items():
+    if key not in globs:
+      globs[key] = val
 
   return deserialize_keras_object(
       name,

From 7b8d3d5894e95b0774ca8615b8b542824a973448 Mon Sep 17 00:00:00 2001
From: PiyushDatta <piyushdattaca@gmail.com>
Date: Wed, 3 Jun 2020 16:33:38 -0400
Subject: [PATCH 08/12] fixing pylint issues

---
 tensorflow/python/keras/activations.py                 | 2 +-
 tensorflow/python/keras/layers/advanced_activations.py | 2 ++
 2 files changed, 3 insertions(+), 1 deletion(-)

diff --git a/tensorflow/python/keras/activations.py b/tensorflow/python/keras/activations.py
index 32e54f8059f..37e119b24ca 100644
--- a/tensorflow/python/keras/activations.py
+++ b/tensorflow/python/keras/activations.py
@@ -530,7 +530,7 @@ def deserialize(name, custom_objects=None):
 
   # only replace missing activations
   advanced_activations_globs = advanced_activations.get_globals()
-  for key,val in advanced_activations_globs.items():
+  for key, val in advanced_activations_globs.items():
     if key not in globs:
       globs[key] = val
 
diff --git a/tensorflow/python/keras/layers/advanced_activations.py b/tensorflow/python/keras/layers/advanced_activations.py
index e9ce23654fd..456b6758dc6 100644
--- a/tensorflow/python/keras/layers/advanced_activations.py
+++ b/tensorflow/python/keras/layers/advanced_activations.py
@@ -29,9 +29,11 @@ from tensorflow.python.keras.utils import tf_utils
 from tensorflow.python.ops import math_ops
 from tensorflow.python.util.tf_export import keras_export
 
+
 def get_globals():
   return globals()
 
+
 @keras_export('keras.layers.LeakyReLU')
 class LeakyReLU(Layer):
   """Leaky version of a Rectified Linear Unit.

From 5f631dd558f84eb3c15912be4ca7558a89ac856d Mon Sep 17 00:00:00 2001
From: PiyushDatta <piyushdattaca@gmail.com>
Date: Fri, 5 Jun 2020 18:20:16 -0400
Subject: [PATCH 09/12] New feature. Use new param log_all in CSVLogger to log
 all elements in training even if some epochs don't contain the same elements.

---
 tensorflow/python/keras/callbacks.py      | 35 +++++++++++++++++++----
 tensorflow/python/keras/callbacks_test.py |  6 +++-
 2 files changed, 35 insertions(+), 6 deletions(-)

diff --git a/tensorflow/python/keras/callbacks.py b/tensorflow/python/keras/callbacks.py
index 3469ccb68ef..08e54323b08 100644
--- a/tensorflow/python/keras/callbacks.py
+++ b/tensorflow/python/keras/callbacks.py
@@ -2486,12 +2486,18 @@ class CSVLogger(Callback):
       separator: String used to separate elements in the CSV file.
       append: Boolean. True: append if file exists (useful for continuing
           training). False: overwrite existing file.
+      log_all: Boolean. True: log all elements, even elements that are
+          only recorded every x epochs (ex. validation sometimes is
+          only recorded every validation_freq). False: Don't log all
+          elements, only log the elements that are present in every epoch.
   """
 
-  def __init__(self, filename, separator=',', append=False):
+  def __init__(self, filename, separator=',', append=False, log_all=False):
     self.sep = separator
     self.filename = path_to_string(filename)
     self.append = append
+    self.log_all = log_all
+    self._row_dicts = []
     self.writer = None
     self.keys = None
     self.append_header = True
@@ -2529,6 +2535,10 @@ class CSVLogger(Callback):
 
     if self.keys is None:
       self.keys = sorted(logs.keys())
+    elif self.log_all and len(self.keys) < len(logs.keys()):
+      # have to make a new writer to accommodate for the new keys
+      self.keys = sorted(logs.keys())
+      self.writer = None
 
     if self.model.stop_training:
       # We set NA so that csv parsers do not fail for this last epoch.
@@ -2547,15 +2557,30 @@ class CSVLogger(Callback):
           self.csv_file,
           fieldnames=fieldnames,
           dialect=CustomDialect)
-      if self.append_header:
+      # if user wants to log all, then we append_header
+      # at the end of training
+      if self.append_header and not self.log_all:
         self.writer.writeheader()
 
     row_dict = collections.OrderedDict({'epoch': epoch})
-    row_dict.update((key, handle_value(logs[key])) for key in self.keys)
-    self.writer.writerow(row_dict)
-    self.csv_file.flush()
+    row_dict.update((key, handle_value(logs[key]))
+                    for key in self.keys if key in logs)
+    # if user wants to log all, then we write all rows to csv file
+    # at the end of training
+    if not self.log_all:
+      self.writer.writerow(row_dict)
+      self.csv_file.flush()
+    else:
+      self._row_dicts.append(row_dict)
 
   def on_train_end(self, logs=None):
+    if self.log_all:
+      if self.append_header:
+        self.writer.writeheader()
+      self.writer.writerows(self._row_dicts)
+      self._row_dicts = []
+      self.csv_file.flush()
+
     self.csv_file.close()
     self.writer = None
 
diff --git a/tensorflow/python/keras/callbacks_test.py b/tensorflow/python/keras/callbacks_test.py
index 9fd8bf86609..933ce15fe8f 100644
--- a/tensorflow/python/keras/callbacks_test.py
+++ b/tensorflow/python/keras/callbacks_test.py
@@ -1301,7 +1301,7 @@ class KerasCallbacksTest(keras_parameterized.TestCase):
     self.assertTrue(hasattr(reduce_on_plateau, 'min_delta'))
     self.assertEqual(reduce_on_plateau.min_delta, 1e-13)
 
-  def test_CSVLogger(self):
+  def test_CSVLogger(self, log_all=False):
     with self.cached_session():
       np.random.seed(1337)
       temp_dir = self.get_temp_dir()
@@ -1364,6 +1364,7 @@ class KerasCallbacksTest(keras_parameterized.TestCase):
           y_train,
           batch_size=BATCH_SIZE,
           validation_data=(x_test, y_test),
+          validation_freq=1 if not log_all else 2,
           callbacks=cbks,
           epochs=2,
           verbose=0)
@@ -1378,6 +1379,9 @@ class KerasCallbacksTest(keras_parameterized.TestCase):
 
       os.remove(filepath)
 
+  def test_CSVLogger_log_all(self):
+    self.test_CSVLogger(log_all=True)
+
   def test_stop_training_csv(self):
     # Test that using the CSVLogger callback with the TerminateOnNaN callback
     # does not result in invalid CSVs.

From b4b65a40882907a4655d65d82567463ee50f2177 Mon Sep 17 00:00:00 2001
From: PiyushDatta <piyushdattaca@gmail.com>
Date: Fri, 5 Jun 2020 18:29:08 -0400
Subject: [PATCH 10/12] Revert "New feature. Use new param log_all in CSVLogger
 to log all elements in training even if some epochs don't contain the same
 elements."

This reverts commit 204913109700abfa7fd620bf05c4603dc7795f34.
---
 tensorflow/python/keras/callbacks.py      | 35 ++++-------------------
 tensorflow/python/keras/callbacks_test.py |  6 +---
 2 files changed, 6 insertions(+), 35 deletions(-)

diff --git a/tensorflow/python/keras/callbacks.py b/tensorflow/python/keras/callbacks.py
index 08e54323b08..3469ccb68ef 100644
--- a/tensorflow/python/keras/callbacks.py
+++ b/tensorflow/python/keras/callbacks.py
@@ -2486,18 +2486,12 @@ class CSVLogger(Callback):
       separator: String used to separate elements in the CSV file.
       append: Boolean. True: append if file exists (useful for continuing
           training). False: overwrite existing file.
-      log_all: Boolean. True: log all elements, even elements that are
-          only recorded every x epochs (ex. validation sometimes is
-          only recorded every validation_freq). False: Don't log all
-          elements, only log the elements that are present in every epoch.
   """
 
-  def __init__(self, filename, separator=',', append=False, log_all=False):
+  def __init__(self, filename, separator=',', append=False):
     self.sep = separator
     self.filename = path_to_string(filename)
     self.append = append
-    self.log_all = log_all
-    self._row_dicts = []
     self.writer = None
     self.keys = None
     self.append_header = True
@@ -2535,10 +2529,6 @@ class CSVLogger(Callback):
 
     if self.keys is None:
       self.keys = sorted(logs.keys())
-    elif self.log_all and len(self.keys) < len(logs.keys()):
-      # have to make a new writer to accommodate for the new keys
-      self.keys = sorted(logs.keys())
-      self.writer = None
 
     if self.model.stop_training:
       # We set NA so that csv parsers do not fail for this last epoch.
@@ -2557,30 +2547,15 @@ class CSVLogger(Callback):
           self.csv_file,
           fieldnames=fieldnames,
           dialect=CustomDialect)
-      # if user wants to log all, then we append_header
-      # at the end of training
-      if self.append_header and not self.log_all:
+      if self.append_header:
         self.writer.writeheader()
 
     row_dict = collections.OrderedDict({'epoch': epoch})
-    row_dict.update((key, handle_value(logs[key]))
-                    for key in self.keys if key in logs)
-    # if user wants to log all, then we write all rows to csv file
-    # at the end of training
-    if not self.log_all:
-      self.writer.writerow(row_dict)
-      self.csv_file.flush()
-    else:
-      self._row_dicts.append(row_dict)
+    row_dict.update((key, handle_value(logs[key])) for key in self.keys)
+    self.writer.writerow(row_dict)
+    self.csv_file.flush()
 
   def on_train_end(self, logs=None):
-    if self.log_all:
-      if self.append_header:
-        self.writer.writeheader()
-      self.writer.writerows(self._row_dicts)
-      self._row_dicts = []
-      self.csv_file.flush()
-
     self.csv_file.close()
     self.writer = None
 
diff --git a/tensorflow/python/keras/callbacks_test.py b/tensorflow/python/keras/callbacks_test.py
index 933ce15fe8f..9fd8bf86609 100644
--- a/tensorflow/python/keras/callbacks_test.py
+++ b/tensorflow/python/keras/callbacks_test.py
@@ -1301,7 +1301,7 @@ class KerasCallbacksTest(keras_parameterized.TestCase):
     self.assertTrue(hasattr(reduce_on_plateau, 'min_delta'))
     self.assertEqual(reduce_on_plateau.min_delta, 1e-13)
 
-  def test_CSVLogger(self, log_all=False):
+  def test_CSVLogger(self):
     with self.cached_session():
       np.random.seed(1337)
       temp_dir = self.get_temp_dir()
@@ -1364,7 +1364,6 @@ class KerasCallbacksTest(keras_parameterized.TestCase):
           y_train,
           batch_size=BATCH_SIZE,
           validation_data=(x_test, y_test),
-          validation_freq=1 if not log_all else 2,
           callbacks=cbks,
           epochs=2,
           verbose=0)
@@ -1379,9 +1378,6 @@ class KerasCallbacksTest(keras_parameterized.TestCase):
 
       os.remove(filepath)
 
-  def test_CSVLogger_log_all(self):
-    self.test_CSVLogger(log_all=True)
-
   def test_stop_training_csv(self):
     # Test that using the CSVLogger callback with the TerminateOnNaN callback
     # does not result in invalid CSVs.

From 41f57af899a69a82a037d0689745694a77ac50a9 Mon Sep 17 00:00:00 2001
From: piyushdatta <piyushdattaca@gmail.com>
Date: Thu, 20 Aug 2020 20:55:28 -0400
Subject: [PATCH 11/12] added advanced_activations into activation lib
 dependancies

---
 tensorflow/python/keras/BUILD | 1 +
 1 file changed, 1 insertion(+)

diff --git a/tensorflow/python/keras/BUILD b/tensorflow/python/keras/BUILD
index d8eff0f2260..1ece307f142 100755
--- a/tensorflow/python/keras/BUILD
+++ b/tensorflow/python/keras/BUILD
@@ -119,6 +119,7 @@ py_library(
     deps = [
         ":backend",
         "//tensorflow/python/keras/utils:engine_utils",
+        "//tensorflow/python/keras/layers:advanced_activations",
     ],
 )
 

From c16f8793e3768faba3edef6317e1369214a4c28a Mon Sep 17 00:00:00 2001
From: piyushdatta <piyushdattaca@gmail.com>
Date: Wed, 14 Oct 2020 21:53:12 -0400
Subject: [PATCH 12/12] swap these lines to keep in alphabetical order

---
 tensorflow/python/keras/BUILD | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/tensorflow/python/keras/BUILD b/tensorflow/python/keras/BUILD
index 1ece307f142..7c156cad3fb 100755
--- a/tensorflow/python/keras/BUILD
+++ b/tensorflow/python/keras/BUILD
@@ -118,8 +118,8 @@ py_library(
     srcs_version = "PY2AND3",
     deps = [
         ":backend",
-        "//tensorflow/python/keras/utils:engine_utils",
         "//tensorflow/python/keras/layers:advanced_activations",
+        "//tensorflow/python/keras/utils:engine_utils",
     ],
 )