Exposes some experimental learning rate schedules added to TF 2.0 in TF 1.x.

PiperOrigin-RevId: 231867881
This commit is contained in:
A. Unique TensorFlower 2019-01-31 15:37:14 -08:00 committed by TensorFlower Gardener
parent aebb09b43c
commit 7b2ded24ac
7 changed files with 73 additions and 13 deletions

View File

@ -631,8 +631,7 @@ class CosineDecay(LearningRateSchedule):
}
@keras_export("keras.experimental.CosineDecayRestarts",
v1=[])
@keras_export("keras.experimental.CosineDecayRestarts")
class CosineDecayRestarts(LearningRateSchedule):
"""A LearningRateSchedule that uses a cosine decay schedule with restarts."""
@ -761,8 +760,7 @@ class CosineDecayRestarts(LearningRateSchedule):
}
@keras_export("keras.experimental.LinearCosineDecay",
v1=[])
@keras_export("keras.experimental.LinearCosineDecay")
class LinearCosineDecay(LearningRateSchedule):
"""A LearningRateSchedule that uses a linear cosine decay schedule."""
@ -879,8 +877,7 @@ class LinearCosineDecay(LearningRateSchedule):
}
@keras_export("keras.experimental.NoisyLinearCosineDecay",
v1=[])
@keras_export("keras.experimental.NoisyLinearCosineDecay")
class NoisyLinearCosineDecay(LearningRateSchedule):
"""A LearningRateSchedule that uses a noisy linear cosine decay schedule."""

View File

@ -0,0 +1,18 @@
path: "tensorflow.keras.experimental.CosineDecayRestarts"
tf_class {
is_instance: "<class \'tensorflow.python.keras.optimizer_v2.learning_rate_schedule.CosineDecayRestarts\'>"
is_instance: "<class \'tensorflow.python.keras.optimizer_v2.learning_rate_schedule.LearningRateSchedule\'>"
is_instance: "<type \'object\'>"
member_method {
name: "__init__"
argspec: "args=[\'self\', \'initial_learning_rate\', \'first_decay_steps\', \'t_mul\', \'m_mul\', \'alpha\', \'name\'], varargs=None, keywords=None, defaults=[\'2.0\', \'1.0\', \'0.0\', \'None\'], "
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
}

View File

@ -0,0 +1,18 @@
path: "tensorflow.keras.experimental.LinearCosineDecay"
tf_class {
is_instance: "<class \'tensorflow.python.keras.optimizer_v2.learning_rate_schedule.LinearCosineDecay\'>"
is_instance: "<class \'tensorflow.python.keras.optimizer_v2.learning_rate_schedule.LearningRateSchedule\'>"
is_instance: "<type \'object\'>"
member_method {
name: "__init__"
argspec: "args=[\'self\', \'initial_learning_rate\', \'decay_steps\', \'num_periods\', \'alpha\', \'beta\', \'name\'], varargs=None, keywords=None, defaults=[\'0.5\', \'0.0\', \'0.001\', \'None\'], "
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
}

View File

@ -0,0 +1,18 @@
path: "tensorflow.keras.experimental.NoisyLinearCosineDecay"
tf_class {
is_instance: "<class \'tensorflow.python.keras.optimizer_v2.learning_rate_schedule.NoisyLinearCosineDecay\'>"
is_instance: "<class \'tensorflow.python.keras.optimizer_v2.learning_rate_schedule.LearningRateSchedule\'>"
is_instance: "<type \'object\'>"
member_method {
name: "__init__"
argspec: "args=[\'self\', \'initial_learning_rate\', \'decay_steps\', \'initial_variance\', \'variance_decay\', \'num_periods\', \'alpha\', \'beta\', \'name\'], varargs=None, keywords=None, defaults=[\'1.0\', \'0.55\', \'0.5\', \'0.0\', \'0.001\', \'None\'], "
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
}

View File

@ -4,6 +4,18 @@ tf_module {
name: "CosineDecay"
mtype: "<type \'type\'>"
}
member {
name: "CosineDecayRestarts"
mtype: "<type \'type\'>"
}
member {
name: "LinearCosineDecay"
mtype: "<type \'type\'>"
}
member {
name: "NoisyLinearCosineDecay"
mtype: "<type \'type\'>"
}
member {
name: "PeepholeLSTMCell"
mtype: "<type \'type\'>"

View File

@ -827,12 +827,8 @@ class TFAPIChangeSpec(ast_edits.APIChangeSpec):
decay_function_comment = (
ast_edits.INFO,
"<function name> has been changed to return a callable instead "
"of a tensor when graph building, but its functionality remains "
"unchanged during eager execution (returns a callable like "
"before). The converter cannot detect and fix this reliably, so "
"this usage has been converted to compat.v1 (even though it may already"
" be correct).\n"
"To use learning rate decay schedules with TensorFlow 2.0, switch to "
"the schedules in `tf.keras.optimizers.schedules`.\n"
)
assert_return_type_comment = (

View File

@ -409,7 +409,8 @@ bazel-bin/tensorflow/tools/compatibility/update/generate_v2_reorders_map
text = "%s(a, b)\n" % decay
_, report, unused_errors, _ = self._upgrade(text)
self.assertIn("%s has been changed to return a callable" % decay, report)
self.assertIn("switch to the schedules in "
"`tf.keras.optimizers.schedules`", report)
def testMetrics(self):
metrics = [