Support optimizer function in Dnn/Linear/.. Estimators.

Change: 126354629
This commit is contained in:
Mustafa Ispir 2016-06-30 15:03:20 -08:00 committed by TensorFlower Gardener
parent 01fb3ef365
commit 69abcd7ec2
2 changed files with 48 additions and 15 deletions
tensorflow/contrib/learn/python/learn/estimators

View File

@ -130,6 +130,14 @@ class _ComposableModel(object):
return []
def _get_optimizer(self):
if (self._optimizer is None or isinstance(self._optimizer,
six.string_types)):
self._optimizer = self._get_default_optimizer(self._optimizer)
elif callable(self._optimizer):
self._optimizer = self._optimizer()
return self._optimizer
def _get_default_optimizer(self, optimizer_name=None):
raise NotImplementedError
@ -173,14 +181,12 @@ class _LinearComposableModel(_ComposableModel):
name="linear")
return logits
def _get_optimizer(self):
if self._optimizer is None:
self._optimizer = "Ftrl"
if isinstance(self._optimizer, six.string_types):
default_learning_rate = 1. / math.sqrt(len(self._get_feature_columns()))
self._optimizer = layers.OPTIMIZER_CLS_NAMES[self._optimizer](
learning_rate=default_learning_rate)
return self._optimizer
def _get_default_optimizer(self, optimizer_name=None):
if optimizer_name is None:
optimizer_name = "Ftrl"
default_learning_rate = 1. / math.sqrt(len(self._get_feature_columns()))
return layers.OPTIMIZER_CLS_NAMES[optimizer_name](
learning_rate=default_learning_rate)
class _DNNComposableModel(_ComposableModel):
@ -269,13 +275,10 @@ class _DNNComposableModel(_ComposableModel):
self._add_hidden_layer_summary(logits, "dnn_logits")
return logits
def _get_optimizer(self):
if self._optimizer is None:
self._optimizer = "Adagrad"
if isinstance(self._optimizer, six.string_types):
self._optimizer = layers.OPTIMIZER_CLS_NAMES[self._optimizer](
learning_rate=0.05)
return self._optimizer
def _get_default_optimizer(self, optimizer_name=None):
if optimizer_name is None:
optimizer_name = "Adagrad"
return layers.OPTIMIZER_CLS_NAMES[optimizer_name](learning_rate=0.05)
# TODO(ispir): Increase test coverage

View File

@ -262,6 +262,36 @@ class DNNLinearCombinedClassifierTest(tf.test.TestCase):
scores = classifier.evaluate(input_fn=_iris_input_logistic_fn, steps=100)
self.assertGreater(scores['accuracy'], 0.9)
def testCustomOptimizerByFunction(self):
"""Tests binary classification using matrix data as input."""
iris = _prepare_iris_data_for_logistic_regression()
cont_features = [
tf.contrib.layers.real_valued_column('feature', dimension=4)
]
bucketized_features = [
tf.contrib.layers.bucketized_column(
cont_features[0], _get_quantile_based_buckets(iris.data, 10))
]
def _optimizer_exp_decay():
global_step = tf.contrib.framework.get_global_step()
learning_rate = tf.train.exponential_decay(learning_rate=0.1,
global_step=global_step,
decay_steps=100,
decay_rate=0.001)
return tf.train.AdagradOptimizer(learning_rate=learning_rate)
classifier = tf.contrib.learn.DNNLinearCombinedClassifier(
linear_feature_columns=bucketized_features,
linear_optimizer=_optimizer_exp_decay,
dnn_feature_columns=cont_features,
dnn_hidden_units=[3, 3],
dnn_optimizer=_optimizer_exp_decay)
classifier.fit(input_fn=_iris_input_logistic_fn, steps=100)
scores = classifier.evaluate(input_fn=_iris_input_logistic_fn, steps=100)
self.assertGreater(scores['accuracy'], 0.9)
def testPredict(self):
"""Tests weight column in evaluation."""
def _input_fn_train():