Add tf.tables_initializer as a replacement for tf.initialize_all_tables, and
update callers in tensorflow. Deprecated tf.initialize_all_tables. Change: 144575599
This commit is contained in:
parent
e72067896c
commit
9654cf9e2d
@ -229,7 +229,7 @@ class TransformerTest(test.TestCase):
|
|||||||
self.assertEqual(len(output), 1)
|
self.assertEqual(len(output), 1)
|
||||||
self.assertIn(keys_sparse, output)
|
self.assertIn(keys_sparse, output)
|
||||||
with self.test_session():
|
with self.test_session():
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
self.assertEqual(output[keys_sparse].values.dtype, dtypes.int64)
|
self.assertEqual(output[keys_sparse].values.dtype, dtypes.int64)
|
||||||
self.assertAllEqual(output[keys_sparse].values.eval(), [1, 2, 0])
|
self.assertAllEqual(output[keys_sparse].values.eval(), [1, 2, 0])
|
||||||
self.assertAllEqual(output[keys_sparse].indices.eval(),
|
self.assertAllEqual(output[keys_sparse].indices.eval(),
|
||||||
@ -247,7 +247,7 @@ class TransformerTest(test.TestCase):
|
|||||||
output = feature_column_ops._Transformer(features).transform(keys_sparse)
|
output = feature_column_ops._Transformer(features).transform(keys_sparse)
|
||||||
|
|
||||||
with self.test_session():
|
with self.test_session():
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
# While the input is a dense Tensor, the output should be a SparseTensor.
|
# While the input is a dense Tensor, the output should be a SparseTensor.
|
||||||
self.assertIsInstance(output, sparse_tensor.SparseTensor)
|
self.assertIsInstance(output, sparse_tensor.SparseTensor)
|
||||||
self.assertEqual(output.dtype, dtypes.int64)
|
self.assertEqual(output.dtype, dtypes.int64)
|
||||||
@ -316,7 +316,7 @@ class TransformerTest(test.TestCase):
|
|||||||
self.assertIn(weighted_ids, output)
|
self.assertIn(weighted_ids, output)
|
||||||
|
|
||||||
with self.test_session():
|
with self.test_session():
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
self.assertAllEqual(output[weighted_ids][0].dense_shape.eval(),
|
self.assertAllEqual(output[weighted_ids][0].dense_shape.eval(),
|
||||||
ids_tensor.dense_shape.eval())
|
ids_tensor.dense_shape.eval())
|
||||||
self.assertAllEqual(output[weighted_ids][0].indices.eval(),
|
self.assertAllEqual(output[weighted_ids][0].indices.eval(),
|
||||||
@ -346,7 +346,7 @@ class TransformerTest(test.TestCase):
|
|||||||
self.assertEqual(len(output), 1)
|
self.assertEqual(len(output), 1)
|
||||||
self.assertIn(vocab_sparse, output)
|
self.assertIn(vocab_sparse, output)
|
||||||
with self.test_session():
|
with self.test_session():
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
self.assertEqual(output[vocab_sparse].values.dtype, dtypes.int64)
|
self.assertEqual(output[vocab_sparse].values.dtype, dtypes.int64)
|
||||||
self.assertAllEqual(output[vocab_sparse].values.eval(), [1, 2, 0])
|
self.assertAllEqual(output[vocab_sparse].values.eval(), [1, 2, 0])
|
||||||
self.assertAllEqual(output[vocab_sparse].indices.eval(),
|
self.assertAllEqual(output[vocab_sparse].indices.eval(),
|
||||||
@ -368,7 +368,7 @@ class TransformerTest(test.TestCase):
|
|||||||
self.assertEqual(len(output), 1)
|
self.assertEqual(len(output), 1)
|
||||||
self.assertIn(vocab_sparse, output)
|
self.assertIn(vocab_sparse, output)
|
||||||
with self.test_session():
|
with self.test_session():
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
self.assertEqual(output[vocab_sparse].values.dtype, dtypes.int64)
|
self.assertEqual(output[vocab_sparse].values.dtype, dtypes.int64)
|
||||||
self.assertAllEqual(output[vocab_sparse].values.eval(), [1, 2, 0, 1])
|
self.assertAllEqual(output[vocab_sparse].values.eval(), [1, 2, 0, 1])
|
||||||
self.assertAllEqual(output[vocab_sparse].indices.eval(),
|
self.assertAllEqual(output[vocab_sparse].indices.eval(),
|
||||||
@ -392,7 +392,7 @@ class TransformerTest(test.TestCase):
|
|||||||
self.assertEqual(len(output), 1)
|
self.assertEqual(len(output), 1)
|
||||||
self.assertIn(vocab_sparse, output)
|
self.assertIn(vocab_sparse, output)
|
||||||
with self.test_session():
|
with self.test_session():
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
self.assertEqual(output[vocab_sparse].values.dtype, dtypes.int64)
|
self.assertEqual(output[vocab_sparse].values.dtype, dtypes.int64)
|
||||||
self.assertAllEqual(output[vocab_sparse].values.eval(), [1, 2, 0])
|
self.assertAllEqual(output[vocab_sparse].values.eval(), [1, 2, 0])
|
||||||
self.assertAllEqual(output[vocab_sparse].indices.eval(),
|
self.assertAllEqual(output[vocab_sparse].indices.eval(),
|
||||||
@ -414,7 +414,7 @@ class TransformerTest(test.TestCase):
|
|||||||
self.assertEqual(len(output), 1)
|
self.assertEqual(len(output), 1)
|
||||||
self.assertIn(vocab_sparse, output)
|
self.assertIn(vocab_sparse, output)
|
||||||
with self.test_session():
|
with self.test_session():
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
self.assertEqual(output[vocab_sparse].values.dtype, dtypes.int64)
|
self.assertEqual(output[vocab_sparse].values.dtype, dtypes.int64)
|
||||||
self.assertAllEqual(output[vocab_sparse].values.eval(), [1, 2, 0, 1])
|
self.assertAllEqual(output[vocab_sparse].values.eval(), [1, 2, 0, 1])
|
||||||
self.assertAllEqual(output[vocab_sparse].indices.eval(),
|
self.assertAllEqual(output[vocab_sparse].indices.eval(),
|
||||||
@ -584,7 +584,7 @@ class CreateInputLayersForDNNsTest(test.TestCase):
|
|||||||
])
|
])
|
||||||
with self.test_session():
|
with self.test_session():
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
self.assertAllEqual(output.eval().shape, [3, 3 + 4 + 10])
|
self.assertAllEqual(output.eval().shape, [3, 3 + 4 + 10])
|
||||||
|
|
||||||
def testRealValuedColumn(self):
|
def testRealValuedColumn(self):
|
||||||
@ -681,7 +681,7 @@ class CreateInputLayersForDNNsTest(test.TestCase):
|
|||||||
[one_hot_column])
|
[one_hot_column])
|
||||||
with self.test_session():
|
with self.test_session():
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
self.assertAllEqual([[0, 0, 10., 0], [0, 20., 0, 0], [30., 0, 40., 0]],
|
self.assertAllEqual([[0, 0, 10., 0], [0, 20., 0, 0], [30., 0, 40., 0]],
|
||||||
output.eval())
|
output.eval())
|
||||||
|
|
||||||
@ -699,7 +699,7 @@ class CreateInputLayersForDNNsTest(test.TestCase):
|
|||||||
|
|
||||||
with self.test_session():
|
with self.test_session():
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
self.assertAllEqual([[0, 0, 1, 0], [0, 1, 0, 0], [1, 0, 0, 0]],
|
self.assertAllEqual([[0, 0, 1, 0], [0, 1, 0, 0], [1, 0, 0, 0]],
|
||||||
output.eval())
|
output.eval())
|
||||||
|
|
||||||
@ -717,7 +717,7 @@ class CreateInputLayersForDNNsTest(test.TestCase):
|
|||||||
|
|
||||||
with self.test_session():
|
with self.test_session():
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
self.assertAllEqual([[0, 0, 1, 0], [0, 1, 0, 0], [1, 0, 1, 0]],
|
self.assertAllEqual([[0, 0, 1, 0], [0, 1, 0, 0], [1, 0, 1, 0]],
|
||||||
output.eval())
|
output.eval())
|
||||||
|
|
||||||
@ -751,7 +751,7 @@ class CreateInputLayersForDNNsTest(test.TestCase):
|
|||||||
[one_hot_sparse])
|
[one_hot_sparse])
|
||||||
with self.test_session():
|
with self.test_session():
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
self.assertAllEqual([3, 10], output.eval().shape)
|
self.assertAllEqual([3, 10], output.eval().shape)
|
||||||
|
|
||||||
def testEmbeddingColumnSucceedsForDNN(self):
|
def testEmbeddingColumnSucceedsForDNN(self):
|
||||||
@ -857,7 +857,7 @@ class CreateInputLayersForDNNsTest(test.TestCase):
|
|||||||
[embeded_sparse])
|
[embeded_sparse])
|
||||||
with self.test_session():
|
with self.test_session():
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
self.assertAllEqual(output.eval().shape, [2, 10])
|
self.assertAllEqual(output.eval().shape, [2, 10])
|
||||||
|
|
||||||
def testEmbeddingColumnWithCrossedColumnSucceedsForDNN(self):
|
def testEmbeddingColumnWithCrossedColumnSucceedsForDNN(self):
|
||||||
@ -908,7 +908,7 @@ class CreateInputLayersForDNNsTest(test.TestCase):
|
|||||||
with self.assertRaisesRegexp(
|
with self.assertRaisesRegexp(
|
||||||
ValueError,
|
ValueError,
|
||||||
"Error creating input layer for column: ids_weighted_by_weights"):
|
"Error creating input layer for column: ids_weighted_by_weights"):
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
feature_column_ops.input_from_feature_columns(features, [weighted_ids])
|
feature_column_ops.input_from_feature_columns(features, [weighted_ids])
|
||||||
|
|
||||||
def testCrossedColumnFailsForDNN(self):
|
def testCrossedColumnFailsForDNN(self):
|
||||||
@ -1015,7 +1015,7 @@ class CreateInputLayersForDNNsTest(test.TestCase):
|
|||||||
[embeded_sparse])
|
[embeded_sparse])
|
||||||
with self.test_session():
|
with self.test_session():
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
# score: (sum of weights)
|
# score: (sum of weights)
|
||||||
self.assertAllEqual(output.eval(), [[10.], [50.], [0.]])
|
self.assertAllEqual(output.eval(), [[10.], [50.], [0.]])
|
||||||
|
|
||||||
@ -1208,7 +1208,7 @@ class SequenceInputFromFeatureColumnTest(test.TestCase):
|
|||||||
|
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
model_input = sess.run(model_input_tensor)
|
model_input = sess.run(model_input_tensor)
|
||||||
|
|
||||||
expected_input_shape = np.array([4, 3, 4])
|
expected_input_shape = np.array([4, 3, 4])
|
||||||
@ -1242,7 +1242,7 @@ class SequenceInputFromFeatureColumnTest(test.TestCase):
|
|||||||
|
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
model_input = sess.run(model_input_tensor)
|
model_input = sess.run(model_input_tensor)
|
||||||
|
|
||||||
expected_input_shape = np.array([4, 3, hash_buckets])
|
expected_input_shape = np.array([4, 3, hash_buckets])
|
||||||
@ -1272,7 +1272,7 @@ class SequenceInputFromFeatureColumnTest(test.TestCase):
|
|||||||
|
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
model_input = sess.run(model_input_tensor)
|
model_input = sess.run(model_input_tensor)
|
||||||
|
|
||||||
self.assertAllEqual(expected_input_shape, model_input.shape)
|
self.assertAllEqual(expected_input_shape, model_input.shape)
|
||||||
@ -1302,7 +1302,7 @@ class SequenceInputFromFeatureColumnTest(test.TestCase):
|
|||||||
embedding_weights)
|
embedding_weights)
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
model_input, gradients = sess.run([model_input_tensor, gradient_tensor])
|
model_input, gradients = sess.run([model_input_tensor, gradient_tensor])
|
||||||
|
|
||||||
expected_input_shape = [4, 3, embedding_dimension]
|
expected_input_shape = [4, 3, embedding_dimension]
|
||||||
@ -1369,7 +1369,7 @@ class SequenceInputFromFeatureColumnTest(test.TestCase):
|
|||||||
|
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
model_input = sess.run(model_input_tensor)
|
model_input = sess.run(model_input_tensor)
|
||||||
|
|
||||||
expected_input_shape = [
|
expected_input_shape = [
|
||||||
@ -1437,7 +1437,7 @@ class WeightedSumTest(test.TestCase):
|
|||||||
features, [weighted_ids], num_outputs=5)
|
features, [weighted_ids], num_outputs=5)
|
||||||
with self.test_session():
|
with self.test_session():
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
self.assertAllEqual(logits.eval().shape, [2, 5])
|
self.assertAllEqual(logits.eval().shape, [2, 5])
|
||||||
|
|
||||||
def testWeightedSparseColumnWithDenseInputTensor(self):
|
def testWeightedSparseColumnWithDenseInputTensor(self):
|
||||||
@ -1453,7 +1453,7 @@ class WeightedSumTest(test.TestCase):
|
|||||||
|
|
||||||
with self.test_session():
|
with self.test_session():
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
self.assertAllEqual(logits.eval().shape, [2, 5])
|
self.assertAllEqual(logits.eval().shape, [2, 5])
|
||||||
|
|
||||||
def testCrossedColumn(self):
|
def testCrossedColumn(self):
|
||||||
@ -1507,7 +1507,7 @@ class WeightedSumTest(test.TestCase):
|
|||||||
features, [movies], num_outputs=1))
|
features, [movies], num_outputs=1))
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
variables_lib.initialize_all_variables().run()
|
variables_lib.initialize_all_variables().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
|
|
||||||
weights = column_to_variable[movies][0]
|
weights = column_to_variable[movies][0]
|
||||||
self.assertEqual(weights.get_shape(), (3, 1))
|
self.assertEqual(weights.get_shape(), (3, 1))
|
||||||
@ -1582,7 +1582,7 @@ class WeightedSumTest(test.TestCase):
|
|||||||
features, [age, language], num_outputs=1))
|
features, [age, language], num_outputs=1))
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
|
|
||||||
self.assertAllClose(output.eval(), [[0.], [0.]])
|
self.assertAllClose(output.eval(), [[0.], [0.]])
|
||||||
|
|
||||||
@ -1622,7 +1622,7 @@ class WeightedSumTest(test.TestCase):
|
|||||||
self.assertEqual(len(variables), 1)
|
self.assertEqual(len(variables), 1)
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
|
|
||||||
self.assertAllClose(output.eval(), [[0.], [0.]])
|
self.assertAllClose(output.eval(), [[0.], [0.]])
|
||||||
|
|
||||||
@ -1686,7 +1686,7 @@ class WeightedSumTest(test.TestCase):
|
|||||||
features, [weighted_language], num_outputs=1))
|
features, [weighted_language], num_outputs=1))
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
|
|
||||||
self.assertAllClose(output.eval(), [[0.], [0.]])
|
self.assertAllClose(output.eval(), [[0.], [0.]])
|
||||||
|
|
||||||
@ -1714,7 +1714,7 @@ class WeightedSumTest(test.TestCase):
|
|||||||
features, [language], num_outputs=1))
|
features, [language], num_outputs=1))
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
|
|
||||||
# score: 0.1 + language_weight['hindi'] + language_weight['english']
|
# score: 0.1 + language_weight['hindi'] + language_weight['english']
|
||||||
sess.run(bias.assign([0.1]))
|
sess.run(bias.assign([0.1]))
|
||||||
@ -1737,7 +1737,7 @@ class WeightedSumTest(test.TestCase):
|
|||||||
features, [movies], num_outputs=1))
|
features, [movies], num_outputs=1))
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
|
|
||||||
weights = column_to_variable[movies][0]
|
weights = column_to_variable[movies][0]
|
||||||
self.assertEqual(weights.get_shape(), (15, 1))
|
self.assertEqual(weights.get_shape(), (15, 1))
|
||||||
@ -1771,7 +1771,7 @@ class WeightedSumTest(test.TestCase):
|
|||||||
features, [country_language], num_outputs=1))
|
features, [country_language], num_outputs=1))
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
|
|
||||||
weights = column_to_variable[country_language][0]
|
weights = column_to_variable[country_language][0]
|
||||||
sess.run(weights.assign(weights + 0.4))
|
sess.run(weights.assign(weights + 0.4))
|
||||||
@ -1795,7 +1795,7 @@ class WeightedSumTest(test.TestCase):
|
|||||||
features, [language_language], num_outputs=1))
|
features, [language_language], num_outputs=1))
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
|
|
||||||
weights = column_to_variable[language_language][0]
|
weights = column_to_variable[language_language][0]
|
||||||
sess.run(weights.assign(weights + 0.4))
|
sess.run(weights.assign(weights + 0.4))
|
||||||
@ -1828,7 +1828,7 @@ class WeightedSumTest(test.TestCase):
|
|||||||
features, [country_language], num_outputs=1))
|
features, [country_language], num_outputs=1))
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
|
|
||||||
weights = column_to_variable[country_language][0]
|
weights = column_to_variable[country_language][0]
|
||||||
sess.run(weights.assign(weights + 0.4))
|
sess.run(weights.assign(weights + 0.4))
|
||||||
@ -1869,7 +1869,7 @@ class WeightedSumTest(test.TestCase):
|
|||||||
scope=scope))
|
scope=scope))
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
|
|
||||||
self.assertEqual(2, len(column_to_variable[country]))
|
self.assertEqual(2, len(column_to_variable[country]))
|
||||||
self.assertEqual(3, len(column_to_variable[language]))
|
self.assertEqual(3, len(column_to_variable[language]))
|
||||||
@ -1906,7 +1906,7 @@ class WeightedSumTest(test.TestCase):
|
|||||||
features, [country, age, incomes], num_outputs=1))
|
features, [country, age, incomes], num_outputs=1))
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
|
|
||||||
incomes_weights = column_to_variable[incomes][0]
|
incomes_weights = column_to_variable[incomes][0]
|
||||||
sess.run(incomes_weights.assign([[0.1], [0.2], [0.3]]))
|
sess.run(incomes_weights.assign([[0.1], [0.2], [0.3]]))
|
||||||
@ -1943,7 +1943,7 @@ class WeightedSumTest(test.TestCase):
|
|||||||
features, [country, age, height, incomes], num_outputs=5))
|
features, [country, age, height, incomes], num_outputs=5))
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
|
|
||||||
height_weights = column_to_variable[height][0]
|
height_weights = column_to_variable[height][0]
|
||||||
sess.run(
|
sess.run(
|
||||||
@ -1973,7 +1973,7 @@ class WeightedSumTest(test.TestCase):
|
|||||||
features, [bucket], num_outputs=1))
|
features, [bucket], num_outputs=1))
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
|
|
||||||
sess.run(column_to_variable[bucket][0].assign([[0.1], [0.2], [0.3],
|
sess.run(column_to_variable[bucket][0].assign([[0.1], [0.2], [0.3],
|
||||||
[0.4]]))
|
[0.4]]))
|
||||||
@ -2001,7 +2001,7 @@ class WeightedSumTest(test.TestCase):
|
|||||||
features, [bucket, country], num_outputs=1))
|
features, [bucket, country], num_outputs=1))
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
|
|
||||||
# dimension = 2, bucket_size = 4, num_classes = 1
|
# dimension = 2, bucket_size = 4, num_classes = 1
|
||||||
sess.run(column_to_variable[bucket][0].assign(
|
sess.run(column_to_variable[bucket][0].assign(
|
||||||
@ -2030,7 +2030,7 @@ class WeightedSumTest(test.TestCase):
|
|||||||
features, [bucket, country], num_outputs=5))
|
features, [bucket, country], num_outputs=5))
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
|
|
||||||
# dimension = 2, bucket_size = 4, num_classes = 5
|
# dimension = 2, bucket_size = 4, num_classes = 5
|
||||||
sess.run(column_to_variable[bucket][0].assign(
|
sess.run(column_to_variable[bucket][0].assign(
|
||||||
@ -2066,7 +2066,7 @@ class WeightedSumTest(test.TestCase):
|
|||||||
features, [country_price], num_outputs=1))
|
features, [country_price], num_outputs=1))
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
|
|
||||||
weights = column_to_variable[country_price][0]
|
weights = column_to_variable[country_price][0]
|
||||||
sess.run(weights.assign(weights + 0.4))
|
sess.run(weights.assign(weights + 0.4))
|
||||||
@ -2105,7 +2105,7 @@ class WeightedSumTest(test.TestCase):
|
|||||||
features, [country_language_price], num_outputs=1))
|
features, [country_language_price], num_outputs=1))
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
|
|
||||||
weights = column_to_variable[country_language_price][0]
|
weights = column_to_variable[country_language_price][0]
|
||||||
sess.run(weights.assign(weights + 0.4))
|
sess.run(weights.assign(weights + 0.4))
|
||||||
@ -2129,7 +2129,7 @@ class WeightedSumTest(test.TestCase):
|
|||||||
features, [product], num_outputs=1))
|
features, [product], num_outputs=1))
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
product_weights = column_to_variable[product][0]
|
product_weights = column_to_variable[product][0]
|
||||||
sess.run(product_weights.assign([[0.1], [0.2], [0.3], [0.4], [0.5]]))
|
sess.run(product_weights.assign([[0.1], [0.2], [0.3], [0.4], [0.5]]))
|
||||||
self.assertAllClose(output.eval(), [[0.1], [0.5], [0.3]])
|
self.assertAllClose(output.eval(), [[0.1], [0.5], [0.3]])
|
||||||
@ -2144,7 +2144,7 @@ class WeightedSumTest(test.TestCase):
|
|||||||
features, [product], num_outputs=1))
|
features, [product], num_outputs=1))
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
product_weights = column_to_variable[product][0]
|
product_weights = column_to_variable[product][0]
|
||||||
sess.run(product_weights.assign([[0.1], [0.2], [0.3], [0.4], [0.5]]))
|
sess.run(product_weights.assign([[0.1], [0.2], [0.3], [0.4], [0.5]]))
|
||||||
self.assertAllClose(output.eval(), [[0.1], [0.5], [0.3]])
|
self.assertAllClose(output.eval(), [[0.1], [0.5], [0.3]])
|
||||||
@ -2159,7 +2159,7 @@ class WeightedSumTest(test.TestCase):
|
|||||||
features, [product], num_outputs=1))
|
features, [product], num_outputs=1))
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
product_weights = column_to_variable[product][0]
|
product_weights = column_to_variable[product][0]
|
||||||
sess.run(product_weights.assign([[0.1], [0.2], [0.3], [0.4], [0.5]]))
|
sess.run(product_weights.assign([[0.1], [0.2], [0.3], [0.4], [0.5]]))
|
||||||
self.assertAllClose(output.eval(), [[0.6], [0.7]])
|
self.assertAllClose(output.eval(), [[0.6], [0.7]])
|
||||||
@ -2180,7 +2180,7 @@ class WeightedSumTest(test.TestCase):
|
|||||||
features, [product], num_outputs=1))
|
features, [product], num_outputs=1))
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
product_weights = column_to_variable[product][0]
|
product_weights = column_to_variable[product][0]
|
||||||
sess.run(product_weights.assign([[0.1], [0.2], [0.3], [0.4], [0.5]]))
|
sess.run(product_weights.assign([[0.1], [0.2], [0.3], [0.4], [0.5]]))
|
||||||
self.assertAllClose(output.eval(), [[0.1], [0.5], [0.3]])
|
self.assertAllClose(output.eval(), [[0.1], [0.5], [0.3]])
|
||||||
@ -2192,7 +2192,7 @@ class WeightedSumTest(test.TestCase):
|
|||||||
features, [feature_column.real_valued_column("age")], num_outputs=3)
|
features, [feature_column.real_valued_column("age")], num_outputs=3)
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
sess.run(bias.assign([0.1, 0.2, 0.3]))
|
sess.run(bias.assign([0.1, 0.2, 0.3]))
|
||||||
self.assertAllClose(output.eval(), [[0.1, 0.2, 0.3], [0.1, 0.2, 0.3],
|
self.assertAllClose(output.eval(), [[0.1, 0.2, 0.3], [0.1, 0.2, 0.3],
|
||||||
[0.1, 0.2, 0.3], [0.1, 0.2, 0.3]])
|
[0.1, 0.2, 0.3], [0.1, 0.2, 0.3]])
|
||||||
@ -2206,7 +2206,7 @@ class WeightedSumTest(test.TestCase):
|
|||||||
features, [column], num_outputs=3))
|
features, [column], num_outputs=3))
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
weights = column_to_variable[column][0]
|
weights = column_to_variable[column][0]
|
||||||
self.assertEqual(weights.get_shape(), (1, 3))
|
self.assertEqual(weights.get_shape(), (1, 3))
|
||||||
sess.run(weights.assign([[0.01, 0.03, 0.05]]))
|
sess.run(weights.assign([[0.01, 0.03, 0.05]]))
|
||||||
@ -2230,7 +2230,7 @@ class WeightedSumTest(test.TestCase):
|
|||||||
features, [column], num_outputs=3))
|
features, [column], num_outputs=3))
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
weights = column_to_variable[column][0]
|
weights = column_to_variable[column][0]
|
||||||
self.assertEqual(weights.get_shape(), (5, 3))
|
self.assertEqual(weights.get_shape(), (5, 3))
|
||||||
sess.run(
|
sess.run(
|
||||||
@ -2256,7 +2256,7 @@ class WeightedSumTest(test.TestCase):
|
|||||||
features, [column], num_outputs=3))
|
features, [column], num_outputs=3))
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
|
|
||||||
weights = column_to_variable[column][0]
|
weights = column_to_variable[column][0]
|
||||||
self.assertEqual(weights.get_shape(), (5, 3))
|
self.assertEqual(weights.get_shape(), (5, 3))
|
||||||
@ -2296,7 +2296,7 @@ class WeightedSumTest(test.TestCase):
|
|||||||
features, [column], num_outputs=3))
|
features, [column], num_outputs=3))
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
|
|
||||||
weights = column_to_variable[column][0]
|
weights = column_to_variable[column][0]
|
||||||
self.assertEqual(weights.get_shape(), (5, 3))
|
self.assertEqual(weights.get_shape(), (5, 3))
|
||||||
@ -2325,7 +2325,7 @@ class WeightedSumTest(test.TestCase):
|
|||||||
features, [column], num_outputs=3))
|
features, [column], num_outputs=3))
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
variables_lib.global_variables_initializer().run()
|
variables_lib.global_variables_initializer().run()
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
|
|
||||||
weights = column_to_variable[column][0]
|
weights = column_to_variable[column][0]
|
||||||
self.assertEqual(weights.get_shape(), (5, 3))
|
self.assertEqual(weights.get_shape(), (5, 3))
|
||||||
@ -2390,7 +2390,7 @@ class ParseExampleTest(test.TestCase):
|
|||||||
self.assertIn(bucket, output)
|
self.assertIn(bucket, output)
|
||||||
self.assertIn(wire_cast, output)
|
self.assertIn(wire_cast, output)
|
||||||
with self.test_session():
|
with self.test_session():
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
self.assertAllEqual(output[bucket].eval(), [[2, 3, 0]])
|
self.assertAllEqual(output[bucket].eval(), [[2, 3, 0]])
|
||||||
self.assertAllEqual(output[wire_cast].indices.eval(), [[0, 0], [0, 1]])
|
self.assertAllEqual(output[wire_cast].indices.eval(), [[0, 0], [0, 1]])
|
||||||
self.assertAllEqual(output[wire_cast].values.eval(), [2, 0])
|
self.assertAllEqual(output[wire_cast].values.eval(), [2, 0])
|
||||||
|
@ -160,7 +160,7 @@ class DynamicRnnEstimatorTest(test.TestCase):
|
|||||||
self.context_feature_columns)
|
self.context_feature_columns)
|
||||||
with self.test_session() as sess:
|
with self.test_session() as sess:
|
||||||
sess.run(variables.global_variables_initializer())
|
sess.run(variables.global_variables_initializer())
|
||||||
sess.run(data_flow_ops.initialize_all_tables())
|
sess.run(data_flow_ops.tables_initializer())
|
||||||
sequence_input_val = sess.run(sequence_input)
|
sequence_input_val = sess.run(sequence_input)
|
||||||
expected_shape = np.array([
|
expected_shape = np.array([
|
||||||
3, # expected batch size
|
3, # expected batch size
|
||||||
@ -181,7 +181,7 @@ class DynamicRnnEstimatorTest(test.TestCase):
|
|||||||
# Obtain values of activations and final state.
|
# Obtain values of activations and final state.
|
||||||
with session.Session() as sess:
|
with session.Session() as sess:
|
||||||
sess.run(variables.global_variables_initializer())
|
sess.run(variables.global_variables_initializer())
|
||||||
sess.run(data_flow_ops.initialize_all_tables())
|
sess.run(data_flow_ops.tables_initializer())
|
||||||
activations, final_state = sess.run([activations_t, final_state_t])
|
activations, final_state = sess.run([activations_t, final_state_t])
|
||||||
|
|
||||||
expected_activations_shape = np.array([3, 2, self.NUM_LABEL_COLUMNS])
|
expected_activations_shape = np.array([3, 2, self.NUM_LABEL_COLUMNS])
|
||||||
|
@ -1283,7 +1283,7 @@ class Estimator(BaseEstimator):
|
|||||||
|
|
||||||
with tf_session.Session('') as session:
|
with tf_session.Session('') as session:
|
||||||
variables.initialize_local_variables()
|
variables.initialize_local_variables()
|
||||||
data_flow_ops.initialize_all_tables()
|
data_flow_ops.tables_initializer()
|
||||||
saver_for_restore = saver.Saver(
|
saver_for_restore = saver.Saver(
|
||||||
variables.global_variables(),
|
variables.global_variables(),
|
||||||
sharded=True)
|
sharded=True)
|
||||||
@ -1291,7 +1291,7 @@ class Estimator(BaseEstimator):
|
|||||||
|
|
||||||
init_op = control_flow_ops.group(
|
init_op = control_flow_ops.group(
|
||||||
variables.local_variables_initializer(),
|
variables.local_variables_initializer(),
|
||||||
data_flow_ops.initialize_all_tables())
|
data_flow_ops.tables_initializer())
|
||||||
|
|
||||||
# Perform the export
|
# Perform the export
|
||||||
builder = saved_model_builder.SavedModelBuilder(export_dir)
|
builder = saved_model_builder.SavedModelBuilder(export_dir)
|
||||||
|
@ -634,7 +634,7 @@ def _get_local_init_op():
|
|||||||
ops.GraphKeys.LOCAL_INIT_OP)
|
ops.GraphKeys.LOCAL_INIT_OP)
|
||||||
if local_init_op is None:
|
if local_init_op is None:
|
||||||
op_list = [variables.local_variables_initializer(),
|
op_list = [variables.local_variables_initializer(),
|
||||||
data_flow_ops.initialize_all_tables()]
|
data_flow_ops.tables_initializer()]
|
||||||
if op_list:
|
if op_list:
|
||||||
local_init_op = control_flow_ops.group(*op_list)
|
local_init_op = control_flow_ops.group(*op_list)
|
||||||
ops.add_to_collection(ops.GraphKeys.LOCAL_INIT_OP, local_init_op)
|
ops.add_to_collection(ops.GraphKeys.LOCAL_INIT_OP, local_init_op)
|
||||||
@ -881,7 +881,7 @@ def run_feeds_iter(output_dict, feed_dicts, restore_checkpoint_path=None):
|
|||||||
else:
|
else:
|
||||||
session.run(variables.global_variables_initializer())
|
session.run(variables.global_variables_initializer())
|
||||||
session.run(variables.local_variables_initializer())
|
session.run(variables.local_variables_initializer())
|
||||||
session.run(data_flow_ops.initialize_all_tables())
|
session.run(data_flow_ops.tables_initializer())
|
||||||
coord = coordinator.Coordinator()
|
coord = coordinator.Coordinator()
|
||||||
threads = None
|
threads = None
|
||||||
try:
|
try:
|
||||||
|
@ -66,13 +66,13 @@ def _export_graph(graph, saver, checkpoint_path, export_dir,
|
|||||||
with graph.as_default():
|
with graph.as_default():
|
||||||
with tf_session.Session('') as session:
|
with tf_session.Session('') as session:
|
||||||
variables.local_variables_initializer()
|
variables.local_variables_initializer()
|
||||||
data_flow_ops.initialize_all_tables()
|
data_flow_ops.tables_initializer()
|
||||||
saver.restore(session, checkpoint_path)
|
saver.restore(session, checkpoint_path)
|
||||||
|
|
||||||
export = exporter.Exporter(saver)
|
export = exporter.Exporter(saver)
|
||||||
export.init(init_op=control_flow_ops.group(
|
export.init(init_op=control_flow_ops.group(
|
||||||
variables.local_variables_initializer(),
|
variables.local_variables_initializer(),
|
||||||
data_flow_ops.initialize_all_tables()),
|
data_flow_ops.tables_initializer()),
|
||||||
default_graph_signature=default_graph_signature,
|
default_graph_signature=default_graph_signature,
|
||||||
named_graph_signatures=named_graph_signatures,
|
named_graph_signatures=named_graph_signatures,
|
||||||
assets_collection=ops.get_collection(
|
assets_collection=ops.get_collection(
|
||||||
|
@ -795,7 +795,7 @@ def string_to_index_table_from_file(vocabulary_file=None,
|
|||||||
The bucket ID range is `[vocabulary size, vocabulary size + num_oov_buckets]`.
|
The bucket ID range is `[vocabulary size, vocabulary size + num_oov_buckets]`.
|
||||||
|
|
||||||
The underlying table must be initialized by calling
|
The underlying table must be initialized by calling
|
||||||
`tf.initialize_all_tables.run()` or `table.init.run()` once.
|
`tf.tables_initializer.run()` or `table.init.run()` once.
|
||||||
|
|
||||||
Sample Usages:
|
Sample Usages:
|
||||||
|
|
||||||
@ -813,7 +813,7 @@ def string_to_index_table_from_file(vocabulary_file=None,
|
|||||||
vocabulary_file="test.txt", num_oov_buckets=1)
|
vocabulary_file="test.txt", num_oov_buckets=1)
|
||||||
ids = table.lookup(features)
|
ids = table.lookup(features)
|
||||||
...
|
...
|
||||||
tf.initialize_all_tables().run()
|
tf.tables_initializer().run()
|
||||||
|
|
||||||
ids.eval() ==> [0, 1, 3, 2] # where 3 is the out-of-vocabulary bucket
|
ids.eval() ==> [0, 1, 3, 2] # where 3 is the out-of-vocabulary bucket
|
||||||
```
|
```
|
||||||
@ -893,7 +893,7 @@ def string_to_index_table_from_tensor(mapping,
|
|||||||
The bucket ID range is `[mapping size, mapping size + num_oov_buckets]`.
|
The bucket ID range is `[mapping size, mapping size + num_oov_buckets]`.
|
||||||
|
|
||||||
The underlying table must be initialized by calling
|
The underlying table must be initialized by calling
|
||||||
`tf.initialize_all_tables.run()` or `table.init.run()` once.
|
`tf.tables_initializer.run()` or `table.init.run()` once.
|
||||||
|
|
||||||
Elements in `mapping` cannot have duplicates, otherwise when executing the
|
Elements in `mapping` cannot have duplicates, otherwise when executing the
|
||||||
table initializer op, it will throw a `FailedPreconditionError`.
|
table initializer op, it will throw a `FailedPreconditionError`.
|
||||||
@ -907,7 +907,7 @@ def string_to_index_table_from_tensor(mapping,
|
|||||||
features = tf.constant(["emerson", "lake", "and", "palmer"])
|
features = tf.constant(["emerson", "lake", "and", "palmer"])
|
||||||
ids = table.lookup(features)
|
ids = table.lookup(features)
|
||||||
...
|
...
|
||||||
tf.initialize_all_tables().run()
|
tf.tables_initializer().run()
|
||||||
|
|
||||||
ids.eval() ==> [0, 1, 4, 2]
|
ids.eval() ==> [0, 1, 4, 2]
|
||||||
```
|
```
|
||||||
@ -975,7 +975,7 @@ def string_to_index(tensor, mapping, default_value=-1, name=None):
|
|||||||
will throw a FailedPreconditionError.
|
will throw a FailedPreconditionError.
|
||||||
|
|
||||||
The underlying table must be initialized by calling
|
The underlying table must be initialized by calling
|
||||||
`tf.initialize_all_tables.run()` once.
|
`tf.tables_initializer.run()` once.
|
||||||
|
|
||||||
For example:
|
For example:
|
||||||
|
|
||||||
@ -985,7 +985,7 @@ def string_to_index(tensor, mapping, default_value=-1, name=None):
|
|||||||
ids = tf.contrib.lookup.string_to_index(
|
ids = tf.contrib.lookup.string_to_index(
|
||||||
feats, mapping=mapping_strings, default_value=-1)
|
feats, mapping=mapping_strings, default_value=-1)
|
||||||
...
|
...
|
||||||
tf.initialize_all_tables().run()
|
tf.tables_initializer().run()
|
||||||
|
|
||||||
ids.eval() ==> [0, 1, -1, 2]
|
ids.eval() ==> [0, 1, -1, 2]
|
||||||
```
|
```
|
||||||
@ -1022,7 +1022,7 @@ def index_to_string_table_from_file(vocabulary_file,
|
|||||||
(an out-of-vocabulary entry) is assigned the `default_value`
|
(an out-of-vocabulary entry) is assigned the `default_value`
|
||||||
|
|
||||||
The underlying table must be initialized by calling
|
The underlying table must be initialized by calling
|
||||||
`tf.initialize_all_tables.run()` or `table.init.run()` once.
|
`tf.tables_initializer.run()` or `table.init.run()` once.
|
||||||
|
|
||||||
Sample Usages:
|
Sample Usages:
|
||||||
|
|
||||||
@ -1040,7 +1040,7 @@ def index_to_string_table_from_file(vocabulary_file,
|
|||||||
vocabulary_file="test.txt", default_value="UNKNOWN")
|
vocabulary_file="test.txt", default_value="UNKNOWN")
|
||||||
values = table.lookup(indices)
|
values = table.lookup(indices)
|
||||||
...
|
...
|
||||||
tf.initialize_all_tables().run()
|
tf.tables_initializer().run()
|
||||||
|
|
||||||
values.eval() ==> ["lake", "UNKNOWN"]
|
values.eval() ==> ["lake", "UNKNOWN"]
|
||||||
```
|
```
|
||||||
@ -1096,7 +1096,7 @@ def index_to_string_table_from_tensor(mapping, default_value="UNK", name=None):
|
|||||||
(an out-of-vocabulary entry) is assigned the `default_value`
|
(an out-of-vocabulary entry) is assigned the `default_value`
|
||||||
|
|
||||||
The underlying table must be initialized by calling
|
The underlying table must be initialized by calling
|
||||||
`tf.initialize_all_tables.run()` or `table.init.run()` once.
|
`tf.tables_initializer.run()` or `table.init.run()` once.
|
||||||
|
|
||||||
Elements in `mapping` cannot have duplicates, otherwise when executing the
|
Elements in `mapping` cannot have duplicates, otherwise when executing the
|
||||||
table initializer op, it will throw a `FailedPreconditionError`.
|
table initializer op, it will throw a `FailedPreconditionError`.
|
||||||
@ -1110,7 +1110,7 @@ def index_to_string_table_from_tensor(mapping, default_value="UNK", name=None):
|
|||||||
mapping_string, default_value="UNKNOWN")
|
mapping_string, default_value="UNKNOWN")
|
||||||
values = table.lookup(indices)
|
values = table.lookup(indices)
|
||||||
...
|
...
|
||||||
tf.initialize_all_tables().run()
|
tf.tables_initializer().run()
|
||||||
|
|
||||||
values.eval() ==> ["lake", "UNKNOWN"]
|
values.eval() ==> ["lake", "UNKNOWN"]
|
||||||
```
|
```
|
||||||
@ -1159,7 +1159,7 @@ def index_to_string(tensor, mapping, default_value="UNK", name=None):
|
|||||||
(an out-of-vocabulary entry) is assigned the `default_value`
|
(an out-of-vocabulary entry) is assigned the `default_value`
|
||||||
|
|
||||||
The underlying table must be initialized by calling
|
The underlying table must be initialized by calling
|
||||||
`tf.initialize_all_tables.run()` once.
|
`tf.tables_initializer.run()` once.
|
||||||
|
|
||||||
For example:
|
For example:
|
||||||
|
|
||||||
@ -1169,7 +1169,7 @@ def index_to_string(tensor, mapping, default_value="UNK", name=None):
|
|||||||
values = tf.contrib.lookup.index_to_string(
|
values = tf.contrib.lookup.index_to_string(
|
||||||
indices, mapping=mapping_string, default_value="UNKNOWN")
|
indices, mapping=mapping_string, default_value="UNKNOWN")
|
||||||
...
|
...
|
||||||
tf.initialize_all_tables().run()
|
tf.tables_initializer().run()
|
||||||
|
|
||||||
values.eval() ==> ["lake", "UNKNOWN"]
|
values.eval() ==> ["lake", "UNKNOWN"]
|
||||||
```
|
```
|
||||||
|
@ -125,7 +125,7 @@ class HashTableOpTest(test.TestCase):
|
|||||||
table3 = lookup_ops.HashTable(
|
table3 = lookup_ops.HashTable(
|
||||||
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
|
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
|
||||||
|
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
self.assertAllEqual(3, table1.size().eval())
|
self.assertAllEqual(3, table1.size().eval())
|
||||||
self.assertAllEqual(3, table2.size().eval())
|
self.assertAllEqual(3, table2.size().eval())
|
||||||
self.assertAllEqual(3, table3.size().eval())
|
self.assertAllEqual(3, table3.size().eval())
|
||||||
@ -1148,7 +1148,7 @@ class StringToIndexTableFromFile(test.TestCase):
|
|||||||
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
|
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
|
||||||
|
|
||||||
self.assertRaises(errors_impl.OpError, ids.eval)
|
self.assertRaises(errors_impl.OpError, ids.eval)
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
self.assertAllEqual((1, 2, 3), ids.eval())
|
self.assertAllEqual((1, 2, 3), ids.eval())
|
||||||
|
|
||||||
def test_string_to_index_table_from_file_with_default_value(self):
|
def test_string_to_index_table_from_file_with_default_value(self):
|
||||||
@ -1160,7 +1160,7 @@ class StringToIndexTableFromFile(test.TestCase):
|
|||||||
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
|
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
|
||||||
|
|
||||||
self.assertRaises(errors_impl.OpError, ids.eval)
|
self.assertRaises(errors_impl.OpError, ids.eval)
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
self.assertAllEqual((1, 2, default_value), ids.eval())
|
self.assertAllEqual((1, 2, default_value), ids.eval())
|
||||||
|
|
||||||
def test_string_to_index_table_from_file_with_oov_buckets(self):
|
def test_string_to_index_table_from_file_with_oov_buckets(self):
|
||||||
@ -1172,7 +1172,7 @@ class StringToIndexTableFromFile(test.TestCase):
|
|||||||
constant_op.constant(["salad", "surgery", "tarkus", "toccata"]))
|
constant_op.constant(["salad", "surgery", "tarkus", "toccata"]))
|
||||||
|
|
||||||
self.assertRaises(errors_impl.OpError, ids.eval)
|
self.assertRaises(errors_impl.OpError, ids.eval)
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
self.assertAllEqual(
|
self.assertAllEqual(
|
||||||
(
|
(
|
||||||
1, # From vocabulary file.
|
1, # From vocabulary file.
|
||||||
@ -1195,7 +1195,7 @@ class StringToIndexTableFromFile(test.TestCase):
|
|||||||
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
|
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
|
||||||
|
|
||||||
self.assertRaises(errors_impl.OpError, ids.eval)
|
self.assertRaises(errors_impl.OpError, ids.eval)
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
self.assertAllEqual((1, -1, -1), ids.eval())
|
self.assertAllEqual((1, -1, -1), ids.eval())
|
||||||
self.assertEqual(2, table.size().eval())
|
self.assertEqual(2, table.size().eval())
|
||||||
|
|
||||||
@ -1222,7 +1222,7 @@ class StringToIndexTableFromFile(test.TestCase):
|
|||||||
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
|
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
|
||||||
|
|
||||||
self.assertRaises(errors_impl.OpError, ids.eval)
|
self.assertRaises(errors_impl.OpError, ids.eval)
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
self.assertAllEqual((1, 2, -1), ids.eval())
|
self.assertAllEqual((1, 2, -1), ids.eval())
|
||||||
self.assertEqual(3, table.size().eval())
|
self.assertEqual(3, table.size().eval())
|
||||||
|
|
||||||
@ -1255,7 +1255,7 @@ class StringToIndexTableFromTensor(test.TestCase):
|
|||||||
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
|
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
|
||||||
|
|
||||||
self.assertRaises(errors_impl.OpError, ids.eval)
|
self.assertRaises(errors_impl.OpError, ids.eval)
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
self.assertAllEqual((1, 2, 3), ids.eval())
|
self.assertAllEqual((1, 2, 3), ids.eval())
|
||||||
|
|
||||||
def test_string_to_index_table_from_tensor_with_default_value(self):
|
def test_string_to_index_table_from_tensor_with_default_value(self):
|
||||||
@ -1266,7 +1266,7 @@ class StringToIndexTableFromTensor(test.TestCase):
|
|||||||
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
|
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
|
||||||
|
|
||||||
self.assertRaises(errors_impl.OpError, ids.eval)
|
self.assertRaises(errors_impl.OpError, ids.eval)
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
self.assertAllEqual((1, 2, default_value), ids.eval())
|
self.assertAllEqual((1, 2, default_value), ids.eval())
|
||||||
|
|
||||||
def test_string_to_index_table_from_tensor_with_only_oov_buckets(self):
|
def test_string_to_index_table_from_tensor_with_only_oov_buckets(self):
|
||||||
@ -1301,7 +1301,7 @@ class StringToIndexTest(test.TestCase):
|
|||||||
indices = lookup_ops.string_to_index(feats, mapping=mapping_strings)
|
indices = lookup_ops.string_to_index(feats, mapping=mapping_strings)
|
||||||
|
|
||||||
self.assertRaises(errors_impl.OpError, indices.eval)
|
self.assertRaises(errors_impl.OpError, indices.eval)
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
|
|
||||||
self.assertAllEqual((1, 2, -1), indices.eval())
|
self.assertAllEqual((1, 2, -1), indices.eval())
|
||||||
|
|
||||||
@ -1312,7 +1312,7 @@ class StringToIndexTest(test.TestCase):
|
|||||||
indices = lookup_ops.string_to_index(feats, mapping=mapping_strings)
|
indices = lookup_ops.string_to_index(feats, mapping=mapping_strings)
|
||||||
|
|
||||||
self.assertRaises(errors_impl.OpError,
|
self.assertRaises(errors_impl.OpError,
|
||||||
data_flow_ops.initialize_all_tables().run)
|
data_flow_ops.tables_initializer().run)
|
||||||
|
|
||||||
def test_string_to_index_with_default_value(self):
|
def test_string_to_index_with_default_value(self):
|
||||||
default_value = -42
|
default_value = -42
|
||||||
@ -1323,7 +1323,7 @@ class StringToIndexTest(test.TestCase):
|
|||||||
feats, mapping=mapping_strings, default_value=default_value)
|
feats, mapping=mapping_strings, default_value=default_value)
|
||||||
self.assertRaises(errors_impl.OpError, indices.eval)
|
self.assertRaises(errors_impl.OpError, indices.eval)
|
||||||
|
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
self.assertAllEqual((1, 2, default_value), indices.eval())
|
self.assertAllEqual((1, 2, default_value), indices.eval())
|
||||||
|
|
||||||
|
|
||||||
@ -1342,7 +1342,7 @@ class IndexToStringTableFromFileTest(test.TestCase):
|
|||||||
vocabulary_file=vocabulary_file)
|
vocabulary_file=vocabulary_file)
|
||||||
features = table.lookup(constant_op.constant([0, 1, 2, 3], dtypes.int64))
|
features = table.lookup(constant_op.constant([0, 1, 2, 3], dtypes.int64))
|
||||||
self.assertRaises(errors_impl.OpError, features.eval)
|
self.assertRaises(errors_impl.OpError, features.eval)
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
self.assertAllEqual((b"brain", b"salad", b"surgery", b"UNK"),
|
self.assertAllEqual((b"brain", b"salad", b"surgery", b"UNK"),
|
||||||
features.eval())
|
features.eval())
|
||||||
|
|
||||||
@ -1354,7 +1354,7 @@ class IndexToStringTableFromFileTest(test.TestCase):
|
|||||||
vocabulary_file=vocabulary_file, default_value=default_value)
|
vocabulary_file=vocabulary_file, default_value=default_value)
|
||||||
features = table.lookup(constant_op.constant([1, 2, 4], dtypes.int64))
|
features = table.lookup(constant_op.constant([1, 2, 4], dtypes.int64))
|
||||||
self.assertRaises(errors_impl.OpError, features.eval)
|
self.assertRaises(errors_impl.OpError, features.eval)
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
self.assertAllEqual((b"salad", b"surgery", default_value),
|
self.assertAllEqual((b"salad", b"surgery", default_value),
|
||||||
features.eval())
|
features.eval())
|
||||||
|
|
||||||
@ -1368,7 +1368,7 @@ class IndexToStringTableFromFileTest(test.TestCase):
|
|||||||
default_value=default_value)
|
default_value=default_value)
|
||||||
features = table.lookup(constant_op.constant([1, 2, 4], dtypes.int64))
|
features = table.lookup(constant_op.constant([1, 2, 4], dtypes.int64))
|
||||||
self.assertRaises(errors_impl.OpError, features.eval)
|
self.assertRaises(errors_impl.OpError, features.eval)
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
self.assertAllEqual((b"salad", default_value, default_value),
|
self.assertAllEqual((b"salad", default_value, default_value),
|
||||||
features.eval())
|
features.eval())
|
||||||
|
|
||||||
@ -1380,7 +1380,7 @@ class IndexToStringTableFromFileTest(test.TestCase):
|
|||||||
features = table.lookup(constant_op.constant([1, 2, 4], dtypes.int64))
|
features = table.lookup(constant_op.constant([1, 2, 4], dtypes.int64))
|
||||||
|
|
||||||
self.assertRaises(errors_impl.OpError, features.eval)
|
self.assertRaises(errors_impl.OpError, features.eval)
|
||||||
init = data_flow_ops.initialize_all_tables()
|
init = data_flow_ops.tables_initializer()
|
||||||
self.assertRaisesRegexp(errors_impl.InvalidArgumentError,
|
self.assertRaisesRegexp(errors_impl.InvalidArgumentError,
|
||||||
"Invalid vocab_size", init.run)
|
"Invalid vocab_size", init.run)
|
||||||
|
|
||||||
@ -1392,7 +1392,7 @@ class IndexToStringTableFromFileTest(test.TestCase):
|
|||||||
features = table.lookup(constant_op.constant([1, 2, 4], dtypes.int64))
|
features = table.lookup(constant_op.constant([1, 2, 4], dtypes.int64))
|
||||||
|
|
||||||
self.assertRaises(errors_impl.OpError, features.eval)
|
self.assertRaises(errors_impl.OpError, features.eval)
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
self.assertAllEqual((b"salad", b"surgery", b"UNK"), features.eval())
|
self.assertAllEqual((b"salad", b"surgery", b"UNK"), features.eval())
|
||||||
|
|
||||||
|
|
||||||
@ -1407,7 +1407,7 @@ class IndexToStringTableFromTensorTest(test.TestCase):
|
|||||||
indices = constant_op.constant([0, 1, 2, 3], dtypes.int64)
|
indices = constant_op.constant([0, 1, 2, 3], dtypes.int64)
|
||||||
features = table.lookup(indices)
|
features = table.lookup(indices)
|
||||||
self.assertRaises(errors_impl.OpError, features.eval)
|
self.assertRaises(errors_impl.OpError, features.eval)
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
|
|
||||||
self.assertAllEqual((b"brain", b"salad", b"surgery", b"UNK"),
|
self.assertAllEqual((b"brain", b"salad", b"surgery", b"UNK"),
|
||||||
features.eval())
|
features.eval())
|
||||||
@ -1419,7 +1419,7 @@ class IndexToStringTableFromTensorTest(test.TestCase):
|
|||||||
mapping=mapping_strings)
|
mapping=mapping_strings)
|
||||||
indices = constant_op.constant([0, 1, 4], dtypes.int64)
|
indices = constant_op.constant([0, 1, 4], dtypes.int64)
|
||||||
features = table.lookup(indices)
|
features = table.lookup(indices)
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
self.assertAllEqual((b"hello", b"hello", b"UNK"), features.eval())
|
self.assertAllEqual((b"hello", b"hello", b"UNK"), features.eval())
|
||||||
|
|
||||||
def test_index_to_string_with_default_value(self):
|
def test_index_to_string_with_default_value(self):
|
||||||
@ -1432,7 +1432,7 @@ class IndexToStringTableFromTensorTest(test.TestCase):
|
|||||||
features = table.lookup(indices)
|
features = table.lookup(indices)
|
||||||
self.assertRaises(errors_impl.OpError, features.eval)
|
self.assertRaises(errors_impl.OpError, features.eval)
|
||||||
|
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
self.assertAllEqual((b"salad", b"surgery", default_value),
|
self.assertAllEqual((b"salad", b"surgery", default_value),
|
||||||
features.eval())
|
features.eval())
|
||||||
|
|
||||||
@ -1446,7 +1446,7 @@ class IndexToStringTest(test.TestCase):
|
|||||||
feats = lookup_ops.index_to_string(indices, mapping=mapping_strings)
|
feats = lookup_ops.index_to_string(indices, mapping=mapping_strings)
|
||||||
|
|
||||||
self.assertRaises(errors_impl.OpError, feats.eval)
|
self.assertRaises(errors_impl.OpError, feats.eval)
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
|
|
||||||
self.assertAllEqual((b"brain", b"salad", b"surgery", b"UNK"),
|
self.assertAllEqual((b"brain", b"salad", b"surgery", b"UNK"),
|
||||||
feats.eval())
|
feats.eval())
|
||||||
@ -1456,11 +1456,11 @@ class IndexToStringTest(test.TestCase):
|
|||||||
mapping_strings = constant_op.constant(["hello", "hello"])
|
mapping_strings = constant_op.constant(["hello", "hello"])
|
||||||
indices = constant_op.constant([0, 1, 4], dtypes.int64)
|
indices = constant_op.constant([0, 1, 4], dtypes.int64)
|
||||||
feats = lookup_ops.index_to_string(indices, mapping=mapping_strings)
|
feats = lookup_ops.index_to_string(indices, mapping=mapping_strings)
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
self.assertAllEqual((b"hello", b"hello", b"UNK"), feats.eval())
|
self.assertAllEqual((b"hello", b"hello", b"UNK"), feats.eval())
|
||||||
|
|
||||||
self.assertRaises(errors_impl.OpError,
|
self.assertRaises(errors_impl.OpError,
|
||||||
data_flow_ops.initialize_all_tables().run)
|
data_flow_ops.tables_initializer().run)
|
||||||
|
|
||||||
def test_index_to_string_with_default_value(self):
|
def test_index_to_string_with_default_value(self):
|
||||||
default_value = b"NONE"
|
default_value = b"NONE"
|
||||||
@ -1471,7 +1471,7 @@ class IndexToStringTest(test.TestCase):
|
|||||||
indices, mapping=mapping_strings, default_value=default_value)
|
indices, mapping=mapping_strings, default_value=default_value)
|
||||||
self.assertRaises(errors_impl.OpError, feats.eval)
|
self.assertRaises(errors_impl.OpError, feats.eval)
|
||||||
|
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
self.assertAllEqual((b"salad", b"surgery", default_value), feats.eval())
|
self.assertAllEqual((b"salad", b"surgery", default_value), feats.eval())
|
||||||
|
|
||||||
|
|
||||||
@ -1615,7 +1615,7 @@ class InitializeTableFromFileOpTest(test.TestCase):
|
|||||||
default_value,
|
default_value,
|
||||||
shared_name=shared_name)
|
shared_name=shared_name)
|
||||||
|
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
|
|
||||||
input_string = constant_op.constant(["brain", "salad", "tank"])
|
input_string = constant_op.constant(["brain", "salad", "tank"])
|
||||||
|
|
||||||
@ -1847,7 +1847,7 @@ class IdTableWithHashBucketsTest(test.TestCase):
|
|||||||
hasher_spec=lookup_ops.StrongHashSpec((1, 2)),
|
hasher_spec=lookup_ops.StrongHashSpec((1, 2)),
|
||||||
name="table2")
|
name="table2")
|
||||||
|
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
|
|
||||||
input_string = constant_op.constant(
|
input_string = constant_op.constant(
|
||||||
["fruit", "brain", "salad", "surgery", "UNK"])
|
["fruit", "brain", "salad", "surgery", "UNK"])
|
||||||
@ -1933,7 +1933,7 @@ class IdTableWithHashBucketsTest(test.TestCase):
|
|||||||
default_value2),
|
default_value2),
|
||||||
oov_buckets)
|
oov_buckets)
|
||||||
|
|
||||||
data_flow_ops.initialize_all_tables().run()
|
data_flow_ops.tables_initializer().run()
|
||||||
|
|
||||||
input_string_1 = constant_op.constant(
|
input_string_1 = constant_op.constant(
|
||||||
["brain", "salad", "surgery", "UNK"])
|
["brain", "salad", "surgery", "UNK"])
|
||||||
|
@ -627,7 +627,7 @@ def train(train_op,
|
|||||||
init_feed_dict: A feed dictionary to use when executing the `init_op`.
|
init_feed_dict: A feed dictionary to use when executing the `init_op`.
|
||||||
local_init_op: The local initialization operation. If left to its default
|
local_init_op: The local initialization operation. If left to its default
|
||||||
value, then the session is initialized by calling
|
value, then the session is initialized by calling
|
||||||
`tf.local_variables_initializer()` and `tf.initialize_all_tables()`.
|
`tf.local_variables_initializer()` and `tf.tables_initializer()`.
|
||||||
init_fn: An optional callable to be executed after `init_op` is called. The
|
init_fn: An optional callable to be executed after `init_op` is called. The
|
||||||
callable must accept one argument, the session being initialized.
|
callable must accept one argument, the session being initialized.
|
||||||
ready_op: Operation to check if the model is ready to use. If left to its
|
ready_op: Operation to check if the model is ready to use. If left to its
|
||||||
@ -697,7 +697,7 @@ def train(train_op,
|
|||||||
if local_init_op == _USE_DEFAULT:
|
if local_init_op == _USE_DEFAULT:
|
||||||
local_init_op = control_flow_ops.group(
|
local_init_op = control_flow_ops.group(
|
||||||
tf_variables.local_variables_initializer(),
|
tf_variables.local_variables_initializer(),
|
||||||
data_flow_ops.initialize_all_tables())
|
data_flow_ops.tables_initializer())
|
||||||
|
|
||||||
if sync_optimizer is not None and isinstance(
|
if sync_optimizer is not None and isinstance(
|
||||||
sync_optimizer, sync_replicas_optimizer.SyncReplicasOptimizer):
|
sync_optimizer, sync_replicas_optimizer.SyncReplicasOptimizer):
|
||||||
|
@ -169,6 +169,7 @@ def all_libraries(module_to_name, members, documented):
|
|||||||
"Inputs and Readers",
|
"Inputs and Readers",
|
||||||
exclude_symbols=["LookupTableBase", "HashTable",
|
exclude_symbols=["LookupTableBase", "HashTable",
|
||||||
"initialize_all_tables",
|
"initialize_all_tables",
|
||||||
|
"tables_initializer",
|
||||||
"parse_single_sequence_example",
|
"parse_single_sequence_example",
|
||||||
"string_to_hash_bucket"],
|
"string_to_hash_bucket"],
|
||||||
prefix=PREFIX_TEXT),
|
prefix=PREFIX_TEXT),
|
||||||
|
@ -39,6 +39,7 @@ from tensorflow.python.ops import math_ops
|
|||||||
# pylint: disable=wildcard-import
|
# pylint: disable=wildcard-import
|
||||||
from tensorflow.python.ops.gen_data_flow_ops import *
|
from tensorflow.python.ops.gen_data_flow_ops import *
|
||||||
# pylint: enable=wildcard-import
|
# pylint: enable=wildcard-import
|
||||||
|
from tensorflow.python.util.deprecation import deprecated
|
||||||
|
|
||||||
|
|
||||||
def _as_type_list(dtypes):
|
def _as_type_list(dtypes):
|
||||||
@ -1053,9 +1054,23 @@ class Barrier(object):
|
|||||||
self._barrier_ref, name=name)
|
self._barrier_ref, name=name)
|
||||||
|
|
||||||
|
|
||||||
|
@deprecated("2017-03-02", "Use `tf.tables_initializer` instead.")
|
||||||
def initialize_all_tables(name="init_all_tables"):
|
def initialize_all_tables(name="init_all_tables"):
|
||||||
"""Returns an Op that initializes all tables of the default graph.
|
"""Returns an Op that initializes all tables of the default graph.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Optional name for the initialization op.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
An Op that initializes all tables. Note that if there are
|
||||||
|
not tables the returned Op is a NoOp.
|
||||||
|
"""
|
||||||
|
return tables_initializer(name)
|
||||||
|
|
||||||
|
|
||||||
|
def tables_initializer(name="init_all_tables"):
|
||||||
|
"""Returns an Op that initializes all tables of the default graph.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
name: Optional name for the initialization op.
|
name: Optional name for the initialization op.
|
||||||
|
|
||||||
|
@ -106,6 +106,7 @@ automatically by the optimizers in most cases.
|
|||||||
### Read-only Lookup Tables
|
### Read-only Lookup Tables
|
||||||
|
|
||||||
@@initialize_all_tables
|
@@initialize_all_tables
|
||||||
|
@@tables_initializer
|
||||||
|
|
||||||
|
|
||||||
## Exporting and Importing Meta Graphs
|
## Exporting and Importing Meta Graphs
|
||||||
|
@ -39,7 +39,7 @@ def main_op():
|
|||||||
"""
|
"""
|
||||||
init = variables.global_variables_initializer()
|
init = variables.global_variables_initializer()
|
||||||
init_local = variables.local_variables_initializer()
|
init_local = variables.local_variables_initializer()
|
||||||
init_tables = tf_data_flow_ops.initialize_all_tables()
|
init_tables = tf_data_flow_ops.tables_initializer()
|
||||||
return control_flow_ops.group(init, init_local, init_tables)
|
return control_flow_ops.group(init, init_local, init_tables)
|
||||||
|
|
||||||
|
|
||||||
|
@ -237,7 +237,7 @@ class Scaffold(object):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def _default_local_init_op():
|
def _default_local_init_op():
|
||||||
return control_flow_ops.group(variables.local_variables_initializer(),
|
return control_flow_ops.group(variables.local_variables_initializer(),
|
||||||
data_flow_ops.initialize_all_tables())
|
data_flow_ops.tables_initializer())
|
||||||
|
|
||||||
|
|
||||||
def MonitoredTrainingSession(master='', # pylint: disable=invalid-name
|
def MonitoredTrainingSession(master='', # pylint: disable=invalid-name
|
||||||
|
@ -440,7 +440,7 @@ class Supervisor(object):
|
|||||||
ops.GraphKeys.LOCAL_INIT_OP)
|
ops.GraphKeys.LOCAL_INIT_OP)
|
||||||
if local_init_op is None:
|
if local_init_op is None:
|
||||||
op_list = [variables.local_variables_initializer(),
|
op_list = [variables.local_variables_initializer(),
|
||||||
data_flow_ops.initialize_all_tables()]
|
data_flow_ops.tables_initializer()]
|
||||||
if op_list:
|
if op_list:
|
||||||
local_init_op = control_flow_ops.group(*op_list)
|
local_init_op = control_flow_ops.group(*op_list)
|
||||||
ops.add_to_collection(ops.GraphKeys.LOCAL_INIT_OP, local_init_op)
|
ops.add_to_collection(ops.GraphKeys.LOCAL_INIT_OP, local_init_op)
|
||||||
|
Loading…
Reference in New Issue
Block a user