Wrap global_variables_initializer with self.evaluate()

In addition, fix a few eval() calls as well as remove some
@test_util.run_v1_only annotations.

PiperOrigin-RevId: 225180248
This commit is contained in:
Gaurav Jain 2018-12-12 07:21:17 -08:00 committed by TensorFlower Gardener
parent 1068d77396
commit bf16a7511a
23 changed files with 322 additions and 344 deletions

View File

@ -230,7 +230,7 @@ class DistributeCoordinatorTestBase(test.TestCase):
with ops.device("/job:worker/task:0"):
result = math_ops.add_n(xs)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
result_value = sess.run(result)
self.assertEqual(result_value, expected)
if result_value == expected:
@ -278,7 +278,7 @@ class DistributeCoordinatorTestBase(test.TestCase):
train_op = control_flow_ops.group([x_add, y_sub])
if context.is_chief:
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
# Synchronize workers after initializaton.
if context.has_barrier:

View File

@ -579,7 +579,7 @@ class FunctionTest(test.TestCase, parameterized.TestCase):
return self.v * 2
o = HasAVar()
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
call = def_function.function(o.call)
op = call()
self.assertAllEqual(self.evaluate(op), 2.0)

View File

@ -39,7 +39,7 @@ class AutomaticControlDependenciesTest(test.TestCase):
def testBasic(self):
with context.graph_mode(), self.cached_session():
v = resource_variable_ops.ResourceVariable(1.0)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
with acd.AutomaticControlDependencies() as c:
v.assign(v + 1)
v.assign(2 * v)
@ -51,7 +51,7 @@ class AutomaticControlDependenciesTest(test.TestCase):
def testCondMustRun(self):
with context.graph_mode(), self.cached_session():
v = resource_variable_ops.ResourceVariable(1.0)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
p = array_ops.placeholder(dtype=dtypes.bool)
with acd.AutomaticControlDependencies() as c:
@ -73,7 +73,7 @@ class AutomaticControlDependenciesTest(test.TestCase):
def testCondMustRunSeparateRead(self):
with context.graph_mode(), self.cached_session():
v = resource_variable_ops.ResourceVariable(1.0)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
p = array_ops.placeholder(dtype=dtypes.bool)
with acd.AutomaticControlDependencies() as c:
@ -97,7 +97,7 @@ class AutomaticControlDependenciesTest(test.TestCase):
def testCondNested(self):
with context.graph_mode(), self.cached_session():
v = resource_variable_ops.ResourceVariable(1.0)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
p = array_ops.placeholder(dtype=dtypes.bool)
q = array_ops.placeholder(dtype=dtypes.bool)
with acd.AutomaticControlDependencies() as c:
@ -132,7 +132,7 @@ class AutomaticControlDependenciesTest(test.TestCase):
def testCondOneBranch(self):
with context.graph_mode(), self.cached_session():
v = resource_variable_ops.ResourceVariable(1.0)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
p = array_ops.placeholder(dtype=dtypes.bool)
with acd.AutomaticControlDependencies() as c:
@ -153,7 +153,7 @@ class AutomaticControlDependenciesTest(test.TestCase):
def testCondOneBranchUpdateBefore(self):
with context.graph_mode(), self.cached_session():
v = resource_variable_ops.ResourceVariable(1.0)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
p = array_ops.placeholder(dtype=dtypes.bool)
with acd.AutomaticControlDependencies() as c:
v.assign(v * 2)
@ -175,7 +175,7 @@ class AutomaticControlDependenciesTest(test.TestCase):
def testCondOneBranchUpdateAfter(self):
with context.graph_mode(), self.cached_session():
v = resource_variable_ops.ResourceVariable(1.0)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
p = array_ops.placeholder(dtype=dtypes.bool)
with acd.AutomaticControlDependencies() as c:
@ -211,7 +211,7 @@ class AutomaticControlDependenciesTest(test.TestCase):
def testDecorator(self):
with context.graph_mode(), self.cached_session():
v = resource_variable_ops.ResourceVariable(1.0)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
@acd.automatic_control_dependencies
def f():

View File

@ -105,7 +105,6 @@ class GenerateVocabRemappingTest(test.TestCase):
self.assertAllEqual(expected_num_present, self.evaluate(num_present))
@test_util.run_v1_only('b/120545219')
class LoadAndRemapMatrixTest(test.TestCase):
"""Tests for the load_and_remap_matrix() op."""
@ -126,7 +125,7 @@ class LoadAndRemapMatrixTest(test.TestCase):
save = saver.Saver([matrix])
with self.cached_session() as sess:
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.bundle_file = os.path.join(test.get_temp_dir(), 'bundle_checkpoint')
save.save(sess, self.bundle_file)
@ -231,6 +230,7 @@ class LoadAndRemapMatrixTest(test.TestCase):
np.reshape(initializing_values, (num_rows, num_cols)),
self.evaluate(remapped_matrix))
@test_util.run_v1_only('b/120545219')
def test_load_and_remap_invalid_remapping(self):
"""Tests that errors are raised when an ID maps to multiple new IDs.
@ -262,6 +262,7 @@ class LoadAndRemapMatrixTest(test.TestCase):
with self.cached_session(), self.assertRaises(errors.UnimplementedError):
self.evaluate(remapped_matrix)
@test_util.run_v1_only('b/120545219')
def test_load_and_remap_incorrect_initializing_values(self):
"""Tests that errors are raised with incorrect number of init values."""
remapped_matrix = gen_checkpoint_ops.load_and_remap_matrix(
@ -313,7 +314,7 @@ class LoadAndRemapMatrixWithMaxRowsTest(test.TestCase):
with self.cached_session() as sess:
ckpt_path = os.path.join(test.get_temp_dir(), 'temp_ckpt')
save = saver.Saver([matrix])
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
save.save(sess, ckpt_path)
num_rows, num_cols = np_value.shape

View File

@ -408,7 +408,7 @@ class ConditionalAccumulatorTest(test.TestCase):
set_global_step_op = q.set_global_step(new_global_step)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
for _ in range(3):
set_global_step_op.run()
self.evaluate(inc_global_step)

View File

@ -140,7 +140,7 @@ class ControlFlowTest(test.TestCase):
v2 = control_flow_ops.with_dependencies([op], v)
self.assertTrue(isinstance(v2, ops.Tensor))
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertEqual(9, self.evaluate(v2))
@test_util.run_v1_only("b/120545219")
@ -154,7 +154,7 @@ class ControlFlowTest(test.TestCase):
op = state_ops.assign(enter_v, enter_nine)
v2 = control_flow_ops.with_dependencies([op], enter_v)
v3 = control_flow_ops.exit(v2)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertEqual(9, self.evaluate(v3))
@test_util.run_v1_only("b/120545219")
@ -165,7 +165,7 @@ class ControlFlowTest(test.TestCase):
p = constant_op.constant(True)
v1 = control_flow_ops._SwitchRefOrTensor(v._ref(), p) # pylint: disable=protected-access
v2 = state_ops.assign(v1[1], 9)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertEqual(9, self.evaluate(v2))
def testEnterMulExit(self):
@ -205,8 +205,8 @@ class ControlFlowTest(test.TestCase):
switch_op = control_flow_ops.switch(data, pred)
merge_op = control_flow_ops.merge(switch_op)[0]
val = merge_op.values.eval()
ind = merge_op.indices.eval()
val = merge_op.values
ind = merge_op.indices
self.assertAllEqual(np.arange(1, 7), val)
self.assertAllEqual(np.arange(0, 12, 2), ind)
@ -418,8 +418,8 @@ class ControlFlowTest(test.TestCase):
fn2 = lambda: ops.IndexedSlices(math_ops.subtract(x.values, 1), indices)
r = control_flow_ops.cond(pred, fn1, fn2)
val = r.values.eval()
ind = r.indices.eval()
val = r.values
ind = r.indices
self.assertAllEqual(11, val)
self.assertAllEqual(0, ind)
@ -437,8 +437,8 @@ class ControlFlowTest(test.TestCase):
fn2 = lambda: sparse_tensor.SparseTensor(
indices, x.values - 1, dense_shape=shape)
r = control_flow_ops.cond(pred, fn1, fn2)
self.assertAllEqual([3.0, 5.0], r.values.eval())
self.assertAllEqual([[1], [4]], r.indices.eval())
self.assertAllEqual([3.0, 5.0], r.values)
self.assertAllEqual([[1], [4]], r.indices)
self.assertAllEqual(r.values.get_shape(), (2,))
@test_util.run_v1_only("b/120545219")
@ -446,7 +446,7 @@ class ControlFlowTest(test.TestCase):
with self.cached_session():
rv = resource_variable_ops.ResourceVariable(True)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
t = ops.convert_to_tensor(1.0)
def case():
@ -454,7 +454,8 @@ class ControlFlowTest(test.TestCase):
with ops.control_dependencies([assign]):
return array_ops.identity(t)
self.assertEqual(1.0, control_flow_ops.cond(rv, case, lambda: t).eval())
self.assertEqual(
1.0, self.evaluate(control_flow_ops.cond(rv, case, lambda: t)))
@test_util.run_v1_only("b/120545219")
def testCondWithTensorArrayGrad(self):
@ -483,8 +484,8 @@ class ControlFlowTest(test.TestCase):
fn2 = lambda: ops.IndexedSlices(math_ops.subtract(x.values, 1), i_64)
r = control_flow_ops.cond(pred, fn1, fn2)
val = r.values.eval()
ind = r.indices.eval()
val = r.values
ind = r.indices
self.assertAllEqual(11, val)
self.assertAllEqual(0, ind)
self.assertTrue(ind.dtype == np.int64)
@ -565,8 +566,8 @@ class ControlFlowTest(test.TestCase):
if not context.executing_eagerly():
with self.cached_session():
variables.global_variables_initializer().run()
result = f().eval()
self.evaluate(variables.global_variables_initializer())
result = self.evaluate(f())
self.assertEqual(True, result)
# Only second cond result was fetched, so v1 assign shouldn't run.
self.assertEqual(7, self.evaluate(v1))
@ -605,7 +606,7 @@ class ControlFlowTest(test.TestCase):
fn2 = lambda: v1
r = control_flow_ops.cond(pred, fn1, fn2)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
result = self.evaluate(r)
self.assertAllEqual(np.array([7]), result)
@ -895,7 +896,7 @@ class ControlFlowTest(test.TestCase):
fn2 = lambda: array_ops.gather(v1, [1, 1])
r = control_flow_ops.cond(pred, fn1, fn2)
grad = gradients_impl.gradients(r, [v1])[0]
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
# Should just be [1, 1], but possibly a sparse representation
gv, gi = sess.run([grad.values, grad.indices], feed_dict={c: 1})
dense_gv = [
@ -942,11 +943,11 @@ class ControlFlowTest(test.TestCase):
if not context.executing_eagerly():
with self.cached_session():
with self.captureWritesToStream(sys.stderr) as printed:
self.assertEqual(build_cond().eval(), 10)
self.assertEqual(build_cond(), 10)
self.assertEqual(printed.contents(), "C\n")
with self.captureWritesToStream(sys.stderr) as printed:
self.assertEqual(build_nested_cond().eval(), 10)
self.assertEqual(build_nested_cond(), 10)
self.assertEqual(printed.contents(), "C\n")
# In defuns, all prints should execute in program order.
@ -996,11 +997,11 @@ class ControlFlowTest(test.TestCase):
if not context.executing_eagerly():
with self.cached_session():
with self.captureWritesToStream(sys.stderr) as printed:
self.assertEqual(build_while()[0].eval(), 2)
self.assertEqual(build_while()[0], 2)
self.assertEqual(printed.contents(), "D\nD\n")
with self.captureWritesToStream(sys.stderr) as printed:
self.assertEqual(build_nested_while()[0].eval(), 2)
self.assertEqual(build_nested_while()[0], 2)
self.assertEqual(printed.contents(), "D\nD\n")
# In defuns, all prints should execute in program order.
@ -1049,8 +1050,8 @@ class ControlFlowTest(test.TestCase):
result = control_flow_ops.while_loop(cond=lambda i: i < 2,
body=body_fn, loop_vars=[1])
self.assertAllEqual(result.eval(), 2)
self.assertAllEqual(v.eval(), 1.0)
self.assertAllEqual(result, 2)
self.assertAllEqual(v.read_value(), 1.0)
@test_util.disable_control_flow_v2("b/79881896 (control deps)")
@test_util.run_v1_only("b/120545219")
@ -1067,7 +1068,7 @@ class ControlFlowTest(test.TestCase):
result = control_flow_ops.while_loop(cond=lambda i: i < 5,
body=body_fn, loop_vars=[0])
self.evaluate(result)
self.assertAllEqual(v.eval(), 1.0)
self.assertAllEqual(self.evaluate(v), 1.0)
@test_util.disable_control_flow_v2("b/113324949 (RefVariable)")
@test_util.run_v1_only("b/120545219")
@ -1085,7 +1086,7 @@ class ControlFlowTest(test.TestCase):
r = control_flow_ops.while_loop(c, b, [i, x], parallel_iterations=5)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertEqual(r[0].dtype, dtypes.int32)
self.assertEqual(r[1].dtype, dtypes.int32_ref)
@ -1333,7 +1334,7 @@ class ControlFlowTest(test.TestCase):
d = ops.convert_to_tensor(100)
r = control_flow_ops.while_loop(lambda i, m, c, o: math_ops.less(i, d),
compute, [i, m, c, o])
result = r[3].eval()
result = r[3]
self.assertAllEqual(10100, result)
@test_util.run_deprecated_v1
@ -1355,7 +1356,7 @@ class ControlFlowTest(test.TestCase):
s = array_ops.size(x)
r = control_flow_ops.while_loop(lambda i, m, c, o: math_ops.less(i, s),
compute, [i, m, c, o])
result = r[3].eval()
result = r[3]
self.assertAllEqual(42, result)
@test_util.run_v1_only("b/120545219")
@ -1380,7 +1381,7 @@ class ControlFlowTest(test.TestCase):
tensor_shape.unknown_shape(),
tensor_shape.unknown_shape()
])
result = r[2].eval()
result = r[2]
self.assertAllEqual(np.array([0, 1, 2, 3, 4, 5, 6]), result)
@test_util.disable_control_flow_v2("b/116338794 (buffer_reuse)")
@ -1634,7 +1635,7 @@ class ControlFlowTest(test.TestCase):
res = control_flow_ops.while_loop(
condition, body, [n, r], parallel_iterations=1)
self.assertAllEqual(12, res[1].eval())
self.assertAllEqual(12, res[1])
@test_util.run_deprecated_v1
def testWhileWithControl_2(self):
@ -1721,7 +1722,7 @@ class ControlFlowTest(test.TestCase):
return i + 1
r = control_flow_ops.while_loop(loop_condition, loop_body, (i0,))
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertEqual(4, self.evaluate(r))
self.assertAllClose(65536.0, self.evaluate(v))
@ -1747,7 +1748,7 @@ class ControlFlowTest(test.TestCase):
r = control_flow_ops.cond(
constant_op.constant(False), lambda: constant_op.constant(1.0),
false_branch)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertEqual(6.0, self.evaluate(r))
self.assertEqual(99, self.evaluate(v))
@ -1890,7 +1891,7 @@ class ControlFlowTest(test.TestCase):
r = control_flow_ops.while_loop(
loop_iterator, loop_body, [n], parallel_iterations=1)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertEqual(3, self.evaluate(r))
result = self.evaluate(select)
self.assertAllClose(np.array([10.0, 10.0, 10.0]), result)
@ -1916,7 +1917,7 @@ class ControlFlowTest(test.TestCase):
r = control_flow_ops.while_loop(
loop_iterator, loop_body, [n], parallel_iterations=1)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertEqual(3, self.evaluate(r))
result1 = self.evaluate(select1)
self.assertAllClose(np.array([10.0, 10.0, 10.0]), result1)
@ -1942,8 +1943,8 @@ class ControlFlowTest(test.TestCase):
loop_iterator,
loop_body, [n, array_ops.identity(select)],
parallel_iterations=1)
variables.global_variables_initializer().run()
result = r[1].eval()
self.evaluate(variables.global_variables_initializer())
result = r[1]
self.assertAllClose(np.array([10.0, 10.0, 10.0]), result)
@test_util.disable_control_flow_v2("b/113324949 (RefVariable)")
@ -1952,7 +1953,7 @@ class ControlFlowTest(test.TestCase):
with self.cached_session():
var_a = variables.Variable(0, name="a")
var_b = variables.Variable(0, name="b")
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
c = constant_op.constant(0, name="c")
asn1 = state_ops.assign_add(var_a, 1, name="a_add")
@ -1982,7 +1983,7 @@ class ControlFlowTest(test.TestCase):
# Create some variables.
var_a = variables.Variable(0, name="a")
var_b = variables.Variable(0, name="b")
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
# Change condition to check var_b
def pred(_):
@ -2014,7 +2015,7 @@ class ControlFlowTest(test.TestCase):
var_a = variables.Variable(0, name="a")
var_b = variables.Variable(0, name="b")
c = constant_op.constant(0)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
# Loop condition
def pred(i):
@ -2054,7 +2055,7 @@ class ControlFlowTest(test.TestCase):
r = control_flow_ops.while_loop(c, b, [i], parallel_iterations=1)
self.assertEqual([10], self.evaluate(r))
for i in xrange(10):
self.assertEqual([i], q.dequeue().eval())
self.assertEqual([i], self.evaluate(q.dequeue()))
@test_util.run_v1_only("b/120545219")
def testWhileTimeOut(self):
@ -2272,8 +2273,8 @@ class ControlFlowTest(test.TestCase):
r = control_flow_ops.while_loop(c, b, [v], parallel_iterations=1)
r = gradients_impl.gradients(r, a)
variables.global_variables_initializer().run()
self.assertAllClose(216.0, r[0].eval())
self.evaluate(variables.global_variables_initializer())
self.assertAllClose(216.0, r[0])
@test_util.run_deprecated_v1
def testWhileGrad_ResourceVariable(self):
@ -2285,8 +2286,8 @@ class ControlFlowTest(test.TestCase):
r = control_flow_ops.while_loop(c, b, [v], parallel_iterations=1)
g = gradients_impl.gradients(r, a)
variables.global_variables_initializer().run()
self.assertAllClose(216.0, g[0].eval())
self.evaluate(variables.global_variables_initializer())
self.assertAllClose(216.0, g[0])
@test_util.run_v1_only("b/120545219")
def testWhileGradInCond(self):
@ -2463,13 +2464,13 @@ class ControlFlowTest(test.TestCase):
rx, ry = control_flow_ops.while_loop(c, b, [x, y], parallel_iterations=1)
r = gradients_impl.gradients([rx, ry], x)
self.assertAllClose(304.0, r[0].eval())
self.assertAllClose(304.0, r[0])
r = gradients_impl.gradients([rx, ry], y)
self.assertAllClose(124.0, r[0].eval())
self.assertAllClose(124.0, r[0])
r = gradients_impl.gradients([rx], x)
self.assertAllClose(295.0, r[0].eval())
self.assertAllClose(295.0, r[0])
r = gradients_impl.gradients([rx], y)
self.assertAllClose(120.0, r[0].eval())
self.assertAllClose(120.0, r[0])
@test_util.run_deprecated_v1
def testWhileGrad_Dependency(self):
@ -2487,9 +2488,9 @@ class ControlFlowTest(test.TestCase):
ri, rx = control_flow_ops.while_loop(c, b, [i, x], parallel_iterations=1)
r = gradients_impl.gradients([ri, rx], x)
self.assertAllClose(1024.0, r[0].eval())
self.assertAllClose(1024.0, r[0])
r = gradients_impl.gradients([rx], x)
self.assertAllClose(1024.0, r[0].eval())
self.assertAllClose(1024.0, r[0])
@test_util.disable_control_flow_v2("b/116355153 (back_prop flag)")
@test_util.run_v1_only("b/120545219")
@ -2501,7 +2502,7 @@ class ControlFlowTest(test.TestCase):
r = control_flow_ops.while_loop(c, b, [v], back_prop=False)
r = math_ops.add(r, v)
r = gradients_impl.gradients(r, v)
self.assertAllClose(1.0, r[0].eval())
self.assertAllClose(1.0, r[0])
@test_util.disable_control_flow_v2("b/113324949 (RefVariable)")
@test_util.run_v1_only("b/120545219")
@ -2522,7 +2523,7 @@ class ControlFlowTest(test.TestCase):
cond=cond, body=body, loop_vars=loop_vars)
cost = math_ops.reduce_sum(tensors[2])
grad = gradients_impl.gradients(cost, [variable])
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertAllClose(np.ones([2, 3]), sess.run(grad[0]))
@test_util.run_deprecated_v1
@ -2562,7 +2563,7 @@ class ControlFlowTest(test.TestCase):
_, rx = control_flow_ops.while_loop(c, b, [i, rx], parallel_iterations=1)
r = gradients_impl.gradients([rx], x)
self.assertAllClose(1024.0, r[0].eval())
self.assertAllClose(1024.0, r[0])
@test_util.run_v1_only("b/120545219")
def testWhileGrad_ParallelTwoLoops(self):
@ -2582,7 +2583,7 @@ class ControlFlowTest(test.TestCase):
rx = math_ops.add(r1, r2)
r = gradients_impl.gradients([rx], x)
self.assertAllClose(64.0, r[0].eval())
self.assertAllClose(64.0, r[0])
@test_util.run_v1_only("b/120545219")
def testWhileGrad_OneOutputWithControlDependencyOnSecond(self):
@ -2697,7 +2698,7 @@ class ControlFlowTest(test.TestCase):
train_op = optimizer.minimize(math_ops.reduce_mean(math_ops.square(res)))
self.evaluate(variables.global_variables_initializer())
self.evaluate(train_op)
self.assertAllClose(2.999, self.evaluate(var))
self.assertAllClose(2.999, var.read_value())
def _testWhileCondGrad_Simple(self, use_gpu):
with self.cached_session(use_gpu=use_gpu):
@ -2784,7 +2785,7 @@ class ControlFlowTest(test.TestCase):
grad_ys = [variables.VariableV1(73)._ref()] # pylint: disable=protected-access
grad = gradients_impl.gradients([r[1]], [x], grad_ys=grad_ys)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertEqual(r[0].dtype, dtypes.int32)
self.assertEqual(r[1].dtype, dtypes.float32_ref)
@ -3093,7 +3094,7 @@ class ControlFlowTest(test.TestCase):
grads = linalg_ops.norm(gradients_impl.gradients(r, vars_)[0])
z = math_ops.add(r, array_ops.stop_gradient(math_ops.reduce_sum(grads)))
result = gradients_impl.gradients(z, vars_)[0]
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertEqual(5.0, self.evaluate(result))
@test_util.run_v1_only("b/120545219")
@ -3145,14 +3146,14 @@ class ControlFlowTest(test.TestCase):
x < y: f1,
x > z: f2
}, default=f3, exclusive=True)
self.assertAllEqual(r1.eval(), 17)
self.assertAllEqual(r1, 17)
r2 = control_flow_ops.case([(y > z, f1), (y > x, f2)], default=f3)
self.assertAllEqual(r2.eval(), 23)
self.assertAllEqual(r2, 23)
# Duplicate events can happen, first one is selected
r3 = control_flow_ops.case([(x < y, f1), (x < y, f2)], default=f3)
self.assertAllEqual(r3.eval(), 17)
self.assertAllEqual(r3, 17)
# Duplicate events cause an error if exclusive = True
r4 = control_flow_ops.case(
@ -3162,7 +3163,7 @@ class ControlFlowTest(test.TestCase):
# Check that the default is called if none of the others are
r5 = control_flow_ops.case({x > y: f1}, default=f3)
self.assertAllEqual(r5.eval(), -1)
self.assertAllEqual(r5, -1)
ran_once = [False, False, False]
@ -3181,7 +3182,7 @@ class ControlFlowTest(test.TestCase):
[(x < y, break_run_twice(0)), (x > y, break_run_twice(1))],
default=lambda: constant_op.constant(2))
self.assertAllEqual(r6.eval(), 0)
self.assertAllEqual(r6, 0)
@test_util.run_v1_only("b/120545219")
def testCaseSideEffects(self):
@ -3204,17 +3205,17 @@ class ControlFlowTest(test.TestCase):
r2 = control_flow_ops.case(
((x > y, a), (x > y, b)), default=c, exclusive=True)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertAllEqual(self.evaluate([v0, v1, v2]), [-1] * 3)
self.assertEqual(2, self.evaluate(r2))
self.assertAllEqual(self.evaluate([v0, v1, v2]), [-1, -1, 2])
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertAllEqual(self.evaluate([v0, v1, v2]), [-1] * 3)
self.assertEqual(1, self.evaluate(r1))
self.assertAllEqual(self.evaluate([v0, v1, v2]), [-1, 1, -1])
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertAllEqual(self.evaluate([v0, v1, v2]), [-1] * 3)
self.assertEqual(0, self.evaluate(r0))
self.assertAllEqual(self.evaluate([v0, v1, v2]), [0, -1, -1])
@ -3237,7 +3238,7 @@ class ControlFlowTest(test.TestCase):
i = control_flow_ops.cond(p, a, b)
self.assertTrue(isinstance(i, ops.Tensor))
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertEqual(0, self.evaluate(v))
@ -3495,7 +3496,7 @@ class ControlFlowTest(test.TestCase):
lambda i, v: [i + 1, script_ops.py_func(func, [v], [dtypes.float32])[0]],
[constant_op.constant(0), constant_op.constant(2.0, dtypes.float32)],
[tensor_shape.unknown_shape(), tensor_shape.unknown_shape()])
self.assertEqual(r[1].eval(), 65536.0)
self.assertEqual(self.evaluate(r[1]), 65536.0)
@test_util.run_v1_only("b/120545219")
def testWhileFuncBasic(self):
@ -3512,8 +3513,8 @@ class ControlFlowTest(test.TestCase):
[tensor_shape.unknown_shape(),
tensor_shape.unknown_shape()])
grad = gradients_impl.gradients(r, x)[0]
self.assertEqual(r[1].eval(), 65536.0)
self.assertEqual(grad.eval(), 524288.0)
self.assertEqual(self.evaluate(r[1]), 65536.0)
self.assertEqual(self.evaluate(grad), 524288.0)
# while_v2 does not have stacks.
if not control_flow_util.ENABLE_CONTROL_FLOW_V2:
self.assertEqual(
@ -3877,7 +3878,7 @@ class WhileOpBenchmark(test.Benchmark):
with session.Session() as sess, ops.device(default_device):
# Get the initial id i, input x, and kernel.
i, x, kernel = self._getInitVariables()
self.evaluate(variables.global_variables_initializer())
variables.global_variables_initializer().run()
if static_unroll:
for _ in xrange(steps):

View File

@ -20,7 +20,6 @@ from __future__ import print_function
import numpy as np
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import state_ops
@ -33,7 +32,6 @@ class AssignOpTest(test.TestCase):
# NOTE(mrry): We exclude thess tests from the TSAN TAP target, because they
# contain benign and deliberate data races when multiple threads update
# the same parameters without a lock.
@test_util.run_v1_only("b/120545219")
def testParallelUpdateWithoutLocking(self):
with self.cached_session() as sess:
ones_t = array_ops.fill([1024, 1024], 1.0)
@ -42,7 +40,7 @@ class AssignOpTest(test.TestCase):
state_ops.assign_add(
p, ones_t, use_locking=False) for _ in range(20)
]
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
def run_add(add_op):
self.evaluate(add_op)
@ -61,7 +59,6 @@ class AssignOpTest(test.TestCase):
self.assertTrue((vals >= ones).all())
self.assertTrue((vals <= ones * 20).all())
@test_util.run_v1_only("b/120545219")
def testParallelAssignWithoutLocking(self):
with self.cached_session() as sess:
ones_t = array_ops.fill([1024, 1024], float(1))
@ -70,7 +67,7 @@ class AssignOpTest(test.TestCase):
state_ops.assign(p, math_ops.multiply(ones_t, float(i)), False)
for i in range(1, 21)
]
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
def run_assign(assign_op):
self.evaluate(assign_op)
@ -94,7 +91,6 @@ class AssignOpTest(test.TestCase):
# contain non-benign but known data races between the variable assignment and
# returning the output tensors. This issue will be resolved with the new
# resource variables.
@test_util.run_v1_only("b/120545219")
def testParallelUpdateWithLocking(self):
with self.cached_session() as sess:
zeros_t = array_ops.fill([1024, 1024], 0.0)
@ -104,7 +100,7 @@ class AssignOpTest(test.TestCase):
state_ops.assign_add(
p, ones_t, use_locking=True) for _ in range(20)
]
p.initializer.run()
self.evaluate(p.initializer)
def run_add(add_op):
self.evaluate(add_op)
@ -122,7 +118,6 @@ class AssignOpTest(test.TestCase):
ones = np.ones((1024, 1024)).astype(np.float32)
self.assertAllEqual(vals, ones * 20)
@test_util.run_v1_only("b/120545219")
def testParallelAssignWithLocking(self):
with self.cached_session() as sess:
zeros_t = array_ops.fill([1024, 1024], 0.0)
@ -133,7 +128,7 @@ class AssignOpTest(test.TestCase):
p, math_ops.multiply(ones_t, float(i)), use_locking=True)
for i in range(1, 21)
]
p.initializer.run()
self.evaluate(p.initializer)
def run_assign(assign_op):
self.evaluate(assign_op)

View File

@ -466,7 +466,7 @@ class FunctionalOpsTest(test.TestCase):
loss = l0 + array_ops.stop_gradient(l1)
grad = gradients_impl.gradients(ys=[loss], xs=[a, b])
with self.test_session(use_gpu=True) as sess:
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.evaluate(grad)
@test_util.run_in_graph_and_eager_modes

View File

@ -323,26 +323,24 @@ class PartitionedVariablesTestCase(test.TestCase):
for i in xrange(len(expected_specs)):
self.assertEquals(expected_specs[i], slices[i]._save_slice_info.spec)
@test_util.run_deprecated_v1
def testVecConstantInit(self):
with self.cached_session():
rnd_par = constant_op.constant([1, 2, 3, 4])
vs = partitioned_variables.create_partitioned_variables([4], [4], rnd_par)
variables.global_variables_initializer().run()
val = array_ops.concat(vs, 0).eval()
self.evaluate(variables.global_variables_initializer())
val = array_ops.concat(vs, 0)
rnd = self.evaluate(rnd_par)
self.assertAllClose(rnd, val)
self.assertEqual([dtypes.int32] * 4, [v.dtype.base_dtype for v in vs])
self._TestSaveSpec(vs, ["4 0,1", "4 1,1", "4 2,1", "4 3,1"])
@test_util.run_deprecated_v1
def testConstantInit(self):
with self.cached_session():
rnd_par = constant_op.constant([[1, 2, 3, 4], [5, 6, 7, 8]])
vs = partitioned_variables.create_partitioned_variables([2, 4], [1, 2],
rnd_par)
variables.global_variables_initializer().run()
val = array_ops.concat(vs, 1).eval()
self.evaluate(variables.global_variables_initializer())
val = array_ops.concat(vs, 1)
rnd = self.evaluate(rnd_par)
self.assertAllClose(rnd, val)
self.assertEqual([dtypes.int32] * 2, [v.dtype.base_dtype for v in vs])
@ -356,7 +354,7 @@ class PartitionedVariablesTestCase(test.TestCase):
rnd_par)
vs2 = partitioned_variables.create_partitioned_variables([2, 4], [1, 2],
rnd_par)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
var1_name = vs1[0]._save_slice_info.full_name
var2_name = vs2[0]._save_slice_info.full_name
self.assertEqual("hi/PartitionedVariable", var1_name)
@ -376,7 +374,7 @@ class PartitionedVariablesTestCase(test.TestCase):
vs, reuse=True, use_resource=use_resource):
vs2 = partitioned_variables.create_partitioned_variables(
[2, 4], [1, 2], rnd_par, dtype=dtypes.int32)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
var1_name = vs1[0]._save_slice_info.full_name
var2_name = vs2[0]._save_slice_info.full_name
self.assertEqual("hola/PartitionedVariable", var1_name)
@ -393,7 +391,7 @@ class PartitionedVariablesTestCase(test.TestCase):
rnd_par)
vs2 = partitioned_variables.create_partitioned_variables([2, 4], [1, 2],
rnd_par)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
var1_name = vs1[0]._save_slice_info.full_name
var2_name = vs2[0]._save_slice_info.full_name
# Currently, the name scope 'ola' has no effect.
@ -408,18 +406,16 @@ class PartitionedVariablesTestCase(test.TestCase):
def testName(self):
self._testNameHelper(use_resource=False)
@test_util.run_deprecated_v1
def testResourceName(self):
self._testNameHelper(use_resource=True)
@test_util.run_v1_only("b/120545219")
def testRandomInitValue(self):
with self.cached_session():
rnd = variables.Variable(random_ops.random_uniform([200, 40]))
vs = partitioned_variables.create_partitioned_variables(
rnd.get_shape(), [1, 10], rnd.initialized_value())
variables.global_variables_initializer().run()
val = array_ops.concat(vs, 1).eval()
self.evaluate(variables.global_variables_initializer())
val = array_ops.concat(vs, 1)
rnd = self.evaluate(rnd)
self.assertAllClose(rnd, val)
self.assertEqual([dtypes.float32] * 10, [v.dtype.base_dtype for v in vs])
@ -430,7 +426,6 @@ class PartitionedVariablesTestCase(test.TestCase):
"200 40 0,200:36,4"
])
@test_util.run_v1_only("b/120545219")
def testRandomInitUnevenPartitions(self):
with self.cached_session():
rnd = variables.Variable(
@ -440,7 +435,7 @@ class PartitionedVariablesTestCase(test.TestCase):
rnd.get_shape(), [1, i], rnd.initialized_value())
for i in xrange(1, 10)
]
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
rnd_val = self.evaluate(rnd)
# Only check the slice save specs for the first 5 tf.
save_specs = [
@ -462,33 +457,31 @@ class PartitionedVariablesTestCase(test.TestCase):
]
]
for i, vs in enumerate(var_lists):
var_val = array_ops.concat(vs, 1).eval()
var_val = array_ops.concat(vs, 1)
self.assertAllClose(rnd_val, var_val)
self.assertEqual([dtypes.float64] * len(vs),
[v.dtype.base_dtype for v in vs])
if i < len(save_specs):
self._TestSaveSpec(vs, save_specs[i])
@test_util.run_v1_only("b/120545219")
def testDegenerate(self):
with self.cached_session():
rnd = variables.Variable(random_ops.random_uniform([10, 43]))
vs = partitioned_variables.create_partitioned_variables(
rnd.get_shape(), [1, 1], rnd.initialized_value())
variables.global_variables_initializer().run()
val = array_ops.concat(vs, 0).eval()
self.evaluate(variables.global_variables_initializer())
val = array_ops.concat(vs, 0)
rnd = self.evaluate(rnd)
self.assertAllClose(rnd, val)
self._TestSaveSpec(vs, ["10 43 0,10:0,43"])
@test_util.run_v1_only("b/120545219")
def testSliceSizeOne(self):
with self.cached_session():
rnd = variables.Variable(random_ops.random_uniform([10, 43]))
vs = partitioned_variables.create_partitioned_variables(
rnd.get_shape(), [10, 1], rnd.initialized_value())
variables.global_variables_initializer().run()
val = array_ops.concat(vs, 0).eval()
self.evaluate(variables.global_variables_initializer())
val = array_ops.concat(vs, 0)
rnd = self.evaluate(rnd)
self.assertAllClose(rnd, val)
self._TestSaveSpec(vs, [
@ -497,7 +490,6 @@ class PartitionedVariablesTestCase(test.TestCase):
"10 43 6,1:0,43", "10 43 7,1:0,43", "10 43 8,1:0,43", "10 43 9,1:0,43"
])
@test_util.run_deprecated_v1
def testIotaInitializer(self):
self.assertAllClose([0., 1., 2., 3.], _IotaInitializer([4]))
self.assertAllClose([[0., 1.], [0., 10.], [0., 100.], [0., 1000.]],
@ -505,11 +497,11 @@ class PartitionedVariablesTestCase(test.TestCase):
with self.cached_session():
vs = partitioned_variables.create_partitioned_variables([13, 5], [3, 1],
_IotaInitializer)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
slice0 = _IotaInitializer([5, 5])
slice1 = _IotaInitializer([4, 5])
slice2 = _IotaInitializer([4, 5])
val = array_ops.concat(vs, 0).eval()
val = array_ops.concat(vs, 0)
self.assertAllClose(slice0 + slice1 + slice2, val)
self._TestSaveSpec(vs, ["13 5 0,5:0,5", "13 5 5,4:0,5", "13 5 9,4:0,5"])
@ -520,7 +512,7 @@ class PartitionedVariablesTestCase(test.TestCase):
with self.cached_session():
var0, var1 = partitioned_variables.create_partitioned_variables(
[20, 12], [1, 2], init_ops.random_uniform_initializer())
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
val0, val1 = self.evaluate(var0).flatten(), self.evaluate(var1).flatten()
self.assertTrue(np.linalg.norm(val0 - val1) > 1e-6)
# Negative test that proves that slices have the same values if
@ -528,7 +520,7 @@ class PartitionedVariablesTestCase(test.TestCase):
with self.cached_session():
var0, var1 = partitioned_variables.create_partitioned_variables(
[20, 12], [1, 2], init_ops.random_uniform_initializer(seed=201))
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
val0, val1 = self.evaluate(var0).flatten(), self.evaluate(var1).flatten()
self.assertAllClose(val0, val1)
@ -607,8 +599,8 @@ class PartitionedVariablesTestCase(test.TestCase):
self.assertTrue(
c.op in concat_control_inputs,
"var_x._concat() should get control dependencies from its scope.")
variables.global_variables_initializer().run()
self.assertAllClose(value.eval(), var_x.as_tensor().eval())
self.evaluate(variables.global_variables_initializer())
self.assertAllClose(value, var_x.as_tensor())
def testMetaGraphSaveLoad(self):
save_prefix = os.path.join(self.get_temp_dir(), "ckpt")
@ -623,7 +615,7 @@ class PartitionedVariablesTestCase(test.TestCase):
v0_part = v0._get_partitions()
self.assertEqual(len(v0_list), 5)
self.assertAllEqual(v0_part, (5, 1))
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
save_graph.get_collection_ref("partvar").append(v0)
saver = saver_lib.Saver()

View File

@ -689,7 +689,7 @@ class ResourceVariableOpsTest(test_util.TensorFlowTestCase):
def testToFromProto(self):
with self.cached_session():
v = resource_variable_ops.ResourceVariable(1.0)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
w = resource_variable_ops.ResourceVariable.from_proto(v.to_proto())
self.assertEquals(2, math_ops.add(w, 1).eval())
@ -793,11 +793,11 @@ class ResourceVariableOpsTest(test_util.TensorFlowTestCase):
with self.assertRaises(ValueError):
_ = w.value().op.get_attr("_class")
@test_util.run_v1_only("b/120545219")
@test_util.run_deprecated_v1
def testSharedName(self):
with self.cached_session():
v = resource_variable_ops.ResourceVariable(300.0, name="var4")
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
w = resource_variable_ops.var_handle_op(
dtype=v.dtype.base_dtype, shape=v.get_shape(), shared_name="var4",

View File

@ -425,7 +425,6 @@ class TensorArrayTest(test.TestCase):
self.assertAllEqual(t_g_ta_0, t_g_ta_1)
self.assertAllEqual([[4.0, 5.0]], d_r1_0)
@test_util.run_v1_only("b/120545219")
def testTensorArrayWriteWrongIndexOrDataTypeFails(self):
with self.session(use_gpu=True):
ta = _make_ta(3, "foo", dtype=dtypes.float32)
@ -459,7 +458,6 @@ class TensorArrayTest(test.TestCase):
with self.assertRaisesOpError(error_msg):
self.evaluate(ta.write(3, 3.0).flow)
@test_util.run_v1_only("b/120545219")
def testTensorArrayReadWrongIndexOrDataTypeFails(self):
with self.session(use_gpu=True):
ta = _make_ta(3, "foo", dtype=dtypes.float32)
@ -505,7 +503,6 @@ class TensorArrayTest(test.TestCase):
"it has already been written to."):
self.evaluate(ta.write(2, 3.0).write(2, 3.0).flow)
@test_util.run_v1_only("b/120545219")
def testTensorArrayConcatIncompatibleShapesFails(self):
with self.session(use_gpu=True):
ta = tensor_array_ops.TensorArray(
@ -537,7 +534,6 @@ class TensorArrayTest(test.TestCase):
with self.assertRaisesOpError("shape"):
self.evaluate(w3.concat())
@test_util.run_v1_only("b/120545219")
def testTensorArraySplitIncompatibleShapesFails(self):
with self.session(use_gpu=True):
in_eager_mode = context.executing_eagerly()
@ -959,7 +955,7 @@ class TensorArrayTest(test.TestCase):
v0_grad = gradients_impl.gradients([vout], [v0], [grad_val])[0]
state0_grad = gradients_impl.gradients([vout], [state0], [grad_val])[0]
var_grad = gradients_impl.gradients([vout], [var], [grad_val])[0]
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
state0_t, var_t, v0_t, vout_t, v0_grad_t, var_grad_t, state0_grad_t = (
self.evaluate(
@ -1578,7 +1574,7 @@ class TensorArrayTest(test.TestCase):
self.assertEqual(tensor_shape.scalar(), read1.get_shape())
if not context.executing_eagerly():
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
read0_v, read1_v, size0_v, size1_v = self.evaluate((read0, read1, size0,
size1))

View File

@ -66,7 +66,7 @@ class VariablesTestCase(test.TestCase):
with self.assertRaisesOpError("Attempting to use uninitialized value"):
self.evaluate(var1)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertAllClose(0.0, self.evaluate(var0))
self.assertAllClose(1.1, self.evaluate(var1))
@ -96,11 +96,11 @@ class VariablesTestCase(test.TestCase):
self.assertEqual([3, 6], depdep.get_shape())
self.assertEqual([3, 6], depdep.shape)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertAllClose(rnd.eval(), self.evaluate(dep))
self.assertAllClose(rnd.eval() + self.evaluate(dep) + 2.0,
self.evaluate(depdep))
self.assertAllClose(self.evaluate(rnd), self.evaluate(dep))
self.assertAllClose(
self.evaluate(rnd) + self.evaluate(dep) + 2.0, self.evaluate(depdep))
def testIterable(self):
with self.assertRaisesRegexp(TypeError, "not iterable"):
@ -117,7 +117,7 @@ class VariablesTestCase(test.TestCase):
plus_one = var.assign_add(1.0)
minus_one = var.assign_sub(2.0)
four = var.assign(4.0)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertAllClose(0.0, self.evaluate(var))
self.assertAllClose(1.0, self.evaluate(plus_one))
@ -136,7 +136,7 @@ class VariablesTestCase(test.TestCase):
plus_one = var.assign_add(1.0)
minus_one = var.assign_sub(2.0)
four = var.assign(4.0)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertAllClose(0.0, self.evaluate(var))
self.evaluate(plus_one)
@ -166,7 +166,7 @@ class VariablesTestCase(test.TestCase):
var = variables.Variable(zero)
count_up_to = var.count_up_to(3)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertEqual(0, self.evaluate(var))
self.assertEqual(0, self.evaluate(count_up_to))
@ -264,10 +264,10 @@ class VariablesTestCase(test.TestCase):
with self.cached_session():
var_x = variables.Variable(2.0)
var_y = variables.Variable(3.0)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertAllClose(2.0, self.evaluate(var_x))
self.assertAllClose(3.0, self.evaluate(var_y))
self.assertAllClose(5.0, math_ops.add(var_x, var_y).eval())
self.assertAllClose(5.0, self.evaluate(math_ops.add(var_x, var_y)))
@test_util.run_deprecated_v1
def testZeroSizeVarSameAsConst(self):
@ -277,9 +277,9 @@ class VariablesTestCase(test.TestCase):
variable_mul = math_ops.matmul(zero_size_const, zero_size_var)
const_mul = math_ops.matmul(
zero_size_const, zero_size_const, transpose_b=True)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
variable_output = self.evaluate(variable_mul)
self.assertAllClose(const_mul.eval(), variable_output)
self.assertAllClose(self.evaluate(const_mul), variable_output)
self.assertAllClose([[0., 0.], [0., 0.]], variable_output)
@test_util.run_deprecated_v1
@ -372,7 +372,7 @@ class VariablesTestCase(test.TestCase):
matmul = var_m.__matmul__([[10.0], [20.0]])
rmatmul = var_m.__rmatmul__([[10.0], [20.0]])
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertAllClose([2.0], self.evaluate(add))
self.assertAllClose([3.0], self.evaluate(radd))
self.assertAllClose([1.0], self.evaluate(sub))
@ -409,7 +409,7 @@ class VariablesTestCase(test.TestCase):
def testSession(self):
with self.cached_session() as sess:
var = variables.Variable([1, 12])
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertAllClose([1, 12], self.evaluate(var))
@test_util.run_v1_only("b/120545219")
@ -431,7 +431,7 @@ class VariablesTestCase(test.TestCase):
v1 = variables.Variable(initializer, dtype=dtypes.float32)
self.assertEqual(shape, v1.get_shape())
self.assertEqual(shape, v1.shape)
self.assertAllClose(value, v1.initial_value.eval())
self.assertAllClose(value, self.evaluate(v1.initial_value))
with self.assertRaises(errors_impl.FailedPreconditionError):
self.evaluate(v1)
@ -439,11 +439,11 @@ class VariablesTestCase(test.TestCase):
math_ops.negative(v1.initialized_value()), dtype=dtypes.float32)
self.assertEqual(v1.get_shape(), v2.get_shape())
self.assertEqual(v1.shape, v2.shape)
self.assertAllClose(np.negative(value), v2.initial_value.eval())
self.assertAllClose(np.negative(value), self.evaluate(v2.initial_value))
with self.assertRaises(errors_impl.FailedPreconditionError):
self.evaluate(v2)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertAllClose(np.negative(value), self.evaluate(v2))
def testConstraintArg(self):
@ -465,10 +465,10 @@ class VariablesTestCase(test.TestCase):
a = variables.Variable([1, 2, 3], dtype=dtypes.float32)
b = variables.Variable(a.initialized_value() + 2)
c = variables.Variable(b.initialized_value() + 2)
variables.global_variables_initializer().run()
self.assertAllEqual(a.eval(), [1, 2, 3])
self.assertAllEqual(b.eval(), [3, 4, 5])
self.assertAllEqual(c.eval(), [5, 6, 7])
self.evaluate(variables.global_variables_initializer())
self.assertAllEqual(self.evaluate(a), [1, 2, 3])
self.assertAllEqual(self.evaluate(b), [3, 4, 5])
self.assertAllEqual(self.evaluate(c), [5, 6, 7])
@test_util.run_deprecated_v1
def testInitializerFunctionDevicePlacement(self):
@ -503,7 +503,7 @@ class VariablesTestCase(test.TestCase):
# initialized_value should not rerun the initializer_op if the variable
# has already been initialized elsewhere.
self.evaluate(v.assign(1.0))
self.assertEqual(1.0, v.initialized_value().eval())
self.assertEqual(1.0, self.evaluate(v.initialized_value()))
v_def.ClearField("initial_value_name")
with ops.Graph().as_default(), self.cached_session() as sess:
@ -537,7 +537,7 @@ class VariablesTestCase(test.TestCase):
def testLoad(self):
with self.cached_session():
var = variables.Variable(np.zeros((5, 5), np.float32))
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
var.load(np.ones((5, 5), np.float32))
self.assertAllClose(np.ones((5, 5), np.float32), self.evaluate(var))
@ -573,7 +573,7 @@ class IsInitializedTest(test.TestCase):
_ = v, w
uninited = variables.report_uninitialized_variables()
self.assertAllEqual(np.array([b"v", b"w"]), self.evaluate(uninited))
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertEqual(0, self.evaluate(uninited).size)
@test_util.run_v1_only("b/120545219")
@ -601,20 +601,20 @@ class IsInitializedTest(test.TestCase):
b = variables.Variable(array_ops.ones([2, 2]))
objective = math_ops.reduce_sum(b + math_ops.matmul(
a, a, transpose_a=True))
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
do_opt = gradient_descent.GradientDescentOptimizer(0.1).minimize(
objective)
self.evaluate([do_opt])
self.assertAllClose([[0.9, 0.9], [0.9, 0.9]], self.evaluate(b))
@test_util.run_v1_only("b/120545219")
class ObsoleteIsInitializedTest(test.TestCase):
def testNoVars(self):
with ops.Graph().as_default():
self.assertEqual(None, variables.assert_variables_initialized())
@test_util.run_v1_only("b/120545219")
def testVariables(self):
with ops.Graph().as_default(), self.cached_session() as sess:
v = variables.VariableV1([1, 2])
@ -623,10 +623,9 @@ class ObsoleteIsInitializedTest(test.TestCase):
inited = variables.assert_variables_initialized()
with self.assertRaisesOpError("Attempting to use uninitialized value"):
self.evaluate(inited)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.evaluate(inited)
@test_util.run_v1_only("b/120545219")
def testVariableList(self):
with ops.Graph().as_default(), self.cached_session() as sess:
v = variables.VariableV1([1, 2])
@ -766,36 +765,36 @@ class PartitionedVariableTest(test.TestCase):
assign_list = pv_1.assign([c_0, c_1])
assign_part_value = pv_1.assign_add(assign_ones)
assign_part_var = pv_1.assign_sub(pv_0)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertEqual([1.0], plus_delta[0].eval())
self.assertEqual([1.0], self.evaluate(plus_delta[0]))
self.assertEqual([1.0], self.evaluate(v0))
self.assertEqual([3.0], plus_delta[1].eval())
self.assertEqual([3.0], self.evaluate(plus_delta[1]))
self.assertEqual([3.0], self.evaluate(v1))
self.assertEqual([-2.0], minus_delta[0].eval())
self.assertEqual([-2.0], self.evaluate(minus_delta[0]))
self.assertEqual([-2.0], self.evaluate(v0))
self.assertEqual([-1.0], minus_delta[1].eval())
self.assertEqual([-1.0], self.evaluate(minus_delta[1]))
self.assertEqual([-1.0], self.evaluate(v1))
self.assertEqual([1.0], assign_ones[0].eval())
self.assertEqual([1.0], self.evaluate(assign_ones[0]))
self.assertEqual([1.0], self.evaluate(v0))
self.assertEqual([1.0], assign_ones[1].eval())
self.assertEqual([1.0], self.evaluate(assign_ones[1]))
self.assertEqual([1.0], self.evaluate(v1))
self.assertEqual([2.0], assign_list[0].eval())
self.assertEqual([2.0], self.evaluate(assign_list[0]))
self.assertEqual([2.0], self.evaluate(v2))
self.assertEqual([3.0], assign_list[1].eval())
self.assertEqual([3.0], self.evaluate(assign_list[1]))
self.assertEqual([3.0], self.evaluate(v3))
self.assertEqual([3.0], assign_part_value[0].eval())
self.assertEqual([3.0], self.evaluate(assign_part_value[0]))
self.assertEqual([3.0], self.evaluate(v2))
self.assertEqual([4.0], assign_part_value[1].eval())
self.assertEqual([4.0], self.evaluate(assign_part_value[1]))
self.assertEqual([4.0], self.evaluate(v3))
self.assertEqual([2.0], assign_part_var[0].eval())
self.assertEqual([2.0], self.evaluate(assign_part_var[0]))
self.assertEqual([2.0], self.evaluate(v2))
self.assertEqual([3.0], assign_part_var[1].eval())
self.assertEqual([3.0], self.evaluate(assign_part_var[1]))
self.assertEqual([3.0], self.evaluate(v3))

View File

@ -565,7 +565,7 @@ class DataTypesTest(test_util.TensorFlowTestCase):
strict=strict)
with self.cached_session() as sess:
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
true_feed_dict = {condition: True}
true_feed_dict.update(feed_dict)
result_cond, result_case = sess.run([output_cond, output_case],

View File

@ -1027,7 +1027,7 @@ class CustomGradientTest(test_util.TensorFlowTestCase):
conditional, lambda: alpha * 2, lambda: alpha * 3)
g, = gradients_impl.gradients(output, alpha)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertAllEqual(g.eval(), [2.0])
self.assertAllEqual(g.eval(feed_dict={conditional: False}), [3.0])

View File

@ -1084,7 +1084,7 @@ class SavedModelTest(SavedModelTestBase):
# CheckpointedOp is a key-value table that can be saved across sessions.
# The table register itself in SAVEABLE_OBJECTS collection.
v1 = saver_test_utils.CheckpointedOp(name="v1")
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
v1.insert("k1", 3.0).run()
# Once the table is restored, we can access it through this reference.
ops.add_to_collection("table_ref", v1.table_ref)

View File

@ -106,7 +106,7 @@ class AdagradOptimizerTest(test.TestCase):
pred = math_ops.matmul(embedding_ops.embedding_lookup([var0], [0]), x)
loss = pred * pred
sgd_op = adagrad.AdagradOptimizer(1.0).minimize(loss)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
# Fetch params to validate initial values
self.assertAllCloseAccordingToType([[1.0, 2.0], [3.0, 4.0]],
self.evaluate(var0))
@ -129,7 +129,7 @@ class AdagradOptimizerTest(test.TestCase):
constant_op.constant(3.0), initial_accumulator_value=0.1)
ada_update = ada_opt.apply_gradients(
zip([grads0, grads1], [var0, var1]))
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
# Fetch params to validate initial values
self.assertAllClose([1.0, 2.0], self.evaluate(var0))
self.assertAllClose([3.0, 4.0], self.evaluate(var1))
@ -163,7 +163,7 @@ class AdagradOptimizerTest(test.TestCase):
ada_opt = adagrad.AdagradOptimizer(3.0, initial_accumulator_value=0.1)
ada_update = ada_opt.apply_gradients(
zip([grads0, grads1], [var0, var1]))
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
# Fetch params to validate initial values
self.assertAllClose([[1.0], [2.0]], self.evaluate(var0))
self.assertAllClose([[3.0], [4.0]], self.evaluate(var1))
@ -198,7 +198,7 @@ class AdagradOptimizerTest(test.TestCase):
[(grad_repeated_index, repeated_index_update_var)])
aggregated_update = adagrad.AdagradOptimizer(3.0).apply_gradients(
[(grad_aggregated, aggregated_update_var)])
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertAllClose(aggregated_update_var.eval(),
self.evaluate(repeated_index_update_var))
for _ in range(3):
@ -223,7 +223,7 @@ class AdagradOptimizerTest(test.TestCase):
2.0).minimize(loss_repeated)
update_op_aggregated = adagrad.AdagradOptimizer(
2.0).minimize(loss_aggregated)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertAllCloseAccordingToType(
self.evaluate(var_repeated), self.evaluate(var_aggregated))
for _ in range(3):
@ -289,7 +289,7 @@ class AdagradOptimizerTest(test.TestCase):
self.assertEquals(slot0.get_shape(), var0.get_shape())
slot1 = ada_opt.get_slot(var1, "accumulator")
self.assertEquals(slot1.get_shape(), var1.get_shape())
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
# Fetch params to validate initial values.
self.assertAllClose([1.0, 2.0], self.evaluate(var0))

View File

@ -154,7 +154,7 @@ class LoadAndRemapWrappersTest(test.TestCase):
partitioner=partitioned_variables.fixed_size_partitioner(2))
with self.cached_session():
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertAllClose(expected_remapped_matrix,
remapped_matrix.as_tensor().eval())
@ -188,7 +188,7 @@ class LoadAndRemapWrappersTest(test.TestCase):
partitioner=partitioned_variables.fixed_size_partitioner(2))
with self.cached_session():
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertAllClose(expected_remapped_matrix,
remapped_matrix.as_tensor().eval())
@ -226,7 +226,7 @@ class LoadAndRemapWrappersTest(test.TestCase):
partitioner=partitioned_variables.fixed_size_partitioner(2))
with self.cached_session():
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertAllClose(expected_remapped_matrix,
remapped_matrix.as_tensor().eval())
@ -262,7 +262,7 @@ class LoadAndRemapWrappersTest(test.TestCase):
partitioner=partitioned_variables.fixed_size_partitioner(2))
with self.cached_session():
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertAllClose(expected_remapped_matrix,
remapped_matrix.as_tensor().eval())
@ -296,7 +296,7 @@ class LoadAndRemapWrappersTest(test.TestCase):
partitioner=partitioned_variables.fixed_size_partitioner(2))
with self.cached_session():
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertAllClose(expected_remapped_embeddings,
remapped_embeddings.as_tensor().eval())
@ -342,7 +342,7 @@ class LoadAndRemapWrappersTest(test.TestCase):
partitioner=partitioned_variables.fixed_size_partitioner(2))
with self.cached_session():
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertAllClose(expected_remapped_embeddings,
remapped_embeddings.as_tensor().eval())
@ -380,7 +380,7 @@ class LoadAndRemapWrappersTest(test.TestCase):
partitioner=partitioned_variables.fixed_size_partitioner(2))
with self.cached_session():
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertAllClose(expected_remapped_embeddings,
remapped_embeddings.as_tensor().eval())

View File

@ -58,7 +58,7 @@ class MatchFilenamesOnceTest(test_lib.TestCase):
question = inp.match_filenames_once(
os.path.join(self.get_temp_dir(), "match_filenames.?"))
one = inp.match_filenames_once(additional[1])
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
variables.local_variables_initializer().run()
self.assertItemsEqual(
map(compat.as_bytes, filenames), self.evaluate(star))
@ -84,7 +84,7 @@ class LimitEpochsTest(test_lib.TestCase):
with self.cached_session():
love_me = constant_op.constant("Love Me")
love_me_two_times = inp.limit_epochs(love_me, num_epochs=2)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
variables.local_variables_initializer().run()
self.assertEqual(b"Love Me", self.evaluate(love_me_two_times))
self.assertEqual(b"Love Me", self.evaluate(love_me_two_times))
@ -105,7 +105,7 @@ class InputProducerTest(test_lib.TestCase):
input_tensor, num_epochs=num_epochs, shuffle=False)
dequeue_many = queue.dequeue_many(len(input_tensor) * num_epochs)
dequeue = queue.dequeue()
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
variables.local_variables_initializer().run()
threads = queue_runner_impl.start_queue_runners()
@ -132,7 +132,7 @@ class InputProducerTest(test_lib.TestCase):
input_tensor, element_shape=[4], num_epochs=num_epochs, shuffle=False)
dequeue_many = queue.dequeue_many(len(input_value) * num_epochs)
dequeue = queue.dequeue()
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
variables.local_variables_initializer().run()
threads = queue_runner_impl.start_queue_runners()
@ -163,7 +163,7 @@ class StringInputProducerTest(test_lib.TestCase):
strings, num_epochs=num_epochs, shuffle=False)
dequeue_many = queue.dequeue_many(len(strings) * num_epochs)
dequeue = queue.dequeue()
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
variables.local_variables_initializer().run()
threads = queue_runner_impl.start_queue_runners()
@ -186,7 +186,7 @@ class StringInputProducerTest(test_lib.TestCase):
strings, num_epochs=num_epochs, shuffle=True, seed=271828)
dequeue_many = queue.dequeue_many(len(strings))
dequeue = queue.dequeue()
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
variables.local_variables_initializer().run()
threads = queue_runner_impl.start_queue_runners()
@ -234,7 +234,7 @@ class StringInputProducerTest(test_lib.TestCase):
constant_op.constant(
[], dtype=dtypes.string))
dequeue = queue.dequeue()
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
variables.local_variables_initializer().run()
threads = queue_runner_impl.start_queue_runners(coord=coord)
with self.assertRaises(errors_impl.OutOfRangeError):
@ -284,7 +284,7 @@ class RangeInputProducerTest(test_lib.TestCase):
range_size, num_epochs=num_epochs, shuffle=False)
dequeue_many = queue.dequeue_many(range_size * num_epochs)
dequeue = queue.dequeue()
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
variables.local_variables_initializer().run()
threads = queue_runner_impl.start_queue_runners()
@ -307,7 +307,7 @@ class RangeInputProducerTest(test_lib.TestCase):
range_size, num_epochs=num_epochs, shuffle=True, seed=314159)
dequeue_many = queue.dequeue_many(range_size)
dequeue = queue.dequeue()
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
variables.local_variables_initializer().run()
threads = queue_runner_impl.start_queue_runners()
@ -358,7 +358,7 @@ class SliceInputProducerTest(test_lib.TestCase):
source_ints = [2, 3, 5, 7]
slices = inp.slice_input_producer(
[source_strings, source_ints], num_epochs=num_epochs, shuffle=False)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
variables.local_variables_initializer().run()
threads = queue_runner_impl.start_queue_runners()
@ -386,7 +386,7 @@ class SliceInputProducerTest(test_lib.TestCase):
num_epochs=num_epochs,
shuffle=True,
seed=161803)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
variables.local_variables_initializer().run()
threads = queue_runner_impl.start_queue_runners()
@ -487,7 +487,7 @@ class BatchTest(test_lib.TestCase):
batched = inp.batch(
[counter, sparse_counter, "string"], batch_size=batch_size)
batched_fetch = batched
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
variables.local_variables_initializer().run()
threads = queue_runner_impl.start_queue_runners()
@ -555,7 +555,7 @@ class BatchTest(test_lib.TestCase):
counter = examples.count_up_to(num_batches * batch_size)
string = array_ops.tile(["string"],
math_ops.to_int32(array_ops.stack([counter])))
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
variables.local_variables_initializer().run()
batched = inp.batch(
[counter, string], batch_size=batch_size, dynamic_pad=True)
@ -590,7 +590,7 @@ class BatchTest(test_lib.TestCase):
dense_shape=[1])
pre_batched = inp.batch([counter, sparse_counter, "string"], batch_size=2)
batched = inp.batch(pre_batched, enqueue_many=True, batch_size=batch_size)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
variables.local_variables_initializer().run()
threads = queue_runner_impl.start_queue_runners()
@ -629,7 +629,7 @@ class BatchTest(test_lib.TestCase):
[counter, sparse_counter, "string"],
batch_size=batch_size,
num_threads=4)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
variables.local_variables_initializer().run()
threads = queue_runner_impl.start_queue_runners()
@ -672,7 +672,7 @@ class BatchTest(test_lib.TestCase):
[counter, sparse_counter, "string"],
batch_size=batch_size,
allow_smaller_final_batch=True)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
variables.local_variables_initializer().run()
threads = queue_runner_impl.start_queue_runners()
@ -730,7 +730,7 @@ class BatchTest(test_lib.TestCase):
batch_size=batch_size,
num_threads=4,
allow_smaller_final_batch=True)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
variables.local_variables_initializer().run()
threads = queue_runner_impl.start_queue_runners()
@ -1058,7 +1058,7 @@ class BatchJoinTest(test_lib.TestCase):
batched_fetch[1].dense_shape.get_shape().as_list())
self.assertAllEqual((batch_size,), batched_fetch[2].get_shape().as_list())
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
variables.local_variables_initializer().run()
threads = queue_runner_impl.start_queue_runners()
@ -1157,7 +1157,7 @@ class BatchJoinTest(test_lib.TestCase):
self.assertAllEqual((batch_size,), batched[0].get_shape().as_list())
self.assertAllEqual((batch_size, None), batched[1].get_shape().as_list())
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
variables.local_variables_initializer().run()
threads = queue_runner_impl.start_queue_runners()
@ -1244,7 +1244,7 @@ class BatchJoinTest(test_lib.TestCase):
self.assertAllEqual((2,), batched[1].dense_shape.get_shape().as_list())
self.assertAllEqual((None,), batched[2].get_shape().as_list())
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
variables.local_variables_initializer().run()
threads = queue_runner_impl.start_queue_runners()
@ -1339,7 +1339,7 @@ class BatchJoinTest(test_lib.TestCase):
self.assertAllEqual((None,), batched[0].get_shape().as_list())
self.assertAllEqual((None, None), batched[1].get_shape().as_list())
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
variables.local_variables_initializer().run()
threads = queue_runner_impl.start_queue_runners()
@ -1644,7 +1644,7 @@ class ShuffleBatchTest(test_lib.TestCase):
min_after_dequeue=16,
seed=141421)
batched_fetch = batched
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
variables.local_variables_initializer().run()
threads = queue_runner_impl.start_queue_runners()
@ -1702,7 +1702,7 @@ class ShuffleBatchTest(test_lib.TestCase):
seed=141421,
allow_smaller_final_batch=True)
batched_fetch = batched
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
variables.local_variables_initializer().run()
threads = queue_runner_impl.start_queue_runners()
@ -1756,7 +1756,7 @@ class ShuffleBatchTest(test_lib.TestCase):
min_after_dequeue=16,
seed=173205,
num_threads=4)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
variables.local_variables_initializer().run()
threads = queue_runner_impl.start_queue_runners()
@ -1807,7 +1807,7 @@ class ShuffleBatchTest(test_lib.TestCase):
seed=173205,
num_threads=4,
allow_smaller_final_batch=True)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
variables.local_variables_initializer().run()
threads = queue_runner_impl.start_queue_runners()
@ -2070,7 +2070,7 @@ class ShuffleBatchJoinTest(test_lib.TestCase):
batched_fetch[1].dense_shape.get_shape().as_list())
self.assertAllEqual((batch_size,), batched_fetch[2].get_shape().as_list())
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
variables.local_variables_initializer().run()
threads = queue_runner_impl.start_queue_runners()
@ -2165,7 +2165,7 @@ class ShuffleBatchJoinTest(test_lib.TestCase):
self.assertAllEqual((2,), batched[1].dense_shape.get_shape().as_list())
self.assertAllEqual((None,), batched[2].get_shape().as_list())
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
variables.local_variables_initializer().run()
threads = queue_runner_impl.start_queue_runners()

View File

@ -43,7 +43,7 @@ class MovingAveragesTest(test.TestCase):
decay = 0.25
assign = moving_averages.assign_moving_average(
var, val, decay, zero_debias=False)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertAllClose([10.0, 11.0], self.evaluate(var))
assign.op.run()
self.assertAllClose(
@ -57,7 +57,7 @@ class MovingAveragesTest(test.TestCase):
val = constant_op.constant([1.0, 2.0], dtypes.float32)
decay = 0.25
assign = moving_averages.assign_moving_average(var, val, decay)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertAllClose([0.0, 0.0], self.evaluate(var))
assign.op.run()
self.assertAllClose(
@ -98,7 +98,7 @@ class MovingAveragesTest(test.TestCase):
val = array_ops.placeholder(dtypes.float32, [])
wma = moving_averages.weighted_moving_average(val, decay, weight)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
# Get the first weighted moving average.
val_1 = 3.0
@ -125,7 +125,7 @@ class MovingAveragesTest(test.TestCase):
val = array_ops.placeholder(dtypes.bfloat16, [])
wma = moving_averages.weighted_moving_average(val, decay, weight)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
# Get the first weighted moving average.
val_1 = 3.0
@ -164,7 +164,7 @@ class ExponentialMovingAverageTest(test.TestCase):
thirties = _Repeat(30.0, dim)
var0 = variables.Variable(tens, name="v0")
var1 = variables.Variable(thirties, name="v1")
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
# Note that tensor2 is not a Variable but just a plain Tensor resulting
# from the sum operation.
tensor2 = var0 + var1
@ -178,7 +178,7 @@ class ExponentialMovingAverageTest(test.TestCase):
self.assertFalse(avg0 in variables.trainable_variables())
self.assertFalse(avg1 in variables.trainable_variables())
self.assertFalse(avg2 in variables.trainable_variables())
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertEqual("v0/ExponentialMovingAverage:0", avg0.name)
self.assertEqual("v1/ExponentialMovingAverage:0", avg1.name)

View File

@ -49,7 +49,7 @@ class QueueRunnerTest(test.TestCase):
var = variables.VariableV1(zero64)
count_up_to = var.count_up_to(3)
queue = data_flow_ops.FIFOQueue(10, dtypes.float32)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
qr = queue_runner_impl.QueueRunner(queue, [count_up_to])
threads = qr.create_threads(sess)
self.assertEqual(sorted(t.name for t in threads),
@ -77,7 +77,7 @@ class QueueRunnerTest(test.TestCase):
self.assertEqual(sorted(t.name for t in threads),
["QueueRunnerThread-fifo_queue-CountUpTo:0",
"QueueRunnerThread-fifo_queue-CountUpTo_1:0"])
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
for t in threads:
t.start()
for t in threads:
@ -93,7 +93,7 @@ class QueueRunnerTest(test.TestCase):
qr = queue_runner_impl.QueueRunner(queue, [_MockOp("i fail"),
_MockOp("so fail")])
threads = qr.create_threads(sess)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
for t in threads:
t.start()
for t in threads:
@ -140,7 +140,7 @@ class QueueRunnerTest(test.TestCase):
var = variables.VariableV1(zero64)
count_up_to = var.count_up_to(3)
queue = data_flow_ops.FIFOQueue(10, dtypes.float32)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
qr = queue_runner_impl.QueueRunner(queue, [count_up_to])
# As the coordinator to stop. The queue runner should
# finish immediately.
@ -196,7 +196,7 @@ class QueueRunnerTest(test.TestCase):
var = variables.VariableV1(zero64)
count_up_to = var.count_up_to(3)
queue = data_flow_ops.FIFOQueue(10, dtypes.float32)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
coord = coordinator.Coordinator()
qr = queue_runner_impl.QueueRunner(queue, [count_up_to])
# NOTE that this test does not actually start the threads.
@ -212,7 +212,7 @@ class QueueRunnerTest(test.TestCase):
var = variables.VariableV1(zero64)
count_up_to = var.count_up_to(3)
queue = data_flow_ops.FIFOQueue(10, dtypes.float32)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
coord = coordinator.Coordinator()
qr = queue_runner_impl.QueueRunner(queue, [count_up_to])
threads = []
@ -229,7 +229,7 @@ class QueueRunnerTest(test.TestCase):
var = variables.VariableV1(zero64)
count_up_to = var.count_up_to(3)
queue = data_flow_ops.FIFOQueue(10, dtypes.float32)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
qr = queue_runner_impl.QueueRunner(queue, [count_up_to,
_MockOp("bad_op")])
threads = qr.create_threads(sess, start=True)

View File

@ -124,8 +124,8 @@ class SaverTest(test.TestCase):
if not context.executing_eagerly():
self.assertEqual(
len(variables.report_uninitialized_variables().eval()), 2)
self.assertEqual(0, len(v2.keys().eval()))
self.assertEqual(0, len(v2.values().eval()))
self.assertEqual(0, len(self.evaluate(v2.keys())))
self.assertEqual(0, len(self.evaluate(v2.values())))
# Restore the saved values in the parameter nodes.
save = saver_module.Saver({"v0": v0, "v1": v1, "v2": v2.saveable})
save.restore(sess, save_path)
@ -331,10 +331,10 @@ class SaverTest(test.TestCase):
self.evaluate(init_all_op)
# Check that the parameter nodes have been initialized.
self.assertEqual(10.0, v0.eval())
self.assertEqual(20.0, v1.eval())
self.assertEqual(b"k1", v2.keys().eval())
self.assertEqual(30.0, v2.values().eval())
self.assertEqual(10.0, self.evaluate(v0))
self.assertEqual(20.0, self.evaluate(v1))
self.assertEqual(b"k1", self.evaluate(v2.keys()))
self.assertEqual(30.0, self.evaluate(v2.values()))
# Save the initialized values in the file at "save_path"
val = save.save(sess, save_path1)
@ -360,16 +360,16 @@ class SaverTest(test.TestCase):
# Assert that the variables are not initialized.
self.assertEqual(
len(variables.report_uninitialized_variables().eval()), 2)
self.assertEqual(0, len(v2.keys().eval()))
self.assertEqual(0, len(v2.values().eval()))
self.assertEqual(0, len(self.evaluate(v2.keys())))
self.assertEqual(0, len(self.evaluate(v2.values())))
# Restore the saved values in the parameter nodes.
save.restore(sess, save_path2)
# Check that the parameter nodes have been restored.
self.assertEqual(10.0, v0.eval())
self.assertEqual(20.0, v1.eval())
self.assertEqual(b"k1", v2.keys().eval())
self.assertEqual(30.0, v2.values().eval())
self.assertEqual(10.0, self.evaluate(v0))
self.assertEqual(20.0, self.evaluate(v1))
self.assertEqual(b"k1", self.evaluate(v2.keys()))
self.assertEqual(30.0, self.evaluate(v2.values()))
@test_util.run_deprecated_v1
def testFilenameTensor(self):
@ -398,7 +398,7 @@ class SaverTest(test.TestCase):
# Build a graph with 1 node, and save and restore for them.
v = variables.VariableV1(np.int64(15), name="v")
save = saver_module.Saver({"v": v}, restore_sequentially=True)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
# Save the initialized values in the file at "save_path"
val = save.save(sess, save_path)
@ -416,7 +416,7 @@ class SaverTest(test.TestCase):
# Restore the saved values in the parameter nodes.
save.restore(sess, save_path)
# Check that the parameter nodes have been restored.
self.assertEqual(np.int64(15), v.eval())
self.assertEqual(np.int64(15), self.evaluate(v))
def testSomeErrors(self):
with ops_lib.Graph().as_default():
@ -478,14 +478,14 @@ class SaverTest(test.TestCase):
v2 = saver_test_utils.CheckpointedOp(name="v2")
v2_init = v2.insert("k1", 30.0)
save = saver_module.Saver([v0, v1, v2.saveable])
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
v2_init.run()
# Check that the parameter nodes have been initialized.
self.assertEqual(10.0, v0.eval())
self.assertEqual(20.0, v1.eval())
self.assertEqual(b"k1", v2.keys().eval())
self.assertEqual(30.0, v2.values().eval())
self.assertEqual(10.0, self.evaluate(v0))
self.assertEqual(20.0, self.evaluate(v1))
self.assertEqual(b"k1", self.evaluate(v2.keys()))
self.assertEqual(30.0, self.evaluate(v2.values()))
# Save the initialized values in the file at "save_path"
val = save.save(sess, save_path)
@ -506,16 +506,16 @@ class SaverTest(test.TestCase):
with self.assertRaisesWithPredicateMatch(
errors_impl.OpError, lambda e: "uninitialized value v1" in e.message):
self.evaluate(v1)
self.assertEqual(0, len(v2.keys().eval()))
self.assertEqual(0, len(v2.values().eval()))
self.assertEqual(0, len(self.evaluate(v2.keys())))
self.assertEqual(0, len(self.evaluate(v2.values())))
# Restore the saved values in the parameter nodes.
save.restore(sess, save_path)
# Check that the parameter nodes have been restored.
self.assertEqual(10.0, v0.eval())
self.assertEqual(20.0, v1.eval())
self.assertEqual(b"k1", v2.keys().eval())
self.assertEqual(30.0, v2.values().eval())
self.assertEqual(10.0, self.evaluate(v0))
self.assertEqual(20.0, self.evaluate(v1))
self.assertEqual(b"k1", self.evaluate(v2.keys()))
self.assertEqual(30.0, self.evaluate(v2.values()))
# Build another graph with 2 nodes, initialized
# differently, and a Restore node for them.
@ -525,20 +525,20 @@ class SaverTest(test.TestCase):
v2_2 = saver_test_utils.CheckpointedOp(name="v2")
save2 = saver_module.Saver([v0_2, v1_2, v2_2.saveable])
v2_2.insert("k1000", 3000.0).run()
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
# Check that the parameter nodes have been initialized.
self.assertEqual(1000.0, v0_2.eval())
self.assertEqual(2000.0, v1_2.eval())
self.assertEqual(b"k1000", v2_2.keys().eval())
self.assertEqual(3000.0, v2_2.values().eval())
self.assertEqual(1000.0, self.evaluate(v0_2))
self.assertEqual(2000.0, self.evaluate(v1_2))
self.assertEqual(b"k1000", self.evaluate(v2_2.keys()))
self.assertEqual(3000.0, self.evaluate(v2_2.values()))
# Restore the values saved earlier in the parameter nodes.
save2.restore(sess, save_path)
# Check that the parameter nodes have been restored.
self.assertEqual(10.0, v0_2.eval())
self.assertEqual(20.0, v1_2.eval())
self.assertEqual(b"k1", v2_2.keys().eval())
self.assertEqual(30.0, v2_2.values().eval())
self.assertEqual(10.0, self.evaluate(v0_2))
self.assertEqual(20.0, self.evaluate(v1_2))
self.assertEqual(b"k1", self.evaluate(v2_2.keys()))
self.assertEqual(30.0, self.evaluate(v2_2.values()))
def _SaveAndLoad(self, var_name, var_value, other_value, save_path):
with self.session(graph=ops_lib.Graph()) as sess:
@ -582,14 +582,14 @@ class SaverTest(test.TestCase):
with sess.graph.device(test.gpu_device_name()):
v0_1 = variables.VariableV1(123.45)
save = saver_module.Saver({"v0": v0_1})
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
save.save(sess, save_path)
with session.Session("", graph=ops_lib.Graph()) as sess:
with sess.graph.device(test.gpu_device_name()):
v0_2 = variables.VariableV1(543.21)
save = saver_module.Saver({"v0": v0_2})
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
def testSharedServerOnGPU(self):
if not test.is_gpu_available():
@ -599,14 +599,14 @@ class SaverTest(test.TestCase):
with sess.graph.device(test.gpu_device_name()):
v0_1 = variables.VariableV1(123.45)
save = saver_module.Saver({"v0": v0_1}, sharded=True, allow_empty=True)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
save.save(sess, save_path)
with session.Session("", graph=ops_lib.Graph()) as sess:
with sess.graph.device(test.gpu_device_name()):
v0_2 = variables.VariableV1(543.21)
save = saver_module.Saver({"v0": v0_2}, sharded=True, allow_empty=True)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
def testVariables(self):
save_path = os.path.join(self.get_temp_dir(), "variables")
@ -627,10 +627,10 @@ class SaverTest(test.TestCase):
# Saver with no arg, defaults to 'all variables'.
save = saver_module.Saver()
save.restore(sess, save_path)
self.assertAllClose(1.0, one.eval())
self.assertAllClose([2.0, 2.0, 2.0], twos.eval())
self.assertEqual(b"k1", v2.keys().eval())
self.assertEqual(3.0, v2.values().eval())
self.assertAllClose(1.0, self.evaluate(one))
self.assertAllClose([2.0, 2.0, 2.0], self.evaluate(twos))
self.assertEqual(b"k1", self.evaluate(v2.keys()))
self.assertEqual(3.0, self.evaluate(v2.values()))
def testVarListShouldBeEmptyInDeferredBuild(self):
with ops_lib.Graph().as_default():
@ -664,8 +664,8 @@ class SaverTest(test.TestCase):
# Saver with no arg, defaults to 'all variables'.
save = saver_module.Saver()
save.restore(sess, save_path)
self.assertAllClose(1.0, one.eval())
self.assertAllClose([2.0, 2.0, 2.0], twos.eval())
self.assertAllClose(1.0, self.evaluate(one))
self.assertAllClose([2.0, 2.0, 2.0], self.evaluate(twos))
@test_util.run_v1_only("b/120545219")
def testReshape(self):
@ -691,7 +691,8 @@ class SaverTest(test.TestCase):
var = variables.VariableV1([[0.0, 0.0], [0.0, 0.0], [0.0, 0.0]])
save = saver_module.Saver(reshape=True)
save.restore(sess, save_path)
self.assertAllClose([[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]], var.eval())
self.assertAllClose([[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]],
self.evaluate(var))
@test_util.run_in_graph_and_eager_modes
def testSaveWithGlobalStep(self, pad_step_number=False):
@ -726,7 +727,6 @@ class SaverTest(test.TestCase):
def testSaveWithGlobalStepWithPadding(self):
self.testSaveWithGlobalStep(pad_step_number=True)
@test_util.run_v1_only("b/120545219")
def testSaveToNonexistingPath(self):
file_io.write_string_to_file(
os.path.join(self.get_temp_dir(), "actually_a_file"), "")
@ -753,8 +753,8 @@ class SaverTest(test.TestCase):
self.evaluate(init_all_op)
# Check that the parameter nodes have been initialized.
self.assertEqual(10.0, v0.eval())
self.assertEqual(20.0, v1.eval())
self.assertEqual(10.0, self.evaluate(v0))
self.assertEqual(20.0, self.evaluate(v1))
# Save the graph.
save.save(sess, save_path)
@ -763,13 +763,12 @@ class SaverTest(test.TestCase):
# Restore the saved values in the parameter nodes.
save.restore(sess, save_path)
# Check that the parameter nodes have been restored.
self.assertEqual(10.0, v0.eval())
self.assertEqual(20.0, v1.eval())
self.assertEqual(10.0, self.evaluate(v0))
self.assertEqual(20.0, self.evaluate(v1))
except ValueError as exc:
error_msg_template = "Parent directory of {} doesn't exist, can't save."
self.assertEqual(error_msg_template.format(save_path), str(exc))
@test_util.run_deprecated_v1
def testSaveToURI(self):
# ParseURI functions don't work on Windows yet.
# TODO(jhseu): Remove this check when it works.
@ -789,8 +788,8 @@ class SaverTest(test.TestCase):
self.evaluate(init_all_op)
# Check that the parameter nodes have been initialized.
self.assertEqual(10.0, v0.eval())
self.assertEqual(20.0, v1.eval())
self.assertEqual(10.0, self.evaluate(v0))
self.assertEqual(20.0, self.evaluate(v1))
save.save(sess, save_path)
def testSaveRestoreAndValidateVariableDtype(self):
@ -835,7 +834,7 @@ class SaverTest(test.TestCase):
orig_vars = _model()
self.evaluate(variables.global_variables_initializer())
save = saver_module.Saver(max_to_keep=1)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
save.save(sess, save_dir)
orig_vals = self.evaluate(orig_vars)
@ -882,7 +881,7 @@ class SaveRestoreShardedTest(test.TestCase):
},
write_version=self._WRITE_VERSION,
sharded=True)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
t0.insert("k1", 30.0).run()
t1.insert("k2", 40.0).run()
val = save.save(sess, save_path)
@ -908,15 +907,15 @@ class SaveRestoreShardedTest(test.TestCase):
},
write_version=self._WRITE_VERSION,
sharded=True)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
t0.insert("k11", 33.0).run()
self.assertEqual(111, v0.eval())
self.assertEqual(b"k11", t0.keys().eval())
self.assertEqual(33.0, t0.values().eval())
self.assertEqual(111, self.evaluate(v0))
self.assertEqual(b"k11", self.evaluate(t0.keys()))
self.assertEqual(33.0, self.evaluate(t0.values()))
save.restore(sess, save_path + "-00000-of-00002")
self.assertEqual(10, v0.eval())
self.assertEqual(b"k1", t0.keys().eval())
self.assertEqual(30.0, t0.values().eval())
self.assertEqual(10, self.evaluate(v0))
self.assertEqual(b"k1", self.evaluate(t0.keys()))
self.assertEqual(30.0, self.evaluate(t0.values()))
# Restore different ops from shard 1 of the saved files.
with session.Session(
@ -932,15 +931,15 @@ class SaveRestoreShardedTest(test.TestCase):
},
write_version=self._WRITE_VERSION,
sharded=True)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
t1.insert("k22", 44.0).run()
self.assertEqual(222, v1.eval())
self.assertEqual(b"k22", t1.keys().eval())
self.assertEqual(44.0, t1.values().eval())
self.assertEqual(222, self.evaluate(v1))
self.assertEqual(b"k22", self.evaluate(t1.keys()))
self.assertEqual(44.0, self.evaluate(t1.values()))
save.restore(sess, save_path + "-00001-of-00002")
self.assertEqual(20, v1.eval())
self.assertEqual(b"k2", t1.keys().eval())
self.assertEqual(40.0, t1.values().eval())
self.assertEqual(20, self.evaluate(v1))
self.assertEqual(b"k2", self.evaluate(t1.keys()))
self.assertEqual(40.0, self.evaluate(t1.values()))
# Now try a restore with the sharded filename.
with session.Session(
@ -961,26 +960,26 @@ class SaveRestoreShardedTest(test.TestCase):
},
write_version=self._WRITE_VERSION,
sharded=True)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
t0.insert("k11", 33.0).run()
t1.insert("k22", 44.0).run()
self.assertEqual(111, v0.eval())
self.assertEqual(222, v1.eval())
self.assertEqual(b"k11", t0.keys().eval())
self.assertEqual(33.0, t0.values().eval())
self.assertEqual(b"k22", t1.keys().eval())
self.assertEqual(44.0, t1.values().eval())
self.assertEqual(111, self.evaluate(v0))
self.assertEqual(222, self.evaluate(v1))
self.assertEqual(b"k11", self.evaluate(t0.keys()))
self.assertEqual(33.0, self.evaluate(t0.values()))
self.assertEqual(b"k22", self.evaluate(t1.keys()))
self.assertEqual(44.0, self.evaluate(t1.values()))
save_path = os.path.join(self.get_temp_dir(), "sharded_basics")
if save._write_version is saver_pb2.SaverDef.V1:
save.restore(sess, save_path + "-?????-of-?????")
else:
save.restore(sess, save_path)
self.assertEqual(10, v0.eval())
self.assertEqual(20, v1.eval())
self.assertEqual(b"k1", t0.keys().eval())
self.assertEqual(30.0, t0.values().eval())
self.assertEqual(b"k2", t1.keys().eval())
self.assertEqual(40.0, t1.values().eval())
self.assertEqual(10, self.evaluate(v0))
self.assertEqual(20, self.evaluate(v1))
self.assertEqual(b"k1", self.evaluate(t0.keys()))
self.assertEqual(30.0, self.evaluate(t0.values()))
self.assertEqual(b"k2", self.evaluate(t1.keys()))
self.assertEqual(40.0, self.evaluate(t1.values()))
if save._write_version is saver_pb2.SaverDef.V1:
self.assertEqual(
@ -1028,7 +1027,7 @@ class SaveRestoreShardedTest(test.TestCase):
else:
vs = [variables.VariableV1(rnd, name=var_name)]
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
if call_saver_with_dict:
saver = saver_module.Saver({var_name: vs[0]})
else:
@ -1056,7 +1055,7 @@ class SaveRestoreShardedTest(test.TestCase):
name=var_name)
]
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
if call_saver_with_dict:
saver = saver_module.Saver({
var_name: new_vs[0]
@ -1203,7 +1202,7 @@ class MaxToKeepTest(test.TestCase):
with self.cached_session() as sess:
v = variables.VariableV1(10.0, name="v")
save = saver_module.Saver({"v": v}, max_to_keep=2)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertEqual([], save.last_checkpoints)
s1 = save.save(sess, os.path.join(save_dir, "s1"))
@ -1388,7 +1387,7 @@ class MaxToKeepTest(test.TestCase):
"v0": v0,
"v1": v1
}, sharded=True, max_to_keep=2)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertEqual([], save.last_checkpoints)
s1 = save.save(sess, os.path.join(save_dir, "s1"))
@ -1434,14 +1433,13 @@ class MaxToKeepTest(test.TestCase):
self.assertTrue(
gfile.Exists(checkpoint_management.meta_graph_filename(s3)))
@test_util.run_deprecated_v1
def testNoMaxToKeep(self):
save_dir = self._get_test_dir("no_max_to_keep")
save_dir2 = self._get_test_dir("max_to_keep_0")
with self.cached_session() as sess:
v = variables.VariableV1(10.0, name="v")
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
# Test max_to_keep being None.
save = saver_module.Saver({"v": v}, max_to_keep=None)
@ -1463,14 +1461,13 @@ class MaxToKeepTest(test.TestCase):
self.assertEqual([], save2.last_checkpoints)
self.assertTrue(checkpoint_management.checkpoint_exists(s2))
@test_util.run_deprecated_v1
def testNoMetaGraph(self):
save_dir = self._get_test_dir("no_meta_graph")
with self.cached_session() as sess:
v = variables.VariableV1(10.0, name="v")
save = saver_module.Saver({"v": v})
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
s1 = save.save(sess, os.path.join(save_dir, "s1"), write_meta_graph=False)
self.assertTrue(checkpoint_management.checkpoint_exists(s1))
@ -1487,7 +1484,6 @@ class KeepCheckpointEveryNHoursTest(test.TestCase):
@test_util.run_in_graph_and_eager_modes
@test.mock.patch.object(saver_module, "time")
@test_util.run_deprecated_v1
def testNonSharded(self, mock_time):
save_dir = self._get_test_dir("keep_checkpoint_every_n_hours")
@ -1607,7 +1603,6 @@ class SaveRestoreWithVariableNameMap(test.TestCase):
self.assertEqual(20.0, self.evaluate(v1))
@test_util.run_in_graph_and_eager_modes
@test_util.run_v1_only("b/120545219")
def testNonReshapeResourceVariable(self):
self._testNonReshape(resource_variable_ops.ResourceVariable)
@ -1714,7 +1709,7 @@ class MetaGraphTest(test.TestCase):
saver1 = saver_module.Saver({"v1": v1}, name="saver1")
ops_lib.add_to_collection("savers", saver0)
ops_lib.add_to_collection("savers", saver1)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
# Saves to different checkpoints.
saver0.save(sess, saver0_ckpt)
saver1.save(sess, saver1_ckpt)
@ -1760,7 +1755,8 @@ class MetaGraphTest(test.TestCase):
new_saver0.restore(sess, saver0_ckpt)
v0 = sess.graph.get_tensor_by_name("v0:0")
v1 = sess.graph.get_tensor_by_name("v1:0")
self.assertAllEqual([[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]], v0.eval())
self.assertAllEqual([[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]],
self.evaluate(v0))
self.assertEqual([3, 2], v0.get_shape())
self.assertEqual([], v1.get_shape())
with self.assertRaisesWithPredicateMatch(
@ -1770,7 +1766,7 @@ class MetaGraphTest(test.TestCase):
new_saver1 = savers[1]
new_saver1.restore(sess, saver1_ckpt)
v1 = sess.graph.get_tensor_by_name("v1:0")
self.assertEqual(11.0, v1.eval())
self.assertEqual(11.0, self.evaluate(v1))
@test_util.run_v1_only("b/120545219")
def testMultiSaverCollection(self):
@ -1794,7 +1790,7 @@ class MetaGraphTest(test.TestCase):
saver1 = saver_module.Saver({"v1": v1}, name="saver1")
ops_lib.add_to_collection("savers", saver0)
ops_lib.add_to_collection("savers", saver1)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
# Saves to different checkpoints.
saver0.save(sess, saver0_ckpt)
@ -1878,7 +1874,7 @@ class MetaGraphTest(test.TestCase):
# The names are different and will work.
slice_saver = saver_module.Saver({"first": v1, "second": v2})
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
# Exports to meta_graph
meta_graph_def = slice_saver.export_meta_graph(filename)
@ -2093,7 +2089,6 @@ class MetaGraphTest(test.TestCase):
return i + 1, x + r
self._testWhileLoopAndGradientSerDes(body)
@test_util.run_deprecated_v1
def testNestedControlFlowSerDes(self):
# Test while loop in a cond in a while loop.
# pylint: disable=g-long-lambda
@ -2745,7 +2740,7 @@ class ScopedGraphTest(test.TestCase):
graph.add_to_collection(ops_lib.GraphKeys.SAVERS, saver2)
with self.session(graph=graph) as sess:
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
saver1.save(sess, saver1_ckpt, write_state=False)
saver2.save(sess, saver2_ckpt, write_state=False)
@ -2762,7 +2757,7 @@ class ScopedGraphTest(test.TestCase):
with self.session(graph=graph1) as sess:
saver_list1[0].restore(sess, saver1_ckpt)
self.assertEqual(1.0, var_dict1["variable1:0"].eval())
self.assertEqual(1.0, self.evaluate(var_dict1["variable1:0"]))
graph2 = ops_lib.Graph()
var_dict2 = meta_graph.copy_scoped_meta_graph(
@ -2777,7 +2772,7 @@ class ScopedGraphTest(test.TestCase):
with self.session(graph=graph2) as sess:
saver_list2[0].restore(sess, saver2_ckpt)
self.assertEqual(2.0, var_dict2["variable2:0"].eval())
self.assertEqual(2.0, self.evaluate(var_dict2["variable2:0"]))
class _OwnsAVariableSimple(checkpointable_base.CheckpointableBase):
@ -3010,7 +3005,6 @@ class CheckpointableCompatibilityTests(test.TestCase):
"a mismatch between the current graph and the graph"):
a_saver.restore(sess=sess, save_path=save_path)
@test_util.run_v1_only("b/120545219")
def testLoadFromObjectBasedGraph(self):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")

View File

@ -38,7 +38,7 @@ class SlotCreatorTest(test.TestCase):
v = variables.Variable([1.0, 2.5], name="var")
slot = slot_creator.create_slot(v, v.initialized_value(), name="slot")
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertEqual("var/slot", slot.op.name)
self.assertEqual([2], slot.get_shape().as_list())
@ -51,7 +51,7 @@ class SlotCreatorTest(test.TestCase):
v = constant_op.constant([1.0, 2.5], name="const")
slot = slot_creator.create_slot(v, v * 2, name="slot")
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertEqual("const/slot", slot.op.name)
self.assertEqual([2], slot.get_shape().as_list())
@ -66,7 +66,7 @@ class SlotCreatorTest(test.TestCase):
slot = slot_creator.create_zeros_slot(
v, name="slot", dtype=dtypes.float64)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertEqual("var/slot", slot.op.name)
self.assertEqual([2], slot.get_shape().as_list())
@ -88,7 +88,7 @@ class SlotCreatorTest(test.TestCase):
slot = slot_creator.create_zeros_slot(
v, name="slot", dtype=dtypes.float64)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertEqual("var/slot", slot.op.name)
self.assertEqual([2], array_ops.shape(slot).eval())
@ -102,7 +102,7 @@ class SlotCreatorTest(test.TestCase):
with ops.control_dependencies(None):
slot = slot_creator.create_zeros_slot(v, name="slot")
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertEqual("const/slot", slot.op.name)
self.assertEqual([2], slot.get_shape().as_list())
@ -118,7 +118,7 @@ class SlotCreatorTest(test.TestCase):
slot = slot_creator.create_zeros_slot(
v, name="slot", dtype=dtypes.float64)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertEqual("const/slot", slot.op.name)
self.assertEqual([2], array_ops.shape(slot).eval())

View File

@ -53,7 +53,7 @@ class TrainingOpsTest(TensorFlowTestCase):
self.setUp()
with self.session(use_gpu=use_gpu):
var = variables.VariableV1(x)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertAllCloseAccordingToType(x, self.evaluate(var))
apply_sgd = training_ops.apply_gradient_descent(var, alpha, delta)
out = self.evaluate(apply_sgd)
@ -74,7 +74,7 @@ class TrainingOpsTest(TensorFlowTestCase):
with self.session(use_gpu=use_gpu):
var = variables.VariableV1(x)
accum = variables.VariableV1(y)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertAllCloseAccordingToType(x, self.evaluate(var))
apply_adagrad = training_ops.apply_adagrad(var, accum, lr, grad)
@ -99,7 +99,7 @@ class TrainingOpsTest(TensorFlowTestCase):
var = variables.VariableV1(x)
accum = variables.VariableV1(y)
linear = variables.VariableV1(z)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertAllCloseAccordingToType(x, self.evaluate(var))
apply_ftrl = training_ops.apply_ftrl(var, accum, linear, grad, lr, l1, l2,
@ -156,7 +156,7 @@ class TrainingOpsTest(TensorFlowTestCase):
with self.session(use_gpu=False):
var = variables.VariableV1(x)
accum = variables.VariableV1(y)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertAllCloseAccordingToType(x, self.evaluate(var))
sparse_apply_adagrad = training_ops.sparse_apply_adagrad(
@ -187,7 +187,7 @@ class TrainingOpsTest(TensorFlowTestCase):
var = variables.VariableV1(x)
accum = variables.VariableV1(y)
linear = variables.VariableV1(z)
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertAllCloseAccordingToType(x, self.evaluate(var))
sparse_apply_ftrl = training_ops.sparse_apply_ftrl(
@ -285,7 +285,7 @@ class TrainingOpsTest(TensorFlowTestCase):
beta2_power_t = variables.VariableV1(beta2_power)
lr_t = constant_op.constant(lr, self._toType(var.dtype), [])
epsilon_t = constant_op.constant(epsilon, self._toType(var.dtype), [])
variables.global_variables_initializer().run()
self.evaluate(variables.global_variables_initializer())
self.assertAllCloseAccordingToType(var, self.evaluate(var_t))
new_var, _, _ = self._adamUpdateNumpy(var, grad, t, m, v, lr, beta1,