Automated rollback of change 142698388
Change: 142716019
This commit is contained in:
parent
21d8bb6dba
commit
d8703c3e52
@ -55,7 +55,7 @@ class AvgPool2DTest(tf.test.TestCase):
|
||||
output = tf.contrib.layers.avg_pool2d(images, [3, 3],
|
||||
outputs_collections='outputs')
|
||||
output_collected = tf.get_collection('outputs')[0]
|
||||
self.assertEqual(output_collected.aliases, ['AvgPool2D'])
|
||||
self.assertEqual(output_collected.alias, 'AvgPool2D')
|
||||
self.assertEqual(output_collected, output)
|
||||
|
||||
def testCreateSquareAvgPool(self):
|
||||
@ -123,7 +123,7 @@ class PoolTest(tf.test.TestCase):
|
||||
pooling_type='AVG',
|
||||
outputs_collections='outputs')
|
||||
output_collected = tf.get_collection('outputs')[0]
|
||||
self.assertEqual(output_collected.aliases, ['avg_pool'])
|
||||
self.assertEqual(output_collected.alias, 'avg_pool')
|
||||
self.assertEqual(output_collected, output)
|
||||
|
||||
def testCreateSquareAvgPool(self):
|
||||
@ -350,7 +350,7 @@ class ConvolutionTest(tf.test.TestCase):
|
||||
outputs_collections='outputs',
|
||||
scope='Conv')
|
||||
output_collected = tf.get_collection('outputs')[0]
|
||||
self.assertEqual(output_collected.aliases, ['fe/Conv'])
|
||||
self.assertEqual(output_collected.alias, 'fe/Conv')
|
||||
self.assertEqual(output_collected, conv)
|
||||
|
||||
def testCreateConvWithoutActivation(self):
|
||||
@ -1218,7 +1218,7 @@ class DropoutTest(tf.test.TestCase):
|
||||
images = tf.random_uniform((5, height, width, 3), seed=1)
|
||||
output = tf.contrib.layers.dropout(images, outputs_collections='outputs')
|
||||
c_output = tf.get_collection('outputs')[0]
|
||||
self.assertEqual(c_output.aliases, ['Dropout'])
|
||||
self.assertEqual(c_output.alias, 'Dropout')
|
||||
self.assertEqual(c_output, output)
|
||||
|
||||
def testDropout(self):
|
||||
@ -1295,7 +1295,7 @@ class FlattenTest(tf.test.TestCase):
|
||||
images = np.random.uniform(size=(5, height, width, 3))
|
||||
output = tf.contrib.layers.flatten(images, outputs_collections='outputs')
|
||||
c_output = tf.get_collection('outputs')[0]
|
||||
self.assertEqual(c_output.aliases, ['Flatten'])
|
||||
self.assertEqual(c_output.alias, 'Flatten')
|
||||
self.assertEqual(c_output, output)
|
||||
|
||||
def testFlatten4D(self):
|
||||
@ -1434,7 +1434,7 @@ class FCTest(tf.test.TestCase):
|
||||
outputs_collections='outputs',
|
||||
scope='fc')
|
||||
output_collected = tf.get_collection('outputs')[0]
|
||||
self.assertEqual(output_collected.aliases, ['fe/fc'])
|
||||
self.assertEqual(output_collected.alias, 'fe/fc')
|
||||
self.assertEqual(output_collected, fc)
|
||||
|
||||
def testCreateFcCreatesWeightsAndBiasesVars(self):
|
||||
@ -2575,7 +2575,7 @@ class MaxPool2DTest(tf.test.TestCase):
|
||||
output = tf.contrib.layers.max_pool2d(images, [3, 3],
|
||||
outputs_collections='outputs')
|
||||
output_collected = tf.get_collection('outputs')[0]
|
||||
self.assertEqual(output_collected.aliases, ['MaxPool2D'])
|
||||
self.assertEqual(output_collected.alias, 'MaxPool2D')
|
||||
self.assertEqual(output_collected, output)
|
||||
|
||||
def testCreateSquareMaxPool(self):
|
||||
@ -2634,7 +2634,7 @@ class OneHotEncodingTest(tf.test.TestCase):
|
||||
output = tf.contrib.layers.one_hot_encoding(labels, num_classes=3,
|
||||
outputs_collections='outputs')
|
||||
c_output = tf.get_collection('outputs')[0]
|
||||
self.assertEqual(c_output.aliases, ['OneHotEncoding'])
|
||||
self.assertEqual(c_output.alias, 'OneHotEncoding')
|
||||
self.assertEqual(c_output, output)
|
||||
|
||||
def testOneHotEncoding(self):
|
||||
|
@ -45,93 +45,71 @@ def collect_named_outputs(collections, alias, outputs):
|
||||
It is useful to collect end-points or tags for summaries. Example of usage:
|
||||
|
||||
logits = collect_named_outputs('end_points', 'inception_v3/logits', logits)
|
||||
assert 'inception_v3/logits' in logits.aliases
|
||||
assert logits.alias == 'inception_v3/logits'
|
||||
|
||||
Args:
|
||||
collections: A collection or list of collections. If None skip collection.
|
||||
alias: String to append to the list of aliases of outputs, for example,
|
||||
'inception_v3/conv1'.
|
||||
alias: String, alias to name the outputs, ex. 'inception_v3/conv1'
|
||||
outputs: Tensor, an output tensor to collect
|
||||
|
||||
Returns:
|
||||
The outputs Tensor to allow inline call.
|
||||
"""
|
||||
append_tensor_alias(outputs, alias)
|
||||
# Remove ending '/' if present.
|
||||
if alias[-1] == '/':
|
||||
alias = alias[:-1]
|
||||
outputs.alias = alias
|
||||
if collections:
|
||||
ops.add_to_collections(collections, outputs)
|
||||
return outputs
|
||||
|
||||
|
||||
def append_tensor_alias(tensor, alias):
|
||||
"""Append an alias to the list of aliases of the tensor.
|
||||
|
||||
Args:
|
||||
tensor: A `Tensor`.
|
||||
alias: String, to add to the list of aliases of the tensor.
|
||||
|
||||
Returns:
|
||||
The tensor with a new alias appended to its list of aliases.
|
||||
"""
|
||||
# Remove ending '/' if present.
|
||||
if alias[-1] == '/':
|
||||
alias = alias[:-1]
|
||||
if hasattr(tensor, 'aliases'):
|
||||
tensor.aliases.append(alias)
|
||||
else:
|
||||
tensor.aliases = [alias]
|
||||
return tensor
|
||||
|
||||
|
||||
def gather_tensors_aliases(tensors):
|
||||
def gather_tensors_alias(tensors):
|
||||
"""Given a list of tensors, gather their aliases.
|
||||
|
||||
If the tensor does not have an alias it would default to its name.
|
||||
|
||||
Args:
|
||||
tensors: A list of `Tensors`.
|
||||
|
||||
Returns:
|
||||
A list of strings with the aliases of all tensors.
|
||||
A list of strings with the alias of each tensor.
|
||||
"""
|
||||
aliases = []
|
||||
for tensor in tensors:
|
||||
aliases += get_tensor_aliases(tensor)
|
||||
return aliases
|
||||
return [get_tensor_alias(tensor) for tensor in tensors]
|
||||
|
||||
|
||||
def get_tensor_aliases(tensor):
|
||||
"""Get a list with the aliases of the input tensor.
|
||||
def get_tensor_alias(tensor):
|
||||
"""Given a tensor gather its alias, its op.name or its name.
|
||||
|
||||
If the tensor does not have any alias, it would default to its its op.name or
|
||||
its name.
|
||||
If the tensor does not have an alias it would default to its name.
|
||||
|
||||
Args:
|
||||
tensor: A `Tensor`.
|
||||
|
||||
Returns:
|
||||
A list of strings with the aliases of the tensor.
|
||||
A string with the alias of the tensor.
|
||||
"""
|
||||
if hasattr(tensor, 'aliases'):
|
||||
aliases = tensor.aliases
|
||||
if hasattr(tensor, 'alias'):
|
||||
alias = tensor.alias
|
||||
else:
|
||||
if tensor.name[-2:] == ':0':
|
||||
# Use op.name for tensor ending in :0
|
||||
aliases = [tensor.op.name]
|
||||
alias = tensor.op.name
|
||||
else:
|
||||
aliases = [tensor.name]
|
||||
return aliases
|
||||
alias = tensor.name
|
||||
return alias
|
||||
|
||||
|
||||
def convert_collection_to_dict(collection):
|
||||
"""Returns an OrderedDict of Tensors with their aliases as keys.
|
||||
"""Returns an OrderedDict of Tensors using get_tensor_alias as key.
|
||||
|
||||
Args:
|
||||
collection: A collection.
|
||||
|
||||
Returns:
|
||||
An OrderedDict of {alias: tensor}
|
||||
An OrderedDict of {get_tensor_alias(tensor): tensor}
|
||||
"""
|
||||
return OrderedDict((alias, tensor)
|
||||
for tensor in ops.get_collection(collection)
|
||||
for alias in get_tensor_aliases(tensor))
|
||||
return OrderedDict((get_tensor_alias(t), t) for t in ops.get_collection(collection))
|
||||
|
||||
|
||||
def constant_value(value_or_tensor_or_var, dtype=None):
|
||||
|
@ -194,18 +194,8 @@ class CollectNamedOutputsTest(tf.test.TestCase):
|
||||
t2 = tf.constant(2.0, name='t2')
|
||||
utils.collect_named_outputs('end_points', 'a1', t1)
|
||||
utils.collect_named_outputs('end_points', 'a2', t2)
|
||||
self.assertEqual(t1.aliases, ['a1'])
|
||||
self.assertEqual(t2.aliases, ['a2'])
|
||||
|
||||
def test_multiple_aliases(self):
|
||||
t1 = tf.constant(1.0, name='t1')
|
||||
t2 = tf.constant(2.0, name='t2')
|
||||
utils.collect_named_outputs('end_points', 'a11', t1)
|
||||
utils.collect_named_outputs('end_points', 'a12', t1)
|
||||
utils.collect_named_outputs('end_points', 'a21', t2)
|
||||
utils.collect_named_outputs('end_points', 'a22', t2)
|
||||
self.assertEqual(t1.aliases, ['a11', 'a12'])
|
||||
self.assertEqual(t2.aliases, ['a21', 'a22'])
|
||||
self.assertEqual(t1.alias, 'a1')
|
||||
self.assertEqual(t2.alias, 'a2')
|
||||
|
||||
def test_gather_aliases(self):
|
||||
t1 = tf.constant(1.0, name='t1')
|
||||
@ -214,18 +204,8 @@ class CollectNamedOutputsTest(tf.test.TestCase):
|
||||
utils.collect_named_outputs('end_points', 'a1', t1)
|
||||
utils.collect_named_outputs('end_points', 'a2', t2)
|
||||
tf.add_to_collection('end_points', t3)
|
||||
aliases = utils.gather_tensors_aliases(tf.get_collection('end_points'))
|
||||
self.assertEqual(aliases, ['a1', 'a2', 't3'])
|
||||
|
||||
def test_convert_collection_to_dict(self):
|
||||
t1 = tf.constant(1.0, name='t1')
|
||||
t2 = tf.constant(2.0, name='t2')
|
||||
utils.collect_named_outputs('end_points', 'a1', t1)
|
||||
utils.collect_named_outputs('end_points', 'a21', t2)
|
||||
utils.collect_named_outputs('end_points', 'a22', t2)
|
||||
end_points = utils.convert_collection_to_dict('end_points')
|
||||
self.assertEqual(end_points.keys(), ['a1', 'a21', 'a22'])
|
||||
self.assertEqual(end_points.values(), [t1, t2, t2])
|
||||
aliases = utils.gather_tensors_alias(tf.get_collection('end_points'))
|
||||
self.assertListEqual(aliases, ['a1', 'a2', 't3'])
|
||||
|
||||
|
||||
class NPositiveIntegersTest(tf.test.TestCase):
|
||||
|
@ -169,25 +169,19 @@ class ResnetUtilsTest(tf.test.TestCase):
|
||||
_, end_points = self._resnet_plain(inputs, blocks, scope='tiny')
|
||||
expected = [
|
||||
'tiny/block1/unit_1/bottleneck_v1/shortcut',
|
||||
'tiny/block1/unit_1/bottleneck_v1/shortcut/BatchNorm',
|
||||
'tiny/block1/unit_1/bottleneck_v1/conv1',
|
||||
'tiny/block1/unit_1/bottleneck_v1/conv2',
|
||||
'tiny/block1/unit_1/bottleneck_v1/conv3',
|
||||
'tiny/block1/unit_1/bottleneck_v1/conv3/BatchNorm',
|
||||
'tiny/block1/unit_2/bottleneck_v1/conv1',
|
||||
'tiny/block1/unit_2/bottleneck_v1/conv2',
|
||||
'tiny/block1/unit_2/bottleneck_v1/conv3',
|
||||
'tiny/block1/unit_2/bottleneck_v1/conv3/BatchNorm',
|
||||
'tiny/block2/unit_1/bottleneck_v1/shortcut',
|
||||
'tiny/block2/unit_1/bottleneck_v1/shortcut/BatchNorm',
|
||||
'tiny/block2/unit_1/bottleneck_v1/conv1',
|
||||
'tiny/block2/unit_1/bottleneck_v1/conv2',
|
||||
'tiny/block2/unit_1/bottleneck_v1/conv3',
|
||||
'tiny/block2/unit_1/bottleneck_v1/conv3/BatchNorm',
|
||||
'tiny/block2/unit_2/bottleneck_v1/conv1',
|
||||
'tiny/block2/unit_2/bottleneck_v1/conv2',
|
||||
'tiny/block2/unit_2/bottleneck_v1/conv3',
|
||||
'tiny/block2/unit_2/bottleneck_v1/conv3/BatchNorm']
|
||||
'tiny/block2/unit_2/bottleneck_v1/conv3']
|
||||
self.assertItemsEqual(expected, end_points)
|
||||
|
||||
def _stack_blocks_nondense(self, net, blocks):
|
||||
|
Loading…
x
Reference in New Issue
Block a user