var = list() --> var = []

This commit is contained in:
Lukas Geiger 2019-02-11 23:26:47 +01:00
parent f370495c73
commit d3c1e397f6
10 changed files with 12 additions and 13 deletions
tensorflow
contrib
distributions/python/ops/bijectors
kinesis/python/kernel_tests
examples
python

View File

@ -266,7 +266,7 @@ class BatchNormalization(bijector.Bijector):
else:
# At training-time, ildj is computed from the mean and log-variance across
# the current minibatch.
_, v = nn.moments(y, axes=reduction_axes, keep_dims=True)
_, v = nn.moments(y, axes=reduction_axes, keepdims=True)
log_variance = math_ops.log(v + self.batchnorm.epsilon)
# `gamma` and `log Var(y)` reductions over event_dims.

View File

@ -110,7 +110,7 @@ class KinesisDatasetTest(test.TestCase):
init_batch_op = iterator.make_initializer(batch_dataset)
get_next = iterator.get_next()
data = list()
data = []
with self.cached_session() as sess:
# Basic test: read from shard 0 of stream 2.
sess.run(

View File

@ -84,7 +84,7 @@ def word2vec_basic(log_dir):
dictionary = dict()
for word, _ in count:
dictionary[word] = len(dictionary)
data = list()
data = []
unk_count = 0
for word in words:
index = dictionary.get(word, 0)

View File

@ -254,7 +254,7 @@
" dictionary = dict()\n",
" for word, _ in count:\n",
" dictionary[word] = len(dictionary)\n",
" data = list()\n",
" data = []\n",
" unk_count = 0\n",
" for word in words:\n",
" if word in dictionary:\n",

View File

@ -554,7 +554,7 @@
" return output_gate * tf.tanh(state), state\n",
"\n",
" # Input data.\n",
" train_data = list()\n",
" train_data = []\n",
" for _ in range(num_unrollings + 1):\n",
" train_data.append(\n",
" tf.placeholder(tf.float32, shape=[batch_size,vocabulary_size]))\n",
@ -562,7 +562,7 @@
" train_labels = train_data[1:] # labels are inputs shifted by one time step.\n",
"\n",
" # Unrolled LSTM loop.\n",
" outputs = list()\n",
" outputs = []\n",
" output = saved_output\n",
" state = saved_state\n",
" for i in train_inputs:\n",

View File

@ -582,8 +582,8 @@ class Function(object):
concrete_functions.extend(
self._stateless_fn._function_cache.all_values())
# pylint: enable=protected-access
deduplicated_concrete_functions = list()
seen_signatures = list()
deduplicated_concrete_functions = []
seen_signatures = []
# We are using a list so that:
# - the returned collection is deterministic, and
# - we can use a custom equality operator (is_same_structure).

View File

@ -1247,7 +1247,7 @@ class FIFOQueueTest(test.TestCase):
def testSelectQueue(self):
with self.cached_session():
num_queues = 10
qlist = list()
qlist = []
for _ in xrange(num_queues):
qlist.append(data_flow_ops.FIFOQueue(10, dtypes_lib.float32))
# Enqueue/Dequeue into a dynamically selected queue

View File

@ -1360,7 +1360,7 @@ class PaddingFIFOQueueTest(test.TestCase):
def testSelectQueue(self):
with self.cached_session():
num_queues = 10
qlist = list()
qlist = []
for _ in xrange(num_queues):
qlist.append(
data_flow_ops.PaddingFIFOQueue(10, dtypes_lib.float32, ((),)))

View File

@ -1197,7 +1197,7 @@ class RandomShuffleQueueTest(test.TestCase):
def testSelectQueue(self):
with self.cached_session():
num_queues = 10
qlist = list()
qlist = []
for _ in xrange(num_queues):
qlist.append(
data_flow_ops.RandomShuffleQueue(10, 0, dtypes_lib.float32))

View File

@ -669,8 +669,7 @@ class TPUEmbedding(object):
Arguments for `enqueue_tpu_embedding_sparse_tensor_batch()`.
"""
sample_idcs, embedding_idcs, aggregation_weights, table_ids = (
list(), list(), list(), list())
sample_idcs, embedding_idcs, aggregation_weights, table_ids = [], [], [], []
for table_id, table in enumerate(self._table_to_features_dict):
features = self._table_to_features_dict[table]
for feature in features: