Fix several DeprecationWarning: invlid escape sequence
Signed-off-by: Mickaël Schoentgen <contact@tiger-222.fr>
This commit is contained in:
parent
41c30afbfb
commit
75f12a5020
@ -34,7 +34,7 @@ def sparse_multiclass_hinge_loss(
|
||||
scope=None,
|
||||
loss_collection=ops.GraphKeys.LOSSES,
|
||||
reduction=losses.Reduction.SUM_BY_NONZERO_WEIGHTS):
|
||||
"""Adds Ops for computing the multiclass hinge loss.
|
||||
r"""Adds Ops for computing the multiclass hinge loss.
|
||||
|
||||
The implementation is based on the following paper:
|
||||
On the Algorithmic Implementation of Multiclass Kernel-based Vector Machines
|
||||
|
@ -174,7 +174,7 @@ class GeneratorIoTest(test.TestCase):
|
||||
return np.arange(32, 36)
|
||||
|
||||
with self.cached_session():
|
||||
with self.assertRaisesRegexp(TypeError, 'x\(\) must be generator'):
|
||||
with self.assertRaisesRegexp(TypeError, r'x\(\) must be generator'):
|
||||
failing_input_fn = generator_io.generator_input_fn(
|
||||
generator, batch_size=2, shuffle=False, num_epochs=1)
|
||||
failing_input_fn()
|
||||
@ -185,7 +185,7 @@ class GeneratorIoTest(test.TestCase):
|
||||
yield np.arange(32, 36)
|
||||
|
||||
with self.cached_session():
|
||||
with self.assertRaisesRegexp(TypeError, 'x\(\) must yield dict'):
|
||||
with self.assertRaisesRegexp(TypeError, r'x\(\) must yield dict'):
|
||||
failing_input_fn = generator_io.generator_input_fn(
|
||||
generator, batch_size=2, shuffle=False, num_epochs=1)
|
||||
failing_input_fn()
|
||||
|
@ -41,7 +41,7 @@ class AdamGSOptimizer(optimizer.Optimizer):
|
||||
def __init__(self, global_step=0, learning_rate=0.001,
|
||||
beta1=0.9, beta2=0.999, epsilon=1e-8,
|
||||
use_locking=False, name="Adam"):
|
||||
"""Construct a new Adam optimizer.
|
||||
r"""Construct a new Adam optimizer.
|
||||
|
||||
Branched from tf.train.AdamOptimizer. The only difference is to pass
|
||||
global step for computing beta1 and beta2 accumulators, instead of having
|
||||
|
@ -36,7 +36,7 @@ class AdamOptimizer(optimizer_v2.OptimizerV2):
|
||||
|
||||
def __init__(self, learning_rate=0.001, beta1=0.9, beta2=0.999, epsilon=1e-8,
|
||||
use_locking=False, name="Adam"):
|
||||
"""Construct a new Adam optimizer.
|
||||
r"""Construct a new Adam optimizer.
|
||||
|
||||
Initialization:
|
||||
|
||||
|
@ -2036,7 +2036,7 @@ class SessionTest(test_util.TensorFlowTestCase):
|
||||
with self.cached_session() as sess:
|
||||
a = array_ops.placeholder(dtype=dtypes.string)
|
||||
with self.assertRaisesRegexp(
|
||||
TypeError, 'Type of feed value 1 with type <(\w+) \'int\'> is not'):
|
||||
TypeError, r'Type of feed value 1 with type <(\w+) \'int\'> is not'):
|
||||
sess.run(a, feed_dict={a: 1})
|
||||
|
||||
|
||||
|
@ -1832,7 +1832,7 @@ class LinearModelTest(test.TestCase):
|
||||
}
|
||||
with self.assertRaisesRegexp(
|
||||
ValueError,
|
||||
'Batch size \(first dimension\) of each feature must be same.'): # pylint: disable=anomalous-backslash-in-string
|
||||
r'Batch size \(first dimension\) of each feature must be same.'):
|
||||
fc.linear_model(features, [price1, price2])
|
||||
|
||||
def test_subset_of_static_batch_size_mismatch(self):
|
||||
@ -1847,7 +1847,7 @@ class LinearModelTest(test.TestCase):
|
||||
}
|
||||
with self.assertRaisesRegexp(
|
||||
ValueError,
|
||||
'Batch size \(first dimension\) of each feature must be same.'): # pylint: disable=anomalous-backslash-in-string
|
||||
r'Batch size \(first dimension\) of each feature must be same.'): # pylint: disable=anomalous-backslash-in-string
|
||||
fc.linear_model(features, [price1, price2, price3])
|
||||
|
||||
def test_runtime_batch_size_mismatch(self):
|
||||
@ -2467,7 +2467,7 @@ class _LinearModelTest(test.TestCase):
|
||||
}
|
||||
with self.assertRaisesRegexp(
|
||||
ValueError,
|
||||
'Batch size \(first dimension\) of each feature must be same.'): # pylint: disable=anomalous-backslash-in-string
|
||||
r'Batch size \(first dimension\) of each feature must be same.'): # pylint: disable=anomalous-backslash-in-string
|
||||
get_keras_linear_model_predictions(features, [price1, price2])
|
||||
|
||||
def test_subset_of_static_batch_size_mismatch(self):
|
||||
@ -2482,7 +2482,7 @@ class _LinearModelTest(test.TestCase):
|
||||
}
|
||||
with self.assertRaisesRegexp(
|
||||
ValueError,
|
||||
'Batch size \(first dimension\) of each feature must be same.'): # pylint: disable=anomalous-backslash-in-string
|
||||
r'Batch size \(first dimension\) of each feature must be same.'): # pylint: disable=anomalous-backslash-in-string
|
||||
get_keras_linear_model_predictions(features, [price1, price2, price3])
|
||||
|
||||
def test_runtime_batch_size_mismatch(self):
|
||||
@ -2974,7 +2974,7 @@ class FunctionalInputLayerTest(test.TestCase):
|
||||
}
|
||||
with self.assertRaisesRegexp(
|
||||
ValueError,
|
||||
'Batch size \(first dimension\) of each feature must be same.'): # pylint: disable=anomalous-backslash-in-string
|
||||
r'Batch size \(first dimension\) of each feature must be same.'): # pylint: disable=anomalous-backslash-in-string
|
||||
fc.input_layer(features, [price1, price2])
|
||||
|
||||
def test_subset_of_static_batch_size_mismatch(self):
|
||||
@ -2989,7 +2989,7 @@ class FunctionalInputLayerTest(test.TestCase):
|
||||
}
|
||||
with self.assertRaisesRegexp(
|
||||
ValueError,
|
||||
'Batch size \(first dimension\) of each feature must be same.'): # pylint: disable=anomalous-backslash-in-string
|
||||
r'Batch size \(first dimension\) of each feature must be same.'): # pylint: disable=anomalous-backslash-in-string
|
||||
fc.input_layer(features, [price1, price2, price3])
|
||||
|
||||
def test_runtime_batch_size_mismatch(self):
|
||||
|
@ -2052,7 +2052,7 @@ class LinearModelTest(test.TestCase):
|
||||
}
|
||||
with self.assertRaisesRegexp(
|
||||
ValueError,
|
||||
'Batch size \(first dimension\) of each feature must be same.'): # pylint: disable=anomalous-backslash-in-string
|
||||
r'Batch size \(first dimension\) of each feature must be same.'): # pylint: disable=anomalous-backslash-in-string
|
||||
model = fc.LinearModel([price1, price2])
|
||||
model(features)
|
||||
|
||||
@ -2068,7 +2068,7 @@ class LinearModelTest(test.TestCase):
|
||||
}
|
||||
with self.assertRaisesRegexp(
|
||||
ValueError,
|
||||
'Batch size \(first dimension\) of each feature must be same.'): # pylint: disable=anomalous-backslash-in-string
|
||||
r'Batch size \(first dimension\) of each feature must be same.'): # pylint: disable=anomalous-backslash-in-string
|
||||
model = fc.LinearModel([price1, price2, price3])
|
||||
model(features)
|
||||
|
||||
@ -2818,7 +2818,7 @@ class OldLinearModelTest(test.TestCase):
|
||||
}
|
||||
with self.assertRaisesRegexp(
|
||||
ValueError,
|
||||
'Batch size \(first dimension\) of each feature must be same.'): # pylint: disable=anomalous-backslash-in-string
|
||||
r'Batch size \(first dimension\) of each feature must be same.'): # pylint: disable=anomalous-backslash-in-string
|
||||
fc_old.linear_model(features, [price1, price2])
|
||||
|
||||
def test_subset_of_static_batch_size_mismatch(self):
|
||||
@ -2833,7 +2833,7 @@ class OldLinearModelTest(test.TestCase):
|
||||
}
|
||||
with self.assertRaisesRegexp(
|
||||
ValueError,
|
||||
'Batch size \(first dimension\) of each feature must be same.'): # pylint: disable=anomalous-backslash-in-string
|
||||
r'Batch size \(first dimension\) of each feature must be same.'): # pylint: disable=anomalous-backslash-in-string
|
||||
fc_old.linear_model(features, [price1, price2, price3])
|
||||
|
||||
def test_runtime_batch_size_mismatch(self):
|
||||
@ -3435,7 +3435,7 @@ class DenseFeaturesTest(test.TestCase):
|
||||
}
|
||||
with self.assertRaisesRegexp(
|
||||
ValueError,
|
||||
'Batch size \(first dimension\) of each feature must be same.'): # pylint: disable=anomalous-backslash-in-string
|
||||
r'Batch size \(first dimension\) of each feature must be same.'): # pylint: disable=anomalous-backslash-in-string
|
||||
fc.DenseFeatures([price1, price2])(features)
|
||||
|
||||
def test_subset_of_static_batch_size_mismatch(self):
|
||||
@ -3450,7 +3450,7 @@ class DenseFeaturesTest(test.TestCase):
|
||||
}
|
||||
with self.assertRaisesRegexp(
|
||||
ValueError,
|
||||
'Batch size \(first dimension\) of each feature must be same.'): # pylint: disable=anomalous-backslash-in-string
|
||||
r'Batch size \(first dimension\) of each feature must be same.'): # pylint: disable=anomalous-backslash-in-string
|
||||
fc.DenseFeatures([price1, price2, price3])(features)
|
||||
|
||||
def test_runtime_batch_size_mismatch(self):
|
||||
@ -4141,7 +4141,7 @@ class FunctionalInputLayerTest(test.TestCase):
|
||||
}
|
||||
with self.assertRaisesRegexp(
|
||||
ValueError,
|
||||
'Batch size \(first dimension\) of each feature must be same.'): # pylint: disable=anomalous-backslash-in-string
|
||||
r'Batch size \(first dimension\) of each feature must be same.'): # pylint: disable=anomalous-backslash-in-string
|
||||
fc_old.input_layer(features, [price1, price2])
|
||||
|
||||
def test_subset_of_static_batch_size_mismatch(self):
|
||||
@ -4156,7 +4156,7 @@ class FunctionalInputLayerTest(test.TestCase):
|
||||
}
|
||||
with self.assertRaisesRegexp(
|
||||
ValueError,
|
||||
'Batch size \(first dimension\) of each feature must be same.'): # pylint: disable=anomalous-backslash-in-string
|
||||
r'Batch size \(first dimension\) of each feature must be same.'): # pylint: disable=anomalous-backslash-in-string
|
||||
fc_old.input_layer(features, [price1, price2, price3])
|
||||
|
||||
def test_runtime_batch_size_mismatch(self):
|
||||
|
@ -472,7 +472,7 @@ class RemoveSqueezableDimensionsTest(test.TestCase):
|
||||
}
|
||||
with self.assertRaisesRegexp(
|
||||
errors_impl.InvalidArgumentError,
|
||||
"Can not squeeze dim\[2\]"):
|
||||
r"Can not squeeze dim\[2\]"):
|
||||
dynamic_labels.eval(feed_dict=feed_dict)
|
||||
self.assertAllEqual(
|
||||
prediction_values, dynamic_predictions.eval(feed_dict=feed_dict))
|
||||
@ -500,7 +500,7 @@ class RemoveSqueezableDimensionsTest(test.TestCase):
|
||||
label_values, dynamic_labels.eval(feed_dict=feed_dict))
|
||||
with self.assertRaisesRegexp(
|
||||
errors_impl.InvalidArgumentError,
|
||||
"Can not squeeze dim\[2\]"):
|
||||
r"Can not squeeze dim\[2\]"):
|
||||
dynamic_predictions.eval(feed_dict=feed_dict)
|
||||
|
||||
|
||||
|
@ -39,7 +39,7 @@ class AdamOptimizer(optimizer.Optimizer):
|
||||
|
||||
def __init__(self, learning_rate=0.001, beta1=0.9, beta2=0.999, epsilon=1e-8,
|
||||
use_locking=False, name="Adam"):
|
||||
"""Construct a new Adam optimizer.
|
||||
r"""Construct a new Adam optimizer.
|
||||
|
||||
Initialization:
|
||||
|
||||
|
@ -33,7 +33,7 @@ import tempfile
|
||||
import zipfile
|
||||
|
||||
TF_NIGHTLY_REGEX = (r"(.+)tf_nightly(|_gpu)-(\d\.[\d]{1,2}"
|
||||
"\.\d.dev[\d]{0,8})-(.+)\.whl")
|
||||
r"\.\d.dev[\d]{0,8})-(.+)\.whl")
|
||||
BINARY_STRING_TEMPLATE = "%s-%s-%s.whl"
|
||||
|
||||
|
||||
|
@ -195,7 +195,7 @@ def generate_RSA(bits=2048, exponent=65537):
|
||||
|
||||
def get_change_ssh_port(use_hostnet, port):
|
||||
if use_hostnet == 1:
|
||||
return "sed -i '/Port 22/c\Port {}' /etc/ssh/sshd_config".format(port)
|
||||
return r"sed -i '/Port 22/c\Port {}' /etc/ssh/sshd_config".format(port)
|
||||
|
||||
return ''
|
||||
|
||||
|
@ -104,7 +104,7 @@ def InvokeNvcc(argv, log=False):
|
||||
"""
|
||||
|
||||
src_files = [f for f in argv if
|
||||
re.search('\.cpp$|\.cc$|\.c$|\.cxx$|\.C$', f)]
|
||||
re.search(r'\.cpp$|\.cc$|\.c$|\.cxx$|\.C$', f)]
|
||||
if len(src_files) == 0:
|
||||
raise Error('No source files found for cuda compilation.')
|
||||
|
||||
|
@ -104,7 +104,7 @@ def InvokeNvcc(argv, log=False):
|
||||
"""
|
||||
|
||||
src_files = [f for f in argv if
|
||||
re.search('\.cpp$|\.cc$|\.c$|\.cxx$|\.C$', f)]
|
||||
re.search(r'\.cpp$|\.cc$|\.c$|\.cxx$|\.C$', f)]
|
||||
if len(src_files) == 0:
|
||||
raise Error('No source files found for cuda compilation.')
|
||||
|
||||
|
@ -104,7 +104,7 @@ def InvokeNvcc(argv, log=False):
|
||||
"""
|
||||
|
||||
src_files = [f for f in argv if
|
||||
re.search('\.cpp$|\.cc$|\.c$|\.cxx$|\.C$', f)]
|
||||
re.search(r'\.cpp$|\.cc$|\.c$|\.cxx$|\.C$', f)]
|
||||
if len(src_files) == 0:
|
||||
raise Error('No source files found for cuda compilation.')
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user