Remove deprecated tf.substr

PiperOrigin-RevId: 225092500
This commit is contained in:
Martin Wicke 2018-12-11 16:30:04 -08:00 committed by TensorFlower Gardener
parent bafb874798
commit fc220a61b7
5 changed files with 18 additions and 10 deletions

View File

@ -367,7 +367,7 @@ def string_length_v2(input, unit="BYTE", name=None):
string_length.__doc__ = gen_string_ops.string_length.__doc__ string_length.__doc__ = gen_string_ops.string_length.__doc__
@tf_export("substr") @tf_export(v1=["substr"])
@deprecation.deprecated(None, "Use `tf.strings.substr` instead of `tf.substr`.") @deprecation.deprecated(None, "Use `tf.strings.substr` instead of `tf.substr`.")
def substr_deprecated(input, pos, len, name=None, unit="BYTE"): def substr_deprecated(input, pos, len, name=None, unit="BYTE"):
return substr(input, pos, len, name=name, unit=unit) return substr(input, pos, len, name=name, unit=unit)
@ -380,14 +380,15 @@ substr_deprecated.__doc__ = gen_string_ops.substr.__doc__
def substr(input, pos, len, name=None, unit="BYTE"): def substr(input, pos, len, name=None, unit="BYTE"):
return gen_string_ops.substr(input, pos, len, unit=unit, name=name) return gen_string_ops.substr(input, pos, len, unit=unit, name=name)
substr.__doc__ = gen_string_ops.substr.__doc__
@tf_export("strings.substr", v1=[]) @tf_export("strings.substr", v1=[])
@dispatch.add_dispatch_support @dispatch.add_dispatch_support
def substr_v2(input, pos, len, unit="BYTE", name=None): def substr_v2(input, pos, len, unit="BYTE", name=None):
return substr(input, pos, len, name=name, unit=unit) return gen_string_ops.substr(input, pos, len, unit=unit, name=name)
substr_v2.__doc__ = gen_string_ops.substr.__doc__
substr.__doc__ = gen_string_ops.substr.__doc__
ops.NotDifferentiable("RegexReplace") ops.NotDifferentiable("RegexReplace")

View File

@ -980,10 +980,6 @@ tf_module {
name: "string_split" name: "string_split"
argspec: "args=[\'source\', \'delimiter\', \'skip_empty\'], varargs=None, keywords=None, defaults=[\' \', \'True\'], " argspec: "args=[\'source\', \'delimiter\', \'skip_empty\'], varargs=None, keywords=None, defaults=[\' \', \'True\'], "
} }
member_method {
name: "substr"
argspec: "args=[\'input\', \'pos\', \'len\', \'name\', \'unit\'], varargs=None, keywords=None, defaults=[\'None\', \'BYTE\'], "
}
member_method { member_method {
name: "subtract" name: "subtract"
argspec: "args=[\'x\', \'y\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], " argspec: "args=[\'x\', \'y\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], "

View File

@ -109,6 +109,7 @@ reorders = {
'tf.strings.length': ['input', 'name', 'unit'], 'tf.strings.length': ['input', 'name', 'unit'],
'tf.strings.reduce_join': ['inputs', 'axis', 'keep_dims', 'separator', 'name', 'reduction_indices'], 'tf.strings.reduce_join': ['inputs', 'axis', 'keep_dims', 'separator', 'name', 'reduction_indices'],
'tf.strings.substr': ['input', 'pos', 'len', 'name', 'unit'], 'tf.strings.substr': ['input', 'pos', 'len', 'name', 'unit'],
'tf.substr': ['input', 'pos', 'len', 'name', 'unit'],
'tf.transpose': ['a', 'perm', 'name', 'conjugate'], 'tf.transpose': ['a', 'perm', 'name', 'conjugate'],
'tf.tuple': ['tensors', 'name', 'control_inputs'], 'tf.tuple': ['tensors', 'name', 'control_inputs'],
'tf.while_loop': ['cond', 'body', 'loop_vars', 'shape_invariants', 'parallel_iterations', 'back_prop', 'swap_memory', 'name', 'maximum_iterations', 'return_same_structure'] 'tf.while_loop': ['cond', 'body', 'loop_vars', 'shape_invariants', 'parallel_iterations', 'back_prop', 'swap_memory', 'name', 'maximum_iterations', 'return_same_structure']

View File

@ -492,6 +492,8 @@ class TFAPIChangeSpec(ast_edits.APIChangeSpec):
"tf.sparse.reduce_max", "tf.sparse.reduce_max",
"tf.random.stateless_multinomial": "tf.random.stateless_multinomial":
"tf.random.stateless_categorical", "tf.random.stateless_categorical",
"tf.substr":
"tf.strings.substr",
"tf.string_to_hash_bucket": "tf.string_to_hash_bucket":
"tf.strings.to_hash_bucket", "tf.strings.to_hash_bucket",
"tf.string_to_number": "tf.string_to_number":
@ -600,9 +602,10 @@ class TFAPIChangeSpec(ast_edits.APIChangeSpec):
"tf.sparse.reduce_max", "tf.sparse.reduce_max",
"tf.sparse_reduce_max", "tf.sparse_reduce_max",
"tf.io.decode_csv", "tf.io.decode_csv",
"tf.strings.substr",
"tf.strings.reduce_join",
"tf.strings.length", "tf.strings.length",
"tf.strings.reduce_join",
"tf.strings.substr",
"tf.substr",
"tf.transpose", "tf.transpose",
"tf.tuple", "tf.tuple",
"tf.parse_example", "tf.parse_example",

View File

@ -443,6 +443,13 @@ bazel-bin/tensorflow/tools/compatibility/update/generate_v2_reorders_map
) )
self.assertEqual(new_text, expected_text) self.assertEqual(new_text, expected_text)
def test_substr(self):
text = "tf.substr(input, pos, len, name, unit)\n"
_, unused_report, errors, new_text = self._upgrade(text)
self.assertEqual("tf.strings.substr(input=input, pos=pos, len=len, "
"name=name, unit=unit)\n", new_text)
self.assertEqual(errors, [])
def testColocateGradientsWithOps(self): def testColocateGradientsWithOps(self):
text = "tf.gradients(a, foo=False)\n" text = "tf.gradients(a, foo=False)\n"
_, unused_report, errors, new_text = self._upgrade(text) _, unused_report, errors, new_text = self._upgrade(text)