Replace astor with astunparse in autograph, for compatibility with Python 3.8. This is a non-functional change - the only differences between the two should be in the formatting of the generated code.
PiperOrigin-RevId: 289985611 Change-Id: I1b87712255e20e354efe95abf3aa24a63ff41784
This commit is contained in:
parent
c2a9d50701
commit
c07bcee596
@ -23,6 +23,7 @@ tensorflow/third_party/android/android_configure.BUILD.tpl
|
|||||||
tensorflow/third_party/android/android_configure.bzl
|
tensorflow/third_party/android/android_configure.bzl
|
||||||
tensorflow/third_party/arm_neon_2_x86_sse.BUILD
|
tensorflow/third_party/arm_neon_2_x86_sse.BUILD
|
||||||
tensorflow/third_party/astor.BUILD
|
tensorflow/third_party/astor.BUILD
|
||||||
|
tensorflow/third_party/astunparse.BUILD
|
||||||
tensorflow/third_party/backports_weakref.BUILD
|
tensorflow/third_party/backports_weakref.BUILD
|
||||||
tensorflow/third_party/boringssl/BUILD
|
tensorflow/third_party/boringssl/BUILD
|
||||||
tensorflow/third_party/clang_toolchain/BUILD
|
tensorflow/third_party/clang_toolchain/BUILD
|
||||||
|
@ -42,7 +42,7 @@ py_library(
|
|||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
deps = [
|
deps = [
|
||||||
"//tensorflow/python/autograph/pyct/common_transformers",
|
"//tensorflow/python/autograph/pyct/common_transformers",
|
||||||
"@astor_archive//:astor",
|
"@astunparse_archive//:astunparse",
|
||||||
"@gast_archive//:gast",
|
"@gast_archive//:gast",
|
||||||
"@six_archive//:six",
|
"@six_archive//:six",
|
||||||
"@termcolor_archive//:termcolor",
|
"@termcolor_archive//:termcolor",
|
||||||
@ -80,6 +80,9 @@ py_test(
|
|||||||
srcs = ["cfg_test.py"],
|
srcs = ["cfg_test.py"],
|
||||||
python_version = "PY3",
|
python_version = "PY3",
|
||||||
srcs_version = "PY2AND3",
|
srcs_version = "PY2AND3",
|
||||||
|
tags = [
|
||||||
|
"no_oss_py2",
|
||||||
|
],
|
||||||
deps = [
|
deps = [
|
||||||
":pyct",
|
":pyct",
|
||||||
"//tensorflow/python:client_testlib",
|
"//tensorflow/python:client_testlib",
|
||||||
|
@ -47,7 +47,7 @@ class AstUtilTest(test.TestCase):
|
|||||||
|
|
||||||
self.assertIsInstance(node.value.left.id, str)
|
self.assertIsInstance(node.value.left.id, str)
|
||||||
source = parser.unparse(node, include_encoding_marker=False)
|
source = parser.unparse(node, include_encoding_marker=False)
|
||||||
self.assertEqual(source.strip(), 'renamed_a + b')
|
self.assertEqual(source.strip(), '(renamed_a + b)')
|
||||||
|
|
||||||
def test_rename_symbols_attributes(self):
|
def test_rename_symbols_attributes(self):
|
||||||
node = parser.parse('b.c = b.c.d')
|
node = parser.parse('b.c = b.c.d')
|
||||||
@ -234,7 +234,7 @@ class AstUtilTest(test.TestCase):
|
|||||||
"""))
|
"""))
|
||||||
f = lambda x: x
|
f = lambda x: x
|
||||||
nodes = ast_util.find_matching_definitions(node, f)
|
nodes = ast_util.find_matching_definitions(node, f)
|
||||||
self.assertLambdaNodes(nodes, ('(1)',))
|
self.assertLambdaNodes(nodes, ('1',))
|
||||||
|
|
||||||
def test_find_matching_definitions_lambda_multiple_matches(self):
|
def test_find_matching_definitions_lambda_multiple_matches(self):
|
||||||
node = parser.parse(
|
node = parser.parse(
|
||||||
@ -243,7 +243,7 @@ class AstUtilTest(test.TestCase):
|
|||||||
"""))
|
"""))
|
||||||
f = lambda x: x
|
f = lambda x: x
|
||||||
nodes = ast_util.find_matching_definitions(node, f)
|
nodes = ast_util.find_matching_definitions(node, f)
|
||||||
self.assertLambdaNodes(nodes, ('(1)', '(2)'))
|
self.assertLambdaNodes(nodes, ('1', '2'))
|
||||||
|
|
||||||
def test_find_matching_definitions_lambda_uses_arg_names(self):
|
def test_find_matching_definitions_lambda_uses_arg_names(self):
|
||||||
node = parser.parse(
|
node = parser.parse(
|
||||||
@ -252,11 +252,11 @@ class AstUtilTest(test.TestCase):
|
|||||||
"""))
|
"""))
|
||||||
f = lambda x: x
|
f = lambda x: x
|
||||||
nodes = ast_util.find_matching_definitions(node, f)
|
nodes = ast_util.find_matching_definitions(node, f)
|
||||||
self.assertLambdaNodes(nodes, ('(1)',))
|
self.assertLambdaNodes(nodes, ('1',))
|
||||||
|
|
||||||
f = lambda y: y
|
f = lambda y: y
|
||||||
nodes = ast_util.find_matching_definitions(node, f)
|
nodes = ast_util.find_matching_definitions(node, f)
|
||||||
self.assertLambdaNodes(nodes, ('(2)',))
|
self.assertLambdaNodes(nodes, ('2',))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
@ -172,8 +172,8 @@ class AstToCfgTest(test.TestCase):
|
|||||||
self.assertGraphMatches(
|
self.assertGraphMatches(
|
||||||
graph,
|
graph,
|
||||||
(
|
(
|
||||||
(None, 'a, b', 'a = b + 1'),
|
(None, 'a, b', 'a = (b + 1)'),
|
||||||
('a = b + 1', 'a += max(a)', None),
|
('a = (b + 1)', 'a += max(a)', None),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -209,7 +209,7 @@ class AstToCfgTest(test.TestCase):
|
|||||||
(
|
(
|
||||||
(None, 'a', '(a > 0)'),
|
(None, 'a', '(a > 0)'),
|
||||||
('(a > 0)', 'a = 1', None),
|
('(a > 0)', 'a = 1', None),
|
||||||
('(a > 0)', 'a += -1', None),
|
('(a > 0)', 'a += (- 1)', None),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
self.assertStatementEdges(
|
self.assertStatementEdges(
|
||||||
@ -973,8 +973,8 @@ class AstToCfgTest(test.TestCase):
|
|||||||
self.assertGraphMatches(
|
self.assertGraphMatches(
|
||||||
graph,
|
graph,
|
||||||
(
|
(
|
||||||
('a', 'a = lambda b: a + b', 'return a'),
|
('a', 'a = (lambda b: (a + b))', 'return a'),
|
||||||
('a = lambda b: a + b', 'return a', None),
|
('a = (lambda b: (a + b))', 'return a', None),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -37,15 +37,16 @@ class LoaderTest(test.TestCase):
|
|||||||
a = True
|
a = True
|
||||||
b = ''
|
b = ''
|
||||||
if a:
|
if a:
|
||||||
b = x + 1
|
b = (x + 1)
|
||||||
return b
|
return b
|
||||||
|
|
||||||
node, _ = parser.parse_entity(test_fn, future_features=())
|
node, _ = parser.parse_entity(test_fn, future_features=())
|
||||||
module, _, _ = loader.load_ast(node)
|
module, _, _ = loader.load_ast(node)
|
||||||
|
|
||||||
|
# astunparse uses fixed 4-space indenting.
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
textwrap.dedent(tf_inspect.getsource(test_fn)),
|
textwrap.dedent(tf_inspect.getsource(test_fn)),
|
||||||
tf_inspect.getsource(module.test_fn))
|
tf_inspect.getsource(module.test_fn).replace(' ', ' '))
|
||||||
|
|
||||||
def test_load_ast(self):
|
def test_load_ast(self):
|
||||||
node = gast.FunctionDef(
|
node = gast.FunctionDef(
|
||||||
@ -81,7 +82,7 @@ class LoaderTest(test.TestCase):
|
|||||||
expected_source = """
|
expected_source = """
|
||||||
# coding=utf-8
|
# coding=utf-8
|
||||||
def f(a):
|
def f(a):
|
||||||
return a + 1
|
return (a + 1)
|
||||||
"""
|
"""
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
textwrap.dedent(expected_source).strip(),
|
textwrap.dedent(expected_source).strip(),
|
||||||
|
@ -25,7 +25,7 @@ import re
|
|||||||
import textwrap
|
import textwrap
|
||||||
import tokenize
|
import tokenize
|
||||||
|
|
||||||
import astor
|
import astunparse
|
||||||
import gast
|
import gast
|
||||||
import six
|
import six
|
||||||
|
|
||||||
@ -253,12 +253,13 @@ def parse_expression(src):
|
|||||||
return node.value
|
return node.value
|
||||||
|
|
||||||
|
|
||||||
def unparse(node, indentation=' ', include_encoding_marker=True):
|
def unparse(node, indentation=None, include_encoding_marker=True):
|
||||||
"""Returns the source code of given AST.
|
"""Returns the source code of given AST.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
node: The code to compile, as an AST object.
|
node: The code to compile, as an AST object.
|
||||||
indentation: The string to use for indentation.
|
indentation: Unused, deprecated. The returning code will always be indented
|
||||||
|
at 4 spaces.
|
||||||
include_encoding_marker: Bool, thether to include a comment on the first
|
include_encoding_marker: Bool, thether to include a comment on the first
|
||||||
line to explicitly specify UTF-8 encoding.
|
line to explicitly specify UTF-8 encoding.
|
||||||
|
|
||||||
@ -266,37 +267,16 @@ def unparse(node, indentation=' ', include_encoding_marker=True):
|
|||||||
code: The source code generated from the AST object
|
code: The source code generated from the AST object
|
||||||
source_mapping: A mapping between the user and AutoGraph generated code.
|
source_mapping: A mapping between the user and AutoGraph generated code.
|
||||||
"""
|
"""
|
||||||
|
del indentation # astunparse doesn't allow configuring it.
|
||||||
if not isinstance(node, (list, tuple)):
|
if not isinstance(node, (list, tuple)):
|
||||||
node = (node,)
|
node = (node,)
|
||||||
generator = astor.code_gen.SourceGenerator(indentation, False,
|
|
||||||
astor.string_repr.pretty_string)
|
|
||||||
|
|
||||||
|
codes = []
|
||||||
|
if include_encoding_marker:
|
||||||
|
codes.append('# coding=utf-8')
|
||||||
for n in node:
|
for n in node:
|
||||||
if isinstance(n, gast.AST):
|
if isinstance(n, gast.AST):
|
||||||
n = gast.gast_to_ast(n)
|
n = gast.gast_to_ast(n)
|
||||||
generator.visit(n)
|
codes.append(astunparse.unparse(n).strip())
|
||||||
generator.result.append('\n')
|
|
||||||
|
|
||||||
# In some versions of Python, literals may appear as actual values. This
|
return '\n'.join(codes)
|
||||||
# ensures everything is string.
|
|
||||||
code = ''.join(map(str, generator.result))
|
|
||||||
|
|
||||||
# Strip leading blank lines.
|
|
||||||
code_lines = code.split('\n')
|
|
||||||
trimmed_code_lines = []
|
|
||||||
for l in code_lines:
|
|
||||||
if l.rstrip() or trimmed_code_lines:
|
|
||||||
trimmed_code_lines.append(l)
|
|
||||||
code = '\n'.join(trimmed_code_lines)
|
|
||||||
|
|
||||||
# Work around the reference cycle generated by astor.
|
|
||||||
# See https://github.com/berkerpeksag/astor/blob/55dd323f7d8d696610c703c0296763c567685c31/astor/code_gen.py#L162 # pylint:disable=line-too-long
|
|
||||||
# Reference cycles are quite disliked by TensorFlow's tests.
|
|
||||||
if hasattr(generator, 'write'):
|
|
||||||
generator.write = None
|
|
||||||
del generator
|
|
||||||
|
|
||||||
if include_encoding_marker:
|
|
||||||
code = '# coding=utf-8\n' + code
|
|
||||||
|
|
||||||
return code
|
|
||||||
|
@ -166,9 +166,9 @@ string""")
|
|||||||
textwrap.dedent("""
|
textwrap.dedent("""
|
||||||
# coding=utf-8
|
# coding=utf-8
|
||||||
if 1:
|
if 1:
|
||||||
a = b
|
a = b
|
||||||
else:
|
else:
|
||||||
a = 'c'
|
a = 'c'
|
||||||
""").strip(), source.strip())
|
""").strip(), source.strip())
|
||||||
|
|
||||||
|
|
||||||
|
@ -129,6 +129,7 @@ function install_pip_deps {
|
|||||||
|
|
||||||
# LINT.IfChange(ubuntu_pip_installations)
|
# LINT.IfChange(ubuntu_pip_installations)
|
||||||
# TODO(aselle): Change all these to be --user instead of sudo.
|
# TODO(aselle): Change all these to be --user instead of sudo.
|
||||||
|
${SUDO_CMD} ${PIP_CMD} install astunparse==1.6.3
|
||||||
${SUDO_CMD} ${PIP_CMD} install keras_preprocessing==1.1.0 --no-deps
|
${SUDO_CMD} ${PIP_CMD} install keras_preprocessing==1.1.0 --no-deps
|
||||||
${SUDO_CMD} ${PIP_CMD} install gast==0.3.2
|
${SUDO_CMD} ${PIP_CMD} install gast==0.3.2
|
||||||
${SUDO_CMD} ${PIP_CMD} install h5py==2.8.0
|
${SUDO_CMD} ${PIP_CMD} install h5py==2.8.0
|
||||||
@ -159,6 +160,7 @@ function install_ubuntu_16_pip_deps {
|
|||||||
done
|
done
|
||||||
|
|
||||||
# LINT.IfChange(ubuntu_16_pip_installations)
|
# LINT.IfChange(ubuntu_16_pip_installations)
|
||||||
|
"${PIP_CMD}" install astunparse==1.6.3 --user
|
||||||
"${PIP_CMD}" install --user --upgrade attrs
|
"${PIP_CMD}" install --user --upgrade attrs
|
||||||
"${PIP_CMD}" install keras_preprocessing==1.1.0 --no-deps --user
|
"${PIP_CMD}" install keras_preprocessing==1.1.0 --no-deps --user
|
||||||
"${PIP_CMD}" install numpy==1.14.5 --user
|
"${PIP_CMD}" install numpy==1.14.5 --user
|
||||||
|
@ -38,7 +38,6 @@ SET PATH=%PATH%;C:\%PYTHON_DIRECTORY%
|
|||||||
%PIP_EXE% install wrapt --upgrade --no-deps
|
%PIP_EXE% install wrapt --upgrade --no-deps
|
||||||
|
|
||||||
IF "%PYTHON_DIRECTORY%"=="Python37" (
|
IF "%PYTHON_DIRECTORY%"=="Python37" (
|
||||||
%PIP_EXE% install astor==0.7.1
|
|
||||||
%PIP_EXE% install absl-py==0.5.0
|
%PIP_EXE% install absl-py==0.5.0
|
||||||
%PIP_EXE% install colorama==0.3.9
|
%PIP_EXE% install colorama==0.3.9
|
||||||
%PIP_EXE% install cycler==0.10.0
|
%PIP_EXE% install cycler==0.10.0
|
||||||
@ -57,6 +56,7 @@ IF "%PYTHON_DIRECTORY%"=="Python37" (
|
|||||||
@REM break with gast upgrade to 0.3.2. Need to figure out the right way to
|
@REM break with gast upgrade to 0.3.2. Need to figure out the right way to
|
||||||
@REM handle this case.
|
@REM handle this case.
|
||||||
%PIP_EXE% install gast==0.3.2
|
%PIP_EXE% install gast==0.3.2
|
||||||
|
%PIP_EXE% install astunparse==1.6.3
|
||||||
|
|
||||||
:: Set cuda related environment variables. If we are not using CUDA, these are not used.
|
:: Set cuda related environment variables. If we are not using CUDA, these are not used.
|
||||||
IF NOT DEFINED TF_CUDA_VERSION (
|
IF NOT DEFINED TF_CUDA_VERSION (
|
||||||
|
@ -128,6 +128,7 @@ filegroup(
|
|||||||
"//third_party/hadoop:LICENSE.txt",
|
"//third_party/hadoop:LICENSE.txt",
|
||||||
"//third_party/icu/data:LICENSE",
|
"//third_party/icu/data:LICENSE",
|
||||||
"@arm_neon_2_x86_sse//:LICENSE",
|
"@arm_neon_2_x86_sse//:LICENSE",
|
||||||
|
"@astunparse_archive//:LICENSE",
|
||||||
"@astor_archive//:LICENSE",
|
"@astor_archive//:LICENSE",
|
||||||
"@boringssl//:LICENSE",
|
"@boringssl//:LICENSE",
|
||||||
"@com_google_absl//:LICENSE",
|
"@com_google_absl//:LICENSE",
|
||||||
|
@ -51,7 +51,7 @@ _VERSION = '2.1.0'
|
|||||||
|
|
||||||
REQUIRED_PACKAGES = [
|
REQUIRED_PACKAGES = [
|
||||||
'absl-py >= 0.7.0',
|
'absl-py >= 0.7.0',
|
||||||
'astor >= 0.6.0',
|
'astunparse == 1.6.3',
|
||||||
'backports.weakref >= 1.0rc1;python_version<"3.4"',
|
'backports.weakref >= 1.0rc1;python_version<"3.4"',
|
||||||
'enum34 >= 1.1.6;python_version<"3.4"',
|
'enum34 >= 1.1.6;python_version<"3.4"',
|
||||||
'gast == 0.3.2',
|
'gast == 0.3.2',
|
||||||
|
@ -345,6 +345,29 @@ def tf_repositories(path_prefix = "", tf_repo_name = ""):
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
tf_http_archive(
|
||||||
|
name = "astunparse_archive",
|
||||||
|
build_file = clean_dep("//third_party:astunparse.BUILD"),
|
||||||
|
sha256 = "5ad93a8456f0d084c3456d059fd9a92cce667963232cbf763eac3bc5b7940872",
|
||||||
|
strip_prefix = "astunparse-1.6.3/lib",
|
||||||
|
system_build_file = clean_dep("//third_party/systemlibs:astunparse.BUILD"),
|
||||||
|
urls = [
|
||||||
|
"https://storage.googleapis.com/mirror.tensorflow.org/files.pythonhosted.org/packages/f3/af/4182184d3c338792894f34a62672919db7ca008c89abee9b564dd34d8029/astunparse-1.6.3.tar.gz",
|
||||||
|
"https://files.pythonhosted.org/packages/f3/af/4182184d3c338792894f34a62672919db7ca008c89abee9b564dd34d8029/astunparse-1.6.3.tar.gz",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
filegroup_external(
|
||||||
|
name = "astunparse_license",
|
||||||
|
licenses = ["notice"], # PSFL
|
||||||
|
sha256_urls = {
|
||||||
|
"92fc0e4f4fa9460558eedf3412b988d433a2dcbb3a9c45402a145a4fab8a6ac6": [
|
||||||
|
"http://mirror.tensorflow.org/raw.githubusercontent.com/simonpercivall/astunparse/v1.6.2/LICENSE",
|
||||||
|
"https://raw.githubusercontent.com/simonpercivall/astunparse/v1.6.2/LICENSE",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
tf_http_archive(
|
tf_http_archive(
|
||||||
name = "functools32_archive",
|
name = "functools32_archive",
|
||||||
build_file = clean_dep("//third_party:functools32.BUILD"),
|
build_file = clean_dep("//third_party:functools32.BUILD"),
|
||||||
|
23
third_party/astunparse.BUILD
vendored
Normal file
23
third_party/astunparse.BUILD
vendored
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
# Description:
|
||||||
|
# AST round-trip manipulation for Python.
|
||||||
|
|
||||||
|
package(default_visibility = ["//visibility:public"])
|
||||||
|
|
||||||
|
licenses(["notice"])
|
||||||
|
|
||||||
|
py_library(
|
||||||
|
name = "astunparse",
|
||||||
|
srcs = [
|
||||||
|
"astunparse/__init__.py",
|
||||||
|
"astunparse/printer.py",
|
||||||
|
"astunparse/unparser.py",
|
||||||
|
],
|
||||||
|
srcs_version = "PY2AND3",
|
||||||
|
)
|
||||||
|
|
||||||
|
genrule(
|
||||||
|
name = "license",
|
||||||
|
srcs = ["@astunparse_license"],
|
||||||
|
outs = ["LICENSE"],
|
||||||
|
cmd = "cp $< $@",
|
||||||
|
)
|
Loading…
Reference in New Issue
Block a user