Include sha hash in python variable __version__
It is necessary to symlink in files from .git/ in order to make bazel aware of changes to the current head. As it is this is not completely reliable when git repositories are in a dirty index state. First class support for bazel git a reported bug but not a high priority. ./configure sets up the symlinks by calling the gen_git_source.py a bazel genrule calls gen_git_source.py to generate version_info.cc Also changed cmake and make to build this properly. Change: 132328009
This commit is contained in:
parent
8aa6d5fa25
commit
09045e49d1
11
configure
vendored
11
configure
vendored
@ -2,6 +2,11 @@
|
|||||||
|
|
||||||
DO_NOT_SUBMIT_WARNING="Unofficial setting. DO NOT SUBMIT!!!"
|
DO_NOT_SUBMIT_WARNING="Unofficial setting. DO NOT SUBMIT!!!"
|
||||||
|
|
||||||
|
# Find out the absolute path to where ./configure resides
|
||||||
|
pushd `dirname $0` #> /dev/null
|
||||||
|
SOURCE_BASE_DIR=`pwd -P`
|
||||||
|
popd > /dev/null
|
||||||
|
|
||||||
## Set up python-related environment settings
|
## Set up python-related environment settings
|
||||||
while true; do
|
while true; do
|
||||||
fromuser=""
|
fromuser=""
|
||||||
@ -68,6 +73,12 @@ echo "$SWIG_PATH" > tensorflow/tools/swig/swig_path
|
|||||||
# Invoke python_config and set up symlinks to python includes
|
# Invoke python_config and set up symlinks to python includes
|
||||||
(./util/python/python_config.sh --setup "$PYTHON_BIN_PATH";) || exit -1
|
(./util/python/python_config.sh --setup "$PYTHON_BIN_PATH";) || exit -1
|
||||||
|
|
||||||
|
# Run the gen_git_source to create links where bazel can track dependencies for
|
||||||
|
# git hash propagation
|
||||||
|
GEN_GIT_SOURCE=tensorflow/tools/git/gen_git_source.py
|
||||||
|
chmod a+x ${GEN_GIT_SOURCE}
|
||||||
|
${PYTHON_BIN_PATH} ${GEN_GIT_SOURCE} --configure ${SOURCE_BASE_DIR}
|
||||||
|
|
||||||
## Set up Cuda-related environment settings
|
## Set up Cuda-related environment settings
|
||||||
|
|
||||||
while [ "$TF_NEED_CUDA" == "" ]; do
|
while [ "$TF_NEED_CUDA" == "" ]; do
|
||||||
|
@ -168,6 +168,7 @@ filegroup(
|
|||||||
"//tensorflow/tools/docker:all_files",
|
"//tensorflow/tools/docker:all_files",
|
||||||
"//tensorflow/tools/docker/notebooks:all_files",
|
"//tensorflow/tools/docker/notebooks:all_files",
|
||||||
"//tensorflow/tools/docs:all_files",
|
"//tensorflow/tools/docs:all_files",
|
||||||
|
"//tensorflow/tools/git:all_files",
|
||||||
"//tensorflow/tools/proto_text:all_files",
|
"//tensorflow/tools/proto_text:all_files",
|
||||||
"//tensorflow/tools/test:all_files",
|
"//tensorflow/tools/test:all_files",
|
||||||
"//tensorflow/user_ops:all_files",
|
"//tensorflow/user_ops:all_files",
|
||||||
|
@ -180,6 +180,21 @@ add_dependencies(tf_core_lib
|
|||||||
boringssl
|
boringssl
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Tricky setup to force always rebuilding
|
||||||
|
# force_rebuild always runs forcing ${VERSION_INFO_CC} target to run
|
||||||
|
# ${VERSION_INFO_CC} would cache, but it depends on a phony never produced
|
||||||
|
# target.
|
||||||
|
set(VERSION_INFO_CC ${tensorflow_source_dir}/tensorflow/core/util/version_info.cc)
|
||||||
|
add_custom_target(force_rebuild_target ALL DEPENDS ${VERSION_INFO_CC})
|
||||||
|
add_custom_command(OUTPUT __force_rebuild COMMAND cmake -E echo)
|
||||||
|
add_custom_command(OUTPUT
|
||||||
|
${VERSION_INFO_CC}
|
||||||
|
COMMAND ${tensorflow_source_dir}/tensorflow/tools/git/gen_git_source.py
|
||||||
|
--raw_generate ${VERSION_INFO_CC}
|
||||||
|
DEPENDS __force_rebuild)
|
||||||
|
|
||||||
|
set(tf_version_srcs ${tensorflow_source_dir}/tensorflow/core/util/version_info.cc)
|
||||||
|
|
||||||
|
|
||||||
########################################################
|
########################################################
|
||||||
# tf_core_framework library
|
# tf_core_framework library
|
||||||
@ -211,6 +226,7 @@ list(REMOVE_ITEM tf_core_framework_srcs ${tf_core_framework_test_srcs})
|
|||||||
|
|
||||||
add_library(tf_core_framework OBJECT
|
add_library(tf_core_framework OBJECT
|
||||||
${tf_core_framework_srcs}
|
${tf_core_framework_srcs}
|
||||||
|
${tf_version_srcs}
|
||||||
${PROTO_TEXT_HDRS}
|
${PROTO_TEXT_HDRS}
|
||||||
${PROTO_TEXT_SRCS})
|
${PROTO_TEXT_SRCS})
|
||||||
target_include_directories(tf_core_framework PUBLIC
|
target_include_directories(tf_core_framework PUBLIC
|
||||||
|
@ -421,7 +421,8 @@ $(wildcard tensorflow/core/platform/*.cc) \
|
|||||||
$(wildcard tensorflow/core/platform/*/*.cc) \
|
$(wildcard tensorflow/core/platform/*/*.cc) \
|
||||||
$(wildcard tensorflow/core/platform/*/*/*.cc) \
|
$(wildcard tensorflow/core/platform/*/*/*.cc) \
|
||||||
$(wildcard tensorflow/core/util/*.cc) \
|
$(wildcard tensorflow/core/util/*.cc) \
|
||||||
$(wildcard tensorflow/core/util/*/*.cc)
|
$(wildcard tensorflow/core/util/*/*.cc) \
|
||||||
|
tensorflow/core/util/version_info.cc
|
||||||
CORE_CC_EXCLUDE_SRCS := \
|
CORE_CC_EXCLUDE_SRCS := \
|
||||||
$(wildcard tensorflow/core/*/*test.cc) \
|
$(wildcard tensorflow/core/*/*test.cc) \
|
||||||
$(wildcard tensorflow/core/*/*testutil*) \
|
$(wildcard tensorflow/core/*/*testutil*) \
|
||||||
@ -477,6 +478,11 @@ all: $(LIB_PATH) $(BENCHMARK_NAME)
|
|||||||
|
|
||||||
# Rules for target compilation.
|
# Rules for target compilation.
|
||||||
|
|
||||||
|
|
||||||
|
.phony_version_info:
|
||||||
|
tensorflow/core/util/version_info.cc: .phony_version_info
|
||||||
|
python tensorflow/tools/git/gen_git_source.py --raw_generate $@
|
||||||
|
|
||||||
# Gathers together all the objects we've compiled into a single '.a' archive.
|
# Gathers together all the objects we've compiled into a single '.a' archive.
|
||||||
$(LIB_PATH): $(LIB_OBJS)
|
$(LIB_PATH): $(LIB_OBJS)
|
||||||
@mkdir -p $(dir $@)
|
@mkdir -p $(dir $@)
|
||||||
@ -553,6 +559,7 @@ $(HOST_GENDIR)%.pb.cc $(HOST_GENDIR)%.pb.h: %.proto
|
|||||||
# Gets rid of all generated files.
|
# Gets rid of all generated files.
|
||||||
clean:
|
clean:
|
||||||
rm -rf $(MAKEFILE_DIR)/gen
|
rm -rf $(MAKEFILE_DIR)/gen
|
||||||
|
rm -rf tensorflow/core/util/version_info.cc
|
||||||
|
|
||||||
# Gets rid of target files only, leaving the host alone. Also leaves the lib
|
# Gets rid of target files only, leaving the host alone. Also leaves the lib
|
||||||
# directory untouched deliberately, so we can persist multiple architectures
|
# directory untouched deliberately, so we can persist multiple architectures
|
||||||
|
@ -68,6 +68,7 @@ load(
|
|||||||
)
|
)
|
||||||
load("//tensorflow:tensorflow.bzl", "tf_cc_test_gpu")
|
load("//tensorflow:tensorflow.bzl", "tf_cc_test_gpu")
|
||||||
load("//tensorflow:tensorflow.bzl", "tf_cc_tests_gpu")
|
load("//tensorflow:tensorflow.bzl", "tf_cc_tests_gpu")
|
||||||
|
load("//tensorflow:tensorflow.bzl", "tf_version_info_genrule")
|
||||||
|
|
||||||
# For platform specific build config
|
# For platform specific build config
|
||||||
load(
|
load(
|
||||||
@ -938,6 +939,13 @@ cc_library(
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
tf_version_info_genrule()
|
||||||
|
|
||||||
|
cc_library(
|
||||||
|
name = "version_lib",
|
||||||
|
srcs = ["util/version_info.cc"],
|
||||||
|
)
|
||||||
|
|
||||||
tf_cuda_library(
|
tf_cuda_library(
|
||||||
name = "framework_internal",
|
name = "framework_internal",
|
||||||
srcs = glob(
|
srcs = glob(
|
||||||
@ -980,6 +988,7 @@ tf_cuda_library(
|
|||||||
":lib_internal",
|
":lib_internal",
|
||||||
":proto_text",
|
":proto_text",
|
||||||
":protos_all_cc",
|
":protos_all_cc",
|
||||||
|
":version_lib",
|
||||||
"//tensorflow/core/kernels:bounds_check",
|
"//tensorflow/core/kernels:bounds_check",
|
||||||
"//third_party/eigen3",
|
"//third_party/eigen3",
|
||||||
],
|
],
|
||||||
|
@ -84,4 +84,12 @@ limitations under the License.
|
|||||||
#define TF_CHECKPOINT_VERSION_MIN_CONSUMER 0
|
#define TF_CHECKPOINT_VERSION_MIN_CONSUMER 0
|
||||||
#define TF_CHECKPOINT_VERSION 1
|
#define TF_CHECKPOINT_VERSION 1
|
||||||
|
|
||||||
|
/// Version query functions (defined in generated version_info.cc)
|
||||||
|
|
||||||
|
// Host compiler version (declared elsewhere to be __VERSION__)
|
||||||
|
extern const char* tf_compiler_version();
|
||||||
|
// The git commit designator when tensorflow was built
|
||||||
|
// If no git repository, this will be "internal".
|
||||||
|
extern const char* tf_git_version();
|
||||||
|
|
||||||
#endif // TENSORFLOW_CORE_PUBLIC_VERSION_H_
|
#endif // TENSORFLOW_CORE_PUBLIC_VERSION_H_
|
||||||
|
@ -245,4 +245,8 @@ __all__.extend([
|
|||||||
'train',
|
'train',
|
||||||
])
|
])
|
||||||
|
|
||||||
__all__.append('__version__')
|
__all__.extend([
|
||||||
|
'__version__',
|
||||||
|
'__git_version__',
|
||||||
|
'__compiler_version__',
|
||||||
|
])
|
||||||
|
@ -35,6 +35,12 @@ tensorflow::ImportNumpy();
|
|||||||
%constant int GRAPH_DEF_VERSION_MIN_CONSUMER = TF_GRAPH_DEF_VERSION_MIN_CONSUMER;
|
%constant int GRAPH_DEF_VERSION_MIN_CONSUMER = TF_GRAPH_DEF_VERSION_MIN_CONSUMER;
|
||||||
%constant int GRAPH_DEF_VERSION_MIN_PRODUCER = TF_GRAPH_DEF_VERSION_MIN_PRODUCER;
|
%constant int GRAPH_DEF_VERSION_MIN_PRODUCER = TF_GRAPH_DEF_VERSION_MIN_PRODUCER;
|
||||||
|
|
||||||
|
// Git version information
|
||||||
|
%constant const char* __git_version__ = tf_git_version();
|
||||||
|
|
||||||
|
// Compiler
|
||||||
|
%constant const char* __compiler_version__ = tf_compiler_version();
|
||||||
|
|
||||||
// Release the Python GIL for the duration of most methods.
|
// Release the Python GIL for the duration of most methods.
|
||||||
%exception {
|
%exception {
|
||||||
Py_BEGIN_ALLOW_THREADS;
|
Py_BEGIN_ALLOW_THREADS;
|
||||||
|
@ -22,6 +22,9 @@ from __future__ import print_function
|
|||||||
from tensorflow.python import pywrap_tensorflow
|
from tensorflow.python import pywrap_tensorflow
|
||||||
|
|
||||||
__version__ = pywrap_tensorflow.__version__
|
__version__ = pywrap_tensorflow.__version__
|
||||||
|
__git_version__ = pywrap_tensorflow.__git_version__
|
||||||
|
__compiler_version__ = pywrap_tensorflow.__compiler_version__
|
||||||
|
|
||||||
GRAPH_DEF_VERSION = pywrap_tensorflow.GRAPH_DEF_VERSION
|
GRAPH_DEF_VERSION = pywrap_tensorflow.GRAPH_DEF_VERSION
|
||||||
GRAPH_DEF_VERSION_MIN_CONSUMER = (
|
GRAPH_DEF_VERSION_MIN_CONSUMER = (
|
||||||
pywrap_tensorflow.GRAPH_DEF_VERSION_MIN_CONSUMER)
|
pywrap_tensorflow.GRAPH_DEF_VERSION_MIN_CONSUMER)
|
||||||
@ -30,4 +33,5 @@ GRAPH_DEF_VERSION_MIN_PRODUCER = (
|
|||||||
|
|
||||||
# Make sure these symbols are exported even though one starts with _.
|
# Make sure these symbols are exported even though one starts with _.
|
||||||
__all__ = ["__version__", "GRAPH_DEF_VERSION", "GRAPH_DEF_VERSION_MIN_CONSUMER",
|
__all__ = ["__version__", "GRAPH_DEF_VERSION", "GRAPH_DEF_VERSION_MIN_CONSUMER",
|
||||||
"GRAPH_DEF_VERSION_MIN_PRODUCER"]
|
"GRAPH_DEF_VERSION_MIN_PRODUCER", "__git_version__",
|
||||||
|
"__compiler_version__"]
|
||||||
|
@ -38,6 +38,9 @@ class VersionTest(tf.test.TestCase):
|
|||||||
self.assertLessEqual(0, min_producer)
|
self.assertLessEqual(0, min_producer)
|
||||||
self.assertLessEqual(min_producer, version)
|
self.assertLessEqual(min_producer, version)
|
||||||
|
|
||||||
|
def testGitVersion(self):
|
||||||
|
self.assertEqual(type(tf.__git_version__), str)
|
||||||
|
self.assertEqual(type(tf.__compiler_version__), str)
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
tf.test.main()
|
tf.test.main()
|
||||||
|
@ -813,3 +813,18 @@ def tf_genrule_cmd_append_to_srcs(to_append):
|
|||||||
return ("cat $(SRCS) > $(@) && " +
|
return ("cat $(SRCS) > $(@) && " +
|
||||||
"echo >> $(@) && " +
|
"echo >> $(@) && " +
|
||||||
"echo " + to_append + " >> $(@)")
|
"echo " + to_append + " >> $(@)")
|
||||||
|
|
||||||
|
|
||||||
|
def tf_version_info_genrule():
|
||||||
|
native.genrule(
|
||||||
|
name = "version_info_gen",
|
||||||
|
srcs = [
|
||||||
|
"//tensorflow/tools/git:gen/spec.json",
|
||||||
|
"//tensorflow/tools/git:gen/head",
|
||||||
|
"//tensorflow/tools/git:gen/branch_ref",
|
||||||
|
],
|
||||||
|
outs = ["util/version_info.cc"],
|
||||||
|
cmd = "$(location //tensorflow/tools/git:gen_git_source.py) --generate $(SRCS) \"$@\"",
|
||||||
|
local = 1,
|
||||||
|
tools = ["//tensorflow/tools/git:gen_git_source.py"],
|
||||||
|
)
|
||||||
|
@ -1,2 +1,3 @@
|
|||||||
*tensorflow*
|
*tensorflow*
|
||||||
*perftools*gputools*
|
*perftools*gputools*
|
||||||
|
*tf_*
|
||||||
|
28
tensorflow/tools/git/BUILD
Normal file
28
tensorflow/tools/git/BUILD
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
# Description:
|
||||||
|
# Contains script to generate tensorflow/core/util/version_info.cc
|
||||||
|
# Also contains information about git repository deposited by configure
|
||||||
|
# in gen/...
|
||||||
|
package(default_visibility = ["//tensorflow:internal"])
|
||||||
|
|
||||||
|
licenses(["notice"]) # Apache 2.0
|
||||||
|
|
||||||
|
exports_files(
|
||||||
|
glob(["gen/*"]) + [
|
||||||
|
"gen_git_source.py",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
# Google-internal targets. These must be at the end for syncrepo.
|
||||||
|
|
||||||
|
filegroup(
|
||||||
|
name = "all_files",
|
||||||
|
srcs = glob(
|
||||||
|
["**/*"],
|
||||||
|
exclude = [
|
||||||
|
"**/METADATA",
|
||||||
|
"**/OWNERS",
|
||||||
|
],
|
||||||
|
),
|
||||||
|
visibility = ["//tensorflow:__subpackages__"],
|
||||||
|
)
|
223
tensorflow/tools/git/gen_git_source.py
Executable file
223
tensorflow/tools/git/gen_git_source.py
Executable file
@ -0,0 +1,223 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
# ==============================================================================
|
||||||
|
"""Help include git hash in tensorflow bazel build.
|
||||||
|
|
||||||
|
This creates symlinks from the internal git repository directory so
|
||||||
|
that the build system can see changes in the version state. We also
|
||||||
|
remember what branch git was on so when the branch changes we can
|
||||||
|
detect that the ref file is no longer correct (so we can suggest users
|
||||||
|
run ./configure again).
|
||||||
|
|
||||||
|
NOTE: this script is only used in opensource.
|
||||||
|
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
import argparse
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
|
||||||
|
def parse_branch_ref(filename):
|
||||||
|
"""Given a filename of a .git/HEAD file return ref path.
|
||||||
|
|
||||||
|
In particular, if git is in detached head state, this will
|
||||||
|
return None. If git is in attached head, it will return
|
||||||
|
the branch reference. E.g. if on 'master', the HEAD will
|
||||||
|
contain 'ref: refs/heads/master' so 'refs/heads/master'
|
||||||
|
will be returned.
|
||||||
|
|
||||||
|
Example: parse_branch_ref(".git/HEAD")
|
||||||
|
Args:
|
||||||
|
filename: file to treat as a git HEAD file
|
||||||
|
Returns:
|
||||||
|
None if detached head, otherwise ref subpath
|
||||||
|
Raises:
|
||||||
|
RuntimeError: if the HEAD file is unparseable.
|
||||||
|
"""
|
||||||
|
|
||||||
|
data = open(filename).read().strip()
|
||||||
|
items = data.split(" ")
|
||||||
|
if len(items) == 1:
|
||||||
|
return None
|
||||||
|
elif len(items) == 2 and items[0] == "ref:":
|
||||||
|
return items[1].strip()
|
||||||
|
else:
|
||||||
|
raise RuntimeError("Git directory has unparseable HEAD")
|
||||||
|
|
||||||
|
|
||||||
|
def configure(src_base_path, debug=False):
|
||||||
|
"""Configure `src_base_path` to embed git hashes if available."""
|
||||||
|
|
||||||
|
# TODO(aselle): No files generated or symlinked here are deleted by
|
||||||
|
# the build system. I don't know of a way to do it in bazel. It
|
||||||
|
# should only be a problem if somebody moves a sandbox directory
|
||||||
|
# without running ./configure again.
|
||||||
|
|
||||||
|
git_path = os.path.join(src_base_path, ".git")
|
||||||
|
gen_path = os.path.join(src_base_path, "tensorflow", "tools", "git", "gen")
|
||||||
|
|
||||||
|
# Remove and recreate the path
|
||||||
|
if os.path.exists(gen_path):
|
||||||
|
if os.path.isdir(gen_path):
|
||||||
|
shutil.rmtree(gen_path)
|
||||||
|
else:
|
||||||
|
raise RuntimeError("Cannot delete non-directory %s, inspect ",
|
||||||
|
"and remove manually" % gen_path)
|
||||||
|
os.makedirs(gen_path)
|
||||||
|
|
||||||
|
if not os.path.isdir(gen_path):
|
||||||
|
raise RuntimeError("gen_git_source.py: Failed to create dir")
|
||||||
|
|
||||||
|
# file that specifies what the state of the git repo is
|
||||||
|
spec = {}
|
||||||
|
|
||||||
|
# value file names will be mapped to the keys
|
||||||
|
link_map = {"head": None, "branch_ref": None}
|
||||||
|
|
||||||
|
if not os.path.isdir(git_path):
|
||||||
|
# No git directory
|
||||||
|
spec["git"] = False
|
||||||
|
open(os.path.join(gen_path, "head"), "w").write("")
|
||||||
|
open(os.path.join(gen_path, "branch_ref"), "w").write("")
|
||||||
|
else:
|
||||||
|
# Git directory, possibly detached or attached
|
||||||
|
spec["git"] = True
|
||||||
|
spec["path"] = src_base_path
|
||||||
|
git_head_path = os.path.join(git_path, "HEAD")
|
||||||
|
spec["branch"] = parse_branch_ref(git_head_path)
|
||||||
|
link_map["head"] = git_head_path
|
||||||
|
if spec["branch"] is not None:
|
||||||
|
# attached method
|
||||||
|
link_map["branch_ref"] = os.path.join(git_path, *
|
||||||
|
os.path.split(spec["branch"]))
|
||||||
|
# Create symlinks or dummy files
|
||||||
|
for target, src in link_map.items():
|
||||||
|
if src is None:
|
||||||
|
open(os.path.join(gen_path, target), "w").write("")
|
||||||
|
else:
|
||||||
|
os.symlink(src, os.path.join(gen_path, target))
|
||||||
|
|
||||||
|
json.dump(spec, open(os.path.join(gen_path, "spec.json"), "w"), indent=2)
|
||||||
|
if debug:
|
||||||
|
print("gen_git_source.py: list %s" % gen_path)
|
||||||
|
print("gen_git_source.py: %s" + repr(os.listdir(gen_path)))
|
||||||
|
print("gen_git_source.py: spec is %r" % spec)
|
||||||
|
|
||||||
|
|
||||||
|
def generate(arglist):
|
||||||
|
"""Generate version_info.cc as given `destination_file`.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
arglist: should be a sequence that contains
|
||||||
|
spec, head_symlink, ref_symlink, destination_file.
|
||||||
|
|
||||||
|
`destination_file` is the filename where version_info.cc will be written
|
||||||
|
|
||||||
|
`spec` is a filename where the file contains a JSON dictionary
|
||||||
|
'git' bool that is true if the source is in a git repo
|
||||||
|
'path' base path of the source code
|
||||||
|
'branch' the name of the ref specification of the current branch/tag
|
||||||
|
|
||||||
|
`head_symlink` is a filename to HEAD that is cross-referenced against
|
||||||
|
what is contained in the json branch designation.
|
||||||
|
|
||||||
|
`ref_symlink` is unused in this script but passed, because the build
|
||||||
|
system uses that file to detect when commits happen.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
RuntimeError: If ./configure needs to be run, RuntimeError will be raised.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# unused ref_symlink arg
|
||||||
|
spec, head_symlink, _, dest_file = arglist
|
||||||
|
data = json.load(open(spec))
|
||||||
|
strs = {"tf_compiler_version": "__VERSION__"}
|
||||||
|
if not data["git"]:
|
||||||
|
strs["tf_git_version"] = "internal"
|
||||||
|
else:
|
||||||
|
old_branch = data["branch"]
|
||||||
|
new_branch = parse_branch_ref(head_symlink)
|
||||||
|
if new_branch != old_branch:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Run ./configure again, branch was '%s' but is now '%s'" %
|
||||||
|
(old_branch, new_branch))
|
||||||
|
strs["tf_git_version"] = os.popen(
|
||||||
|
"git -C \"%s\" describe --long --dirty --tags" %
|
||||||
|
(data["path"],)).read().strip()
|
||||||
|
# TODO(aselle): Check for escaping
|
||||||
|
cpp_file = "\n".join("const char* %s() {return \"%s\";}" % (x, y)
|
||||||
|
for x, y in strs.items())
|
||||||
|
open(dest_file, "w").write(cpp_file + "\n")
|
||||||
|
|
||||||
|
|
||||||
|
def raw_generate(output_file):
|
||||||
|
"""Simple generator used for cmake/make build systems.
|
||||||
|
|
||||||
|
This does not create any symlinks. It requires the build system
|
||||||
|
to build unconditionally.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
output_file: Output filename for the version info cc
|
||||||
|
"""
|
||||||
|
|
||||||
|
strs = {"tf_compiler_version": "__VERSION__"}
|
||||||
|
version = os.popen("git describe --long --dirty --tags").read().strip()
|
||||||
|
version = version if version else "unknown"
|
||||||
|
strs["tf_git_version"] = version
|
||||||
|
cpp_file = "\n".join("const char* %s() {return \"%s\";}" % (x, y)
|
||||||
|
for x, y in strs.items())
|
||||||
|
open(output_file, "w").write(cpp_file + "\n")
|
||||||
|
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(description="""Git hash injection into bazel.
|
||||||
|
If used with --configure <path> will search for git directory and put symlinks
|
||||||
|
into source so that a bazel genrule can call --generate""")
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--debug",
|
||||||
|
type=bool,
|
||||||
|
help="print debugging information about paths",
|
||||||
|
default=False)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--configure", type=str,
|
||||||
|
help="Path to configure as a git repo dependency tracking sentinel")
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--generate",
|
||||||
|
type=str,
|
||||||
|
help="Generate given spec-file, HEAD-symlink-file, ref-symlink-file",
|
||||||
|
nargs="+")
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--raw_generate",
|
||||||
|
type=str,
|
||||||
|
help="Generate version_info.cc (simpler version used for cmake/make)")
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
if args.configure is not None:
|
||||||
|
configure(args.configure, debug=args.debug)
|
||||||
|
elif args.generate is not None:
|
||||||
|
generate(args.generate)
|
||||||
|
elif args.raw_generate is not None:
|
||||||
|
raw_generate(args.raw_generate)
|
||||||
|
else:
|
||||||
|
raise RuntimeError("--configure or --generate or --raw_generate "
|
||||||
|
"must be used")
|
Loading…
Reference in New Issue
Block a user