Force clean+fetch when re-running configure with different settings. (#4285)

* Run bazel clean and bazel fetch in the configure script even when building
  without GPU support to force clean+fetch if the user re-runs ./configure
  with a different setting.
* Print a more actionable error messsage if the user attempts to build with
  --config=cuda but did not configure TensorFlow to build with GPU support.
* Update the BUILD file in @local_config_cuda to use repository-local labels.

Fixes #4105
This commit is contained in:
David Z. Chen 2016-09-21 12:50:41 -07:00 committed by Martin Wicke
parent 754048a045
commit 4316aeb4cb
4 changed files with 47 additions and 7 deletions

9
configure vendored
View File

@ -5,6 +5,11 @@ pushd `dirname $0` #> /dev/null
SOURCE_BASE_DIR=`pwd -P`
popd > /dev/null
function bazel_clean_and_fetch() {
bazel clean --expunge
bazel fetch //tensorflow/...
}
## Set up python-related environment settings
while true; do
fromuser=""
@ -114,6 +119,7 @@ done
export TF_NEED_CUDA
if [ "$TF_NEED_CUDA" == "0" ]; then
echo "Configuration finished"
bazel_clean_and_fetch
exit
fi
@ -300,7 +306,6 @@ EOF
TF_CUDA_COMPUTE_CAPABILITIES=""
done
bazel clean --expunge
bazel fetch //...
bazel_clean_and_fetch
echo "Configuration finished"

View File

@ -2,10 +2,12 @@ licenses(["restricted"])
package(default_visibility = ["//visibility:public"])
filegroup(
name = "crosstool",
srcs = ["CROSSTOOL"],
output_licenses = ["unencumbered"],
cc_toolchain_suite(
name = "toolchain",
toolchains = {
"local|compiler": ":cc-compiler-local",
"darwin|compiler": ":cc-compiler-darwin",
},
)
cc_toolchain(

View File

@ -331,6 +331,33 @@ def _file(repository_ctx, label):
{})
_DUMMY_CROSSTOOL_BZL_FILE = """
def error_gpu_disabled():
fail("ERROR: Building with --config=cuda but TensorFlow is not configured " +
"to build with GPU support. Please re-run ./configure and enter 'Y' " +
"at the prompt to build with GPU support.")
native.genrule(
name = "error_gen_crosstool",
outs = ["CROSSTOOL"],
cmd = "echo 'Should not be run.' && exit 1",
)
native.filegroup(
name = "crosstool",
srcs = [":CROSSTOOL"],
output_licenses = ["unencumbered"],
)
"""
_DUMMY_CROSSTOOL_BUILD_FILE = """
load("//crosstool:error_gpu_disabled.bzl", "error_gpu_disabled")
error_gpu_disabled()
"""
def _create_dummy_repository(repository_ctx):
cpu_value = _cpu_value(repository_ctx)
symlink_files = _cuda_symlink_files(cpu_value, _DEFAULT_CUDA_VERSION,
@ -371,6 +398,12 @@ def _create_dummy_repository(repository_ctx):
for c in _DEFAULT_CUDA_COMPUTE_CAPABILITIES]),
})
# If cuda_configure is not configured to build with GPU support, and the user
# attempts to build with --config=cuda, add a dummy build rule to intercept
# this and fail with an actionable error message.
repository_ctx.file("crosstool/error_gpu_disabled.bzl",
_DUMMY_CROSSTOOL_BZL_FILE)
repository_ctx.file("crosstool/BUILD", _DUMMY_CROSSTOOL_BUILD_FILE)
def _symlink_dir(repository_ctx, src_dir, dest_dir):
"""Symlinks all the files in a directory.

View File

@ -1,4 +1,4 @@
build:cuda --crosstool_top=@local_config_cuda//crosstool
build:cuda --crosstool_top=@local_config_cuda//crosstool:toolchain
build:cuda --define=using_cuda=true --define=using_cuda_nvcc=true
build --force_python=py$PYTHON_MAJOR_VERSION