Remove 'gpu_clang' CI Docker image, use 'gpu' image instead.

The clang is now downloaded using the new TF_DOWNLOAD_CLANG option at build
time.

Also removes GPU-specific env vars from 'tools/ci_build/builds/configured',
they are now passed directly to 'docker run' instead.

PiperOrigin-RevId: 180536813
This commit is contained in:
Ilya Biryukov 2018-01-02 05:06:01 -08:00 committed by TensorFlower Gardener
parent 7b700c515b
commit e415e562d4
6 changed files with 32 additions and 124 deletions

View File

@ -1,36 +0,0 @@
FROM nvidia/cuda:9.0-cudnn7-devel-ubuntu16.04
LABEL maintainer="Ilya Biryukov <ibiryukov@google.com>"
# In the Ubuntu 16.04 images, cudnn is placed in system paths. Move them to
# /usr/local/cuda
RUN cp /usr/include/cudnn.h /usr/local/cuda/include
RUN cp /usr/lib/x86_64-linux-gnu/libcudnn* /usr/local/cuda/lib64
# Copy and run the install scripts.
COPY install/*.sh /install/
RUN /install/install_bootstrap_deb_packages.sh
RUN add-apt-repository -y ppa:openjdk-r/ppa
# LLVM requires cmake version 3.4.3, but ppa:george-edison55/cmake-3.x only
# provides version 3.2.2.
# So we skip it in `install_deb_packages.sh`, and later install it from
# https://cmake.org in `install_cmake_for_clang.sh`.
RUN /install/install_deb_packages.sh --without_cmake
RUN /install/install_pip_packages.sh
RUN /install/install_bazel.sh
RUN /install/install_golang.sh
# Install cmake and build clang
RUN /install/install_cmake_for_clang.sh
RUN /install/build_and_install_clang.sh
# Set up the master bazelrc configuration file.
COPY install/.bazelrc /etc/bazel.bazelrc
ENV LD_LIBRARY_PATH /usr/local/cuda/extras/CUPTI/lib64:$LD_LIBRARY_PATH
# Configure the build for our CUDA configuration.
ENV TF_NEED_CUDA 1
ENV TF_CUDA_CLANG 1
ENV CLANG_CUDA_COMPILER_PATH /usr/local/bin/clang
ENV TF_CUDA_COMPUTE_CAPABILITIES 3.0

View File

@ -32,15 +32,6 @@ COMMAND=("$@")
export CI_BUILD_PYTHON="${CI_BUILD_PYTHON:-python}"
export PYTHON_BIN_PATH="${PYTHON_BIN_PATH:-$(which ${CI_BUILD_PYTHON})}"
if [ "${CONTAINER_TYPE}" == "gpu" ]; then
export TF_NEED_CUDA=1
elif [ "${CONTAINER_TYPE}" == "gpu_clang" ]; then
export TF_NEED_CUDA=1
export TF_CUDA_CLANG=1
export CLANG_CUDA_COMPILER_PATH="/usr/local/bin/clang"
else
export TF_NEED_CUDA=0
fi
pushd "${CI_TENSORFLOW_SUBMODULE_PATH:-.}"
yes "" | $PYTHON_BIN_PATH configure.py

View File

@ -18,7 +18,7 @@
# <COMMAND>
#
# CONTAINER_TYPE: Type of the docker container used the run the build:
# e.g., (cpu | gpu | gpu_clang | android | tensorboard)
# e.g., (cpu | gpu | android | tensorboard)
#
# DOCKERFILE_PATH: (Optional) Path to the Dockerfile used for docker build.
# If this optional value is not supplied (via the
@ -79,7 +79,7 @@ if [[ "${CONTAINER_TYPE}" == "cmake" ]]; then
fi
# Use nvidia-docker if the container is GPU.
if [[ "${CONTAINER_TYPE}" == "gpu" ]] || [[ "${CONTAINER_TYPE}" == "gpu_clang" ]]; then
if [[ "${CONTAINER_TYPE}" == "gpu" ]]; then
DOCKER_BINARY="nvidia-docker"
else
DOCKER_BINARY="docker"
@ -99,7 +99,7 @@ BUILD_TAG="${BUILD_TAG:-tf_ci}"
# Add extra params for cuda devices and libraries for GPU container.
# And clear them if we are not building for GPU.
if [[ "${CONTAINER_TYPE}" != "gpu" ]] && [[ "${CONTAINER_TYPE}" != "gpu_clang" ]]; then
if [[ "${CONTAINER_TYPE}" != "gpu" ]]; then
GPU_EXTRA_PARAMS=""
fi

View File

@ -18,7 +18,7 @@
# ci_parameterized_build.sh
#
# The script obeys the following required environment variables:
# TF_BUILD_CONTAINER_TYPE: (CPU | GPU | GPU_CLANG | ANDROID | ANDROID_FULL)
# TF_BUILD_CONTAINER_TYPE: (CPU | GPU | ANDROID | ANDROID_FULL)
# TF_BUILD_PYTHON_VERSION: (PYTHON2 | PYTHON3 | PYTHON3.5)
# TF_BUILD_IS_PIP: (NO_PIP | PIP | BOTH)
#
@ -88,6 +88,9 @@
# TF_NIGHTLY:
# If this run is being used to build the tf_nightly pip
# packages.
# TF_CUDA_CLANG:
# If set to 1, builds and runs cuda_clang configuration.
# Only available inside GPU containers.
#
# This script can be used by Jenkins parameterized / matrix builds.
@ -246,16 +249,34 @@ if [[ "$(uname -s)" == "Darwin" ]]; then
OPT_FLAG="${OPT_FLAG} ${NO_DOCKER_OPT_FLAG}"
fi
# In DO_DOCKER mode, appends environment variable to docker's run invocation.
# Otherwise, exports the corresponding variable.
function set_script_variable() {
local VAR="$1"
local VALUE="$2"
if [[ $DO_DOCKER == "1" ]]; then
TF_BUILD_APPEND_CI_DOCKER_EXTRA_PARAMS="${TF_BUILD_APPEND_CI_DOCKER_EXTRA_PARAMS} -e $VAR=$VALUE"
else
export $VAR="$VALUE"
fi
}
# Process container type
if [[ ${CTYPE} == "cpu" ]] || [[ ${CTYPE} == "debian.jessie.cpu" ]]; then
:
elif [[ ${CTYPE} == "gpu" ]] || [[ ${CTYPE} == "gpu_clang" ]]; then
if [[ ${CTYPE} == "gpu" ]]; then
OPT_FLAG="${OPT_FLAG} --config=cuda"
else # ${CTYPE} == "gpu_clang"
OPT_FLAG="${OPT_FLAG} --config=cuda_clang"
fi
elif [[ ${CTYPE} == "gpu" ]]; then
set_script_variable TF_NEED_CUDA 1
if [[ $TF_CUDA_CLANG == "1" ]]; then
OPT_FLAG="${OPT_FLAG} --config=cuda_clang"
set_script_variable TF_CUDA_CLANG 1
# For cuda_clang we download `clang` while building.
set_script_variable TF_DOWNLOAD_CLANG 1
else
OPT_FLAG="${OPT_FLAG} --config=cuda"
fi
# Attempt to determine CUDA capability version automatically and use it if
# CUDA capability version is not specified by the environment variables.
@ -407,7 +428,7 @@ if [[ ${TF_BUILD_IS_PIP} == "no_pip" ]] ||
# CPU only command, fully parallel.
NO_PIP_MAIN_CMD="${MAIN_CMD} ${BAZEL_CMD} ${OPT_FLAG} ${EXTRA_ARGS} -- "\
"${BAZEL_TARGET}"
elif [[ ${CTYPE} == "gpu" ]] || [[ ${CTYPE} == "gpu_clang" ]]; then
elif [[ ${CTYPE} == "gpu" ]]; then
# GPU only command, run as many jobs as the GPU count only.
NO_PIP_MAIN_CMD="${BAZEL_CMD} ${OPT_FLAG} "\
"--local_test_jobs=${TF_GPU_COUNT} "\

View File

@ -1,49 +0,0 @@
#!/usr/bin/env bash
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
set -ex
LLVM_SVN_REVISION="314281"
CLANG_TMP_DIR=/tmp/clang-build
mkdir "$CLANG_TMP_DIR"
pushd "$CLANG_TMP_DIR"
# Checkout llvm+clang
svn co -q -r$LLVM_SVN_REVISION http://llvm.org/svn/llvm-project/llvm/trunk "$CLANG_TMP_DIR/llvm"
svn co -q -r$LLVM_SVN_REVISION http://llvm.org/svn/llvm-project/cfe/trunk "$CLANG_TMP_DIR/llvm/tools/clang"
# Build 1st stage. Compile clang with system compiler
mkdir "$CLANG_TMP_DIR/build-1"
cd "$CLANG_TMP_DIR/build-1"
cmake -G"Unix Makefiles" -DCMAKE_BUILD_TYPE=Release "$CLANG_TMP_DIR/llvm"
make -j `nproc` clang clang-headers
# Build 2nd stage. Compile clang with clang built in stage 1
mkdir "$CLANG_TMP_DIR/build-2"
cd "$CLANG_TMP_DIR/build-2"
CC="$CLANG_TMP_DIR/build-1/bin/clang" \
CXX="$CLANG_TMP_DIR/build-1/bin/clang++" \
cmake -G"Unix Makefiles" -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr/local "$CLANG_TMP_DIR/llvm"
make -j `nproc` install-clang install-clang-headers
popd
# Cleanup
rm -rf "$CLANG_TMP_DIR"

View File

@ -1,19 +0,0 @@
#!/usr/bin/env bash
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
CMAKE_URL="https://cmake.org/files/v3.7/cmake-3.7.2-Linux-x86_64.tar.gz"
wget -O - "${CMAKE_URL}" | tar xzf - -C /usr/local --strip-components=1