Various Bash Improvements (#10572)

This commit is contained in:
Androbin 2017-06-14 01:55:51 +02:00 committed by Martin Wicke
parent f58e6cebe4
commit 847484e394
20 changed files with 172 additions and 138 deletions

View File

@ -26,7 +26,7 @@ usage() {
[ $# == 0 ] && usage && exit 0
# read the options
ARGS=`getopt -o p:v:h --long prefix:,version:,help -n $0 -- "$@"`
ARGS=$(getopt -o p:v:h --long prefix:,version:,help -n $0 -- "$@")
eval set -- "$ARGS"
# extract options and their arguments into variables.

View File

@ -23,7 +23,7 @@
# Make sure we're in the correct directory, at the root of the source tree.
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
WORKSPACE="${SCRIPT_DIR}/../../../"
cd ${WORKSPACE}
cd ${WORKSPACE} || exit 1
DOCKER_IMG_NAME="tf-make-base"
DOCKER_CONTEXT_PATH="${WORKSPACE}tensorflow/contrib/makefile/"

View File

@ -27,7 +27,7 @@ cc_prefix="${CC_PREFIX}"
usage() {
echo "Usage: $(basename "$0") [-a:c]"
echo "-a [Architecture] Architecture of target android [default=armeabi-v7a] \
(supported archtecture list: \
(supported architecture list: \
arm64-v8a armeabi armeabi-v7a armeabi-v7a-hard mips mips64 x86 x86_64)"
echo "-c Clean before building protobuf for target"
echo "\"NDK_ROOT\" should be defined as an environment variable."
@ -130,7 +130,7 @@ elif [[ ${ARCHITECTURE} == "x86_64" ]]; then
sysroot_arch="x86_64"
bin_prefix="x86_64-linux-android"
else
echo "archtecture ${arcitecture} is not supported." 1>&2
echo "architecture ${ARCHITECTURE} is not supported." 1>&2
usage
exit 1
fi

View File

@ -1,4 +1,4 @@
#!/bin/bash -x -e
#!/bin/bash
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -15,6 +15,9 @@
# ==============================================================================
# Builds protobuf 3 for iOS.
set -x
set -e
SCRIPT_DIR=$(dirname $0)
source "${SCRIPT_DIR}/build_helper.subr"
@ -30,17 +33,17 @@ fi
JOB_COUNT="${JOB_COUNT:-$(get_job_count)}"
GENDIR=`pwd`/gen/protobuf_ios/
GENDIR=$(pwd)/gen/protobuf_ios/
LIBDIR=${GENDIR}lib
mkdir -p ${LIBDIR}
OSX_VERSION=darwin14.0.0
IPHONEOS_PLATFORM=`xcrun --sdk iphoneos --show-sdk-platform-path`
IPHONEOS_SYSROOT=`xcrun --sdk iphoneos --show-sdk-path`
IPHONESIMULATOR_PLATFORM=`xcrun --sdk iphonesimulator --show-sdk-platform-path`
IPHONESIMULATOR_SYSROOT=`xcrun --sdk iphonesimulator --show-sdk-path`
IOS_SDK_VERSION=`xcrun --sdk iphoneos --show-sdk-version`
IPHONEOS_PLATFORM=$(xcrun --sdk iphoneos --show-sdk-platform-path)
IPHONEOS_SYSROOT=$(xcrun --sdk iphoneos --show-sdk-path)
IPHONESIMULATOR_PLATFORM=$(xcrun --sdk iphonesimulator --show-sdk-platform-path)
IPHONESIMULATOR_SYSROOT=$(xcrun --sdk iphonesimulator --show-sdk-path)
IOS_SDK_VERSION=$(xcrun --sdk iphoneos --show-sdk-version)
MIN_SDK_VERSION=8.0
CFLAGS="-DNDEBUG -Os -pipe -fPIC -fno-exceptions"

View File

@ -20,7 +20,7 @@ source "${SCRIPT_DIR}/build_helper.subr"
JOB_COUNT="${JOB_COUNT:-$(get_job_count)}"
function less_than_required_version() {
echo $1 | (IFS=. read major minor micro
echo $1 | (IFS=. read -r major minor micro
if [ $major -ne $2 ]; then
[ $major -lt $2 ]
elif [ $minor -ne $3 ]; then
@ -31,7 +31,7 @@ function less_than_required_version() {
)
}
ACTUAL_XCODE_VERSION=`xcodebuild -version | head -n 1 | sed 's/Xcode //'`
ACTUAL_XCODE_VERSION=$(xcodebuild -version | head -n 1 | sed 's/Xcode //')
REQUIRED_XCODE_VERSION=7.3.0
if less_than_required_version $ACTUAL_XCODE_VERSION 7 3 0
then

View File

@ -15,15 +15,15 @@
# ==============================================================================
# Builds protobuf 3 for iOS.
cd tensorflow/contrib/makefile
cd tensorflow/contrib/makefile || exit 1
GENDIR=`pwd`/gen/protobuf_pi/
GENDIR=$(pwd)/gen/protobuf_pi/
LIBDIR=${GENDIR}
mkdir -p ${LIBDIR}
CXX=arm-linux-gnueabihf-g++
cd downloads/protobuf
cd downloads/protobuf || exit 1
./autogen.sh
if [ $? -ne 0 ]

View File

@ -32,7 +32,7 @@ TFLEARN_EXAMPLE_BASE_DIR=$DIR/tensorflow/examples/learn
function test() {
echo "Test "$1":"
echo "Test $1:"
$TFLEARN_EXAMPLE_BASE_DIR/$1 $2
if [ $? -eq 0 ]
then

View File

@ -20,7 +20,7 @@ go get github.com/golang/protobuf/proto
go get github.com/golang/protobuf/protoc-gen-go
cd $(dirname $0)
for g in $(echo $GOPATH | sed "s/:/ /g"); do
for g in $(echo "${GOPATH//:/ }"); do
TF_DIR="${g}/src/github.com/tensorflow/tensorflow"
PROTOC="${TF_DIR}/bazel-out/host/bin/external/protobuf/protoc"
if [ -x "${PROTOC}" ]; then

View File

@ -21,7 +21,7 @@ TF_VERSION="$1"
SETTINGS_XML="$2"
shift
shift
CMD="$@"
CMD="$*"
if [[ -z "${TF_VERSION}" ]]
then

View File

@ -205,14 +205,17 @@ if [[ -n "${PY_TAGS}" ]]; then
$(echo ${WHL_BASE_NAME} | cut -d \- -f 2)-${PY_TAGS}-${PLATFORM_TAG}.whl
if [[ ! -f "${WHL_DIR}/${NEW_WHL_BASE_NAME}" ]]; then
cp "${WHL_DIR}/${WHL_BASE_NAME}" "${WHL_DIR}/${NEW_WHL_BASE_NAME}" && \
echo "Copied wheel file: ${WHL_BASE_NAME} --> ${NEW_WHL_BASE_NAME}" || \
if cp "${WHL_DIR}/${WHL_BASE_NAME}" "${WHL_DIR}/${NEW_WHL_BASE_NAME}"
then
echo "Copied wheel file: ${WHL_BASE_NAME} --> ${NEW_WHL_BASE_NAME}"
else
die "ERROR: Failed to copy wheel file to ${NEW_WHL_BASE_NAME}"
fi
fi
fi
if [[ $(uname) == "Linux" ]]; then
AUDITED_WHL_NAME="${WHL_DIR}/$(echo ${WHL_BASE_NAME} | sed "s/linux/manylinux1/")"
AUDITED_WHL_NAME="${WHL_DIR}/$(echo ${WHL_BASE_NAME//linux/manylinux1})"
# Repair the wheels for cpu manylinux1
if [[ ${CONTAINER_TYPE} == "cpu" ]]; then
@ -240,14 +243,20 @@ echo "Installing pip whl file: ${WHL_PATH}"
VENV_DIR="${PIP_TEST_ROOT}/venv"
if [[ -d "${VENV_DIR}" ]]; then
rm -rf "${VENV_DIR}" && \
echo "Removed existing virtualenv directory: ${VENV_DIR}" || \
die "Failed to remove existing virtualenv directory: ${VENV_DIR}"
if rm -rf "${VENV_DIR}"
then
echo "Removed existing virtualenv directory: ${VENV_DIR}"
else
die "Failed to remove existing virtualenv directory: ${VENV_DIR}"
fi
fi
mkdir -p ${VENV_DIR} && \
echo "Created virtualenv directory: ${VENV_DIR}" || \
die "FAILED to create virtualenv directory: ${VENV_DIR}"
if mkdir -p ${VENV_DIR}
then
echo "Created virtualenv directory: ${VENV_DIR}"
else
die "FAILED to create virtualenv directory: ${VENV_DIR}"
fi
# Verify that virtualenv exists
if [[ -z $(which virtualenv) ]]; then

View File

@ -80,7 +80,7 @@ fi
# cmake (CPU) builds do not require configuration.
if [[ "${CONTAINER_TYPE}" == "cmake" ]]; then
CI_COMMAND_PREFIX=""
CI_COMMAND_PREFIX=("")
fi
# Use nvidia-docker if the container is GPU.
@ -120,9 +120,9 @@ DOCKER_IMG_NAME=$(echo "${DOCKER_IMG_NAME}" | tr '[:upper:]' '[:lower:]')
# Print arguments.
echo "WORKSPACE: ${WORKSPACE}"
echo "CI_DOCKER_EXTRA_PARAMS: ${CI_DOCKER_EXTRA_PARAMS[@]}"
echo "COMMAND: ${COMMAND[@]}"
echo "CI_COMMAND_PREFIX: ${CI_COMMAND_PREFIX[@]}"
echo "CI_DOCKER_EXTRA_PARAMS: ${CI_DOCKER_EXTRA_PARAMS[*]}"
echo "COMMAND: ${COMMAND[*]}"
echo "CI_COMMAND_PREFIX: ${CI_COMMAND_PREFIX[*]}"
echo "CONTAINER_TYPE: ${CONTAINER_TYPE}"
echo "BUILD_TAG: ${BUILD_TAG}"
echo " (docker container name will be ${DOCKER_IMG_NAME})"
@ -140,7 +140,7 @@ if [[ $? != "0" ]]; then
fi
# Run the command inside the container.
echo "Running '${COMMAND[@]}' inside ${DOCKER_IMG_NAME}..."
echo "Running '${COMMAND[*]}' inside ${DOCKER_IMG_NAME}..."
mkdir -p ${WORKSPACE}/bazel-ci_build-cache
# By default we cleanup - remove the container once it finish running (--rm)
# and share the PID namespace (--pid=host) so the process inside does not have

View File

@ -200,8 +200,8 @@ echo " TF_BUILD_ENABLE_XLA=${TF_BUILD_ENABLE_XLA}"
function get_cuda_capability_version() {
if [[ ! -z $(which deviceQuery) ]]; then
# The first listed device is used
echo $(deviceQuery | grep "CUDA Capability .* version" | \
head -1 | awk '{print $NF}')
deviceQuery | grep "CUDA Capability .* version" | \
head -1 | awk '{print $NF}'
fi
}
@ -532,11 +532,14 @@ if [[ "${TF_BUILD_PYTHON_VERSION}" == "python3.5" ]]; then
DOCKERFILE="${TMP_DIR}/Dockerfile.${TF_BUILD_CONTAINER_TYPE}"
# Replace a line in the Dockerfile
sed -i \
if sed -i \
's/RUN \/install\/install_pip_packages.sh/RUN \/install\/install_python3.5_pip_packages.sh/g' \
"${DOCKERFILE}" && \
echo "Copied and modified Dockerfile for Python 3.5 build: ${DOCKERFILE}" || \
die "ERROR: Faild to copy and modify Dockerfile: ${DOCKERFILE}"
"${DOCKERFILE}"
then
echo "Copied and modified Dockerfile for Python 3.5 build: ${DOCKERFILE}"
else
die "ERROR: Faild to copy and modify Dockerfile: ${DOCKERFILE}"
fi
DOCKERFILE_FLAG="--dockerfile ${DOCKERFILE}"
fi
@ -574,7 +577,7 @@ rm -f ${TMP_SCRIPT}
END_TIME=$(date +'%s')
echo ""
echo "Parameterized build ends with ${RESULT} at: $(date) "\
"(Elapsed time: $((${END_TIME} - ${START_TIME})) s)"
"(Elapsed time: $((END_TIME - START_TIME)) s)"
# Clean up temporary directory if it exists

View File

@ -47,7 +47,7 @@ num_cpus() {
# Get the hash of the last non-merge git commit on the current branch.
# Usage: get_last_non_merge_git_commit
get_last_non_merge_git_commit() {
echo $(git rev-list --no-merges -n 1 HEAD)
git rev-list --no-merges -n 1 HEAD
}
# List files changed (i.e., added, removed or revised) in the last non-merge
@ -75,7 +75,7 @@ get_py_files_to_check() {
echo "${PY_FILES}"
else
echo $(find tensorflow -name '*.py')
find tensorflow -name '*.py'
fi
}
@ -157,25 +157,25 @@ do_pylint() {
NONWL_ERRORS_FILE="$(mktemp)_pylint_nonwl_errors.log"
rm -rf ${OUTPUT_FILE}
rm -rf ${ERRORS_FLIE}
rm -rf ${ERRORS_FILE}
rm -rf ${NONWL_ERRORS_FILE}
touch ${NONWL_ERRORS_FILE}
${PYLINT_BIN} --rcfile="${PYLINTRC_FILE}" --output-format=parseable \
--jobs=${NUM_CPUS} ${PYTHON_SRC_FILES} 2>&1 > ${OUTPUT_FILE}
--jobs=${NUM_CPUS} ${PYTHON_SRC_FILES} > ${OUTPUT_FILE} 2>&1
PYLINT_END_TIME=$(date +'%s')
echo ""
echo "pylint took $((${PYLINT_END_TIME} - ${PYLINT_START_TIME})) s"
echo "pylint took $((PYLINT_END_TIME - PYLINT_START_TIME)) s"
echo ""
grep -E '(\[E|\[W0311|\[W0312)' ${OUTPUT_FILE} > ${ERRORS_FILE}
N_ERRORS=0
while read LINE; do
while read -r LINE; do
IS_WHITELISTED=0
for WL_REGEX in ${ERROR_WHITELIST}; do
if [[ ! -z $(echo ${LINE} | grep "${WL_REGEX}") ]]; then
if echo ${LINE} | grep -q "${WL_REGEX}"; then
echo "Found a whitelisted error:"
echo " ${LINE}"
IS_WHITELISTED=1
@ -248,7 +248,7 @@ do_pep8() {
PEP8_END_TIME=$(date +'%s')
echo ""
echo "pep8 took $((${PEP8_END_TIME} - ${PEP8_START_TIME})) s"
echo "pep8 took $((PEP8_END_TIME - PEP8_START_TIME)) s"
echo ""
if [[ -s ${PEP8_OUTPUT_FILE} ]]; then
@ -278,7 +278,7 @@ do_buildifier(){
BUILDIFIER_END_TIME=$(date +'%s')
echo ""
echo "buildifier took $((${BUILDIFIER_END_TIME} - ${BUILDIFIER_START_TIME})) s"
echo "buildifier took $((BUILDIFIER_END_TIME - BUILDIFIER_START_TIME)) s"
echo ""
if [[ -s ${BUILDIFIER_OUTPUT_FILE} ]]; then
@ -306,7 +306,7 @@ do_external_licenses_check(){
echo "Getting external dependencies for ${BUILD_TARGET}"
bazel query "attr('licenses', 'notice', deps(${BUILD_TARGET}))" --no_implicit_deps --no_host_deps --keep_going \
| egrep -v "^//tensorflow" \
| grep -E -v "^//tensorflow" \
| sed -e 's|:.*||' \
| sort \
| uniq 2>&1 \
@ -315,7 +315,7 @@ do_external_licenses_check(){
echo
echo "Getting list of external licenses mentioned in ${LICENSES_TARGET}."
bazel query "deps(${LICENSES_TARGET})" --no_implicit_deps --no_host_deps --keep_going \
| egrep -v "^//tensorflow" \
| grep -E -v "^//tensorflow" \
| sed -e 's|:.*||' \
| sort \
| uniq 2>&1 \
@ -329,7 +329,7 @@ do_external_licenses_check(){
EXTERNAL_LICENSES_CHECK_END_TIME=$(date +'%s')
echo
echo "do_external_licenses_check took $((${EXTERNAL_LICENSES_CHECK_END_TIME} - ${EXTERNAL_LICENSES_CHECK_START_TIME})) s"
echo "do_external_licenses_check took $((EXTERNAL_LICENSES_CHECK_END_TIME - EXTERNAL_LICENSES_CHECK_START_TIME)) s"
echo
if [[ -s ${MISSING_LICENSES_FILE} ]] || [[ -s ${EXTRA_LICENSES_FILE} ]] ; then
@ -463,7 +463,7 @@ while [[ ${COUNTER} -lt "${#SANITY_STEPS[@]}" ]]; do
((PASS_COUNTER++))
fi
STEP_EXIT_CODES+=(${RESULT})
IFS=" " read -r -a STEP_EXIT_CODES <<< "${RESULT}"
echo ""
((COUNTER++))

View File

@ -70,7 +70,7 @@ get_container_id_by_image_name() {
# Get the id of a container by image name
# Usage: get_docker_container_id_by_image_name <img_name>
echo $(docker ps | grep $1 | awk '{print $1}')
docker ps | grep $1 | awk '{print $1}'
}
# Parse input arguments
@ -152,7 +152,7 @@ rm -rf "${BUILD_DIR}"
docker run ${DOCKER_IMG_NAME} \
/var/tf_dist_test/scripts/dist_mnist_test.sh \
--ps_hosts $(seq -f "localhost:%g" -s "," \
2000 $((2000 + ${NUM_PARAMETER_SERVERS} - 1))) \
2000 $((2000 + NUM_PARAMETER_SERVERS - 1))) \
--worker_hosts $(seq -f "localhost:%g" -s "," \
3000 $((3000 + ${NUM_WORKERS} - 1))) \
3000 $((3000 + NUM_WORKERS - 1))) \
--num_gpus 0 ${SYNC_REPLICAS_FLAG}

View File

@ -233,13 +233,16 @@ if [[ "${TF_DOCKER_BUILD_IS_DEVEL}" == "no" ]]; then
# Modify python/pip version if necessary.
if [[ "${TF_DOCKER_BUILD_PYTHON_VERSION}" == "python3" ]]; then
sed -i -e 's/python /python3 /g' "${DOCKERFILE}" && \
if sed -i -e 's/python /python3 /g' "${DOCKERFILE}" && \
sed -i -e 's/python-dev/python3-dev/g' "${DOCKERFILE}" && \
sed -i -e 's/pip /pip3 /g' "${DOCKERFILE}" && \
sed -i -e 's^# RUN ln -s /usr/bin/python3 /usr/bin/python#^RUN ln -s /usr/bin/python3 /usr/bin/python^' "${DOCKERFILE}" && \
echo "Modified Dockerfile for python version "\
"${TF_DOCKER_BUILD_PYTHON_VERSION} at: ${DOCKERFILE}" || \
die "FAILED to modify ${DOCKERFILE} for python3"
sed -i -e 's^# RUN ln -s /usr/bin/python3 /usr/bin/python#^RUN ln -s /usr/bin/python3 /usr/bin/python^' "${DOCKERFILE}"
then
echo "Modified Dockerfile for python version "\
"${TF_DOCKER_BUILD_PYTHON_VERSION} at: ${DOCKERFILE}"
else
die "FAILED to modify ${DOCKERFILE} for python3"
fi
fi
else
DOCKERFILE="${TMP_DIR}/Dockerfile"
@ -250,14 +253,17 @@ else
# Modify python/pip version if necessary.
if [[ "${TF_DOCKER_BUILD_PYTHON_VERSION}" == "python3" ]]; then
sed -i -e 's/python-dev/python-dev python3-dev/g' "${DOCKERFILE}" && \
if sed -i -e 's/python-dev/python-dev python3-dev/g' "${DOCKERFILE}" && \
sed -i -e 's/python /python3 /g' "${DOCKERFILE}" && \
sed -i -e 's^/tmp/pip^/tmp/pip3^g' "${DOCKERFILE}" && \
sed -i -e 's/pip /pip3 /g' "${DOCKERFILE}" && \
sed -i -e 's/ENV CI_BUILD_PYTHON python/ENV CI_BUILD_PYTHON python3/g' "${DOCKERFILE}" && \
sed -i -e 's^# RUN ln -s /usr/bin/python3 /usr/bin/python#^RUN ln -s /usr/bin/python3 /usr/bin/python^' "${DOCKERFILE}" && \
echo "Modified Dockerfile further for python version ${TF_DOCKER_BUILD_PYTHON_VERSION} at: ${DOCKERFILE}" || \
die "FAILED to modify ${DOCKERFILE} for python3"
sed -i -e 's^# RUN ln -s /usr/bin/python3 /usr/bin/python#^RUN ln -s /usr/bin/python3 /usr/bin/python^' "${DOCKERFILE}"
then
echo "Modified Dockerfile further for python version ${TF_DOCKER_BUILD_PYTHON_VERSION} at: ${DOCKERFILE}"
else
die "FAILED to modify ${DOCKERFILE} for python3"
fi
fi
fi
@ -277,7 +283,7 @@ fi
# Make sure that there is no other containers of the same image running
# TODO(cais): Move to an earlier place.
if [[ ! -z $("${DOCKER_BINARY}" ps | grep "${IMG}") ]]; then
if "${DOCKER_BINARY}" ps | grep -q "${IMG}"; then
die "ERROR: It appears that there are docker containers of the image "\
"${IMG} running. Please stop them before proceeding"
fi
@ -310,16 +316,22 @@ if [[ "${TF_DOCKER_BUILD_IS_DEVEL}" == "no" ]]; then
# on the running docker container
echo ""
echo "Performing basic sanity checks on the running container..."
wget -qO- "http://127.0.0.1:${CONTAINER_PORT}/tree" &> /dev/null && \
echo " PASS: wget tree" || \
mark_check_failed " FAIL: wget tree"
if wget -qO- "http://127.0.0.1:${CONTAINER_PORT}/tree" &> /dev/null
then
echo " PASS: wget tree"
else
mark_check_failed " FAIL: wget tree"
fi
for NB in ${TMP_DIR}/notebooks/*.ipynb; do
NB_BASENAME=$(basename "${NB}")
NB_URL="http://127.0.0.1:${CONTAINER_PORT}/notebooks/${NB_BASENAME}"
wget -qO- "${NB_URL}" -o "${TMP_DIR}/${NB_BASENAME}" &> /dev/null && \
echo " PASS: wget ${NB_URL}" || \
mark_check_failed " FAIL: wget ${NB_URL}"
if wget -qO- "${NB_URL}" -o "${TMP_DIR}/${NB_BASENAME}" &> /dev/null
then
echo " PASS: wget ${NB_URL}"
else
mark_check_failed " FAIL: wget ${NB_URL}"
fi
done
fi

View File

@ -71,7 +71,7 @@ rm -rf ${LOG_FILE} || \
# Invoke main Python file
python "${GCS_SMOKE_PY}" --gcs_bucket_url="${GCS_BUCKET_URL}" \
2>&1 > "${LOG_FILE}"
> "${LOG_FILE}" 2>&1
if [[ $? != "0" ]]; then
cat ${LOG_FILE}
@ -92,6 +92,9 @@ NEW_TFREC_URL=$(grep "Using input path" "${LOG_FILE}" | \
if [[ -z ${NEW_TFREC_URL} ]]; then
die "FAIL: Unable to determine the URL to the new tfrecord file in GCS"
fi
"${GSUTIL_BIN}" rm "${NEW_TFREC_URL}" && \
echo "Cleaned up new tfrecord file in GCS: ${NEW_TFREC_URL}" || \
die "FAIL: Unable to clean up new tfrecord file in GCS: ${NEW_TFREC_URL}"
if "${GSUTIL_BIN}" rm "${NEW_TFREC_URL}"
then
echo "Cleaned up new tfrecord file in GCS: ${NEW_TFREC_URL}"
else
die "FAIL: Unable to clean up new tfrecord file in GCS: ${NEW_TFREC_URL}"
fi

View File

@ -20,7 +20,7 @@ if [[ -z "${OUTPUT_FILENAME}" ]]; then
exit 1
fi
GIT_VERSION=`git describe --long --tags`
GIT_VERSION=$(git describe --long --tags)
if [[ $? != 0 ]]; then
GIT_VERSION=unknown;
fi

View File

@ -29,7 +29,7 @@ TAR="${TAR}"
[ -z "${JAVA}" ] && JAVA="java"
[ -z "${JAVAC}" ] && JAVAC="javac"
[ -z "${TAR}"] && TAR="tar"
[ -z "${TAR}" ] && TAR="tar"
# bazel tests run with ${PWD} set to the root of the bazel workspace
TARFILE="${PWD}/tensorflow/tools/lib_package/libtensorflow_jni.tar.gz"

View File

@ -26,7 +26,7 @@ CC="${CC}"
TAR="${TAR}"
[ -z "${CC}" ] && CC="/usr/bin/gcc"
[ -z "${TAR}"] && TAR="tar"
[ -z "${TAR}" ] && TAR="tar"
# bazel tests run with ${PWD} set to the root of the bazel workspace
TARFILE="${PWD}/tensorflow/tools/lib_package/libtensorflow.tar.gz"

View File

@ -20,45 +20,47 @@ echo "Collecting system information..."
OUTPUT_FILE=tf_env.txt
echo >> $OUTPUT_FILE
echo "== cat /etc/issue ===============================================" >> $OUTPUT_FILE
uname -a >> $OUTPUT_FILE
uname=`uname -s`
if [ "$(uname)" == "Darwin" ]; then
echo Mac OS X `sw_vers -productVersion` >> $OUTPUT_FILE
elif [ "$(uname)" == "Linux" ]; then
cat /etc/*release | grep VERSION >> $OUTPUT_FILE
fi
{
echo
echo "== cat /etc/issue ==============================================="
uname -a
uname=`uname -s`
if [ "$(uname)" == "Darwin" ]; then
echo Mac OS X `sw_vers -productVersion`
elif [ "$(uname)" == "Linux" ]; then
cat /etc/*release | grep VERSION
fi
echo
echo '== are we in docker ============================================='
num=`cat /proc/1/cgroup | grep docker | wc -l`;
if [ $num -ge 1 ]; then
echo "Yes"
else
echo "No"
fi
echo
echo '== compiler ====================================================='
c++ --version 2>&1
echo
echo '== uname -a ====================================================='
uname -a
echo
echo '== check pips ==================================================='
pip list 2>&1 | grep "proto\|numpy\|tensorflow"
echo
echo '== check for virtualenv ========================================='
python -c "import sys;print(hasattr(sys, \"real_prefix\"))"
echo
echo '== tensorflow import ============================================'
} >> ${OUTPUT_FILE}
echo >> $OUTPUT_FILE
echo '== are we in docker =============================================' >> $OUTPUT_FILE
num=`cat /proc/1/cgroup | grep docker | wc -l`;
if [ $num -ge 1 ]; then
echo "Yes" >> $OUTPUT_FILE
else
echo "No" >> $OUTPUT_FILE
fi
echo >> $OUTPUT_FILE
echo '== compiler =====================================================' >> $OUTPUT_FILE
c++ --version 2>&1 >> $OUTPUT_FILE
echo >> $OUTPUT_FILE
echo '== uname -a =====================================================' >> $OUTPUT_FILE
uname -a >> $OUTPUT_FILE
echo >> $OUTPUT_FILE
echo '== check pips ===================================================' >> $OUTPUT_FILE
pip list 2>&1 | grep "proto\|numpy\|tensorflow" >> $OUTPUT_FILE
echo >> $OUTPUT_FILE
echo '== check for virtualenv =========================================' >> $OUTPUT_FILE
python -c "import sys;print(hasattr(sys, \"real_prefix\"))" >> $OUTPUT_FILE
echo >> $OUTPUT_FILE
echo '== tensorflow import ============================================' >> $OUTPUT_FILE
cat <<EOF > /tmp/check_tf.py
import tensorflow as tf;
print("tf.VERSION = %s" % tf.VERSION)
@ -70,29 +72,31 @@ EOF
python /tmp/check_tf.py 2>&1 >> ${OUTPUT_FILE}
DEBUG_LD=libs python -c "import tensorflow" 2>>${OUTPUT_FILE} > /tmp/loadedlibs
grep libcudnn.so /tmp/loadedlibs >> $OUTPUT_FILE
echo >> $OUTPUT_FILE
echo '== env ==========================================================' >> $OUTPUT_FILE
if [ -z ${LD_LIBRARY_PATH+x} ]; then
echo "LD_LIBRARY_PATH is unset" >> $OUTPUT_FILE;
else
echo LD_LIBRARY_PATH ${LD_LIBRARY_PATH} >> $OUTPUT_FILE;
fi
if [ -z ${DYLD_LIBRARY_PATH+x} ]; then
echo "DYLD_LIBRARY_PATH is unset" >> $OUTPUT_FILE;
else
echo DYLD_LIBRARY_PATH ${DYLD_LIBRARY_PATH} >> $OUTPUT_FILE;
fi
{
grep libcudnn.so /tmp/loadedlibs
echo
echo '== env =========================================================='
if [ -z ${LD_LIBRARY_PATH+x} ]; then
echo "LD_LIBRARY_PATH is unset";
else
echo LD_LIBRARY_PATH ${LD_LIBRARY_PATH} ;
fi
if [ -z ${DYLD_LIBRARY_PATH+x} ]; then
echo "DYLD_LIBRARY_PATH is unset";
else
echo DYLD_LIBRARY_PATH ${DYLD_LIBRARY_PATH} ;
fi
echo
echo '== nvidia-smi ==================================================='
nvidia-smi 2>&1
echo
echo '== cuda libs ==================================================='
} >> ${OUTPUT_FILE}
echo >> $OUTPUT_FILE >> $OUTPUT_FILE
echo '== nvidia-smi ===================================================' >> $OUTPUT_FILE
nvidia-smi 2>&1 >> $OUTPUT_FILE
echo >> $OUTPUT_FILE
echo '== cuda libs ===================================================' >> $OUTPUT_FILE
find /usr/local -type f -name 'libcudart*' 2>/dev/null | grep cuda | grep -v "\\.cache" >> ${OUTPUT_FILE}
find /usr/local -type f -name 'libudnn*' 2>/dev/null | grep cuda | grep -v "\\.cache" >> ${OUTPUT_FILE}