Introduce ci_scripts/ for GitHub Actions

This commit is contained in:
Alexandre Lissy 2021-03-25 16:20:11 +01:00 committed by Reuben Morais
parent cd80708546
commit 63aeb6a945
24 changed files with 1589 additions and 6 deletions

View File

@ -7,5 +7,5 @@ inputs:
runs:
using: "composite"
steps:
- run: ./taskcluster/tf_tc-build.sh ${{ inputs.flavor }}
- run: ./ci_scripts/tf-build.sh ${{ inputs.flavor }}
shell: bash

View File

@ -18,5 +18,5 @@ runs:
if [ "${{ inputs.build-flavor }}" = "tflite" ]; then
build="_tflite"
fi
./taskcluster/tc-cpp${build}-ds-tests${{ inputs.model-kind }}.sh ${{ inputs.bitrate }}
./ci_scripts/cpp${build}-ds-tests${{ inputs.model-kind }}.sh ${{ inputs.bitrate }}
shell: bash

View File

@ -7,5 +7,5 @@ inputs:
runs:
using: "composite"
steps:
- run: ./taskcluster/host-build.sh ${{ inputs.flavor }}
- run: ./ci_scripts/host-build.sh ${{ inputs.flavor }}
shell: bash

View File

@ -3,5 +3,5 @@ description: "Package TensorFlow Build"
runs:
using: "composite"
steps:
- run: ./taskcluster/tf_tc-package.sh
- run: ./ci_scripts/tf-package.sh
shell: bash

View File

@ -3,5 +3,5 @@ description: "Package of lib"
runs:
using: "composite"
steps:
- run: ./taskcluster/package.sh
- run: ./ci_scripts/package.sh
shell: bash

View File

@ -3,5 +3,5 @@ description: "Setup TensorFlow Build"
runs:
using: "composite"
steps:
- run: ./taskcluster/tf_tc-setup.sh
- run: ./ci_scripts/tf-setup.sh
shell: bash

108
ci_scripts/all-utils.sh Executable file
View File

@ -0,0 +1,108 @@
#!/bin/bash
set -xe
set_ldc_sample_filename()
{
local _bitrate=$1
if [ -z "${_bitrate}" ]; then
echo "Bitrate should not be empty"
exit 1
fi;
case "${_bitrate}" in
8k)
ldc93s1_sample_filename='LDC93S1_pcms16le_1_8000.wav'
;;
16k)
ldc93s1_sample_filename='LDC93S1_pcms16le_1_16000.wav'
;;
esac
}
download_data()
{
local _model_source_file=$(basename "${model_source}")
${WGET} "${model_source}" -O - | gunzip --force > "${TASKCLUSTER_TMP_DIR}/${_model_source_file}"
local _model_source_mmap_file=$(basename "${model_source_mmap}")
${WGET} "${model_source_mmap}" -O - | gunzip --force > "${TASKCLUSTER_TMP_DIR}/${_model_source_mmap_file}"
cp ${DS_DSDIR}/data/smoke_test/*.wav ${TASKCLUSTER_TMP_DIR}/
cp ${DS_DSDIR}/data/smoke_test/pruned_lm.scorer ${TASKCLUSTER_TMP_DIR}/kenlm.scorer
cp ${DS_DSDIR}/data/smoke_test/pruned_lm.bytes.scorer ${TASKCLUSTER_TMP_DIR}/kenlm.bytes.scorer
cp -R ${DS_DSDIR}/native_client/test ${TASKCLUSTER_TMP_DIR}/test_sources
}
download_material()
{
target_dir=$1
# TODO: FIXME download_native_client_files "${target_dir}"
download_data
ls -hal ${TASKCLUSTER_TMP_DIR}/${model_name} ${TASKCLUSTER_TMP_DIR}/${model_name_mmap} ${TASKCLUSTER_TMP_DIR}/LDC93S1*.wav
}
maybe_install_xldd()
{
# -s required to avoid the noisy output like "Entering / Leaving directories"
toolchain=$(make -s -C ${DS_DSDIR}/native_client/ TARGET=${SYSTEM_TARGET} TFDIR=${DS_TFDIR} print-toolchain)
if [ ! -x "${toolchain}ldd" ]; then
cp "${DS_DSDIR}/native_client/xldd" "${toolchain}ldd" && chmod +x "${toolchain}ldd"
fi
}
# Checks whether we run a patched version of bazel.
# Patching is required to dump computeKey() parameters to .ckd files
# See bazel.patch
# Return 0 (success exit code) on patched version, 1 on release version
is_patched_bazel()
{
bazel_version=$(bazel version | grep 'Build label:' | cut -d':' -f2)
bazel shutdown
if [ -z "${bazel_version}" ]; then
return 0;
else
return 1;
fi;
}
verify_bazel_rebuild()
{
bazel_explain_file="$1"
if [ ! -f "${bazel_explain_file}" ]; then
echo "No such explain file: ${bazel_explain_file}"
exit 1
fi;
mkdir -p ${TASKCLUSTER_ARTIFACTS} || true
cp ${DS_DSDIR}/tensorflow/bazel*.log ${TASKCLUSTER_ARTIFACTS}/
spurious_rebuilds=$(grep 'Executing action' "${bazel_explain_file}" | grep 'Compiling' | grep -v -E 'no entry in the cache|[for host]|unconditional execution is requested|Executing genrule //native_client:workspace_status|Compiling native_client/workspace_status.cc|Linking native_client/libdeepspeech.so' | wc -l)
if [ "${spurious_rebuilds}" -ne 0 ]; then
echo "Bazel rebuilds some file it should not, please check."
if is_patched_bazel; then
mkdir -p ${DS_ROOT_TASK}/ckd/ds ${DS_ROOT_TASK}/ckd/tf
tar xf ${DS_ROOT_TASK}/bazel-ckd-tf.tar --strip-components=4 -C ${DS_ROOT_TASK}/ckd/ds/
tar xf ${DS_ROOT_TASK}/bazel-ckd-ds.tar --strip-components=4 -C ${DS_DSDIR}/ckd/tensorflow/
echo "Making a diff between CKD files"
mkdir -p ${TASKCLUSTER_ARTIFACTS}
diff -urNw ${DS_DSDIR}/ckd/tensorflow/ ${DS_ROOT_TASK}/ckd/ds/ | tee ${TASKCLUSTER_ARTIFACTS}/ckd.diff
rm -fr ${DS_DSDIR}/ckd/tensorflow/ ${DS_ROOT_TASK}/ckd/ds/
else
echo "Cannot get CKD information from release, please use patched Bazel"
fi;
exit 1
fi;
}

85
ci_scripts/all-vars.sh Executable file
View File

@ -0,0 +1,85 @@
#!/bin/bash
set -xe
export OS=$(uname)
if [ "${OS}" = "Linux" ]; then
export DS_ROOT_TASK=${HOME}
export PYENV_ROOT="${DS_ROOT_TASK}/pyenv-root"
export DS_CPU_COUNT=$(nproc)
fi;
if [ "${OS}" = "${TC_MSYS_VERSION}" ]; then
export DS_ROOT_TASK=${TASKCLUSTER_TASK_DIR}
export PYENV_ROOT="${TASKCLUSTER_TASK_DIR}/pyenv-root"
export PLATFORM_EXE_SUFFIX=.exe
export DS_CPU_COUNT=$(nproc)
# Those are the versions available on NuGet.org
export SUPPORTED_PYTHON_VERSIONS="3.5.4:ucs2 3.6.8:ucs2 3.7.6:ucs2 3.8.1:ucs2 3.9.0:ucs2"
fi;
if [ "${OS}" = "Darwin" ]; then
export DS_ROOT_TASK=${TASKCLUSTER_TASK_DIR}
export DS_CPU_COUNT=$(sysctl hw.ncpu |cut -d' ' -f2)
export PYENV_ROOT="${DS_ROOT_TASK}/pyenv-root"
export HOMEBREW_NO_AUTO_UPDATE=1
export BREW_URL=https://github.com/Homebrew/brew/tarball/2.2.17
export BUILDS_BREW="${TASKCLUSTER_TASK_DIR}/homebrew-builds"
export TESTS_BREW="${TASKCLUSTER_TASK_DIR}/homebrew-tests"
export NVM_DIR=$TESTS_BREW/.nvm/ && mkdir -p $NVM_DIR
export PKG_CONFIG_PATH="${BUILDS_BREW}/lib/pkgconfig"
if [ -f "${BUILDS_BREW}/bin/brew" ]; then
export PATH=${BUILDS_BREW}/bin/:${BUILDS_BREW}/opt/node@12/bin:$PATH
fi;
if [ -f "${TESTS_BREW}/bin/brew" ]; then
export PATH=${TESTS_BREW}/bin/:$PATH
fi;
fi;
export TASKCLUSTER_ARTIFACTS=${TASKCLUSTER_ARTIFACTS:-/tmp/artifacts}
export TASKCLUSTER_TMP_DIR=${TASKCLUSTER_TMP_DIR:-/tmp}
export ANDROID_TMP_DIR=/data/local/tmp
mkdir -p ${TASKCLUSTER_TMP_DIR} || true
export DS_TFDIR=${DS_ROOT_TASK}/tensorflow
export DS_DSDIR=${DS_ROOT_TASK}/
export DS_EXAMPLEDIR=${DS_ROOT_TASK}/examples
export DS_VERSION="$(cat ${DS_DSDIR}/training/deepspeech_training/VERSION)"
export GRADLE_USER_HOME=${DS_ROOT_TASK}/gradle-cache
export ANDROID_SDK_HOME=${DS_ROOT_TASK}/DeepSpeech/Android/SDK/
export ANDROID_NDK_HOME=${DS_ROOT_TASK}/DeepSpeech/Android/android-ndk-r18b/
WGET=${WGET:-"wget"}
TAR=${TAR:-"tar"}
XZ=${XZ:-"xz -9 -T0"}
ZIP=${ZIP:-"zip"}
UNXZ=${UNXZ:-"xz -T0 -d"}
UNGZ=${UNGZ:-"gunzip"}
if [ "${OS}" = "Darwin" ]; then
TAR="gtar"
fi
if [ "${OS}" = "${TC_MSYS_VERSION}" ]; then
WGET=/usr/bin/wget.exe
TAR=/usr/bin/tar.exe
XZ="xz -9 -T0 -c -"
UNXZ="xz -9 -T0 -d"
fi
model_source="${DEEPSPEECH_TEST_MODEL}"
model_name="$(basename "${model_source}")"
model_name_mmap="$(basename -s ".pb" "${model_source}").pbmm"
model_source_mmap="$(dirname "${model_source}")/${model_name_mmap}"
ldc93s1_sample_filename=''

602
ci_scripts/asserts.sh Executable file
View File

@ -0,0 +1,602 @@
#!/bin/bash
set -xe
strip() {
echo "$(echo $1 | sed -e 's/^[[:space:]]+//' -e 's/[[:space:]]+$//')"
}
# This verify exact inference result
assert_correct_inference()
{
phrase=$(strip "$1")
expected=$(strip "$2")
status=$3
if [ "$status" -ne "0" ]; then
case "$(cat ${TASKCLUSTER_TMP_DIR}/stderr)" in
*"incompatible with minimum version"*)
echo "Prod model too old for client, skipping test."
return 0
;;
*)
echo "Client failed to run:"
cat ${TASKCLUSTER_TMP_DIR}/stderr
return 1
;;
esac
fi
if [ -z "${phrase}" -o -z "${expected}" ]; then
echo "One or more empty strings:"
echo "phrase: <${phrase}>"
echo "expected: <${expected}>"
return 1
fi;
if [ "${phrase}" = "${expected}" ]; then
echo "Proper output has been produced:"
echo "${phrase}"
return 0
else
echo "!! Non matching output !!"
echo "got: <${phrase}>"
if [ -x "$(command -v xxd)" ]; then
echo "xxd:"; echo "${phrase}" | xxd
fi
echo "-------------------"
echo "expected: <${expected}>"
if [ -x "$(command -v xxd)" ]; then
echo "xxd:"; echo "${expected}" | xxd
fi
return 1
fi;
}
# This verify that ${expected} is contained within ${phrase}
assert_working_inference()
{
phrase=$1
expected=$2
status=$3
if [ -z "${phrase}" -o -z "${expected}" ]; then
echo "One or more empty strings:"
echo "phrase: <${phrase}>"
echo "expected: <${expected}>"
return 1
fi;
if [ "$status" -ne "0" ]; then
case "$(cat ${TASKCLUSTER_TMP_DIR}/stderr)" in
*"incompatible with minimum version"*)
echo "Prod model too old for client, skipping test."
return 0
;;
*)
echo "Client failed to run:"
cat ${TASKCLUSTER_TMP_DIR}/stderr
return 1
;;
esac
fi
case "${phrase}" in
*${expected}*)
echo "Proper output has been produced:"
echo "${phrase}"
return 0
;;
*)
echo "!! Non matching output !!"
echo "got: <${phrase}>"
if [ -x "$(command -v xxd)" ]; then
echo "xxd:"; echo "${phrase}" | xxd
fi
echo "-------------------"
echo "expected: <${expected}>"
if [ -x "$(command -v xxd)" ]; then
echo "xxd:"; echo "${expected}" | xxd
fi
return 1
;;
esac
}
assert_shows_something()
{
stderr=$1
expected=$2
if [ -z "${stderr}" -o -z "${expected}" ]; then
echo "One or more empty strings:"
echo "stderr: <${stderr}>"
echo "expected: <${expected}>"
return 1
fi;
case "${stderr}" in
*"incompatible with minimum version"*)
echo "Prod model too old for client, skipping test."
return 0
;;
*${expected}*)
echo "Proper output has been produced:"
echo "${stderr}"
return 0
;;
*)
echo "!! Non matching output !!"
echo "got: <${stderr}>"
if [ -x "$(command -v xxd)" ]; then
echo "xxd:"; echo "${stderr}" | xxd
fi
echo "-------------------"
echo "expected: <${expected}>"
if [ -x "$(command -v xxd)" ]; then
echo "xxd:"; echo "${expected}" | xxd
fi
return 1
;;
esac
}
assert_not_present()
{
stderr=$1
not_expected=$2
if [ -z "${stderr}" -o -z "${not_expected}" ]; then
echo "One or more empty strings:"
echo "stderr: <${stderr}>"
echo "not_expected: <${not_expected}>"
return 1
fi;
case "${stderr}" in
*${not_expected}*)
echo "!! Not expected was present !!"
echo "got: <${stderr}>"
if [ -x "$(command -v xxd)" ]; then
echo "xxd:"; echo "${stderr}" | xxd
fi
echo "-------------------"
echo "not_expected: <${not_expected}>"
if [ -x "$(command -v xxd)" ]; then
echo "xxd:"; echo "${not_expected}" | xxd
fi
return 1
;;
*)
echo "Proper not expected output has not been produced:"
echo "${stderr}"
return 0
;;
esac
}
assert_correct_ldc93s1()
{
assert_correct_inference "$1" "she had your dark suit in greasy wash water all year" "$2"
}
assert_working_ldc93s1()
{
assert_working_inference "$1" "she had your dark suit in greasy wash water all year" "$2"
}
assert_correct_ldc93s1_lm()
{
assert_correct_inference "$1" "she had your dark suit in greasy wash water all year" "$2"
}
assert_working_ldc93s1_lm()
{
assert_working_inference "$1" "she had your dark suit in greasy wash water all year" "$2"
}
assert_correct_multi_ldc93s1()
{
assert_shows_something "$1" "/${ldc93s1_sample_filename}%she had your dark suit in greasy wash water all year%" "$?"
assert_shows_something "$1" "/LDC93S1_pcms16le_2_44100.wav%she had your dark suit in greasy wash water all year%" "$?"
## 8k will output garbage anyway ...
# assert_shows_something "$1" "/LDC93S1_pcms16le_1_8000.wav%she hayorasryrtl lyreasy asr watal w water all year%"
}
assert_correct_ldc93s1_prodmodel()
{
if [ -z "$3" -o "$3" = "16k" ]; then
assert_correct_inference "$1" "she had your dark suit in greasy wash water all year" "$2"
fi;
if [ "$3" = "8k" ]; then
assert_correct_inference "$1" "she had to do suit in greasy wash water all year" "$2"
fi;
}
assert_correct_ldc93s1_prodtflitemodel()
{
if [ -z "$3" -o "$3" = "16k" ]; then
assert_correct_inference "$1" "she had her dark suit in greasy wash water all year" "$2"
fi;
if [ "$3" = "8k" ]; then
assert_correct_inference "$1" "she had to do so and greasy wash water all year" "$2"
fi;
}
assert_correct_ldc93s1_prodmodel_stereo_44k()
{
assert_correct_inference "$1" "she had your dark suit in greasy wash water all year" "$2"
}
assert_correct_ldc93s1_prodtflitemodel_stereo_44k()
{
assert_correct_inference "$1" "she had her dark suit in greasy wash water all year" "$2"
}
assert_correct_warning_upsampling()
{
assert_shows_something "$1" "erratic speech recognition"
}
assert_tensorflow_version()
{
assert_shows_something "$1" "${EXPECTED_TENSORFLOW_VERSION}"
}
assert_deepspeech_version()
{
assert_not_present "$1" "DeepSpeech: unknown"
}
# We need to ensure that running on inference really leverages GPU because
# it might default back to CPU
ensure_cuda_usage()
{
local _maybe_cuda=$1
DS_BINARY_FILE=${DS_BINARY_FILE:-"deepspeech"}
if [ "${_maybe_cuda}" = "cuda" ]; then
set +e
export TF_CPP_MIN_VLOG_LEVEL=1
ds_cuda=$(${DS_BINARY_PREFIX}${DS_BINARY_FILE} --model ${TASKCLUSTER_TMP_DIR}/${model_name} --audio ${TASKCLUSTER_TMP_DIR}/${ldc93s1_sample_filename} 2>&1 1>/dev/null)
export TF_CPP_MIN_VLOG_LEVEL=
set -e
assert_shows_something "${ds_cuda}" "Successfully opened dynamic library nvcuda.dll"
assert_not_present "${ds_cuda}" "Skipping registering GPU devices"
fi;
}
check_versions()
{
set +e
ds_help=$(${DS_BINARY_PREFIX}deepspeech --model ${TASKCLUSTER_TMP_DIR}/${model_name} --audio ${TASKCLUSTER_TMP_DIR}/${ldc93s1_sample_filename} 2>&1 1>/dev/null)
set -e
assert_tensorflow_version "${ds_help}"
assert_deepspeech_version "${ds_help}"
}
assert_deepspeech_runtime()
{
local expected_runtime=$1
set +e
local ds_version=$(${DS_BINARY_PREFIX}deepspeech --version 2>&1)
set -e
assert_shows_something "${ds_version}" "${expected_runtime}"
}
check_runtime_nodejs()
{
assert_deepspeech_runtime "Runtime: Node"
}
check_runtime_electronjs()
{
assert_deepspeech_runtime "Runtime: Electron"
}
run_tflite_basic_inference_tests()
{
set +e
phrase_pbmodel_nolm=$(${DS_BINARY_PREFIX}deepspeech --model ${DATA_TMP_DIR}/${model_name} --audio ${DATA_TMP_DIR}/${ldc93s1_sample_filename} 2>${TASKCLUSTER_TMP_DIR}/stderr)
set -e
assert_correct_ldc93s1 "${phrase_pbmodel_nolm}" "$?"
set +e
phrase_pbmodel_nolm=$(${DS_BINARY_PREFIX}deepspeech --model ${DATA_TMP_DIR}/${model_name} --audio ${DATA_TMP_DIR}/${ldc93s1_sample_filename} --extended 2>${TASKCLUSTER_TMP_DIR}/stderr)
set -e
assert_correct_ldc93s1 "${phrase_pbmodel_nolm}" "$?"
}
run_netframework_inference_tests()
{
set +e
phrase_pbmodel_nolm=$(DeepSpeechConsole.exe --model ${TASKCLUSTER_TMP_DIR}/${model_name} --audio ${TASKCLUSTER_TMP_DIR}/${ldc93s1_sample_filename} 2>${TASKCLUSTER_TMP_DIR}/stderr)
set -e
assert_working_ldc93s1 "${phrase_pbmodel_nolm}" "$?"
set +e
phrase_pbmodel_nolm=$(DeepSpeechConsole.exe --model ${TASKCLUSTER_TMP_DIR}/${model_name} --audio ${TASKCLUSTER_TMP_DIR}/${ldc93s1_sample_filename} --extended yes 2>${TASKCLUSTER_TMP_DIR}/stderr)
set -e
assert_working_ldc93s1 "${phrase_pbmodel_nolm}" "$?"
set +e
phrase_pbmodel_nolm=$(DeepSpeechConsole.exe --model ${TASKCLUSTER_TMP_DIR}/${model_name_mmap} --audio ${TASKCLUSTER_TMP_DIR}/${ldc93s1_sample_filename} 2>${TASKCLUSTER_TMP_DIR}/stderr)
set -e
assert_working_ldc93s1 "${phrase_pbmodel_nolm}" "$?"
set +e
phrase_pbmodel_withlm=$(DeepSpeechConsole.exe --model ${TASKCLUSTER_TMP_DIR}/${model_name_mmap} --scorer ${TASKCLUSTER_TMP_DIR}/kenlm.scorer --audio ${TASKCLUSTER_TMP_DIR}/${ldc93s1_sample_filename} 2>${TASKCLUSTER_TMP_DIR}/stderr)
set -e
assert_working_ldc93s1_lm "${phrase_pbmodel_withlm}" "$?"
}
run_electronjs_inference_tests()
{
set +e
phrase_pbmodel_nolm=$(deepspeech --model ${TASKCLUSTER_TMP_DIR}/${model_name} --audio ${TASKCLUSTER_TMP_DIR}/${ldc93s1_sample_filename} 2>${TASKCLUSTER_TMP_DIR}/stderr)
set -e
assert_working_ldc93s1 "${phrase_pbmodel_nolm}" "$?"
set +e
phrase_pbmodel_nolm=$(deepspeech --model ${TASKCLUSTER_TMP_DIR}/${model_name} --audio ${TASKCLUSTER_TMP_DIR}/${ldc93s1_sample_filename} --extended 2>${TASKCLUSTER_TMP_DIR}/stderr)
set -e
assert_working_ldc93s1 "${phrase_pbmodel_nolm}" "$?"
set +e
phrase_pbmodel_nolm=$(deepspeech --model ${TASKCLUSTER_TMP_DIR}/${model_name_mmap} --audio ${TASKCLUSTER_TMP_DIR}/${ldc93s1_sample_filename} 2>${TASKCLUSTER_TMP_DIR}/stderr)
set -e
assert_working_ldc93s1 "${phrase_pbmodel_nolm}" "$?"
set +e
phrase_pbmodel_withlm=$(deepspeech --model ${TASKCLUSTER_TMP_DIR}/${model_name_mmap} --scorer ${TASKCLUSTER_TMP_DIR}/kenlm.scorer --audio ${TASKCLUSTER_TMP_DIR}/${ldc93s1_sample_filename} 2>${TASKCLUSTER_TMP_DIR}/stderr)
set -e
assert_working_ldc93s1_lm "${phrase_pbmodel_withlm}" "$?"
}
run_basic_inference_tests()
{
set +e
deepspeech --model "" --audio ${TASKCLUSTER_TMP_DIR}/${ldc93s1_sample_filename} 2>${TASKCLUSTER_TMP_DIR}/stderr
set -e
grep "Missing model information" ${TASKCLUSTER_TMP_DIR}/stderr
set +e
phrase_pbmodel_nolm=$(deepspeech --model ${TASKCLUSTER_TMP_DIR}/${model_name} --audio ${TASKCLUSTER_TMP_DIR}/${ldc93s1_sample_filename} 2>${TASKCLUSTER_TMP_DIR}/stderr)
status=$?
set -e
assert_correct_ldc93s1 "${phrase_pbmodel_nolm}" "$status"
set +e
phrase_pbmodel_nolm=$(deepspeech --model ${TASKCLUSTER_TMP_DIR}/${model_name} --audio ${TASKCLUSTER_TMP_DIR}/${ldc93s1_sample_filename} --extended 2>${TASKCLUSTER_TMP_DIR}/stderr)
status=$?
set -e
assert_correct_ldc93s1 "${phrase_pbmodel_nolm}" "$status"
set +e
phrase_pbmodel_nolm=$(deepspeech --model ${TASKCLUSTER_TMP_DIR}/${model_name_mmap} --audio ${TASKCLUSTER_TMP_DIR}/${ldc93s1_sample_filename} 2>${TASKCLUSTER_TMP_DIR}/stderr)
status=$?
set -e
assert_correct_ldc93s1 "${phrase_pbmodel_nolm}" "$status"
set +e
phrase_pbmodel_withlm=$(deepspeech --model ${TASKCLUSTER_TMP_DIR}/${model_name_mmap} --scorer ${TASKCLUSTER_TMP_DIR}/kenlm.scorer --audio ${TASKCLUSTER_TMP_DIR}/${ldc93s1_sample_filename} 2>${TASKCLUSTER_TMP_DIR}/stderr)
status=$?
set -e
assert_correct_ldc93s1_lm "${phrase_pbmodel_withlm}" "$status"
}
run_all_inference_tests()
{
run_basic_inference_tests
set +e
phrase_pbmodel_nolm_stereo_44k=$(deepspeech --model ${TASKCLUSTER_TMP_DIR}/${model_name_mmap} --audio ${TASKCLUSTER_TMP_DIR}/LDC93S1_pcms16le_2_44100.wav 2>${TASKCLUSTER_TMP_DIR}/stderr)
status=$?
set -e
assert_correct_ldc93s1 "${phrase_pbmodel_nolm_stereo_44k}" "$status"
set +e
phrase_pbmodel_withlm_stereo_44k=$(deepspeech --model ${TASKCLUSTER_TMP_DIR}/${model_name_mmap} --scorer ${TASKCLUSTER_TMP_DIR}/kenlm.scorer --audio ${TASKCLUSTER_TMP_DIR}/LDC93S1_pcms16le_2_44100.wav 2>${TASKCLUSTER_TMP_DIR}/stderr)
status=$?
set -e
assert_correct_ldc93s1_lm "${phrase_pbmodel_withlm_stereo_44k}" "$status"
# Run down-sampling warning test only when we actually perform downsampling
if [ "${ldc93s1_sample_filename}" != "LDC93S1_pcms16le_1_8000.wav" ]; then
set +e
phrase_pbmodel_nolm_mono_8k=$(deepspeech --model ${TASKCLUSTER_TMP_DIR}/${model_name_mmap} --audio ${TASKCLUSTER_TMP_DIR}/LDC93S1_pcms16le_1_8000.wav 2>&1 1>/dev/null)
set -e
assert_correct_warning_upsampling "${phrase_pbmodel_nolm_mono_8k}"
set +e
phrase_pbmodel_withlm_mono_8k=$(deepspeech --model ${TASKCLUSTER_TMP_DIR}/${model_name_mmap} --scorer ${TASKCLUSTER_TMP_DIR}/kenlm.scorer --audio ${TASKCLUSTER_TMP_DIR}/LDC93S1_pcms16le_1_8000.wav 2>&1 1>/dev/null)
set -e
assert_correct_warning_upsampling "${phrase_pbmodel_withlm_mono_8k}"
fi;
}
run_prod_concurrent_stream_tests()
{
local _bitrate=$1
set +e
output=$(python ${TASKCLUSTER_TMP_DIR}/test_sources/concurrent_streams.py \
--model ${TASKCLUSTER_TMP_DIR}/${model_name_mmap} \
--scorer ${TASKCLUSTER_TMP_DIR}/kenlm.scorer \
--audio1 ${TASKCLUSTER_TMP_DIR}/LDC93S1_pcms16le_1_16000.wav \
--audio2 ${TASKCLUSTER_TMP_DIR}/new-home-in-the-stars-16k.wav 2>${TASKCLUSTER_TMP_DIR}/stderr)
status=$?
set -e
output1=$(echo "${output}" | head -n 1)
output2=$(echo "${output}" | tail -n 1)
assert_correct_ldc93s1_prodmodel "${output1}" "${status}" "16k"
assert_correct_inference "${output2}" "we must find a new home in the stars" "${status}"
}
run_prod_inference_tests()
{
local _bitrate=$1
set +e
phrase_pbmodel_withlm=$(deepspeech --model ${TASKCLUSTER_TMP_DIR}/${model_name} --scorer ${TASKCLUSTER_TMP_DIR}/kenlm.scorer --audio ${TASKCLUSTER_TMP_DIR}/${ldc93s1_sample_filename} 2>${TASKCLUSTER_TMP_DIR}/stderr)
status=$?
set -e
assert_correct_ldc93s1_prodmodel "${phrase_pbmodel_withlm}" "$status" "${_bitrate}"
set +e
phrase_pbmodel_withlm=$(deepspeech --model ${TASKCLUSTER_TMP_DIR}/${model_name_mmap} --scorer ${TASKCLUSTER_TMP_DIR}/kenlm.scorer --audio ${TASKCLUSTER_TMP_DIR}/${ldc93s1_sample_filename} 2>${TASKCLUSTER_TMP_DIR}/stderr)
status=$?
set -e
assert_correct_ldc93s1_prodmodel "${phrase_pbmodel_withlm}" "$status" "${_bitrate}"
set +e
phrase_pbmodel_withlm_stereo_44k=$(deepspeech --model ${TASKCLUSTER_TMP_DIR}/${model_name_mmap} --scorer ${TASKCLUSTER_TMP_DIR}/kenlm.scorer --audio ${TASKCLUSTER_TMP_DIR}/LDC93S1_pcms16le_2_44100.wav 2>${TASKCLUSTER_TMP_DIR}/stderr)
status=$?
set -e
assert_correct_ldc93s1_prodmodel_stereo_44k "${phrase_pbmodel_withlm_stereo_44k}" "$status"
# Run down-sampling warning test only when we actually perform downsampling
if [ "${ldc93s1_sample_filename}" != "LDC93S1_pcms16le_1_8000.wav" ]; then
set +e
phrase_pbmodel_withlm_mono_8k=$(deepspeech --model ${TASKCLUSTER_TMP_DIR}/${model_name_mmap} --scorer ${TASKCLUSTER_TMP_DIR}/kenlm.scorer --audio ${TASKCLUSTER_TMP_DIR}/LDC93S1_pcms16le_1_8000.wav 2>&1 1>/dev/null)
set -e
assert_correct_warning_upsampling "${phrase_pbmodel_withlm_mono_8k}"
fi;
}
run_prodtflite_inference_tests()
{
local _bitrate=$1
set +e
phrase_pbmodel_withlm=$(deepspeech --model ${TASKCLUSTER_TMP_DIR}/${model_name} --scorer ${TASKCLUSTER_TMP_DIR}/kenlm.scorer --audio ${TASKCLUSTER_TMP_DIR}/${ldc93s1_sample_filename} 2>${TASKCLUSTER_TMP_DIR}/stderr)
status=$?
set -e
assert_correct_ldc93s1_prodtflitemodel "${phrase_pbmodel_withlm}" "$status" "${_bitrate}"
set +e
phrase_pbmodel_withlm=$(deepspeech --model ${TASKCLUSTER_TMP_DIR}/${model_name_mmap} --scorer ${TASKCLUSTER_TMP_DIR}/kenlm.scorer --audio ${TASKCLUSTER_TMP_DIR}/${ldc93s1_sample_filename} 2>${TASKCLUSTER_TMP_DIR}/stderr)
status=$?
set -e
assert_correct_ldc93s1_prodtflitemodel "${phrase_pbmodel_withlm}" "$status" "${_bitrate}"
set +e
phrase_pbmodel_withlm_stereo_44k=$(deepspeech --model ${TASKCLUSTER_TMP_DIR}/${model_name_mmap} --scorer ${TASKCLUSTER_TMP_DIR}/kenlm.scorer --audio ${TASKCLUSTER_TMP_DIR}/LDC93S1_pcms16le_2_44100.wav 2>${TASKCLUSTER_TMP_DIR}/stderr)
status=$?
set -e
assert_correct_ldc93s1_prodtflitemodel_stereo_44k "${phrase_pbmodel_withlm_stereo_44k}" "$status"
# Run down-sampling warning test only when we actually perform downsampling
if [ "${ldc93s1_sample_filename}" != "LDC93S1_pcms16le_1_8000.wav" ]; then
set +e
phrase_pbmodel_withlm_mono_8k=$(deepspeech --model ${TASKCLUSTER_TMP_DIR}/${model_name_mmap} --scorer ${TASKCLUSTER_TMP_DIR}/kenlm.scorer --audio ${TASKCLUSTER_TMP_DIR}/LDC93S1_pcms16le_1_8000.wav 2>&1 1>/dev/null)
set -e
assert_correct_warning_upsampling "${phrase_pbmodel_withlm_mono_8k}"
fi;
}
run_multi_inference_tests()
{
set +e -o pipefail
multi_phrase_pbmodel_nolm=$(deepspeech --model ${TASKCLUSTER_TMP_DIR}/${model_name} --audio ${TASKCLUSTER_TMP_DIR}/ 2>${TASKCLUSTER_TMP_DIR}/stderr | tr '\n' '%')
status=$?
set -e +o pipefail
assert_correct_multi_ldc93s1 "${multi_phrase_pbmodel_nolm}" "$status"
set +e -o pipefail
multi_phrase_pbmodel_withlm=$(deepspeech --model ${TASKCLUSTER_TMP_DIR}/${model_name} --scorer ${TASKCLUSTER_TMP_DIR}/kenlm.scorer --audio ${TASKCLUSTER_TMP_DIR}/ 2>${TASKCLUSTER_TMP_DIR}/stderr | tr '\n' '%')
status=$?
set -e +o pipefail
assert_correct_multi_ldc93s1 "${multi_phrase_pbmodel_withlm}" "$status"
}
run_hotword_tests()
{
DS_BINARY_FILE=${DS_BINARY_FILE:-"deepspeech"}
set +e
hotwords_decode=$(${DS_BINARY_PREFIX}${DS_BINARY_FILE} --model ${TASKCLUSTER_TMP_DIR}/${model_name_mmap} --scorer ${TASKCLUSTER_TMP_DIR}/kenlm.scorer --audio ${TASKCLUSTER_TMP_DIR}/${ldc93s1_sample_filename} --hot_words "foo:0.0,bar:-0.1" 2>${TASKCLUSTER_TMP_DIR}/stderr)
status=$?
set -e
assert_working_ldc93s1_lm "${hotwords_decode}" "$status"
}
run_android_hotword_tests()
{
set +e
hotwords_decode=$(${DS_BINARY_PREFIX}deepspeech --model ${DATA_TMP_DIR}/${model_name} --scorer ${DATA_TMP_DIR}/kenlm.scorer --audio ${DATA_TMP_DIR}/${ldc93s1_sample_filename} --hot_words "foo:0.0,bar:-0.1" 2>${TASKCLUSTER_TMP_DIR}/stderr)
status=$?
set -e
assert_correct_ldc93s1_lm "${hotwords_decode}" "$status"
}
run_cpp_only_inference_tests()
{
set +e
phrase_pbmodel_withlm_intermediate_decode=$(deepspeech --model ${TASKCLUSTER_TMP_DIR}/${model_name_mmap} --scorer ${TASKCLUSTER_TMP_DIR}/kenlm.scorer --audio ${TASKCLUSTER_TMP_DIR}/${ldc93s1_sample_filename} --stream 1280 2>${TASKCLUSTER_TMP_DIR}/stderr | tail -n 1)
status=$?
set -e
assert_correct_ldc93s1_lm "${phrase_pbmodel_withlm_intermediate_decode}" "$status"
}
run_js_streaming_inference_tests()
{
set +e
phrase_pbmodel_withlm=$(deepspeech --model ${TASKCLUSTER_TMP_DIR}/${model_name_mmap} --scorer ${TASKCLUSTER_TMP_DIR}/kenlm.scorer --audio ${TASKCLUSTER_TMP_DIR}/${ldc93s1_sample_filename} --stream 2>${TASKCLUSTER_TMP_DIR}/stderr | tail -n 1)
status=$?
set -e
assert_correct_ldc93s1_lm "${phrase_pbmodel_withlm}" "$status"
set +e
phrase_pbmodel_withlm=$(deepspeech --model ${TASKCLUSTER_TMP_DIR}/${model_name_mmap} --scorer ${TASKCLUSTER_TMP_DIR}/kenlm.scorer --audio ${TASKCLUSTER_TMP_DIR}/${ldc93s1_sample_filename} --stream --extended 2>${TASKCLUSTER_TMP_DIR}/stderr | tail -n 1)
status=$?
set -e
assert_correct_ldc93s1_lm "${phrase_pbmodel_withlm}" "$status"
}
run_js_streaming_prod_inference_tests()
{
local _bitrate=$1
set +e
phrase_pbmodel_withlm=$(deepspeech --model ${TASKCLUSTER_TMP_DIR}/${model_name_mmap} --scorer ${TASKCLUSTER_TMP_DIR}/kenlm.scorer --audio ${TASKCLUSTER_TMP_DIR}/${ldc93s1_sample_filename} --stream 2>${TASKCLUSTER_TMP_DIR}/stderr | tail -n 1)
status=$?
set -e
assert_correct_ldc93s1_prodmodel "${phrase_pbmodel_withlm}" "$status" "${_bitrate}"
local _bitrate=$1
set +e
phrase_pbmodel_withlm=$(deepspeech --model ${TASKCLUSTER_TMP_DIR}/${model_name_mmap} --scorer ${TASKCLUSTER_TMP_DIR}/kenlm.scorer --audio ${TASKCLUSTER_TMP_DIR}/${ldc93s1_sample_filename} --stream --extended 2>${TASKCLUSTER_TMP_DIR}/stderr | tail -n 1)
status=$?
set -e
assert_correct_ldc93s1_prodmodel "${phrase_pbmodel_withlm}" "$status" "${_bitrate}"
}
run_js_streaming_prodtflite_inference_tests()
{
local _bitrate=$1
set +e
phrase_pbmodel_withlm=$(deepspeech --model ${TASKCLUSTER_TMP_DIR}/${model_name_mmap} --scorer ${TASKCLUSTER_TMP_DIR}/kenlm.scorer --audio ${TASKCLUSTER_TMP_DIR}/${ldc93s1_sample_filename} --stream 2>${TASKCLUSTER_TMP_DIR}/stderr | tail -n 1)
status=$?
set -e
assert_correct_ldc93s1_prodtflitemodel "${phrase_pbmodel_withlm}" "$status" "${_bitrate}"
local _bitrate=$1
set +e
phrase_pbmodel_withlm=$(deepspeech --model ${TASKCLUSTER_TMP_DIR}/${model_name_mmap} --scorer ${TASKCLUSTER_TMP_DIR}/kenlm.scorer --audio ${TASKCLUSTER_TMP_DIR}/${ldc93s1_sample_filename} --stream --extended 2>${TASKCLUSTER_TMP_DIR}/stderr | tail -n 1)
status=$?
set -e
assert_correct_ldc93s1_prodtflitemodel "${phrase_pbmodel_withlm}" "$status" "${_bitrate}"
}

46
ci_scripts/build-utils.sh Executable file
View File

@ -0,0 +1,46 @@
#!/bin/bash
set -xe
do_bazel_build()
{
local _opt_or_dbg=${1:-"opt"}
cd ${DS_TFDIR}
eval "export ${BAZEL_ENV_FLAGS}"
if [ "${_opt_or_dbg}" = "opt" ]; then
if is_patched_bazel; then
find ${DS_ROOT_TASK}/tensorflow/bazel-out/ -iname "*.ckd" | tar -cf ${DS_ROOT_TASK}/bazel-ckd-tf.tar -T -
fi;
fi;
bazel ${BAZEL_OUTPUT_USER_ROOT} build \
-s --explain bazel_monolithic.log --verbose_explanations --experimental_strict_action_env --workspace_status_command="bash native_client/bazel_workspace_status_cmd.sh" --config=monolithic -c ${_opt_or_dbg} ${BAZEL_BUILD_FLAGS} ${BAZEL_TARGETS}
if [ "${_opt_or_dbg}" = "opt" ]; then
if is_patched_bazel; then
find ${DS_ROOT_TASK}/tensorflow/bazel-out/ -iname "*.ckd" | tar -cf ${DS_ROOT_TASK}/bazel-ckd-ds.tar -T -
fi;
verify_bazel_rebuild "${DS_ROOT_TASK}/tensorflow/bazel_monolithic.log"
fi;
}
shutdown_bazel()
{
cd ${DS_TFDIR}
bazel ${BAZEL_OUTPUT_USER_ROOT} shutdown
}
do_deepspeech_binary_build()
{
cd ${DS_DSDIR}
make -C native_client/ \
TARGET=${SYSTEM_TARGET} \
TFDIR=${DS_TFDIR} \
RASPBIAN=${SYSTEM_RASPBIAN} \
EXTRA_CFLAGS="${EXTRA_LOCAL_CFLAGS}" \
EXTRA_LDFLAGS="${EXTRA_LOCAL_LDFLAGS}" \
EXTRA_LIBS="${EXTRA_LOCAL_LIBS}" \
deepspeech${PLATFORM_EXE_SUFFIX}
}

View File

@ -0,0 +1,18 @@
#!/bin/bash
set -xe
source $(dirname "$0")/all-vars.sh
source $(dirname "$0")/all-utils.sh
source $(dirname "$0")/asserts.sh
bitrate=$1
set_ldc_sample_filename "${bitrate}"
download_material "${TASKCLUSTER_TMP_DIR}/ds"
export PATH=${TASKCLUSTER_TMP_DIR}/ds/:$PATH
# Bytes output mode with LDC93S1 takes too long to converge so we simply test
# that loading the model won't crash
check_versions

24
ci_scripts/cpp-ds-tests-prod.sh Executable file
View File

@ -0,0 +1,24 @@
#!/bin/bash
set -xe
source $(dirname "$0")/all-vars.sh
source $(dirname "$0")/all-utils.sh
source $(dirname "$0")/asserts.sh
bitrate=$1
set_ldc_sample_filename "${bitrate}"
model_source=${DEEPSPEECH_PROD_MODEL}
model_name=$(basename "${model_source}")
model_source_mmap=${DEEPSPEECH_PROD_MODEL_MMAP}
model_name_mmap=$(basename "${model_source_mmap}")
download_material "${TASKCLUSTER_TMP_DIR}/ds"
export PATH=${TASKCLUSTER_TMP_DIR}/ds/:$PATH
check_versions
run_prod_inference_tests "${bitrate}"

24
ci_scripts/cpp-ds-tests.sh Executable file
View File

@ -0,0 +1,24 @@
#!/bin/bash
set -xe
source $(dirname "$0")/all-vars.sh
source $(dirname "$0")/all-utils.sh
source $(dirname "$0")/asserts.sh
bitrate=$1
set_ldc_sample_filename "${bitrate}"
download_material "${TASKCLUSTER_TMP_DIR}/ds"
export PATH=${TASKCLUSTER_TMP_DIR}/ds/:$PATH
check_versions
run_all_inference_tests
run_multi_inference_tests
run_cpp_only_inference_tests
run_hotword_tests

View File

@ -0,0 +1,24 @@
#!/bin/bash
set -xe
source $(dirname "$0")/all-vars.sh
source $(dirname "$0")/all-utils.sh
source $(dirname "$0")/asserts.sh
bitrate=$1
set_ldc_sample_filename "${bitrate}"
model_source=${DEEPSPEECH_PROD_MODEL//.pb/.tflite}
model_name=$(basename "${model_source}")
model_name_mmap=$(basename "${model_source}")
model_source_mmap=${DEEPSPEECH_PROD_MODEL_MMAP//.pbmm/.tflite}
export DATA_TMP_DIR=${TASKCLUSTER_TMP_DIR}
download_material "${TASKCLUSTER_TMP_DIR}/ds"
export PATH=${TASKCLUSTER_TMP_DIR}/ds/:$PATH
check_versions
run_prodtflite_inference_tests "${bitrate}"

View File

@ -0,0 +1,29 @@
#!/bin/bash
set -xe
source $(dirname "$0")/all-vars.sh
source $(dirname "$0")/all-utils.sh
source $(dirname "$0")/asserts.sh
bitrate=$1
set_ldc_sample_filename "${bitrate}"
model_source=${DEEPSPEECH_TEST_MODEL//.pb/.tflite}
model_name=$(basename "${model_source}")
model_name_mmap=$(basename "${model_source}")
export DATA_TMP_DIR=${TASKCLUSTER_TMP_DIR}
download_material "${TASKCLUSTER_TMP_DIR}/ds"
export PATH=${TASKCLUSTER_TMP_DIR}/ds/:$PATH
check_versions
run_all_inference_tests
run_multi_inference_tests
run_cpp_only_inference_tests
run_hotword_tests

View File

@ -0,0 +1,22 @@
#!/bin/bash
set -xe
source $(dirname "$0")/all-vars.sh
source $(dirname "$0")/all-utils.sh
source $(dirname "$0")/asserts.sh
bitrate=$1
set_ldc_sample_filename "${bitrate}"
model_source=${DEEPSPEECH_TEST_MODEL//.pb/.tflite}
model_name=$(basename "${model_source}")
export DATA_TMP_DIR=${TASKCLUSTER_TMP_DIR}
download_material "${TASKCLUSTER_TMP_DIR}/ds"
export PATH=${TASKCLUSTER_TMP_DIR}/ds/:$PATH
check_versions
run_tflite_basic_inference_tests

20
ci_scripts/cppwin-ds-tests.sh Executable file
View File

@ -0,0 +1,20 @@
#!/bin/bash
set -xe
source $(dirname "$0")/all-vars.sh
source $(dirname "$0")/all-utils.sh
source $(dirname "$0")/asserts.sh
bitrate=$1
set_ldc_sample_filename "${bitrate}"
download_material "${TASKCLUSTER_TMP_DIR}/ds"
export PATH=${TASKCLUSTER_TMP_DIR}/ds/:$PATH
check_versions
ensure_cuda_usage "$2"
run_basic_inference_tests

28
ci_scripts/host-build.sh Executable file
View File

@ -0,0 +1,28 @@
#!/bin/bash
set -xe
runtime=$1
source $(dirname "$0")/all-vars.sh
source $(dirname "$0")/all-utils.sh
source $(dirname "$0")/build-utils.sh
source $(dirname "$0")/tf-vars.sh
BAZEL_TARGETS="
//native_client:libdeepspeech.so
//native_client:generate_scorer_package
"
if [ "${runtime}" = "tflite" ]; then
BAZEL_BUILD_TFLITE="--define=runtime=tflite"
fi;
BAZEL_BUILD_FLAGS="${BAZEL_BUILD_TFLITE} ${BAZEL_OPT_FLAGS} ${BAZEL_EXTRA_FLAGS}"
BAZEL_ENV_FLAGS="TF_NEED_CUDA=0"
SYSTEM_TARGET=host
do_bazel_build
do_deepspeech_binary_build

92
ci_scripts/package-utils.sh Executable file
View File

@ -0,0 +1,92 @@
#!/bin/bash
set -xe
package_native_client()
{
tensorflow_dir=${DS_TFDIR}
deepspeech_dir=${DS_DSDIR}
artifacts_dir=${TASKCLUSTER_ARTIFACTS}
artifact_name=$1
if [ ! -d ${tensorflow_dir} -o ! -d ${deepspeech_dir} -o ! -d ${artifacts_dir} ]; then
echo "Missing directory. Please check:"
echo "tensorflow_dir=${tensorflow_dir}"
echo "deepspeech_dir=${deepspeech_dir}"
echo "artifacts_dir=${artifacts_dir}"
exit 1
fi;
if [ -z "${artifact_name}" ]; then
echo "Please specify artifact name."
fi;
win_lib=""
if [ -f "${tensorflow_dir}/bazel-bin/native_client/libdeepspeech.so.if.lib" ]; then
win_lib="-C ${tensorflow_dir}/bazel-bin/native_client/ libdeepspeech.so.if.lib"
fi;
${TAR} -cf - \
-C ${tensorflow_dir}/bazel-bin/native_client/ libdeepspeech.so \
${win_lib} \
-C ${tensorflow_dir}/bazel-bin/native_client/ generate_scorer_package \
-C ${deepspeech_dir}/ LICENSE \
-C ${deepspeech_dir}/native_client/ deepspeech${PLATFORM_EXE_SUFFIX} \
-C ${deepspeech_dir}/native_client/ deepspeech.h \
-C ${deepspeech_dir}/native_client/kenlm/ README.mozilla \
| ${XZ} > "${artifacts_dir}/${artifact_name}"
}
package_native_client_ndk()
{
deepspeech_dir=${DS_DSDIR}
tensorflow_dir=${DS_TFDIR}
artifacts_dir=${TASKCLUSTER_ARTIFACTS}
artifact_name=$1
arch_abi=$2
if [ ! -d ${deepspeech_dir} -o ! -d ${artifacts_dir} ]; then
echo "Missing directory. Please check:"
echo "deepspeech_dir=${deepspeech_dir}"
echo "artifacts_dir=${artifacts_dir}"
exit 1
fi;
if [ -z "${artifact_name}" ]; then
echo "Please specify artifact name."
fi;
if [ -z "${arch_abi}" ]; then
echo "Please specify arch abi."
fi;
${TAR} -cf - \
-C ${deepspeech_dir}/native_client/libs/${arch_abi}/ deepspeech \
-C ${deepspeech_dir}/native_client/libs/${arch_abi}/ libdeepspeech.so \
-C ${tensorflow_dir}/bazel-bin/native_client/ generate_scorer_package \
-C ${deepspeech_dir}/native_client/libs/${arch_abi}/ libc++_shared.so \
-C ${deepspeech_dir}/native_client/ deepspeech.h \
-C ${deepspeech_dir}/ LICENSE \
-C ${deepspeech_dir}/native_client/kenlm/ README.mozilla \
| ${XZ} > "${artifacts_dir}/${artifact_name}"
}
package_libdeepspeech_as_zip()
{
tensorflow_dir=${DS_TFDIR}
artifacts_dir=${TASKCLUSTER_ARTIFACTS}
artifact_name=$1
if [ ! -d ${tensorflow_dir} -o ! -d ${artifacts_dir} ]; then
echo "Missing directory. Please check:"
echo "tensorflow_dir=${tensorflow_dir}"
echo "artifacts_dir=${artifacts_dir}"
exit 1
fi;
if [ -z "${artifact_name}" ]; then
echo "Please specify artifact name."
fi;
${ZIP} -r9 --junk-paths "${artifacts_dir}/${artifact_name}" ${tensorflow_dir}/bazel-bin/native_client/libdeepspeech.so
}

23
ci_scripts/package.sh Executable file
View File

@ -0,0 +1,23 @@
#!/bin/bash
set -xe
source $(dirname "$0")/all-vars.sh
source $(dirname "$0")/package-utils.sh
mkdir -p ${TASKCLUSTER_ARTIFACTS} || true
cp ${DS_DSDIR}/tensorflow/bazel*.log ${TASKCLUSTER_ARTIFACTS}/
package_native_client "native_client.tar.xz"
package_libdeepspeech_as_zip "libdeepspeech.zip"
if [ -d ${DS_DSDIR}/wheels ]; then
cp ${DS_DSDIR}/wheels/* ${TASKCLUSTER_ARTIFACTS}/
cp ${DS_DSDIR}/native_client/javascript/deepspeech-*.tgz ${TASKCLUSTER_ARTIFACTS}/
fi;
if [ -f ${DS_DSDIR}/native_client/javascript/wrapper.tar.gz ]; then
cp ${DS_DSDIR}/native_client/javascript/wrapper.tar.gz ${TASKCLUSTER_ARTIFACTS}/
fi;

51
ci_scripts/tf-build.sh Executable file
View File

@ -0,0 +1,51 @@
#!/bin/bash
set -ex
source $(dirname $0)/tf-vars.sh
pushd ${DS_ROOT_TASK}/tensorflow/
BAZEL_BUILD="bazel ${BAZEL_OUTPUT_USER_ROOT} build -s --explain bazel_monolithic_tf.log --verbose_explanations --experimental_strict_action_env --config=monolithic"
# Start a bazel process to ensure reliability on Windows and avoid:
# FATAL: corrupt installation: file 'c:\builds\tc-workdir\.bazel_cache/install/6b1660721930e9d5f231f7d2a626209b/_embedded_binaries/build-runfiles.exe' missing.
bazel ${BAZEL_OUTPUT_USER_ROOT} info
# Force toolchain sync (useful on macOS ?)
bazel ${BAZEL_OUTPUT_USER_ROOT} sync --configure
MAYBE_DEBUG=$2
OPT_OR_DBG="-c opt"
if [ "${MAYBE_DEBUG}" = "dbg" ]; then
OPT_OR_DBG="-c dbg"
fi;
case "$1" in
"--linux-cpu"|"--darwin-cpu"|"--windows-cpu")
echo "" | TF_NEED_CUDA=0 ./configure && ${BAZEL_BUILD} ${OPT_OR_DBG} ${BAZEL_OPT_FLAGS} ${BAZEL_EXTRA_FLAGS} ${BUILD_TARGET_LIB_CPP_API} ${BUILD_TARGET_LITE_LIB}
;;
"--linux-cuda"|"--windows-cuda")
eval "export ${TF_CUDA_FLAGS}" && (echo "" | TF_NEED_CUDA=1 ./configure) && ${BAZEL_BUILD} ${OPT_OR_DBG} ${BAZEL_CUDA_FLAGS} ${BAZEL_EXTRA_FLAGS} ${BAZEL_OPT_FLAGS} ${BUILD_TARGET_LIB_CPP_API}
;;
"--linux-arm")
echo "" | TF_NEED_CUDA=0 ./configure && ${BAZEL_BUILD} ${OPT_OR_DBG} ${BAZEL_ARM_FLAGS} ${BAZEL_EXTRA_FLAGS} ${BUILD_TARGET_LITE_LIB}
;;
"--linux-arm64")
echo "" | TF_NEED_CUDA=0 ./configure && ${BAZEL_BUILD} ${OPT_OR_DBG} ${BAZEL_ARM64_FLAGS} ${BAZEL_EXTRA_FLAGS} ${BUILD_TARGET_LITE_LIB}
;;
"--android-armv7")
echo "" | TF_SET_ANDROID_WORKSPACE=1 ./configure && ${BAZEL_BUILD} ${OPT_OR_DBG} ${BAZEL_ANDROID_ARM_FLAGS} ${BAZEL_EXTRA_FLAGS} ${BUILD_TARGET_LITE_LIB}
;;
"--android-arm64")
echo "" | TF_SET_ANDROID_WORKSPACE=1 ./configure && ${BAZEL_BUILD} ${OPT_OR_DBG} ${BAZEL_ANDROID_ARM64_FLAGS} ${BAZEL_EXTRA_FLAGS} ${BUILD_TARGET_LITE_LIB}
;;
"--ios-arm64")
echo "" | TF_NEED_CUDA=0 TF_CONFIGURE_IOS=1 ./configure && ${BAZEL_BUILD} ${OPT_OR_DBG} ${BAZEL_IOS_ARM64_FLAGS} ${BUILD_TARGET_LITE_LIB}
;;
"--ios-x86_64")
echo "" | TF_NEED_CUDA=0 TF_CONFIGURE_IOS=1 ./configure && ${BAZEL_BUILD} ${OPT_OR_DBG} ${BAZEL_IOS_X86_64_FLAGS} ${BUILD_TARGET_LITE_LIB}
;;
esac
bazel ${BAZEL_OUTPUT_USER_ROOT} shutdown
popd

62
ci_scripts/tf-package.sh Executable file
View File

@ -0,0 +1,62 @@
#!/bin/bash
set -xe
source $(dirname $0)/tf-vars.sh
mkdir -p ${TASKCLUSTER_ARTIFACTS} || true
cp ${DS_ROOT_TASK}/tensorflow/bazel_*.log ${TASKCLUSTER_ARTIFACTS} || true
OUTPUT_ROOT="${DS_ROOT_TASK}/tensorflow/bazel-bin"
for output_bin in \
tensorflow/lite/experimental/c/libtensorflowlite_c.so \
tensorflow/tools/graph_transforms/transform_graph \
tensorflow/tools/graph_transforms/summarize_graph \
tensorflow/tools/benchmark/benchmark_model \
tensorflow/contrib/util/convert_graphdef_memmapped_format \
tensorflow/lite/toco/toco;
do
if [ -f "${OUTPUT_ROOT}/${output_bin}" ]; then
cp ${OUTPUT_ROOT}/${output_bin} ${TASKCLUSTER_ARTIFACTS}/
fi;
done;
if [ -f "${OUTPUT_ROOT}/tensorflow/lite/tools/benchmark/benchmark_model" ]; then
cp ${OUTPUT_ROOT}/tensorflow/lite/tools/benchmark/benchmark_model ${TASKCLUSTER_ARTIFACTS}/lite_benchmark_model
fi
# It seems that bsdtar and gnutar are behaving a bit differently on the way
# they deal with --exclude="./public/*" ; this caused ./DeepSpeech/tensorflow/core/public/
# to be ditched when we just wanted to get rid of ./public/ on OSX.
# Switching to gnutar (already needed for the --transform on DeepSpeech tasks)
# does the trick.
TAR_EXCLUDE="--exclude=./dls/*"
if [ "${OS}" = "Darwin" ]; then
TAR_EXCLUDE="--exclude=./dls/* --exclude=./public/* --exclude=./generic-worker/* --exclude=./homebrew/* --exclude=./homebrew.cache/* --exclude=./homebrew.logs/*"
fi;
# Make a tar of
# - /home/build-user/ (linux
# - /Users/build-user/TaskCluster/HeavyTasks/X/ (OSX)
# - C:\builds\tc-workdir\ (windows)
if [ "${OS}" = "${TC_MSYS_VERSION}" ]; then
export PATH=$PATH:'/c/Program Files/7-Zip/'
pushd ${DS_ROOT_TASK}
7z a '-xr!.\dls\' '-xr!.\tmp\' '-xr!.\msys64\' -snl -snh -so home.tar . | 7z a -si ${TASKCLUSTER_ARTIFACTS}/home.tar.xz
popd
else
${TAR} -C ${DS_ROOT_TASK} ${TAR_EXCLUDE} -cf - . | ${XZ} > ${TASKCLUSTER_ARTIFACTS}/home.tar.xz
fi
if [ "${OS}" = "Linux" ]; then
SHA_SUM_GEN="sha256sum"
elif [ "${OS}" = "${TC_MSYS_VERSION}" ]; then
SHA_SUM_GEN="sha256sum"
elif [ "${OS}" = "Darwin" ]; then
SHA_SUM_GEN="shasum -a 256"
fi;
${SHA_SUM_GEN} ${TASKCLUSTER_ARTIFACTS}/* > ${TASKCLUSTER_ARTIFACTS}/checksums.txt

112
ci_scripts/tf-setup.sh Executable file
View File

@ -0,0 +1,112 @@
#!/bin/bash
set -ex
source $(dirname $0)/tf-vars.sh
install_android=
install_cuda=
case "$1" in
"--linux-cuda"|"--windows-cuda")
install_cuda=yes
;;
"--android-armv7"|"--android-arm64")
install_android=yes
;;
esac
# $1 url
# $2 sha256
download()
{
fname=`basename $1`
${WGET} $1 -O ${DS_ROOT_TASK}/dls/$fname && echo "$2 ${DS_ROOT_TASK}/dls/$fname" | ${SHA_SUM} -
}
# Download stuff
mkdir -p ${DS_ROOT_TASK}/dls || true
download $BAZEL_URL $BAZEL_SHA256
if [ ! -z "${install_cuda}" ]; then
download $CUDA_URL $CUDA_SHA256
download $CUDNN_URL $CUDNN_SHA256
fi;
if [ ! -z "${install_android}" ]; then
download $ANDROID_NDK_URL $ANDROID_NDK_SHA256
download $ANDROID_SDK_URL $ANDROID_SDK_SHA256
fi;
# For debug
ls -hal ${DS_ROOT_TASK}/dls/
# Install Bazel in ${DS_ROOT_TASK}/bin
BAZEL_INSTALL_FILENAME=$(basename "${BAZEL_URL}")
if [ "${OS}" = "Linux" ]; then
BAZEL_INSTALL_FLAGS="--user"
elif [ "${OS}" = "Darwin" ]; then
BAZEL_INSTALL_FLAGS="--bin=${DS_ROOT_TASK}/bin --base=${DS_ROOT_TASK}/.bazel"
fi;
mkdir -p ${DS_ROOT_TASK}/bin || true
pushd ${DS_ROOT_TASK}/bin
if [ "${OS}" = "${TC_MSYS_VERSION}" ]; then
cp ${DS_ROOT_TASK}/dls/${BAZEL_INSTALL_FILENAME} ${DS_ROOT_TASK}/bin/bazel.exe
else
/bin/bash ${DS_ROOT_TASK}/dls/${BAZEL_INSTALL_FILENAME} ${BAZEL_INSTALL_FLAGS}
fi
popd
# For debug
bazel version
bazel shutdown
if [ ! -z "${install_cuda}" ]; then
# Install CUDA and CuDNN
mkdir -p ${DS_ROOT_TASK}/DeepSpeech/CUDA/ || true
pushd ${DS_ROOT_TASK}
CUDA_FILE=`basename ${CUDA_URL}`
PERL5LIB=. sh ${DS_ROOT_TASK}/dls/${CUDA_FILE} --silent --override --toolkit --toolkitpath=${DS_ROOT_TASK}/DeepSpeech/CUDA/ --defaultroot=${DS_ROOT_TASK}/DeepSpeech/CUDA/
CUDNN_FILE=`basename ${CUDNN_URL}`
tar xvf ${DS_ROOT_TASK}/dls/${CUDNN_FILE} --strip-components=1 -C ${DS_ROOT_TASK}/DeepSpeech/CUDA/
popd
LD_LIBRARY_PATH=${DS_ROOT_TASK}/DeepSpeech/CUDA/lib64/:${DS_ROOT_TASK}/DeepSpeech/CUDA/lib64/stubs/:$LD_LIBRARY_PATH
export LD_LIBRARY_PATH
# We might lack libcuda.so.1 symlink, let's fix as upstream does:
# https://github.com/tensorflow/tensorflow/pull/13811/files?diff=split#diff-2352449eb75e66016e97a591d3f0f43dR96
if [ ! -h "${DS_ROOT_TASK}/DeepSpeech/CUDA/lib64/stubs/libcuda.so.1" ]; then
ln -s "${DS_ROOT_TASK}/DeepSpeech/CUDA/lib64/stubs/libcuda.so" "${DS_ROOT_TASK}/DeepSpeech/CUDA/lib64/stubs/libcuda.so.1"
fi;
else
echo "No CUDA/CuDNN to install"
fi
if [ ! -z "${install_android}" ]; then
mkdir -p ${DS_ROOT_TASK}/DeepSpeech/Android/SDK || true
ANDROID_NDK_FILE=`basename ${ANDROID_NDK_URL}`
ANDROID_SDK_FILE=`basename ${ANDROID_SDK_URL}`
pushd ${DS_ROOT_TASK}/DeepSpeech/Android
unzip ${DS_ROOT_TASK}/dls/${ANDROID_NDK_FILE}
popd
pushd ${DS_ROOT_TASK}/DeepSpeech/Android/SDK
unzip ${DS_ROOT_TASK}/dls/${ANDROID_SDK_FILE}
yes | ./tools/bin/sdkmanager --licenses
./tools/bin/sdkmanager --update
./tools/bin/sdkmanager --install "platforms;android-16" "build-tools;28.0.3"
popd
fi
mkdir -p ${TASKCLUSTER_ARTIFACTS} || true
# Taken from https://www.tensorflow.org/install/source
# Only future is needed for our builds, as we don't build the Python package
pip install -U --user future==0.17.1 || true

213
ci_scripts/tf-vars.sh Executable file
View File

@ -0,0 +1,213 @@
#!/bin/bash
set -ex
export OS=$(uname)
if [ "${OS}" = "Linux" ]; then
export DS_ROOT_TASK=$(/usr/bin/realpath "${HOME}")
BAZEL_URL=https://github.com/bazelbuild/bazel/releases/download/3.1.0/bazel-3.1.0-installer-linux-x86_64.sh
BAZEL_SHA256=7ba815cbac712d061fe728fef958651512ff394b2708e89f79586ec93d1185ed
CUDA_URL=http://developer.download.nvidia.com/compute/cuda/10.1/Prod/local_installers/cuda_10.1.243_418.87.00_linux.run
CUDA_SHA256=e7c22dc21278eb1b82f34a60ad7640b41ad3943d929bebda3008b72536855d31
# From https://gitlab.com/nvidia/cuda/blob/centos7/10.1/devel/cudnn7/Dockerfile
CUDNN_URL=http://developer.download.nvidia.com/compute/redist/cudnn/v7.6.0/cudnn-10.1-linux-x64-v7.6.0.64.tgz
CUDNN_SHA256=e956c6f9222fcb867a10449cfc76dee5cfd7c7531021d95fe9586d7e043b57d7
ANDROID_NDK_URL=https://dl.google.com/android/repository/android-ndk-r18b-linux-x86_64.zip
ANDROID_NDK_SHA256=4f61cbe4bbf6406aa5ef2ae871def78010eed6271af72de83f8bd0b07a9fd3fd
ANDROID_SDK_URL=https://dl.google.com/android/repository/sdk-tools-linux-4333796.zip
ANDROID_SDK_SHA256=92ffee5a1d98d856634e8b71132e8a95d96c83a63fde1099be3d86df3106def9
SHA_SUM="sha256sum -c --strict"
WGET=/usr/bin/wget
TAR=tar
XZ="pixz -9"
elif [ "${OS}" = "${TC_MSYS_VERSION}" ]; then
if [ -z "${TASKCLUSTER_TASK_DIR}" -o -z "${TASKCLUSTER_ARTIFACTS}" ]; then
echo "Inconsistent Windows setup: missing some vars."
echo "TASKCLUSTER_TASK_DIR=${TASKCLUSTER_TASK_DIR}"
echo "TASKCLUSTER_ARTIFACTS=${TASKCLUSTER_ARTIFACTS}"
exit 1
fi;
# Re-export with cygpath to make sure it is sane, otherwise it might trigger
# unobvious failures with cp etc.
export TASKCLUSTER_TASK_DIR="$(cygpath ${TASKCLUSTER_TASK_DIR})"
export TASKCLUSTER_ARTIFACTS="$(cygpath ${TASKCLUSTER_ARTIFACTS})"
export DS_ROOT_TASK=${TASKCLUSTER_TASK_DIR}
export BAZEL_VC='C:\Program Files (x86)\Microsoft Visual Studio\2019\BuildTools\VC'
export BAZEL_SH='C:\builds\tc-workdir\msys64\usr\bin\bash'
export TC_WIN_BUILD_PATH='C:\builds\tc-workdir\msys64\usr\bin;C:\Python36'
export MSYS2_ARG_CONV_EXCL='//'
mkdir -p ${TASKCLUSTER_TASK_DIR}/tmp/
export TEMP=${TASKCLUSTER_TASK_DIR}/tmp/
export TMP=${TASKCLUSTER_TASK_DIR}/tmp/
BAZEL_URL=https://github.com/bazelbuild/bazel/releases/download/3.1.0/bazel-3.1.0-windows-x86_64.exe
BAZEL_SHA256=776db1f4986dacc3eda143932f00f7529f9ee65c7c1c004414c44aaa6419d0e9
CUDA_INSTALL_DIRECTORY=$(cygpath 'C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v10.1')
SHA_SUM="sha256sum -c --strict"
WGET=wget
TAR=/usr/bin/tar.exe
XZ="xz -9 -T0"
elif [ "${OS}" = "Darwin" ]; then
if [ -z "${TASKCLUSTER_TASK_DIR}" -o -z "${TASKCLUSTER_ARTIFACTS}" ]; then
echo "Inconsistent OSX setup: missing some vars."
echo "TASKCLUSTER_TASK_DIR=${TASKCLUSTER_TASK_DIR}"
echo "TASKCLUSTER_ARTIFACTS=${TASKCLUSTER_ARTIFACTS}"
exit 1
fi;
export DS_ROOT_TASK=${TASKCLUSTER_TASK_DIR}
BAZEL_URL=https://github.com/bazelbuild/bazel/releases/download/3.1.0/bazel-3.1.0-installer-darwin-x86_64.sh
BAZEL_SHA256=5cfa97031b43432b3c742c80e2e01c41c0acdca7ba1052fc8cf1e291271bc9cd
SHA_SUM="shasum -a 256 -c"
WGET=wget
TAR=gtar
XZ="xz -9 -T0"
fi;
# /tmp/artifacts for docker-worker on linux,
# and task subdir for generic-worker on osx
export TASKCLUSTER_ARTIFACTS=${TASKCLUSTER_ARTIFACTS:-/tmp/artifacts}
### Define variables that needs to be exported to other processes
PATH=${DS_ROOT_TASK}/bin:$PATH
if [ "${OS}" = "Darwin" ]; then
PATH=${DS_ROOT_TASK}/homebrew/bin/:${DS_ROOT_TASK}/homebrew/opt/node@10/bin:$PATH
fi;
export PATH
if [ "${OS}" = "Linux" ]; then
export LD_LIBRARY_PATH=${DS_ROOT_TASK}/DeepSpeech/CUDA/lib64/:${DS_ROOT_TASK}/DeepSpeech/CUDA/lib64/stubs/:$LD_LIBRARY_PATH
export ANDROID_SDK_HOME=${DS_ROOT_TASK}/DeepSpeech/Android/SDK/
export ANDROID_NDK_HOME=${DS_ROOT_TASK}/DeepSpeech/Android/android-ndk-r18b/
fi;
export TF_ENABLE_XLA=0
if [ "${OS}" = "Linux" ]; then
TF_NEED_JEMALLOC=1
elif [ "${OS}" = "${TC_MSYS_VERSION}" ]; then
TF_NEED_JEMALLOC=0
elif [ "${OS}" = "Darwin" ]; then
TF_NEED_JEMALLOC=0
fi;
export TF_NEED_JEMALLOC
export TF_NEED_OPENCL_SYCL=0
export TF_NEED_MKL=0
export TF_NEED_VERBS=0
export TF_NEED_MPI=0
export TF_NEED_IGNITE=0
export TF_NEED_GDR=0
export TF_NEED_NGRAPH=0
export TF_DOWNLOAD_CLANG=0
export TF_SET_ANDROID_WORKSPACE=0
export TF_NEED_TENSORRT=0
export TF_NEED_ROCM=0
# This should be gcc-5, hopefully. CUDA and TensorFlow might not be happy, otherwise.
export GCC_HOST_COMPILER_PATH=/usr/bin/gcc
if [ "${OS}" = "${TC_MSYS_VERSION}" ]; then
export PYTHON_BIN_PATH=C:/Python36/python.exe
else
if [ "${OS}" = "Linux" ]; then
source /etc/os-release
if [ "${ID}" = "ubuntu" -a "${VERSION_ID}" = "20.04" ]; then
export PYTHON_BIN_PATH=/usr/bin/python3
else
export PYTHON_BIN_PATH=/usr/bin/python2.7
fi
else
export PYTHON_BIN_PATH=/usr/bin/python2.7
fi
fi
## Below, define or export some build variables
# Enable some SIMD support. Limit ourselves to what Tensorflow needs.
# Also ensure to not require too recent CPU: AVX2/FMA introduced by:
# - Intel with Haswell (2013)
# - AMD with Excavator (2015)
# For better compatibility, AVX ony might be better.
#
# Build for generic amd64 platforms, no device-specific optimization
# See https://gcc.gnu.org/onlinedocs/gcc/x86-Options.html for targetting specific CPUs
if [ "${OS}" = "${TC_MSYS_VERSION}" ]; then
OPT_FLAGS="/arch:AVX"
else
OPT_FLAGS="-mtune=generic -march=x86-64 -msse -msse2 -msse3 -msse4.1 -msse4.2 -mavx"
fi
BAZEL_OPT_FLAGS=""
for flag in ${OPT_FLAGS};
do
BAZEL_OPT_FLAGS="${BAZEL_OPT_FLAGS} --copt=${flag}"
done;
BAZEL_OUTPUT_CACHE_DIR="${DS_ROOT_TASK}/.bazel_cache/"
BAZEL_OUTPUT_CACHE_INSTANCE="${BAZEL_OUTPUT_CACHE_DIR}/output/"
mkdir -p ${BAZEL_OUTPUT_CACHE_INSTANCE} || true
# We need both to ensure stable path ; default value for output_base is some
# MD5 value.
BAZEL_OUTPUT_USER_ROOT="--output_user_root ${BAZEL_OUTPUT_CACHE_DIR} --output_base ${BAZEL_OUTPUT_CACHE_INSTANCE}"
export BAZEL_OUTPUT_USER_ROOT
NVCC_COMPUTE="3.5"
### Define build parameters/env variables that we will re-ues in sourcing scripts.
if [ "${OS}" = "${TC_MSYS_VERSION}" ]; then
TF_CUDA_FLAGS="TF_CUDA_CLANG=0 TF_CUDA_VERSION=10.1 TF_CUDNN_VERSION=7.6.0 CUDNN_INSTALL_PATH=\"${CUDA_INSTALL_DIRECTORY}\" TF_CUDA_PATHS=\"${CUDA_INSTALL_DIRECTORY}\" TF_CUDA_COMPUTE_CAPABILITIES=\"${NVCC_COMPUTE}\""
else
TF_CUDA_FLAGS="TF_CUDA_CLANG=0 TF_CUDA_VERSION=10.1 TF_CUDNN_VERSION=7.6.0 CUDNN_INSTALL_PATH=\"${DS_ROOT_TASK}/DeepSpeech/CUDA\" TF_CUDA_PATHS=\"${DS_ROOT_TASK}/DeepSpeech/CUDA\" TF_CUDA_COMPUTE_CAPABILITIES=\"${NVCC_COMPUTE}\""
fi
BAZEL_ARM_FLAGS="--config=rpi3 --config=rpi3_opt --copt=-DTFLITE_WITH_RUY_GEMV"
BAZEL_ARM64_FLAGS="--config=rpi3-armv8 --config=rpi3-armv8_opt --copt=-DTFLITE_WITH_RUY_GEMV"
BAZEL_ANDROID_ARM_FLAGS="--config=android --config=android_arm --action_env ANDROID_NDK_API_LEVEL=21 --cxxopt=-std=c++14 --copt=-D_GLIBCXX_USE_C99 --copt=-DTFLITE_WITH_RUY_GEMV"
BAZEL_ANDROID_ARM64_FLAGS="--config=android --config=android_arm64 --action_env ANDROID_NDK_API_LEVEL=21 --cxxopt=-std=c++14 --copt=-D_GLIBCXX_USE_C99 --copt=-DTFLITE_WITH_RUY_GEMV"
BAZEL_CUDA_FLAGS="--config=cuda"
if [ "${OS}" = "Linux" ]; then
# constexpr usage in tensorflow's absl dep fails badly because of gcc-5
# so let's skip that
BAZEL_CUDA_FLAGS="${BAZEL_CUDA_FLAGS} --copt=-DNO_CONSTEXPR_FOR_YOU=1"
fi
BAZEL_IOS_ARM64_FLAGS="--config=ios_arm64 --define=runtime=tflite --copt=-DTFLITE_WITH_RUY_GEMV"
BAZEL_IOS_X86_64_FLAGS="--config=ios_x86_64 --define=runtime=tflite --copt=-DTFLITE_WITH_RUY_GEMV"
if [ "${OS}" = "${TC_MSYS_VERSION}" ]; then
# Somehow, even with Python being in the PATH, Bazel on windows struggles
# with '/usr/bin/env python' ...
#
# We also force TMP/TEMP otherwise Bazel will pick default Windows one
# under %USERPROFILE%\AppData\Local\Temp and with 8.3 file format convention
# it messes with cxx_builtin_include_directory
BAZEL_EXTRA_FLAGS="--action_env=PATH=${TC_WIN_BUILD_PATH} --action_env=TEMP=${TEMP} --action_env=TMP=${TMP}"
else
BAZEL_EXTRA_FLAGS="--config=noaws --config=nogcp --config=nohdfs --config=nonccl --copt=-fvisibility=hidden"
fi
if [ "${OS}" = "Darwin" ]; then
BAZEL_EXTRA_FLAGS="${BAZEL_EXTRA_FLAGS} --macos_minimum_os 10.10 --macos_sdk_version 10.15"
fi
### Define build targets that we will re-ues in sourcing scripts.
BUILD_TARGET_LIB_CPP_API="//tensorflow:tensorflow_cc"
BUILD_TARGET_GRAPH_TRANSFORMS="//tensorflow/tools/graph_transforms:transform_graph"
BUILD_TARGET_GRAPH_SUMMARIZE="//tensorflow/tools/graph_transforms:summarize_graph"
BUILD_TARGET_GRAPH_BENCHMARK="//tensorflow/tools/benchmark:benchmark_model"
#BUILD_TARGET_CONVERT_MMAP="//tensorflow/contrib/util:convert_graphdef_memmapped_format"
BUILD_TARGET_TOCO="//tensorflow/lite/toco:toco"
BUILD_TARGET_LITE_BENCHMARK="//tensorflow/lite/tools/benchmark:benchmark_model"
BUILD_TARGET_LITE_LIB="//tensorflow/lite/c:libtensorflowlite_c.so"