Reduce training task from 399 epochs to 220, enough to overfit LDC93S1

This commit is contained in:
Reuben Morais 2019-12-03 13:02:51 +01:00
parent 1f98373274
commit 551570616b
2 changed files with 3 additions and 5 deletions

View File

@ -5,8 +5,6 @@ set -xe
ldc93s1_dir="./data/ldc93s1-tc"
ldc93s1_csv="${ldc93s1_dir}/ldc93s1.csv"
epoch_count=$1
if [ ! -f "${ldc93s1_dir}/ldc93s1.csv" ]; then
echo "Downloading and preprocessing LDC93S1 example data, saving in ${ldc93s1_dir}."
python -u bin/import_ldc93s1.py ${ldc93s1_dir}

View File

@ -58,8 +58,8 @@ LD_LIBRARY_PATH=${PY37_LDPATH}:$LD_LIBRARY_PATH pip install --verbose --only-bin
pushd ${HOME}/DeepSpeech/ds/
# Run twice to test preprocessed features
time ./bin/run-tc-ldc93s1_new.sh 199
time ./bin/run-tc-ldc93s1_new.sh 200
time ./bin/run-tc-ldc93s1_new.sh 219
time ./bin/run-tc-ldc93s1_new.sh 1
time ./bin/run-tc-ldc93s1_tflite.sh
popd
@ -75,7 +75,7 @@ if [ ! -z "${CONVERT_GRAPHDEF_MEMMAPPED}" ]; then
fi;
pushd ${HOME}/DeepSpeech/ds/
time ./bin/run-tc-ldc93s1_checkpoint.sh 200
time ./bin/run-tc-ldc93s1_checkpoint.sh
popd
deactivate