From 4b305d2f5ef409f129c4638561f55b3707d9e8dd Mon Sep 17 00:00:00 2001 From: Reuben Morais Date: Fri, 14 Jun 2019 15:11:21 -0300 Subject: [PATCH] Remove --use_seq_length flag --- DeepSpeech.py | 4 ++-- README.md | 2 +- bin/run-tc-ldc93s1_tflite.sh | 2 +- util/flags.py | 1 - 4 files changed, 4 insertions(+), 5 deletions(-) diff --git a/DeepSpeech.py b/DeepSpeech.py index 7e92e202..400a5067 100755 --- a/DeepSpeech.py +++ b/DeepSpeech.py @@ -588,7 +588,7 @@ def create_inference_graph(batch_size=1, n_steps=16, tflite=False): rnn_impl = rnn_impl_lstmblockfusedcell logits, layers = create_model(batch_x=input_tensor, - seq_length=seq_length if FLAGS.use_seq_length else None, + seq_length=seq_length if not FLAGS.export_tflite else None, dropout=no_dropout, previous_state=previous_state, overlap=False, @@ -630,7 +630,7 @@ def create_inference_graph(batch_size=1, n_steps=16, tflite=False): 'input_samples': input_samples, } - if FLAGS.use_seq_length: + if not FLAGS.export_tflite: inputs.update({'input_lengths': seq_length}) outputs = { diff --git a/README.md b/README.md index bf593a27..91447cfa 100644 --- a/README.md +++ b/README.md @@ -343,7 +343,7 @@ Refer to the corresponding [README.md](native_client/README.md) for information ### Exporting a model for TFLite -If you want to experiment with the TF Lite engine, you need to export a model that is compatible with it, then use the `--nouse_seq_length --export_tflite` flags. If you already have a trained model, you can re-export it for TFLite by running `DeepSpeech.py` again and specifying the same `checkpoint_dir` that you used for training, as well as passing `--nouse_seq_length --export_tflite --export_dir /model/export/destination`. +If you want to experiment with the TF Lite engine, you need to export a model that is compatible with it, then use the `--export_tflite` flags. If you already have a trained model, you can re-export it for TFLite by running `DeepSpeech.py` again and specifying the same `checkpoint_dir` that you used for training, as well as passing `--export_tflite --export_dir /model/export/destination`. ### Making a mmap-able model for inference diff --git a/bin/run-tc-ldc93s1_tflite.sh b/bin/run-tc-ldc93s1_tflite.sh index bab6d7b0..b402d7d9 100755 --- a/bin/run-tc-ldc93s1_tflite.sh +++ b/bin/run-tc-ldc93s1_tflite.sh @@ -20,4 +20,4 @@ python -u DeepSpeech.py --noshow_progressbar \ --export_dir '/tmp/train_tflite' \ --lm_binary_path 'data/smoke_test/vocab.pruned.lm' \ --lm_trie_path 'data/smoke_test/vocab.trie' \ - --export_tflite --nouse_seq_length + --export_tflite diff --git a/util/flags.py b/util/flags.py index b0f824ff..a4cb4979 100644 --- a/util/flags.py +++ b/util/flags.py @@ -73,7 +73,6 @@ def create_flags(): f.DEFINE_string('export_dir', '', 'directory in which exported models are stored - if omitted, the model won\'t get exported') f.DEFINE_boolean('remove_export', False, 'whether to remove old exported models') f.DEFINE_boolean('export_tflite', False, 'export a graph ready for TF Lite engine') - f.DEFINE_boolean('use_seq_length', True, 'have sequence_length in the exported graph(will make tfcompile unhappy)') f.DEFINE_integer('n_steps', 16, 'how many timesteps to process at once by the export graph, higher values mean more latency') f.DEFINE_string('export_language', '', 'language the model was trained on e.g. "en" or "English". Gets embedded into exported model.')