diff --git a/native_client/ctcdecode/Makefile b/native_client/ctcdecode/Makefile index 5c1ed4ce..d4b9bf4c 100644 --- a/native_client/ctcdecode/Makefile +++ b/native_client/ctcdecode/Makefile @@ -6,7 +6,7 @@ NUM_PROCESSES ?= 1 # ARM64 can't find the proper libm.so without this ifeq ($(TARGET),rpi3-armv8) -LDFLAGS_NEEDED += $(RASPBIAN)/lib/aarch64-linux-gnu/libm-2.24.so +LDFLAGS_NEEDED += $(RASPBIAN)/lib/aarch64-linux-gnu/libm.so.6 endif all: bindings diff --git a/native_client/ctcdecode/scorer.cpp b/native_client/ctcdecode/scorer.cpp index 8b84290d..d76192ae 100644 --- a/native_client/ctcdecode/scorer.cpp +++ b/native_client/ctcdecode/scorer.cpp @@ -50,13 +50,14 @@ Scorer::Scorer(double alpha, const char* alphabet_config_path) : Scorer(alpha, beta, lm_path, trie_path, Alphabet(alphabet_config_path)) { - } -Scorer::~Scorer() { +Scorer::~Scorer() +{ } -void Scorer::setup(const std::string& lm_path, const std::string& trie_path) { +void Scorer::setup(const std::string& lm_path, const std::string& trie_path) +{ // load language model const char* filename = lm_path.c_str(); VALID_CHECK_EQ(access(filename, R_OK), 0, "Invalid language model path"); @@ -114,7 +115,8 @@ void Scorer::setup(const std::string& lm_path, const std::string& trie_path) { max_order_ = language_model_->Order(); } -void Scorer::save_dictionary(const std::string& path) { +void Scorer::save_dictionary(const std::string& path) +{ std::ofstream fout(path, std::ios::binary); fout.write(reinterpret_cast(&MAGIC), sizeof(MAGIC)); fout.write(reinterpret_cast(&FILE_VERSION), sizeof(FILE_VERSION)); @@ -123,7 +125,8 @@ void Scorer::save_dictionary(const std::string& path) { dictionary->Write(fout, opt); } -double Scorer::get_log_cond_prob(const std::vector& words) { +double Scorer::get_log_cond_prob(const std::vector& words) +{ double cond_prob = OOV_SCORE; lm::ngram::State state, tmp_state, out_state; // avoid to inserting in begin @@ -143,7 +146,8 @@ double Scorer::get_log_cond_prob(const std::vector& words) { return cond_prob/NUM_FLT_LOGE; } -double Scorer::get_sent_log_prob(const std::vector& words) { +double Scorer::get_sent_log_prob(const std::vector& words) +{ std::vector sentence; if (words.size() == 0) { for (size_t i = 0; i < max_order_; ++i) { @@ -159,7 +163,8 @@ double Scorer::get_sent_log_prob(const std::vector& words) { return get_log_prob(sentence); } -double Scorer::get_log_prob(const std::vector& words) { +double Scorer::get_log_prob(const std::vector& words) +{ assert(words.size() > max_order_); double score = 0.0; for (size_t i = 0; i < words.size() - max_order_ + 1; ++i) { @@ -170,12 +175,14 @@ double Scorer::get_log_prob(const std::vector& words) { return score; } -void Scorer::reset_params(float alpha, float beta) { +void Scorer::reset_params(float alpha, float beta) +{ this->alpha = alpha; this->beta = beta; } -std::vector Scorer::split_labels(const std::vector& labels) { +std::vector Scorer::split_labels(const std::vector& labels) +{ if (labels.empty()) return {}; std::string s = alphabet_.LabelsToString(labels); @@ -188,7 +195,8 @@ std::vector Scorer::split_labels(const std::vector& labels) { return words; } -std::vector Scorer::make_ngram(PathTrie* prefix) { +std::vector Scorer::make_ngram(PathTrie* prefix) +{ std::vector ngram; PathTrie* current_node = prefix; PathTrie* new_node = nullptr; @@ -221,7 +229,8 @@ std::vector Scorer::make_ngram(PathTrie* prefix) { return ngram; } -void Scorer::fill_dictionary(const std::vector& vocabulary, bool add_space) { +void Scorer::fill_dictionary(const std::vector& vocabulary, bool add_space) +{ fst::StdVectorFst dictionary; // For each unigram convert to ints and put in trie for (const auto& word : vocabulary) { diff --git a/tc-single-shot-inference.sh b/tc-single-shot-inference.sh index 3d178f74..e7d229d7 100755 --- a/tc-single-shot-inference.sh +++ b/tc-single-shot-inference.sh @@ -48,7 +48,7 @@ platform=$(python -c 'import sys; import platform; plat = platform.system().lowe whl_ds_version="$(python -c 'from pkg_resources import parse_version; print(parse_version("'${DS_VERSION}'"))')" decoder_pkg="ds_ctcdecoder-${whl_ds_version}-cp${pyver_pkg}-cp${pyver_pkg}${py_unicode_type}-${platform}.whl" -decoder_pkg_url=${DEEPSPEECH_ARTIFACTS_ROOT}/${decoder_pkg} +decoder_pkg_url=${DECODER_ARTIFACTS_ROOT}/${decoder_pkg} LD_LIBRARY_PATH=${PY37_LDPATH}:$LD_LIBRARY_PATH pip install --verbose --only-binary :all: ${PY37_SOURCE_PACKAGE} --upgrade ${decoder_pkg_url} | cat