diff --git a/native_client/javascript/client.ts b/native_client/javascript/client.ts index ecf00a95..d607060e 100644 --- a/native_client/javascript/client.ts +++ b/native_client/javascript/client.ts @@ -134,6 +134,11 @@ if (!args['stream']) { stream.feedAudioContent(chunk); }); conversionStream.on('end', () => { - console.log(stream.finishStream()); + if (args['extended']) { + let metadata = stream.finishStreamWithMetadata(); + console.log(candidateTranscriptToString(metadata.transcripts[0])); + } else { + console.log(stream.finishStream()); + } }); } diff --git a/native_client/javascript/index.d.ts b/native_client/javascript/index.d.ts index b22141ca..339fb36e 100644 --- a/native_client/javascript/index.d.ts +++ b/native_client/javascript/index.d.ts @@ -103,7 +103,7 @@ stt(aBuffer: object): string; * @return :js:func:`Metadata` object containing multiple candidate transcripts. Each transcript has per-token metadata including timing information. * The user is responsible for freeing Metadata by calling :js:func:`FreeMetadata`. Returns undefined on error. */ -sttWithMetadata(aBuffer: object, aNumResults: number): Metadata; +sttWithMetadata(aBuffer: object, aNumResults?: number): Metadata; /** * Create a new streaming inference state. One can then call :js:func:`Stream.feedAudioContent` and :js:func:`Stream.finishStream` on the returned stream object. @@ -143,7 +143,7 @@ intermediateDecode(aSctx: Stream): string; * * @return :js:func:`Metadata` object containing multiple candidate transcripts. Each transcript has per-token metadata including timing information. The user is responsible for freeing Metadata by calling :js:func:`FreeMetadata`. Returns undefined on error. */ -intermediateDecodeWithMetadata (aNumResults: number): Metadata; +intermediateDecodeWithMetadata (aNumResults?: number): Metadata; /** * Compute the final decoding of an ongoing streaming inference and return the result. Signals the end of an ongoing streaming inference. @@ -163,7 +163,7 @@ finishStream(): string; * * This method will free the stream, it must not be used after this method is called. */ -finishStreamWithMetadata(aNumResults: number): Metadata; +finishStreamWithMetadata(aNumResults?: number): Metadata; } /** @@ -177,7 +177,7 @@ export function FreeModel(model: Model): void; /** * Free memory allocated for metadata information. * - * @param metadata Object containing metadata as returned by :js:func:`Model.sttWithMetadata` or :js:func:`Model.finishStreamWithMetadata` + * @param metadata Object containing metadata as returned by :js:func:`Model.sttWithMetadata` or :js:func:`Stream.finishStreamWithMetadata` */ export function FreeMetadata(metadata: Metadata): void; diff --git a/native_client/javascript/index.js b/native_client/javascript/index.js index 7d742aad..f684f503 100644 --- a/native_client/javascript/index.js +++ b/native_client/javascript/index.js @@ -211,7 +211,7 @@ Stream.prototype.finishStream = function() { */ Stream.prototype.finishStreamWithMetadata = function(aNumResults) { aNumResults = aNumResults || 1; - result = binding.FinishStreamWithMetadata(this._impl, aNumResults); + let result = binding.FinishStreamWithMetadata(this._impl, aNumResults); this._impl = null; return result; } @@ -230,7 +230,7 @@ function FreeModel(model) { /** * Free memory allocated for metadata information. * - * @param {object} metadata Object containing metadata as returned by :js:func:`Model.sttWithMetadata` or :js:func:`Model.finishStreamWithMetadata` + * @param {object} metadata Object containing metadata as returned by :js:func:`Model.sttWithMetadata` or :js:func:`Stream.finishStreamWithMetadata` */ function FreeMetadata(metadata) { return binding.FreeMetadata(metadata); diff --git a/taskcluster/tc-asserts.sh b/taskcluster/tc-asserts.sh index 62f1e8ff..a56a4c49 100755 --- a/taskcluster/tc-asserts.sh +++ b/taskcluster/tc-asserts.sh @@ -519,6 +519,12 @@ run_js_streaming_inference_tests() status=$? set -e assert_correct_ldc93s1_lm "${phrase_pbmodel_withlm}" "$status" + + set +e + phrase_pbmodel_withlm=$(deepspeech --model ${TASKCLUSTER_TMP_DIR}/${model_name_mmap} --scorer ${TASKCLUSTER_TMP_DIR}/kenlm.scorer --audio ${TASKCLUSTER_TMP_DIR}/${ldc93s1_sample_filename} --stream --extended 2>${TASKCLUSTER_TMP_DIR}/stderr | tail -n 1) + status=$? + set -e + assert_correct_ldc93s1_lm "${phrase_pbmodel_withlm}" "$status" } run_js_streaming_prod_inference_tests() @@ -529,4 +535,11 @@ run_js_streaming_prod_inference_tests() status=$? set -e assert_correct_ldc93s1_prodmodel "${phrase_pbmodel_withlm}" "$status" "${_bitrate}" + + local _bitrate=$1 + set +e + phrase_pbmodel_withlm=$(deepspeech --model ${TASKCLUSTER_TMP_DIR}/${model_name_mmap} --scorer ${TASKCLUSTER_TMP_DIR}/kenlm.scorer --audio ${TASKCLUSTER_TMP_DIR}/${ldc93s1_sample_filename} --stream --extended 2>${TASKCLUSTER_TMP_DIR}/stderr | tail -n 1) + status=$? + set -e + assert_correct_ldc93s1_prodmodel "${phrase_pbmodel_withlm}" "$status" "${_bitrate}" }