Call the logits probs in `create_inference_graph` after they go thru softmax
This commit is contained in:
parent
9a92fa40ca
commit
98e75c3c03
|
@ -730,7 +730,7 @@ def create_inference_graph(batch_size=1, n_steps=16, tflite=False):
|
||||||
logits = tf.squeeze(logits, [1])
|
logits = tf.squeeze(logits, [1])
|
||||||
|
|
||||||
# Apply softmax for CTC decoder
|
# Apply softmax for CTC decoder
|
||||||
logits = tf.nn.softmax(logits, name='logits')
|
probs = tf.nn.softmax(logits, name='logits')
|
||||||
|
|
||||||
if batch_size <= 0:
|
if batch_size <= 0:
|
||||||
if tflite:
|
if tflite:
|
||||||
|
@ -743,7 +743,7 @@ def create_inference_graph(batch_size=1, n_steps=16, tflite=False):
|
||||||
'input_lengths': seq_length,
|
'input_lengths': seq_length,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
'outputs': logits,
|
'outputs': probs,
|
||||||
},
|
},
|
||||||
layers
|
layers
|
||||||
)
|
)
|
||||||
|
@ -763,7 +763,7 @@ def create_inference_graph(batch_size=1, n_steps=16, tflite=False):
|
||||||
inputs['input_lengths'] = seq_length
|
inputs['input_lengths'] = seq_length
|
||||||
|
|
||||||
outputs = {
|
outputs = {
|
||||||
'outputs': logits,
|
'outputs': probs,
|
||||||
'new_state_c': new_state_c,
|
'new_state_c': new_state_c,
|
||||||
'new_state_h': new_state_h,
|
'new_state_h': new_state_h,
|
||||||
'mfccs': mfccs,
|
'mfccs': mfccs,
|
||||||
|
|
Loading…
Reference in New Issue