All the inputs of the BATCH_MATMUL should be quantized by the type of the activation
This commit is contained in:
parent
0f082f6b40
commit
dff42b15e2
@ -97,6 +97,7 @@ OperatorProperty GetOperatorProperty(const ModelT* model, int subgraph_index,
|
|||||||
property.inputs = {{0, {}}, {1, {}}};
|
property.inputs = {{0, {}}, {1, {}}};
|
||||||
property.outputs = {{0, {}}};
|
property.outputs = {{0, {}}};
|
||||||
property.version = 2;
|
property.version = 2;
|
||||||
|
property.quantize_input_as_activations = true;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case BuiltinOperator_BATCH_TO_SPACE_ND:
|
case BuiltinOperator_BATCH_TO_SPACE_ND:
|
||||||
|
Loading…
x
Reference in New Issue
Block a user