Update documentation for FC op in hexagon delegate.
FC now supports relu activation function PiperOrigin-RevId: 316966229 Change-Id: If5a42ecca8aa9b6e94474e75614153f50ca8ae3b
This commit is contained in:
parent
b920ae9262
commit
69e36409c0
@ -74,7 +74,7 @@ are verified in `IsNodeSupportedByHexagon`:
|
|||||||
- depth_multiplier == 1
|
- depth_multiplier == 1
|
||||||
- dilation only supported when stride == 1
|
- dilation only supported when stride == 1
|
||||||
- Otherwise, stride height/width <= 3
|
- Otherwise, stride height/width <= 3
|
||||||
* FullyConnected (without any activation)
|
* FullyConnected
|
||||||
* Hardswish
|
* Hardswish
|
||||||
* L2Normalization (without any activation)
|
* L2Normalization (without any activation)
|
||||||
* Logistic (aka Sigmoid)
|
* Logistic (aka Sigmoid)
|
||||||
|
@ -151,7 +151,6 @@ TfLiteStatus AddFullyConnectedHelper(const TfLiteIntArray* inputs,
|
|||||||
// Data (8-bit), Weights (const, 8-bit) => MatMul => MatMul out (int32)
|
// Data (8-bit), Weights (const, 8-bit) => MatMul => MatMul out (int32)
|
||||||
// MatMul out (int32), Bias (int32) => QuantizedBiasAdd => BiasAdd out (int32)
|
// MatMul out (int32), Bias (int32) => QuantizedBiasAdd => BiasAdd out (int32)
|
||||||
// BiasAdd out (int32) => Requantize_32to8 => Output (8-bit)
|
// BiasAdd out (int32) => Requantize_32to8 => Output (8-bit)
|
||||||
// TODO(b/129276536): Add activation support.
|
|
||||||
TfLiteStatus MatMulWithConstWeightsOpBuilder::PopulateSubGraph(
|
TfLiteStatus MatMulWithConstWeightsOpBuilder::PopulateSubGraph(
|
||||||
const TfLiteIntArray* inputs, const TfLiteIntArray* outputs,
|
const TfLiteIntArray* inputs, const TfLiteIntArray* outputs,
|
||||||
TfLiteContext* context) {
|
TfLiteContext* context) {
|
||||||
|
Loading…
Reference in New Issue
Block a user