Replace SameOperandsAndResultType by TFL_TCresVTEtIsSameAsOp to cover quantization types

Also fixes Mobilenet-v3-quant conversion failure.

PiperOrigin-RevId: 311473695
Change-Id: I08f836a2b829772f7a8d6b39766ab67ccd2c9a10
This commit is contained in:
Jaesung Chung 2020-05-13 22:48:33 -07:00 committed by TensorFlower Gardener
parent d5a5959dd3
commit 4afee5f519
2 changed files with 22 additions and 15 deletions

View File

@ -1561,9 +1561,11 @@ def TFL_GreaterOp : TFL_Op<"greater", [
let printer = [{ return mlir::impl::printOneResultOp(getOperation(), p); }]; let printer = [{ return mlir::impl::printOneResultOp(getOperation(), p); }];
} }
def TFL_HardSwishOp: TFL_Op<"hard_swish", [NoSideEffect, def TFL_HardSwishOp: TFL_Op<"hard_swish", [
NoSideEffect,
SameOperandsAndResultShape, SameOperandsAndResultShape,
SameOperandsAndResultType, PredOpTrait<"input and output must have same element type",
TFL_TCresVTEtIsSameAsOp<0, 0>>,
TFL_GpuTargetOp]> { TFL_GpuTargetOp]> {
let summary = "Hardswish activation function."; let summary = "Hardswish activation function.";
let description = [{ let description = [{
@ -1574,7 +1576,7 @@ def TFL_HardSwishOp: TFL_Op<"hard_swish", [NoSideEffect,
let arguments = (ins TFL_TensorOf<[F32, QUI8, QI8]>:$input); let arguments = (ins TFL_TensorOf<[F32, QUI8, QI8]>:$input);
let results = (outs TFL_TensorOf<[F32, QUI8, QI8]>:$out); let results = (outs TFL_TensorOf<[F32, QUI8, QI8]>:$output);
let hasOptions = 0; let hasOptions = 0;
} }
@ -1606,7 +1608,8 @@ def TFL_L2NormalizationOp : TFL_Op<"l2_normalization", [NoSideEffect,
def TFL_LeakyReluOp: TFL_Op<"leaky_relu", [ def TFL_LeakyReluOp: TFL_Op<"leaky_relu", [
SameOperandsAndResultShape, SameOperandsAndResultShape,
NoSideEffect, NoSideEffect,
SameOperandsAndResultType]> { PredOpTrait<"input and output must have same element type",
TFL_TCresVTEtIsSameAsOp<0, 0>>]> {
let summary = "Leaky Relu operator"; let summary = "Leaky Relu operator";
let description = [{ let description = [{
@ -1740,7 +1743,8 @@ def TFL_LogOp: TFL_Op<"log", [
def TFL_LogSoftmaxOp : TFL_Op<"log_softmax", [ def TFL_LogSoftmaxOp : TFL_Op<"log_softmax", [
NoSideEffect, NoSideEffect,
SameOperandsAndResultShape, SameOperandsAndResultShape,
SameOperandsAndResultType, PredOpTrait<"x and y must have same element type",
TFL_TCresVTEtIsSameAsOp<0, 0>>,
// zero_point = max_value // zero_point = max_value
// scale = -log_softmax_output_min / (max_value + 1) // scale = -log_softmax_output_min / (max_value + 1)
FixedResultScale<Int8UniformQuantizedType<127, 625, -4>>, FixedResultScale<Int8UniformQuantizedType<127, 625, -4>>,
@ -1896,11 +1900,11 @@ Rounds the values of a tensor to the nearest integer, element-wise.
}]; }];
let arguments = (ins let arguments = (ins
TFL_TensorOf<[F32]>:$x TFL_FpTensor:$x
); );
let results = (outs let results = (outs
TFL_TensorOf<[F32]>:$y TFL_FpTensor:$y
); );
} }
@ -2443,9 +2447,9 @@ def TFL_RsqrtOp: TFL_Op<"rsqrt", [NoSideEffect,
Computes element-wise reverse square root of input Computes element-wise reverse square root of input
}]; }];
let arguments = (ins AnyTensor:$x); let arguments = (ins TFL_FpTensor:$x);
let results = (outs AnyTensor:$y); let results = (outs TFL_FpTensor:$y);
let hasFolder = 1; let hasFolder = 1;
} }
@ -3361,8 +3365,10 @@ def TFL_QuantizeOp: TFL_Op<"quantize", [
let results = (outs AnyTensor:$output); let results = (outs AnyTensor:$output);
} }
def TFL_DensifyOp: TFL_Op<"densify", [NoSideEffect, def TFL_DensifyOp: TFL_Op<"densify", [
SameOperandsAndResultType, NoSideEffect,
PredOpTrait<"input and output must have same element type",
TFL_TCresVTEtIsSameAsOp<0, 0>>,
NoQuantizableResult]> { NoQuantizableResult]> {
let summary = "Densify operator"; let summary = "Densify operator";

View File

@ -321,7 +321,8 @@ void DenseToSparse::runOnFunction() {
if (result.needs_densify) { if (result.needs_densify) {
const auto value = op->getOperand(operand); const auto value = op->getOperand(operand);
auto densify = builder.create<DensifyOp>(op->getLoc(), value); auto densify =
builder.create<DensifyOp>(op->getLoc(), value.getType(), value);
value.replaceAllUsesWith(densify); value.replaceAllUsesWith(densify);
densify.setOperand(value); densify.setOperand(value);
} }