diff --git a/tensorflow/compiler/mlir/lite/BUILD b/tensorflow/compiler/mlir/lite/BUILD index c917af71f92..03cf9265f3b 100644 --- a/tensorflow/compiler/mlir/lite/BUILD +++ b/tensorflow/compiler/mlir/lite/BUILD @@ -26,6 +26,7 @@ package_group( filegroup( name = "tensorflow_lite_ops_td_files", srcs = [ + "experimental/tfl_hardware_interfaces.td", "ir/tfl_op_interfaces.td", "ir/tfl_ops.td", "//tensorflow/compiler/mlir/lite/quantization:quantization_td_files", @@ -204,6 +205,7 @@ cc_library( cc_library( name = "tensorflow_lite", srcs = [ + "experimental/estimators/estimator.h", "ir/tfl_ops.cc", "ir/tfl_ops.cc.inc", "ir/tfl_ops.h.inc", @@ -439,6 +441,7 @@ genrule( srcs = [ "ir/tfl_ops.td", "ir/tfl_op_interfaces.td", + "experimental/tfl_hardware_interfaces.td", "@llvm-project//mlir:include/mlir/Interfaces/LoopLikeInterface.td", "//tensorflow/compiler/mlir/lite/quantization:quantization_td_files", ], diff --git a/tensorflow/compiler/mlir/lite/experimental/estimators/estimator.h b/tensorflow/compiler/mlir/lite/experimental/estimators/estimator.h new file mode 100644 index 00000000000..26f6b0f3428 --- /dev/null +++ b/tensorflow/compiler/mlir/lite/experimental/estimators/estimator.h @@ -0,0 +1,56 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_COMPILER_MLIR_LITE_EXPERIMENTAL_ESTIMATORS_ESTIMATOR_H_ +#define TENSORFLOW_COMPILER_MLIR_LITE_EXPERIMENTAL_ESTIMATORS_ESTIMATOR_H_ + +#include "llvm/Support/raw_ostream.h" +#include "mlir/IR/Operation.h" // TF:llvm-project +#include "tensorflow/compiler/mlir/lite/ir/tfl_ops.h.inc" + +namespace hardware { +// Empty classes that represents hardware types. +class CPU {}; +class GPU {}; +} // namespace hardware + +template +class TFLiteCostEstimator { + public: + static double GetCost(mlir::Operation* op) { + llvm::errs() << "No defined cost function for op: " + << op->getName().getStringRef().str(); + return 0.0; + } + + static bool IsSupported(mlir::Operation* op) { + llvm::errs() << "No defined support for op: " + << op->getName().getStringRef().str(); + return false; + } +}; + +// All ops on CPU are supported. +// TODO(karimnosseir): Only allow TFL ops in the "TFL_OP" param. +template +class TFLiteCostEstimator { + public: + // TODO(karimnosseir): Update and use table based method and lookup + // cost from a loadable table ? + static double GetCost(mlir::Operation* op) { return 0.0; } + + static bool IsSupported(mlir::Operation* op) { return true; } +}; + +#endif // TENSORFLOW_COMPILER_MLIR_LITE_EXPERIMENTAL_ESTIMATORS_ESTIMATOR_H_ diff --git a/tensorflow/compiler/mlir/lite/experimental/tfl_hardware_interfaces.td b/tensorflow/compiler/mlir/lite/experimental/tfl_hardware_interfaces.td new file mode 100644 index 00000000000..5c3ec6c206c --- /dev/null +++ b/tensorflow/compiler/mlir/lite/experimental/tfl_hardware_interfaces.td @@ -0,0 +1,76 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// WARNING: This Interface is experimental, DO NOT USE. + +// This is the Target Hardware operation interfacea definition file +// for TensorFlow Lite. + +#ifndef TFL_TARGET_HARDWARE_OP_INTERFACES +#define TFL_TARGET_HARDWARE_OP_INTERFACES + +def TFL_CpuTargetOp : OpInterface<"CpuOpTargetInterface"> { + let description = [{ + Interface for ops to run on CPU. + }]; + + let methods = [ + InterfaceMethod< + [{Returns the cost of running this op on CPU.}], + // TODO(karimnosseir): Change to return Cost object instead. + "double", "GetOpCost", (ins "mlir::Operation*":$op_to_check), [{ + // TODO(karimnosseir): Consider changing to another way that doesn't + // rely on template param name. + return TFL::TFLiteCostEstimator::GetCost(op_to_check); + }] + >, + InterfaceMethod< + [{Returns whether this op can be run on CPU.}], + "bool", "IsSupported", (ins "mlir::Operation*":$op_to_check), [{ + // TODO(karimnosseir): Consider changing to another way that doesn't + // rely on template param name. + return TFL::TFLiteCostEstimator::IsSupported(op_to_check); + }] + >, + ]; +} + +def TFL_GpuTargetOp : OpInterface<"GpuOpTargetInterface"> { + let description = [{ + Interface for ops to run on GPU. + }]; + + let methods = [ + InterfaceMethod< + [{Returns the cost of running this op on GPU.}], + // TODO(karimnosseir): Change to return Cost object instead. + "double", "GetOpCost", (ins "Operation*":$op_to_check), [{ + // TODO(karimnosseir): Consider changing to another way that doesn't + // rely on template param name. + return TFL::TFLiteCostEstimator::GetCost(op_to_check); + }] + >, + InterfaceMethod< + [{Returns whether this op can be run on GPU.}], + "bool", "IsSupported", (ins "Operation*":$op_to_check), [{ + // TODO(karimnosseir): Consider changing to another way that doesn't + // rely on template param name. + return TFL::TFLiteCostEstimator::IsSupported(op_to_check); + }] + >, + ]; +} + +#endif // TFL_TARGET_HARDWARE_OP_INTERFACES diff --git a/tensorflow/compiler/mlir/lite/ir/tfl_op_interfaces.td b/tensorflow/compiler/mlir/lite/ir/tfl_op_interfaces.td index 8e100538659..db0bef39358 100644 --- a/tensorflow/compiler/mlir/lite/ir/tfl_op_interfaces.td +++ b/tensorflow/compiler/mlir/lite/ir/tfl_op_interfaces.td @@ -19,6 +19,7 @@ limitations under the License. #define TFL_OP_INTERFACES include "mlir/IR/OpBase.td" +include "tensorflow/compiler/mlir/lite/experimental/tfl_hardware_interfaces.td" //===----------------------------------------------------------------------===// // TFL op interface for stateful operands. diff --git a/tensorflow/compiler/mlir/lite/ir/tfl_ops.h b/tensorflow/compiler/mlir/lite/ir/tfl_ops.h index ffdafc1844f..a9b89c2bb64 100644 --- a/tensorflow/compiler/mlir/lite/ir/tfl_ops.h +++ b/tensorflow/compiler/mlir/lite/ir/tfl_ops.h @@ -49,6 +49,7 @@ class TensorFlowLiteDialect : public Dialect { Location loc) override; }; +#include "tensorflow/compiler/mlir/lite/experimental/estimators/estimator.h" #include "tensorflow/compiler/mlir/lite/ir/tfl_ops_interface.h.inc" #define GET_OP_CLASSES #include "tensorflow/compiler/mlir/lite/ir/tfl_ops.h.inc" diff --git a/tensorflow/compiler/mlir/lite/ir/tfl_ops.td b/tensorflow/compiler/mlir/lite/ir/tfl_ops.td index 96eb69f7c8f..53bec976186 100644 --- a/tensorflow/compiler/mlir/lite/ir/tfl_ops.td +++ b/tensorflow/compiler/mlir/lite/ir/tfl_ops.td @@ -285,7 +285,10 @@ def TFL_ComparisonBinaryBuilder : OpBuilder< class TFL_Op traits = []> : Op])> { + [DeclareOpInterfaceMethods, + // All TFL ops are supported on CPU. + DeclareOpInterfaceMethods + ])> { // FlatBuffer generation specific information. // ------------------------------------------- // When generating the FlatBuffer output some operations have