Support creating a XNNPACK delegate in model evaluation namespace, and removed unnecessary dependencies on TFLite's runtime.
PiperOrigin-RevId: 301551749 Change-Id: I37dd5322093b33f269a9b2d2b6698f27b0c9be88
This commit is contained in:
parent
56944a8148
commit
a602383a32
@ -40,9 +40,9 @@ cc_library(
|
||||
hdrs = ["utils.h"],
|
||||
copts = tflite_copts(),
|
||||
deps = [
|
||||
"//tensorflow/lite:context",
|
||||
"//tensorflow/lite:framework",
|
||||
"//tensorflow/lite/c:common",
|
||||
"//tensorflow/lite/delegates/nnapi:nnapi_delegate",
|
||||
"//tensorflow/lite/delegates/xnnpack:xnnpack_delegate",
|
||||
] + select({
|
||||
"//tensorflow:android": [
|
||||
"//tensorflow/lite/delegates/gpu:delegate",
|
||||
@ -59,6 +59,17 @@ cc_library(
|
||||
}),
|
||||
)
|
||||
|
||||
cc_library(
|
||||
name = "evaluation_delegate_provider",
|
||||
srcs = ["evaluation_delegate_provider.cc"],
|
||||
hdrs = ["evaluation_delegate_provider.h"],
|
||||
copts = tflite_copts(),
|
||||
deps = [
|
||||
":utils",
|
||||
"//tensorflow/lite/tools/evaluation/proto:evaluation_stages_cc_proto",
|
||||
],
|
||||
)
|
||||
|
||||
cc_test(
|
||||
name = "utils_test",
|
||||
srcs = ["utils_test.cc"],
|
||||
@ -74,3 +85,14 @@ cc_test(
|
||||
"@com_google_googletest//:gtest_main",
|
||||
],
|
||||
)
|
||||
|
||||
cc_test(
|
||||
name = "evaluation_delegate_provider_test",
|
||||
srcs = ["evaluation_delegate_provider_test.cc"],
|
||||
linkopts = tflite_linkopts(),
|
||||
deps = [
|
||||
":evaluation_delegate_provider",
|
||||
"//tensorflow/lite/tools/evaluation/proto:evaluation_stages_cc_proto",
|
||||
"@com_google_googletest//:gtest_main",
|
||||
],
|
||||
)
|
||||
|
@ -0,0 +1,80 @@
|
||||
/* Copyright 2020 The TensorFlow Authors. All Rights Reserved.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==============================================================================*/
|
||||
|
||||
#include "tensorflow/lite/tools/evaluation/evaluation_delegate_provider.h"
|
||||
|
||||
namespace tflite {
|
||||
namespace evaluation {
|
||||
namespace {
|
||||
constexpr char kNnapiDelegate[] = "nnapi";
|
||||
constexpr char kGpuDelegate[] = "gpu";
|
||||
constexpr char kHexagonDelegate[] = "hexagon";
|
||||
constexpr char kXnnpackDelegate[] = "xnnpack";
|
||||
} // namespace
|
||||
|
||||
TfliteInferenceParams::Delegate ParseStringToDelegateType(
|
||||
const std::string& val) {
|
||||
if (val == kNnapiDelegate) return TfliteInferenceParams::NNAPI;
|
||||
if (val == kGpuDelegate) return TfliteInferenceParams::GPU;
|
||||
if (val == kHexagonDelegate) return TfliteInferenceParams::HEXAGON;
|
||||
if (val == kXnnpackDelegate) return TfliteInferenceParams::XNNPACK;
|
||||
return TfliteInferenceParams::NONE;
|
||||
}
|
||||
|
||||
TfLiteDelegatePtr CreateTfLiteDelegate(const TfliteInferenceParams& params,
|
||||
std::string* error_msg) {
|
||||
const auto type = params.delegate();
|
||||
|
||||
switch (type) {
|
||||
case TfliteInferenceParams::NNAPI: {
|
||||
auto p = CreateNNAPIDelegate();
|
||||
if (!p && error_msg) *error_msg = "NNAPI not supported";
|
||||
return p;
|
||||
}
|
||||
case TfliteInferenceParams::GPU: {
|
||||
auto p = CreateGPUDelegate();
|
||||
if (!p && error_msg) *error_msg = "GPU delegate not supported.";
|
||||
return p;
|
||||
}
|
||||
case TfliteInferenceParams::HEXAGON: {
|
||||
auto p = CreateHexagonDelegate(/*library_directory_path=*/"",
|
||||
/*profiling=*/false);
|
||||
if (!p && error_msg) {
|
||||
*error_msg =
|
||||
"Hexagon delegate is not supported on the platform or required "
|
||||
"libraries are missing.";
|
||||
}
|
||||
return p;
|
||||
}
|
||||
case TfliteInferenceParams::XNNPACK: {
|
||||
auto p = CreateXNNPACKDelegate(params.num_threads());
|
||||
if (!p && error_msg) *error_msg = "XNNPACK delegate not supported.";
|
||||
return p;
|
||||
}
|
||||
case TfliteInferenceParams::NONE:
|
||||
if (error_msg) *error_msg = "No delegate type is specified.";
|
||||
return TfLiteDelegatePtr(nullptr, [](TfLiteDelegate*) {});
|
||||
default:
|
||||
if (error_msg) {
|
||||
*error_msg = "Creation of delegate type: " +
|
||||
TfliteInferenceParams::Delegate_Name(type) +
|
||||
" not supported yet.";
|
||||
}
|
||||
return TfLiteDelegatePtr(nullptr, [](TfLiteDelegate*) {});
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace evaluation
|
||||
} // namespace tflite
|
@ -0,0 +1,38 @@
|
||||
/* Copyright 2020 The TensorFlow Authors. All Rights Reserved.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==============================================================================*/
|
||||
|
||||
#ifndef TENSORFLOW_LITE_TOOLS_EVALUATION_EVALUATION_DELEGATE_PROVIDER_H_
|
||||
#define TENSORFLOW_LITE_TOOLS_EVALUATION_EVALUATION_DELEGATE_PROVIDER_H_
|
||||
|
||||
#include "tensorflow/lite/tools/evaluation/proto/evaluation_stages.pb.h"
|
||||
#include "tensorflow/lite/tools/evaluation/utils.h"
|
||||
|
||||
namespace tflite {
|
||||
namespace evaluation {
|
||||
|
||||
// Parse a string 'val' to the corresponding delegate type defined by
|
||||
// TfliteInferenceParams::Delegate.
|
||||
TfliteInferenceParams::Delegate ParseStringToDelegateType(
|
||||
const std::string& val);
|
||||
|
||||
// Create a TfLite delegate based on the given TfliteInferenceParams 'params'.
|
||||
// If there's an error during the creation, an error message will be recorded to
|
||||
// 'error_msg' if provided.
|
||||
TfLiteDelegatePtr CreateTfLiteDelegate(const TfliteInferenceParams& params,
|
||||
std::string* error_msg = nullptr);
|
||||
} // namespace evaluation
|
||||
} // namespace tflite
|
||||
|
||||
#endif // TENSORFLOW_LITE_TOOLS_EVALUATION_EVALUATION_DELEGATE_PROVIDER_H_
|
@ -0,0 +1,44 @@
|
||||
/* Copyright 2020 The TensorFlow Authors. All Rights Reserved.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
==============================================================================*/
|
||||
#include "tensorflow/lite/tools/evaluation/evaluation_delegate_provider.h"
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
#include "tensorflow/lite/tools/evaluation/proto/evaluation_stages.pb.h"
|
||||
|
||||
namespace tflite {
|
||||
namespace evaluation {
|
||||
namespace {
|
||||
TEST(EvaluationDelegateProviderTest, ParseStringToDelegateType) {
|
||||
EXPECT_EQ(TfliteInferenceParams::NNAPI, ParseStringToDelegateType("nnapi"));
|
||||
EXPECT_EQ(TfliteInferenceParams::GPU, ParseStringToDelegateType("gpu"));
|
||||
EXPECT_EQ(TfliteInferenceParams::HEXAGON,
|
||||
ParseStringToDelegateType("hexagon"));
|
||||
EXPECT_EQ(TfliteInferenceParams::XNNPACK,
|
||||
ParseStringToDelegateType("xnnpack"));
|
||||
|
||||
EXPECT_EQ(TfliteInferenceParams::NONE, ParseStringToDelegateType("Gpu"));
|
||||
EXPECT_EQ(TfliteInferenceParams::NONE, ParseStringToDelegateType("Testing"));
|
||||
}
|
||||
|
||||
TEST(EvaluationDelegateProviderTest, CreateTfLiteDelegate) {
|
||||
TfliteInferenceParams params;
|
||||
params.set_delegate(TfliteInferenceParams::NONE);
|
||||
// A NONE delegate type will return a nullptr TfLite delegate ptr.
|
||||
EXPECT_TRUE(!CreateTfLiteDelegate(params));
|
||||
}
|
||||
|
||||
} // namespace
|
||||
} // namespace evaluation
|
||||
} // namespace tflite
|
@ -111,6 +111,7 @@ message TfliteInferenceParams {
|
||||
NNAPI = 1;
|
||||
GPU = 2;
|
||||
HEXAGON = 3;
|
||||
XNNPACK = 4;
|
||||
}
|
||||
optional Delegate delegate = 2;
|
||||
// Number of threads available to the TFLite Interpreter.
|
||||
|
@ -30,8 +30,8 @@ namespace evaluation {
|
||||
|
||||
namespace {
|
||||
|
||||
Interpreter::TfLiteDelegatePtr CreateNullDelegate() {
|
||||
return Interpreter::TfLiteDelegatePtr(nullptr, [](TfLiteDelegate*) {});
|
||||
TfLiteDelegatePtr CreateNullDelegate() {
|
||||
return TfLiteDelegatePtr(nullptr, [](TfLiteDelegate*) {});
|
||||
}
|
||||
|
||||
} // namespace
|
||||
@ -94,21 +94,20 @@ TfLiteStatus GetSortedFileNames(
|
||||
#endif
|
||||
|
||||
// TODO(b/138448769): Migrate delegate helper APIs to lite/testing.
|
||||
Interpreter::TfLiteDelegatePtr CreateNNAPIDelegate() {
|
||||
TfLiteDelegatePtr CreateNNAPIDelegate() {
|
||||
#if defined(__ANDROID__)
|
||||
return Interpreter::TfLiteDelegatePtr(
|
||||
return TfLiteDelegatePtr(
|
||||
NnApiDelegate(),
|
||||
// NnApiDelegate() returns a singleton, so provide a no-op deleter.
|
||||
[](TfLiteDelegate*) {});
|
||||
#else
|
||||
return Interpreter::TfLiteDelegatePtr(nullptr, [](TfLiteDelegate*) {});
|
||||
return TfLiteDelegatePtr(nullptr, [](TfLiteDelegate*) {});
|
||||
#endif // defined(__ANDROID__)
|
||||
}
|
||||
|
||||
Interpreter::TfLiteDelegatePtr CreateNNAPIDelegate(
|
||||
StatefulNnApiDelegate::Options options) {
|
||||
TfLiteDelegatePtr CreateNNAPIDelegate(StatefulNnApiDelegate::Options options) {
|
||||
#if defined(__ANDROID__)
|
||||
return Interpreter::TfLiteDelegatePtr(
|
||||
return TfLiteDelegatePtr(
|
||||
new StatefulNnApiDelegate(options), [](TfLiteDelegate* delegate) {
|
||||
delete reinterpret_cast<StatefulNnApiDelegate*>(delegate);
|
||||
});
|
||||
@ -118,14 +117,13 @@ Interpreter::TfLiteDelegatePtr CreateNNAPIDelegate(
|
||||
}
|
||||
|
||||
#if defined(__ANDROID__)
|
||||
Interpreter::TfLiteDelegatePtr CreateGPUDelegate(
|
||||
TfLiteGpuDelegateOptionsV2* options) {
|
||||
return Interpreter::TfLiteDelegatePtr(TfLiteGpuDelegateV2Create(options),
|
||||
&TfLiteGpuDelegateV2Delete);
|
||||
TfLiteDelegatePtr CreateGPUDelegate(TfLiteGpuDelegateOptionsV2* options) {
|
||||
return TfLiteDelegatePtr(TfLiteGpuDelegateV2Create(options),
|
||||
&TfLiteGpuDelegateV2Delete);
|
||||
}
|
||||
#endif // defined(__ANDROID__)
|
||||
|
||||
Interpreter::TfLiteDelegatePtr CreateGPUDelegate() {
|
||||
TfLiteDelegatePtr CreateGPUDelegate() {
|
||||
#if defined(__ANDROID__)
|
||||
TfLiteGpuDelegateOptionsV2 options = TfLiteGpuDelegateOptionsV2Default();
|
||||
options.inference_priority1 = TFLITE_GPU_INFERENCE_PRIORITY_MIN_LATENCY;
|
||||
@ -138,7 +136,7 @@ Interpreter::TfLiteDelegatePtr CreateGPUDelegate() {
|
||||
#endif // defined(__ANDROID__)
|
||||
}
|
||||
|
||||
Interpreter::TfLiteDelegatePtr CreateHexagonDelegate(
|
||||
TfLiteDelegatePtr CreateHexagonDelegate(
|
||||
const std::string& library_directory_path, bool profiling) {
|
||||
#if defined(__ANDROID__) && (defined(__arm__) || defined(__aarch64__))
|
||||
if (library_directory_path.empty()) {
|
||||
@ -155,7 +153,7 @@ Interpreter::TfLiteDelegatePtr CreateHexagonDelegate(
|
||||
TfLiteHexagonTearDown();
|
||||
return CreateNullDelegate();
|
||||
}
|
||||
return Interpreter::TfLiteDelegatePtr(delegate, [](TfLiteDelegate* delegate) {
|
||||
return TfLiteDelegatePtr(delegate, [](TfLiteDelegate* delegate) {
|
||||
TfLiteHexagonDelegateDelete(delegate);
|
||||
TfLiteHexagonTearDown();
|
||||
});
|
||||
@ -164,5 +162,25 @@ Interpreter::TfLiteDelegatePtr CreateHexagonDelegate(
|
||||
#endif // defined(__ANDROID__)
|
||||
}
|
||||
|
||||
TfLiteDelegatePtr CreateXNNPACKDelegate() {
|
||||
TfLiteXNNPackDelegateOptions xnnpack_options =
|
||||
TfLiteXNNPackDelegateOptionsDefault();
|
||||
return CreateXNNPACKDelegate(&xnnpack_options);
|
||||
}
|
||||
|
||||
TfLiteDelegatePtr CreateXNNPACKDelegate(
|
||||
const TfLiteXNNPackDelegateOptions* xnnpack_options) {
|
||||
auto xnnpack_delegate = TfLiteXNNPackDelegateCreate(xnnpack_options);
|
||||
return TfLiteDelegatePtr(xnnpack_delegate, [](TfLiteDelegate* delegate) {
|
||||
TfLiteXNNPackDelegateDelete(delegate);
|
||||
});
|
||||
}
|
||||
|
||||
TfLiteDelegatePtr CreateXNNPACKDelegate(int num_threads) {
|
||||
auto opts = TfLiteXNNPackDelegateOptionsDefault();
|
||||
// Note that we don't want to use the thread pool for num_threads == 1.
|
||||
opts.num_threads = num_threads > 1 ? num_threads : 0;
|
||||
return CreateXNNPACKDelegate(&opts);
|
||||
}
|
||||
} // namespace evaluation
|
||||
} // namespace tflite
|
||||
|
@ -27,12 +27,18 @@ limitations under the License.
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#include "tensorflow/lite/context.h"
|
||||
#include "tensorflow/lite/c/common.h"
|
||||
#include "tensorflow/lite/delegates/nnapi/nnapi_delegate.h"
|
||||
#include "tensorflow/lite/model.h"
|
||||
#include "tensorflow/lite/delegates/xnnpack/xnnpack_delegate.h"
|
||||
|
||||
namespace tflite {
|
||||
namespace evaluation {
|
||||
|
||||
// Same w/ Interpreter::TfLiteDelegatePtr to avoid pulling
|
||||
// tensorflow/lite/interpreter.h dependency
|
||||
using TfLiteDelegatePtr =
|
||||
std::unique_ptr<TfLiteDelegate, void (*)(TfLiteDelegate*)>;
|
||||
|
||||
std::string StripTrailingSlashes(const std::string& path);
|
||||
|
||||
bool ReadFileLines(const std::string& file_path,
|
||||
@ -50,20 +56,22 @@ inline TfLiteStatus GetSortedFileNames(const std::string& directory,
|
||||
std::unordered_set<std::string>());
|
||||
}
|
||||
|
||||
Interpreter::TfLiteDelegatePtr CreateNNAPIDelegate();
|
||||
TfLiteDelegatePtr CreateNNAPIDelegate();
|
||||
|
||||
Interpreter::TfLiteDelegatePtr CreateNNAPIDelegate(
|
||||
StatefulNnApiDelegate::Options options);
|
||||
TfLiteDelegatePtr CreateNNAPIDelegate(StatefulNnApiDelegate::Options options);
|
||||
|
||||
Interpreter::TfLiteDelegatePtr CreateGPUDelegate();
|
||||
TfLiteDelegatePtr CreateGPUDelegate();
|
||||
#if defined(__ANDROID__)
|
||||
Interpreter::TfLiteDelegatePtr CreateGPUDelegate(
|
||||
TfLiteGpuDelegateOptionsV2* options);
|
||||
TfLiteDelegatePtr CreateGPUDelegate(TfLiteGpuDelegateOptionsV2* options);
|
||||
#endif
|
||||
|
||||
Interpreter::TfLiteDelegatePtr CreateHexagonDelegate(
|
||||
TfLiteDelegatePtr CreateHexagonDelegate(
|
||||
const std::string& library_directory_path, bool profiling);
|
||||
|
||||
TfLiteDelegatePtr CreateXNNPACKDelegate();
|
||||
TfLiteDelegatePtr CreateXNNPACKDelegate(
|
||||
const TfLiteXNNPackDelegateOptions* options);
|
||||
TfLiteDelegatePtr CreateXNNPACKDelegate(int num_threads);
|
||||
} // namespace evaluation
|
||||
} // namespace tflite
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user