76 lines
2.6 KiB
C++
76 lines
2.6 KiB
C++
/* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
|
|
|
|
Licensed under the Apache License, Version 2.0 (the "License");
|
|
you may not use this file except in compliance with the License.
|
|
You may obtain a copy of the License at
|
|
|
|
http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
Unless required by applicable law or agreed to in writing, software
|
|
distributed under the License is distributed on an "AS IS" BASIS,
|
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
See the License for the specific language governing permissions and
|
|
limitations under the License.
|
|
==============================================================================*/
|
|
#include <cstdio>
|
|
#include "tensorflow/lite/interpreter.h"
|
|
#include "tensorflow/lite/kernels/register.h"
|
|
#include "tensorflow/lite/model.h"
|
|
#include "tensorflow/lite/optional_debug_tools.h"
|
|
|
|
// This is an example that is minimal to read a model
|
|
// from disk and perform inference. There is no data being loaded
|
|
// that is up to you to add as a user.
|
|
//
|
|
// NOTE: Do not add any dependencies to this that cannot be built with
|
|
// the minimal makefile. This example must remain trivial to build with
|
|
// the minimal build tool.
|
|
//
|
|
// Usage: minimal <tflite model>
|
|
|
|
using namespace tflite;
|
|
|
|
#define TFLITE_MINIMAL_CHECK(x) \
|
|
if (!(x)) { \
|
|
fprintf(stderr, "Error at %s:%d\n", __FILE__, __LINE__); \
|
|
exit(1); \
|
|
}
|
|
|
|
int main(int argc, char* argv[]) {
|
|
if (argc != 2) {
|
|
fprintf(stderr, "minimal <tflite model>\n");
|
|
return 1;
|
|
}
|
|
const char* filename = argv[1];
|
|
|
|
// Load model
|
|
std::unique_ptr<tflite::FlatBufferModel> model =
|
|
tflite::FlatBufferModel::BuildFromFile(filename);
|
|
TFLITE_MINIMAL_CHECK(model != nullptr);
|
|
|
|
// Build the interpreter
|
|
tflite::ops::builtin::BuiltinOpResolver resolver;
|
|
InterpreterBuilder builder(*model, resolver);
|
|
std::unique_ptr<Interpreter> interpreter;
|
|
builder(&interpreter);
|
|
TFLITE_MINIMAL_CHECK(interpreter != nullptr);
|
|
|
|
// Allocate tensor buffers.
|
|
TFLITE_MINIMAL_CHECK(interpreter->AllocateTensors() == kTfLiteOk);
|
|
printf("=== Pre-invoke Interpreter State ===\n");
|
|
tflite::PrintInterpreterState(interpreter.get());
|
|
|
|
// Fill input buffers
|
|
// TODO(user): Insert code to fill input tensors
|
|
|
|
// Run inference
|
|
TFLITE_MINIMAL_CHECK(interpreter->Invoke() == kTfLiteOk);
|
|
printf("\n\n=== Post-invoke Interpreter State ===\n");
|
|
tflite::PrintInterpreterState(interpreter.get());
|
|
|
|
// Read output buffers
|
|
// TODO(user): Insert getting data out code.
|
|
|
|
return 0;
|
|
}
|