Add default values in MicroInterpreter constructors.

The MicroInterpreter uses a few values to check state - there is a scenario where these values are not always defaulted to internal states. This can cause an exception when the interpreter tries to run. To ensure things work properly, default values.

I also updated the MicroInterpreter test to use the new RecordingMicroAllocator. Two new tests have been added:

1.) Ensure that the interpreter fails to allocate with too small an arena at Invoke() (insured by recording allocation APIs)

2.) Ensure that the interpreter does not allocate anything at construction time - only at Invoke() (or manually with AllocateTensors()).

This will give us better coverage when we add more tenant use cases.

PiperOrigin-RevId: 316877994
Change-Id: I0582080a1fb649276076371be991a13392324801
This commit is contained in:
Nick Kreeger 2020-06-17 06:43:11 -07:00 committed by TensorFlower Gardener
parent 9eafb72689
commit 401dad16ea
3 changed files with 116 additions and 0 deletions

View File

@ -268,6 +268,7 @@ tflite_micro_cc_test(
":micro_framework",
":micro_utils",
":op_resolvers",
":recording_allocators",
":test_helpers",
"//tensorflow/lite/core/api",
"//tensorflow/lite/micro/testing:micro_test",

View File

@ -83,6 +83,8 @@ MicroInterpreter::MicroInterpreter(const Model* model,
error_reporter_(error_reporter),
allocator_(*MicroAllocator::Create(tensor_arena, tensor_arena_size,
error_reporter)),
tensors_allocated_(false),
initialization_status_(kTfLiteError),
context_helper_(error_reporter_, &allocator_) {
Init(profiler);
}
@ -96,6 +98,8 @@ MicroInterpreter::MicroInterpreter(const Model* model,
op_resolver_(*op_resolver),
error_reporter_(error_reporter),
allocator_(*allocator),
tensors_allocated_(false),
initialization_status_(kTfLiteError),
context_helper_(error_reporter_, &allocator_) {
Init(profiler);
}

View File

@ -21,6 +21,7 @@ limitations under the License.
#include "tensorflow/lite/micro/micro_mutable_op_resolver.h"
#include "tensorflow/lite/micro/micro_optional_debug_tools.h"
#include "tensorflow/lite/micro/micro_utils.h"
#include "tensorflow/lite/micro/recording_micro_allocator.h"
#include "tensorflow/lite/micro/test_helpers.h"
#include "tensorflow/lite/micro/testing/micro_test.h"
@ -244,6 +245,7 @@ TF_LITE_MICRO_TEST(TestIncompleteInitialization) {
tflite::testing::MockOpResolver mock_resolver;
constexpr size_t allocator_buffer_size = 2048;
uint8_t allocator_buffer[allocator_buffer_size];
tflite::MicroInterpreter interpreter(model, mock_resolver, allocator_buffer,
allocator_buffer_size,
micro_test::reporter);
@ -276,4 +278,113 @@ TF_LITE_MICRO_TEST(InterpreterWithProfilerShouldProfileOps) {
#endif
}
TF_LITE_MICRO_TEST(TestIncompleteInitializationAllocationsWithSmallArena) {
const tflite::Model* model = tflite::testing::GetComplexMockModel();
TF_LITE_MICRO_EXPECT_NE(nullptr, model);
tflite::testing::MockOpResolver mock_resolver;
// 1kb is too small for the ComplexMockModel:
constexpr size_t allocator_buffer_size = 1048;
uint8_t allocator_buffer[allocator_buffer_size];
tflite::RecordingMicroAllocator* allocator =
tflite::RecordingMicroAllocator::Create(
allocator_buffer, allocator_buffer_size, micro_test::reporter);
TF_LITE_MICRO_EXPECT_NE(nullptr, allocator);
tflite::MicroInterpreter interpreter(model, &mock_resolver, allocator,
micro_test::reporter);
// Interpreter fails because arena is too small:
TF_LITE_MICRO_EXPECT_EQ(interpreter.Invoke(), kTfLiteError);
// Ensure allocations are zero (ignore tail since some internal structs are
// initialized with this space):
TF_LITE_MICRO_EXPECT_EQ(
0, allocator->GetSimpleMemoryAllocator()->GetHeadUsedBytes());
TF_LITE_MICRO_EXPECT_EQ(
0, allocator
->GetRecordedAllocation(
tflite::RecordedAllocationType::kTfLiteTensorArray)
.used_bytes);
TF_LITE_MICRO_EXPECT_EQ(
0, allocator
->GetRecordedAllocation(tflite::RecordedAllocationType::
kTfLiteTensorArrayQuantizationData)
.used_bytes);
TF_LITE_MICRO_EXPECT_EQ(
0,
allocator
->GetRecordedAllocation(
tflite::RecordedAllocationType::kTfLiteTensorVariableBufferData)
.used_bytes);
TF_LITE_MICRO_EXPECT_EQ(
0,
allocator->GetRecordedAllocation(tflite::RecordedAllocationType::kOpData)
.used_bytes);
}
TF_LITE_MICRO_TEST(TestInterpreterDoesNotAllocateUntilInvoke) {
const tflite::Model* model = tflite::testing::GetComplexMockModel();
TF_LITE_MICRO_EXPECT_NE(nullptr, model);
tflite::testing::MockOpResolver mock_resolver;
constexpr size_t allocator_buffer_size = 1024 * 4;
uint8_t allocator_buffer[allocator_buffer_size];
tflite::RecordingMicroAllocator* allocator =
tflite::RecordingMicroAllocator::Create(
allocator_buffer, allocator_buffer_size, micro_test::reporter);
TF_LITE_MICRO_EXPECT_NE(nullptr, allocator);
tflite::MicroInterpreter interpreter(model, &mock_resolver, allocator,
micro_test::reporter);
// Ensure allocations are zero (ignore tail since some internal structs are
// initialized with this space):
TF_LITE_MICRO_EXPECT_EQ(
0, allocator->GetSimpleMemoryAllocator()->GetHeadUsedBytes());
TF_LITE_MICRO_EXPECT_EQ(
0, allocator
->GetRecordedAllocation(
tflite::RecordedAllocationType::kTfLiteTensorArray)
.used_bytes);
TF_LITE_MICRO_EXPECT_EQ(
0,
allocator
->GetRecordedAllocation(
tflite::RecordedAllocationType::kTfLiteTensorVariableBufferData)
.used_bytes);
TF_LITE_MICRO_EXPECT_EQ(
0,
allocator->GetRecordedAllocation(tflite::RecordedAllocationType::kOpData)
.used_bytes);
TF_LITE_MICRO_EXPECT_EQ(interpreter.Invoke(), kTfLiteOk);
allocator->PrintAllocations();
// Allocation sizes vary based on platform - check that allocations are now
// non-zero:
TF_LITE_MICRO_EXPECT_GT(
allocator->GetSimpleMemoryAllocator()->GetHeadUsedBytes(), 0);
TF_LITE_MICRO_EXPECT_GT(
allocator
->GetRecordedAllocation(
tflite::RecordedAllocationType::kTfLiteTensorArray)
.used_bytes,
0);
TF_LITE_MICRO_EXPECT_GT(
allocator
->GetRecordedAllocation(
tflite::RecordedAllocationType::kTfLiteTensorVariableBufferData)
.used_bytes,
0);
TF_LITE_MICRO_EXPECT_GT(
allocator->GetRecordedAllocation(tflite::RecordedAllocationType::kOpData)
.used_bytes,
0);
}
TF_LITE_MICRO_TESTS_END