diff --git a/tensorflow/lite/BUILD b/tensorflow/lite/BUILD index e50cd4549b2..d08a2e2fbfa 100644 --- a/tensorflow/lite/BUILD +++ b/tensorflow/lite/BUILD @@ -242,6 +242,7 @@ cc_library( ":arena_planner", ":external_cpu_backend_context", ":graph_info", + ":kernel_api", ":memory_planner", ":minimal_logging", ":shared_library", @@ -469,6 +470,7 @@ cc_test( "testdata/add_shared_tensors.bin", "testdata/empty_model.bin", "testdata/multi_add_flex.bin", + "testdata/segment_sum_invalid_buffer.bin", "testdata/sparse_tensor.bin", "testdata/test_min_runtime.bin", "testdata/test_model.bin", diff --git a/tensorflow/lite/core/subgraph.cc b/tensorflow/lite/core/subgraph.cc index 47d0fddb12b..235a776c56f 100644 --- a/tensorflow/lite/core/subgraph.cc +++ b/tensorflow/lite/core/subgraph.cc @@ -19,6 +19,7 @@ limitations under the License. #include #include "tensorflow/lite/arena_planner.h" +#include "tensorflow/lite/builtin_ops.h" #include "tensorflow/lite/c/common.h" #include "tensorflow/lite/context_util.h" #include "tensorflow/lite/core/api/tensor_utils.h" @@ -1030,6 +1031,19 @@ TfLiteStatus Subgraph::Invoke() { tensor->data_is_stale) { TF_LITE_ENSURE_STATUS(EnsureTensorDataIsReadable(tensor_index)); } + if (tensor->data.raw == nullptr && tensor->bytes > 0) { + if (registration.builtin_code == kTfLiteBuiltinReshape && i == 1) { + // In general, having a tensor here with no buffer will be an error. + // However, for the reshape operator, the second input tensor is only + // used for the shape, not for the data. Thus, null buffer is ok. + continue; + } else { + // In all other cases, we need to return an error as otherwise we will + // trigger a null pointer dereference (likely). + ReportError("Input tensor %d lacks data", tensor_index); + return kTfLiteError; + } + } } if (check_cancelled_func_ != nullptr && diff --git a/tensorflow/lite/model_test.cc b/tensorflow/lite/model_test.cc index 6993e350a48..110c54aa571 100644 --- a/tensorflow/lite/model_test.cc +++ b/tensorflow/lite/model_test.cc @@ -438,24 +438,48 @@ TEST(BasicFlatBufferModel, TestParseModelWithSparseTensor) { } // TODO(b/150072943): Add malformed model with sparse tensor tests. -TEST(BasicFlatBufferModel, TestHandleMalformedModel) { - const auto model_paths = { - // These models use the same tensor as both input and ouput of a node - "tensorflow/lite/testdata/add_shared_tensors.bin", - }; - for (const auto& model_path : model_paths) { - std::unique_ptr model = - FlatBufferModel::BuildFromFile(model_path); - ASSERT_NE(model, nullptr); +// The models here have at least a node that uses the same tensor as input and +// output. This causes segfaults when trying to eval the operator, hence we try +// to prevent this scenario. The earliest place we can check this is in +// `AllocateTensors`, hence the test checks that `interpreter->AllocateTensors` +// detects these bad models. +TEST(BasicFlatBufferModel, TestHandleMalformedModelReuseTensor) { + const auto model_path = + "tensorflow/lite/testdata/add_shared_tensors.bin"; - tflite::ops::builtin::BuiltinOpResolver resolver; - InterpreterBuilder builder(*model, resolver); - std::unique_ptr interpreter; - ASSERT_EQ(builder(&interpreter), kTfLiteOk); - ASSERT_NE(interpreter, nullptr); - ASSERT_NE(interpreter->AllocateTensors(), kTfLiteOk); - } + std::unique_ptr model = + FlatBufferModel::BuildFromFile(model_path); + ASSERT_NE(model, nullptr); + + tflite::ops::builtin::BuiltinOpResolver resolver; + InterpreterBuilder builder(*model, resolver); + std::unique_ptr interpreter; + ASSERT_EQ(builder(&interpreter), kTfLiteOk); + ASSERT_NE(interpreter, nullptr); + ASSERT_NE(interpreter->AllocateTensors(), kTfLiteOk); +} + +// The models here have a buffer index for a tensor pointing to a null buffer. +// This results in the tensor being interpreted as read-write, but the model +// assumes the tensor is read-only. As such, `interpreter->Invoke()` would +// segfault if no precondition check is added. The test checks that the +// precondition check exists. +TEST(BasicFlatBufferModel, TestHandleMalformedModelInvalidBuffer) { + const auto model_path = + "tensorflow/lite/testdata/segment_sum_invalid_buffer.bin"; + + std::unique_ptr model = + FlatBufferModel::BuildFromFile(model_path); + ASSERT_NE(model, nullptr); + + tflite::ops::builtin::BuiltinOpResolver resolver; + InterpreterBuilder builder(*model, resolver); + std::unique_ptr interpreter; + ASSERT_EQ(builder(&interpreter), kTfLiteOk); + ASSERT_NE(interpreter, nullptr); + ASSERT_EQ(interpreter->AllocateTensors(), kTfLiteOk); + ASSERT_NE(interpreter->Invoke(), kTfLiteOk); } // TODO(aselle): Add tests for serialization of builtin op data types. diff --git a/tensorflow/lite/testdata/segment_sum_invalid_buffer.bin b/tensorflow/lite/testdata/segment_sum_invalid_buffer.bin new file mode 100644 index 00000000000..b7b81cde6e7 Binary files /dev/null and b/tensorflow/lite/testdata/segment_sum_invalid_buffer.bin differ