From 89910c62d6a8d2a7554fa7ae7f8e8edf6413e876 Mon Sep 17 00:00:00 2001 From: Raman Sarokin Date: Thu, 11 Jun 2020 15:37:35 -0700 Subject: [PATCH] Added handling of TFLITE_GPU_INFERENCE_PRIORITY_MIN_MEMORY_USAGE. PiperOrigin-RevId: 315992058 Change-Id: I7b7bc99bad69eb24cd79a17d6ccbbf5d895a148f --- .../lite/delegates/gpu/cl/gpu_api_delegate.cc | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/tensorflow/lite/delegates/gpu/cl/gpu_api_delegate.cc b/tensorflow/lite/delegates/gpu/cl/gpu_api_delegate.cc index a2357c558d2..fc8fcde439b 100644 --- a/tensorflow/lite/delegates/gpu/cl/gpu_api_delegate.cc +++ b/tensorflow/lite/delegates/gpu/cl/gpu_api_delegate.cc @@ -159,20 +159,32 @@ class Delegate { options.priority2 = InferencePriority::MIN_LATENCY; options.priority3 = InferencePriority::MIN_MEMORY_USAGE; break; + case TfLiteGpuInferencePriority:: + TFLITE_GPU_INFERENCE_PRIORITY_MIN_MEMORY_USAGE: + options.priority2 = InferencePriority::MIN_MEMORY_USAGE; + options.priority3 = InferencePriority::MIN_LATENCY; + break; } } else { - options.priority1 = InferencePriority::MIN_LATENCY; switch (options_.compile_options.inference_priority) { case TfLiteGpuInferencePriority:: TFLITE_GPU_INFERENCE_PRIORITY_MAX_PRECISION: + options.priority1 = InferencePriority::MIN_LATENCY; options.priority2 = InferencePriority::MAX_PRECISION; options.priority3 = InferencePriority::MIN_MEMORY_USAGE; break; case TfLiteGpuInferencePriority:: TFLITE_GPU_INFERENCE_PRIORITY_MIN_LATENCY: + options.priority1 = InferencePriority::MIN_LATENCY; options.priority2 = InferencePriority::MIN_MEMORY_USAGE; options.priority3 = InferencePriority::MAX_PRECISION; break; + case TfLiteGpuInferencePriority:: + TFLITE_GPU_INFERENCE_PRIORITY_MIN_MEMORY_USAGE: + options.priority1 = InferencePriority::MIN_MEMORY_USAGE; + options.priority2 = InferencePriority::MIN_LATENCY; + options.priority3 = InferencePriority::MAX_PRECISION; + break; } } std::unique_ptr builder;