Added handling of TFLITE_GPU_INFERENCE_PRIORITY_MIN_MEMORY_USAGE.

PiperOrigin-RevId: 315992058
Change-Id: I7b7bc99bad69eb24cd79a17d6ccbbf5d895a148f
This commit is contained in:
Raman Sarokin 2020-06-11 15:37:35 -07:00 committed by TensorFlower Gardener
parent dbb8dba311
commit 89910c62d6

View File

@ -159,20 +159,32 @@ class Delegate {
options.priority2 = InferencePriority::MIN_LATENCY;
options.priority3 = InferencePriority::MIN_MEMORY_USAGE;
break;
case TfLiteGpuInferencePriority::
TFLITE_GPU_INFERENCE_PRIORITY_MIN_MEMORY_USAGE:
options.priority2 = InferencePriority::MIN_MEMORY_USAGE;
options.priority3 = InferencePriority::MIN_LATENCY;
break;
}
} else {
options.priority1 = InferencePriority::MIN_LATENCY;
switch (options_.compile_options.inference_priority) {
case TfLiteGpuInferencePriority::
TFLITE_GPU_INFERENCE_PRIORITY_MAX_PRECISION:
options.priority1 = InferencePriority::MIN_LATENCY;
options.priority2 = InferencePriority::MAX_PRECISION;
options.priority3 = InferencePriority::MIN_MEMORY_USAGE;
break;
case TfLiteGpuInferencePriority::
TFLITE_GPU_INFERENCE_PRIORITY_MIN_LATENCY:
options.priority1 = InferencePriority::MIN_LATENCY;
options.priority2 = InferencePriority::MIN_MEMORY_USAGE;
options.priority3 = InferencePriority::MAX_PRECISION;
break;
case TfLiteGpuInferencePriority::
TFLITE_GPU_INFERENCE_PRIORITY_MIN_MEMORY_USAGE:
options.priority1 = InferencePriority::MIN_MEMORY_USAGE;
options.priority2 = InferencePriority::MIN_LATENCY;
options.priority3 = InferencePriority::MAX_PRECISION;
break;
}
}
std::unique_ptr<InferenceBuilder> builder;