Fix bug in tpu_on_demand_compiler.cc

Don't free the result on the failure path.

PiperOrigin-RevId: 353349148
Change-Id: I26308f70662d3cc56b6350f904195b752c81799e
This commit is contained in:
A. Unique TensorFlower 2021-01-22 17:24:09 -08:00 committed by TensorFlower Gardener
parent 9165dbad09
commit 9304b8af0e

View File

@ -220,15 +220,14 @@ class TpuCompiler : public Compiler {
stream_executor::StreamExecutor* executor,
const CompileOptions& options) override {
XLA_HloModule hlo_module;
XLA_HloModule result;
auto cleanup = xla::MakeCleanup([&hlo_module, &result]() {
auto cleanup = xla::MakeCleanup([&hlo_module]() {
stream_executor::tpu::SerializedProto_Free(hlo_module.proto);
stream_executor::tpu::SerializedProto_Free(result.proto);
ApiConverter::Free(&hlo_module.module_config);
});
hlo_module.module_config = ApiConverter::ToC(module->config());
hlo_module.proto = stream_executor::tpu::SerializeProto(module->ToProto());
auto allocator = ApiConverter::ToC(options.device_allocator);
XLA_HloModule result;
StatusHelper status;
ExecutorApiFn()->TpuCompiler_RunHloPassesFn(
compiler_, &hlo_module,
@ -240,6 +239,7 @@ class TpuCompiler : public Compiler {
}
HloModuleProto result_proto =
stream_executor::tpu::DeserializeProto<HloModuleProto>(result.proto);
stream_executor::tpu::SerializedProto_Free(result.proto);
return HloModule::CreateFromProto(result_proto, module->config());
}