Disable using MLIR for VarIsInitializedOp via XlaCompilationCache::CompileSingleOp.

TPU on demand mode can trigger for VarIsInitializedOp, which is not supported yet via MLIR for compilation.

PiperOrigin-RevId: 337588657
Change-Id: Ia67cac14a80fd89e2d9a0de66b3b4770b88df184
This commit is contained in:
Andy Ly 2020-10-16 15:25:29 -07:00 committed by TensorFlower Gardener
parent c60ced548c
commit 7eefc3c951

View File

@ -289,8 +289,10 @@ Status XlaCompilationCache::CompileSingleOp(
return arg.kind == XlaCompiler::Argument::kTensorList;
});
const ConfigProto* config = ctx->function_library()->config_proto();
// TODO(b/171039585): Support tf.VarIsInitializedOp using MLIR.
bool use_mlir = config && config->experimental().enable_mlir_bridge() &&
!has_tensor_list_arg;
!has_tensor_list_arg &&
node_def.op() != "VarIsInitializedOp";
#ifdef LIBTPU_ON_GCE
if (use_mlir) {
LOG(WARNING) << "MLIR is not supported in this environment.";