Setup the crash reproducer on the MLIR pass manager

PiperOrigin-RevId: 332969417
Change-Id: I71dd28699631fc2ffc34add4df7afc7a2a5c75f2
This commit is contained in:
Mehdi Amini 2020-09-21 17:09:53 -07:00 committed by TensorFlower Gardener
parent 4c27c20349
commit e90dd8abe7
11 changed files with 72 additions and 0 deletions

View File

@ -901,6 +901,7 @@ cc_library(
":decompose_resource_ops",
":decompose_resource_ops_inc_gen",
":device_util",
":dump_mlir_util",
":error_util",
":export_tf_dialect_op",
":lower_tf_lib",
@ -982,6 +983,7 @@ cc_library(
srcs = ["transforms/graph_optimization_pass.cc"],
hdrs = ["transforms/graph_optimization_pass.h"],
deps = [
":dump_mlir_util",
":error_util",
":tensorflow_passes",
"//tensorflow/compiler/mlir:mlir_graph_optimization_pass",
@ -1017,6 +1019,7 @@ cc_library(
deps = [
":convert_tensor",
":convert_type",
":dump_mlir_util",
":error_util",
":export_tf_dialect_op",
":export_utils",
@ -1838,6 +1841,7 @@ cc_library(
"//tensorflow/core/platform:logging",
"@llvm-project//llvm:Support",
"@llvm-project//mlir:IR",
"@llvm-project//mlir:Pass",
],
)

View File

@ -35,6 +35,7 @@ tf_cuda_library(
"//tensorflow/compiler/mlir/tensorflow",
"//tensorflow/compiler/mlir/tensorflow:convert_graphdef",
"//tensorflow/compiler/mlir/tensorflow:convert_type",
"//tensorflow/compiler/mlir/tensorflow:dump_mlir_util",
"//tensorflow/compiler/mlir/tensorflow:error_util",
"//tensorflow/compiler/mlir/tensorflow:tensorflow_types",
"//tensorflow/core:framework",

View File

@ -51,6 +51,7 @@ limitations under the License.
#include "tensorflow/compiler/mlir/tensorflow/transforms/passes.h"
#include "tensorflow/compiler/mlir/tensorflow/translate/export_graphdef.h"
#include "tensorflow/compiler/mlir/tensorflow/utils/convert_type.h"
#include "tensorflow/compiler/mlir/tensorflow/utils/dump_mlir_util.h"
#include "tensorflow/compiler/mlir/tensorflow/utils/error_util.h"
#include "tensorflow/core/framework/node_def_util.h"
#include "tensorflow/core/framework/types.pb.h"
@ -511,6 +512,7 @@ Status MlirFunction::GetFunctionDef(tensorflow::FunctionDef** f) {
return Status::OK();
}
PassManager pm(func_.getContext());
::tensorflow::SetCrashReproducer(pm);
pm.addNestedPass<FuncOp>(CreateFunctionalToExecutorDialectConversionPass());
pm.addPass(CreateBreakUpIslandsPass());

View File

@ -22,6 +22,7 @@ limitations under the License.
#include "mlir/Transforms/Passes.h" // from @llvm-project
#include "tensorflow/compiler/mlir/tensorflow/transforms/passes.h"
#include "tensorflow/compiler/mlir/tensorflow/utils/bridge_logger.h"
#include "tensorflow/compiler/mlir/tensorflow/utils/dump_mlir_util.h"
#include "tensorflow/compiler/mlir/tensorflow/utils/error_util.h"
namespace mlir {
@ -57,6 +58,7 @@ tensorflow::Status RunTPUBridge(
ModuleOp module, bool enable_logging,
llvm::function_ref<void(OpPassManager &pm)> pipeline_builder) {
PassManager bridge(module.getContext());
::tensorflow::SetCrashReproducer(bridge);
if (enable_logging) EnableLogging(&bridge);
// Populate a passmanager with the list of passes that implement the bridge.

View File

@ -20,6 +20,7 @@ limitations under the License.
#include "mlir/Support/LogicalResult.h" // from @llvm-project
#include "mlir/Transforms/Passes.h" // from @llvm-project
#include "tensorflow/compiler/mlir/tensorflow/transforms/passes.h"
#include "tensorflow/compiler/mlir/tensorflow/utils/dump_mlir_util.h"
#include "tensorflow/compiler/mlir/tensorflow/utils/error_util.h"
namespace mlir {
@ -39,6 +40,7 @@ Status MlirGraphOptimizationPass::Run(const ConfigProto& config_proto,
VLOG(1) << "Run MLIR Graph Optimization Passes";
PassManager pm(module.getContext());
::tensorflow::SetCrashReproducer(pm);
// Run island coarsening before shape inference to allow more exact shape
// inference using constant folding within islands.

View File

@ -75,6 +75,7 @@ limitations under the License.
#include "tensorflow/compiler/mlir/tensorflow/translate/mlir_roundtrip_flags.h"
#include "tensorflow/compiler/mlir/tensorflow/utils/convert_tensor.h"
#include "tensorflow/compiler/mlir/tensorflow/utils/convert_type.h"
#include "tensorflow/compiler/mlir/tensorflow/utils/dump_mlir_util.h"
#include "tensorflow/compiler/mlir/tensorflow/utils/error_util.h"
#include "tensorflow/compiler/mlir/tensorflow/utils/mangling_util.h"
#include "tensorflow/compiler/mlir/tensorflow/utils/translate_utils.h"
@ -3568,6 +3569,7 @@ Status SavedModelSignatureDefImporter::LiftVariables() {
mlir::StatusScopedDiagnosticHandler diag_handler(module_->getContext());
mlir::PassManager pm(module_->getContext());
SetCrashReproducer(pm);
pm.addPass(mlir::tf_executor::CreateTFExecutorGraphPruningPass());
pm.addPass(mlir::CreateExecutorDialectToFunctionalConversionPass());
pm.addPass(

View File

@ -324,6 +324,7 @@ Status ConvertMLIRToXlaComputation(
llvm::MutableArrayRef<std::unique_ptr<mlir::Pass>>
custom_legalization_passes) {
mlir::PassManager tf2xla(module_op.getContext());
SetCrashReproducer(tf2xla);
CreateConvertMlirToXlaHloPipeline(tf2xla, device_type,
custom_legalization_passes);
@ -512,6 +513,7 @@ Status CompileGraphToXlaHlo(
}
mlir::PassManager pm(module_op.getContext());
SetCrashReproducer(pm);
mlir::TF::StandardPipelineOptions tf_options;
mlir::TF::CreateTFStandardPipeline(pm, tf_options);
{

View File

@ -182,4 +182,49 @@ std::string DumpRawStringToFile(llvm::StringRef name, llvm::StringRef content,
return filepath;
}
void SetCrashReproducer(mlir::PassManager& pm, llvm::StringRef dir_path) {
std::string path = dir_path.str();
if (path.empty()) {
if (getenv("MLIR_CRASH_REPRODUCER_DIRECTORY"))
path = getenv("MLIR_CRASH_REPRODUCER_DIRECTORY");
else if (getenv("TEST_UNDECLARED_OUTPUTS_DIR"))
path = "sponge";
}
if (path.empty()) {
LOG_FIRST_N(INFO, 1) << "disabling MLIR crash reproducer, set env var "
"`MLIR_CRASH_REPRODUCER_DIRECTORY` to enable.";
return;
}
// Output dirs "sponge" (case-insensitive) have a special meaning: Dump into
// the directory specified by the environment variable
// TEST_UNDECLARED_OUTPUTS_DIR.
string lower_path = absl::AsciiStrToLower(path);
if (lower_path == "sponge") {
if (!tensorflow::io::GetTestUndeclaredOutputsDir(&path)) {
LOG(ERROR) << "MLIR crash reproducer is set to '" << dir_path.str()
<< "', but environment variable TEST_UNDECLARED_OUTPUTS_DIR "
"is not set, so cannot dump anywhere.";
return;
}
}
auto* env = tensorflow::Env::Default();
auto status = env->RecursivelyCreateDir(path);
if (!status.ok()) {
LOG(WARNING) << "cannot create directory '" + path +
"': " + status.error_message();
return;
}
path += "/mlir_reproducer_";
if (!tensorflow::Env::Default()->CreateUniqueFileName(&path, ".mlir")) {
LOG(WARNING)
<< "cannot create unique filename, won't enable MLIR crash reproducer.";
return;
}
pm.enableCrashReproducerGeneration(path, /*genLocalReproducer=*/false);
}
} // namespace tensorflow

View File

@ -20,6 +20,7 @@ limitations under the License.
#include "llvm/ADT/StringRef.h"
#include "mlir/IR/Operation.h" // from @llvm-project
#include "mlir/Pass/PassManager.h" // from @llvm-project
#include "tensorflow/core/platform/status.h"
namespace tensorflow {
@ -64,6 +65,13 @@ std::string GetDumpDirFromEnvVar();
std::string DumpRawStringToFile(llvm::StringRef name, llvm::StringRef content,
llvm::StringRef dirname = "");
// Enable the crash reproducer on the provided PassManager to the provided
// directory path. If the provided path is empty, it is retrieved from the
// environment variable `MLIR_CRASH_REPRODUCER_DIRECTORY`. If the provided path
// is the string "sponge", the file will be included in the sponge "Output
// Files" by looking up the environment to infer the directory path.
void SetCrashReproducer(mlir::PassManager& pm, llvm::StringRef dir_path = "");
} // namespace tensorflow
#endif // TENSORFLOW_COMPILER_MLIR_TENSORFLOW_UTILS_DUMP_MLIR_UTIL_H_

View File

@ -41,6 +41,7 @@ cc_library(
"//tensorflow/compiler/mlir/hlo:transform_unranked_hlo", # buildcleaner: keep
"//tensorflow/compiler/mlir/hlo:unfuse_batch_norm", # buildcleaner: keep
"//tensorflow/compiler/mlir/tensorflow",
"//tensorflow/compiler/mlir/tensorflow:dump_mlir_util",
"//tensorflow/compiler/mlir/tools/kernel_gen/transforms:passes",
"//tensorflow/compiler/mlir/xla:xla_legalize_tf",
"//tensorflow/compiler/xla:debug_options_flags",

View File

@ -48,6 +48,7 @@ limitations under the License.
#include "tensorflow/compiler/mlir/hlo/include/mlir-hlo/Dialect/mhlo/transforms/passes.h"
#include "tensorflow/compiler/mlir/tensorflow/dialect_registration.h"
#include "tensorflow/compiler/mlir/tensorflow/ir/tf_ops.h"
#include "tensorflow/compiler/mlir/tensorflow/utils/dump_mlir_util.h"
#include "tensorflow/compiler/mlir/tools/kernel_gen/transforms/passes.h"
#include "tensorflow/compiler/mlir/xla/transforms/passes.h"
#include "tensorflow/compiler/xla/service/mlir_gpu/kernel_lowering.h"
@ -71,6 +72,7 @@ Status LowerTFtoGPU(mlir::ModuleOp module, bool gpu_binary_only,
llvm::ArrayRef<uint32_t> unroll_factors) {
mlir::PassManager pm(module.getContext());
applyPassManagerCLOptions(pm);
SetCrashReproducer(pm);
pm.addPass(mlir::mhlo::createLegalizeTFPass(false));
if (gpu_binary_only) {
@ -177,6 +179,7 @@ Status LowerGPUToLLVM(mlir::ModuleOp module, bool gpu_binary_only,
int32_t architecture) {
mlir::PassManager pm(module.getContext());
applyPassManagerCLOptions(pm);
SetCrashReproducer(pm);
auto& kernel_pm = pm.nest<mlir::gpu::GPUModuleOp>();
if (gpu_binary_only) {