Add mechanism to dump JIT-compiled objects to files
This commit introduces the bits to be able to dump JIT-compile objects to external files by passing an object cache to OrcJit. The new functionality is tested in mlir-cpu-runner under the flag `dump-object-file`. Closes #95 PiperOrigin-RevId: 266439265
This commit is contained in:
parent
636971a59d
commit
d72aca428b
@ -49,8 +49,11 @@ public:
|
||||
llvm::MemoryBufferRef ObjBuffer) override;
|
||||
std::unique_ptr<llvm::MemoryBuffer> getObject(const llvm::Module *M) override;
|
||||
|
||||
/// Dump cached object to output file `filename`.
|
||||
void dumpToObjectFile(llvm::StringRef filename);
|
||||
|
||||
private:
|
||||
llvm::StringMap<std::unique_ptr<llvm::MemoryBuffer>> CachedObjects;
|
||||
llvm::StringMap<std::unique_ptr<llvm::MemoryBuffer>> cachedObjects;
|
||||
};
|
||||
|
||||
/// JIT-backed execution engine for MLIR modules. Assumes the module can be
|
||||
@ -65,15 +68,18 @@ private:
|
||||
/// be used to invoke the JIT-compiled function.
|
||||
class ExecutionEngine {
|
||||
public:
|
||||
ExecutionEngine(bool enableObjectCache);
|
||||
|
||||
/// Creates an execution engine for the given module. If `transformer` is
|
||||
/// provided, it will be called on the LLVM module during JIT-compilation and
|
||||
/// can be used, e.g., for reporting or optimization.
|
||||
/// If `sharedLibPaths` are provided, the underlying JIT-compilation will open
|
||||
/// and link the shared libraries for symbol resolution.
|
||||
static llvm::Expected<std::unique_ptr<ExecutionEngine>>
|
||||
create(ModuleOp m,
|
||||
std::function<llvm::Error(llvm::Module *)> transformer = {},
|
||||
ArrayRef<StringRef> sharedLibPaths = {});
|
||||
/// If `objectCache` is provided, JIT compiler will use it to store the object
|
||||
/// generated for the given module.
|
||||
static llvm::Expected<std::unique_ptr<ExecutionEngine>> create(
|
||||
ModuleOp m, std::function<llvm::Error(llvm::Module *)> transformer = {},
|
||||
ArrayRef<StringRef> sharedLibPaths = {}, bool enableObjectCache = false);
|
||||
|
||||
/// Looks up a packed-argument function with the given name and returns a
|
||||
/// pointer to it. Propagates errors in case of failure.
|
||||
@ -94,6 +100,9 @@ public:
|
||||
/// the engine.
|
||||
static bool setupTargetTriple(llvm::Module *llvmModule);
|
||||
|
||||
/// Dump object code to output file `filename`.
|
||||
void dumpToObjectFile(llvm::StringRef filename);
|
||||
|
||||
private:
|
||||
// Ordering of llvmContext and jit is important for destruction purposes: the
|
||||
// jit must be destroyed before the context.
|
||||
|
@ -22,6 +22,7 @@
|
||||
#include "mlir/ExecutionEngine/ExecutionEngine.h"
|
||||
#include "mlir/IR/Function.h"
|
||||
#include "mlir/IR/Module.h"
|
||||
#include "mlir/Support/FileUtilities.h"
|
||||
#include "mlir/Target/LLVMIR.h"
|
||||
|
||||
#include "llvm/Bitcode/BitcodeReader.h"
|
||||
@ -37,6 +38,7 @@
|
||||
#include "llvm/IR/IRBuilder.h"
|
||||
#include "llvm/Support/Error.h"
|
||||
#include "llvm/Support/TargetRegistry.h"
|
||||
#include "llvm/Support/ToolOutputFile.h"
|
||||
|
||||
using namespace mlir;
|
||||
using llvm::dbgs;
|
||||
@ -68,13 +70,13 @@ namespace mlir {
|
||||
|
||||
void SimpleObjectCache::notifyObjectCompiled(const Module *M,
|
||||
MemoryBufferRef ObjBuffer) {
|
||||
CachedObjects[M->getModuleIdentifier()] = MemoryBuffer::getMemBufferCopy(
|
||||
cachedObjects[M->getModuleIdentifier()] = MemoryBuffer::getMemBufferCopy(
|
||||
ObjBuffer.getBuffer(), ObjBuffer.getBufferIdentifier());
|
||||
}
|
||||
|
||||
std::unique_ptr<MemoryBuffer> SimpleObjectCache::getObject(const Module *M) {
|
||||
auto I = CachedObjects.find(M->getModuleIdentifier());
|
||||
if (I == CachedObjects.end()) {
|
||||
auto I = cachedObjects.find(M->getModuleIdentifier());
|
||||
if (I == cachedObjects.end()) {
|
||||
dbgs() << "No object for " << M->getModuleIdentifier()
|
||||
<< " in cache. Compiling.\n";
|
||||
return nullptr;
|
||||
@ -84,6 +86,26 @@ std::unique_ptr<MemoryBuffer> SimpleObjectCache::getObject(const Module *M) {
|
||||
return MemoryBuffer::getMemBuffer(I->second->getMemBufferRef());
|
||||
}
|
||||
|
||||
void SimpleObjectCache::dumpToObjectFile(llvm::StringRef outputFilename) {
|
||||
// Set up the output file.
|
||||
std::string errorMessage;
|
||||
auto file = openOutputFile(outputFilename, &errorMessage);
|
||||
if (!file) {
|
||||
llvm::errs() << errorMessage << "\n";
|
||||
return;
|
||||
}
|
||||
|
||||
// Dump the object generated for a single module to the output file.
|
||||
assert(cachedObjects.size() == 1 && "Expected only one object entry.");
|
||||
auto &cachedObject = cachedObjects.begin()->second;
|
||||
file->os() << cachedObject->getBuffer();
|
||||
file->keep();
|
||||
}
|
||||
|
||||
void ExecutionEngine::dumpToObjectFile(llvm::StringRef filename) {
|
||||
cache->dumpToObjectFile(filename);
|
||||
}
|
||||
|
||||
// Setup LLVM target triple from the current machine.
|
||||
bool ExecutionEngine::setupTargetTriple(Module *llvmModule) {
|
||||
// Setup the machine properties from the current architecture.
|
||||
@ -168,11 +190,13 @@ void packFunctionArguments(Module *module) {
|
||||
}
|
||||
}
|
||||
|
||||
Expected<std::unique_ptr<ExecutionEngine>>
|
||||
ExecutionEngine::create(ModuleOp m,
|
||||
std::function<Error(llvm::Module *)> transformer,
|
||||
ArrayRef<StringRef> sharedLibPaths) {
|
||||
auto engine = std::make_unique<ExecutionEngine>();
|
||||
ExecutionEngine::ExecutionEngine(bool enableObjectCache)
|
||||
: cache(enableObjectCache ? nullptr : new SimpleObjectCache()) {}
|
||||
|
||||
Expected<std::unique_ptr<ExecutionEngine>> ExecutionEngine::create(
|
||||
ModuleOp m, std::function<Error(llvm::Module *)> transformer,
|
||||
ArrayRef<StringRef> sharedLibPaths, bool enableObjectCache) {
|
||||
auto engine = std::make_unique<ExecutionEngine>(enableObjectCache);
|
||||
|
||||
std::unique_ptr<llvm::LLVMContext> ctx(new llvm::LLVMContext);
|
||||
auto llvmModule = translateModuleToLLVMIR(m);
|
||||
@ -280,5 +304,4 @@ Error ExecutionEngine::invoke(StringRef name, MutableArrayRef<void *> args) {
|
||||
|
||||
return Error::success();
|
||||
}
|
||||
|
||||
} // end namespace mlir
|
||||
|
15
third_party/mlir/lib/Support/JitRunner.cpp
vendored
15
third_party/mlir/lib/Support/JitRunner.cpp
vendored
@ -95,6 +95,16 @@ static llvm::cl::list<std::string>
|
||||
llvm::cl::ZeroOrMore, llvm::cl::MiscFlags::CommaSeparated,
|
||||
llvm::cl::cat(clOptionsCategory));
|
||||
|
||||
// CLI variables for debugging.
|
||||
static llvm::cl::opt<bool> dumpObjectFile(
|
||||
"dump-object-file",
|
||||
llvm::cl::desc("Dump JITted-compiled object to file specified with "
|
||||
"-object-filename (<input file>.o by default)."));
|
||||
|
||||
static llvm::cl::opt<std::string> objectFilename(
|
||||
"object-filename",
|
||||
llvm::cl::desc("Dump JITted-compiled object to file <input file>.o"));
|
||||
|
||||
static OwningModuleRef parseMLIRInput(StringRef inputFilename,
|
||||
MLIRContext *context) {
|
||||
// Set up the input file.
|
||||
@ -181,6 +191,11 @@ compileAndExecute(ModuleOp module, StringRef entryPoint,
|
||||
auto expectedFPtr = engine->lookup(entryPoint);
|
||||
if (!expectedFPtr)
|
||||
return expectedFPtr.takeError();
|
||||
|
||||
if (dumpObjectFile)
|
||||
engine->dumpToObjectFile(objectFilename.empty() ? inputFilename + ".o"
|
||||
: objectFilename);
|
||||
|
||||
void (*fptr)(void **) = *expectedFPtr;
|
||||
(*fptr)(args);
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user