llvm::MemoryBufferRef ObjBuffer) override;
std::unique_ptr<llvm::MemoryBuffer> getObject(const llvm::Module *M) override;
+ /// Dump cached object to output file `filename`.
+ void dumpToObjectFile(llvm::StringRef filename);
+
private:
- llvm::StringMap<std::unique_ptr<llvm::MemoryBuffer>> CachedObjects;
+ llvm::StringMap<std::unique_ptr<llvm::MemoryBuffer>> cachedObjects;
};
/// JIT-backed execution engine for MLIR modules. Assumes the module can be
/// be used to invoke the JIT-compiled function.
class ExecutionEngine {
public:
+ ExecutionEngine(bool enableObjectCache);
+
/// Creates an execution engine for the given module. If `transformer` is
/// provided, it will be called on the LLVM module during JIT-compilation and
/// can be used, e.g., for reporting or optimization.
/// If `sharedLibPaths` are provided, the underlying JIT-compilation will open
/// and link the shared libraries for symbol resolution.
- static llvm::Expected<std::unique_ptr<ExecutionEngine>>
- create(ModuleOp m,
- std::function<llvm::Error(llvm::Module *)> transformer = {},
- ArrayRef<StringRef> sharedLibPaths = {});
+ /// If `objectCache` is provided, JIT compiler will use it to store the object
+ /// generated for the given module.
+ static llvm::Expected<std::unique_ptr<ExecutionEngine>> create(
+ ModuleOp m, std::function<llvm::Error(llvm::Module *)> transformer = {},
+ ArrayRef<StringRef> sharedLibPaths = {}, bool enableObjectCache = false);
/// Looks up a packed-argument function with the given name and returns a
/// pointer to it. Propagates errors in case of failure.
/// the engine.
static bool setupTargetTriple(llvm::Module *llvmModule);
+ /// Dump object code to output file `filename`.
+ void dumpToObjectFile(llvm::StringRef filename);
+
private:
// Ordering of llvmContext and jit is important for destruction purposes: the
// jit must be destroyed before the context.
#include "mlir/ExecutionEngine/ExecutionEngine.h"
#include "mlir/IR/Function.h"
#include "mlir/IR/Module.h"
+#include "mlir/Support/FileUtilities.h"
#include "mlir/Target/LLVMIR.h"
#include "llvm/Bitcode/BitcodeReader.h"
#include "llvm/IR/IRBuilder.h"
#include "llvm/Support/Error.h"
#include "llvm/Support/TargetRegistry.h"
+#include "llvm/Support/ToolOutputFile.h"
using namespace mlir;
using llvm::dbgs;
void SimpleObjectCache::notifyObjectCompiled(const Module *M,
MemoryBufferRef ObjBuffer) {
- CachedObjects[M->getModuleIdentifier()] = MemoryBuffer::getMemBufferCopy(
+ cachedObjects[M->getModuleIdentifier()] = MemoryBuffer::getMemBufferCopy(
ObjBuffer.getBuffer(), ObjBuffer.getBufferIdentifier());
}
std::unique_ptr<MemoryBuffer> SimpleObjectCache::getObject(const Module *M) {
- auto I = CachedObjects.find(M->getModuleIdentifier());
- if (I == CachedObjects.end()) {
+ auto I = cachedObjects.find(M->getModuleIdentifier());
+ if (I == cachedObjects.end()) {
dbgs() << "No object for " << M->getModuleIdentifier()
<< " in cache. Compiling.\n";
return nullptr;
return MemoryBuffer::getMemBuffer(I->second->getMemBufferRef());
}
+void SimpleObjectCache::dumpToObjectFile(llvm::StringRef outputFilename) {
+ // Set up the output file.
+ std::string errorMessage;
+ auto file = openOutputFile(outputFilename, &errorMessage);
+ if (!file) {
+ llvm::errs() << errorMessage << "\n";
+ return;
+ }
+
+ // Dump the object generated for a single module to the output file.
+ assert(cachedObjects.size() == 1 && "Expected only one object entry.");
+ auto &cachedObject = cachedObjects.begin()->second;
+ file->os() << cachedObject->getBuffer();
+ file->keep();
+}
+
+void ExecutionEngine::dumpToObjectFile(llvm::StringRef filename) {
+ cache->dumpToObjectFile(filename);
+}
+
// Setup LLVM target triple from the current machine.
bool ExecutionEngine::setupTargetTriple(Module *llvmModule) {
// Setup the machine properties from the current architecture.
}
}
-Expected<std::unique_ptr<ExecutionEngine>>
-ExecutionEngine::create(ModuleOp m,
- std::function<Error(llvm::Module *)> transformer,
- ArrayRef<StringRef> sharedLibPaths) {
- auto engine = std::make_unique<ExecutionEngine>();
+ExecutionEngine::ExecutionEngine(bool enableObjectCache)
+ : cache(enableObjectCache ? nullptr : new SimpleObjectCache()) {}
+
+Expected<std::unique_ptr<ExecutionEngine>> ExecutionEngine::create(
+ ModuleOp m, std::function<Error(llvm::Module *)> transformer,
+ ArrayRef<StringRef> sharedLibPaths, bool enableObjectCache) {
+ auto engine = std::make_unique<ExecutionEngine>(enableObjectCache);
std::unique_ptr<llvm::LLVMContext> ctx(new llvm::LLVMContext);
auto llvmModule = translateModuleToLLVMIR(m);
return Error::success();
}
-
} // end namespace mlir
llvm::cl::ZeroOrMore, llvm::cl::MiscFlags::CommaSeparated,
llvm::cl::cat(clOptionsCategory));
+// CLI variables for debugging.
+static llvm::cl::opt<bool> dumpObjectFile(
+ "dump-object-file",
+ llvm::cl::desc("Dump JITted-compiled object to file specified with "
+ "-object-filename (<input file>.o by default)."));
+
+static llvm::cl::opt<std::string> objectFilename(
+ "object-filename",
+ llvm::cl::desc("Dump JITted-compiled object to file <input file>.o"));
+
static OwningModuleRef parseMLIRInput(StringRef inputFilename,
MLIRContext *context) {
// Set up the input file.
auto expectedFPtr = engine->lookup(entryPoint);
if (!expectedFPtr)
return expectedFPtr.takeError();
+
+ if (dumpObjectFile)
+ engine->dumpToObjectFile(objectFilename.empty() ? inputFilename + ".o"
+ : objectFilename);
+
void (*fptr)(void **) = *expectedFPtr;
(*fptr)(args);