xref: /llvm-project/mlir/include/mlir/ExecutionEngine/ExecutionEngine.h (revision 0a1aa6cda2758b0926a95f87d39ffefb1cb90200)
1 //===- ExecutionEngine.h - MLIR Execution engine and utils -----*- C++ -*--===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file provides a JIT-backed execution engine for MLIR modules.
10 //
11 //===----------------------------------------------------------------------===//
12 
13 #ifndef MLIR_EXECUTIONENGINE_EXECUTIONENGINE_H_
14 #define MLIR_EXECUTIONENGINE_EXECUTIONENGINE_H_
15 
16 #include "mlir/Support/LLVM.h"
17 #include "llvm/ExecutionEngine/ObjectCache.h"
18 #include "llvm/ExecutionEngine/Orc/LLJIT.h"
19 #include "llvm/ExecutionEngine/SectionMemoryManager.h"
20 #include "llvm/IR/LLVMContext.h"
21 #include "llvm/Support/Error.h"
22 
23 #include <functional>
24 #include <memory>
25 #include <optional>
26 
27 namespace llvm {
28 template <typename T>
29 class Expected;
30 class Module;
31 class ExecutionEngine;
32 class JITEventListener;
33 class MemoryBuffer;
34 } // namespace llvm
35 
36 namespace mlir {
37 
38 class Operation;
39 
40 /// A simple object cache following Lang's LLJITWithObjectCache example.
41 class SimpleObjectCache : public llvm::ObjectCache {
42 public:
43   void notifyObjectCompiled(const llvm::Module *m,
44                             llvm::MemoryBufferRef objBuffer) override;
45   std::unique_ptr<llvm::MemoryBuffer> getObject(const llvm::Module *m) override;
46 
47   /// Dump cached object to output file `filename`.
48   void dumpToObjectFile(StringRef filename);
49 
50   /// Returns `true` if cache hasn't been populated yet.
51   bool isEmpty();
52 
53 private:
54   llvm::StringMap<std::unique_ptr<llvm::MemoryBuffer>> cachedObjects;
55 };
56 
57 struct ExecutionEngineOptions {
58   /// If `llvmModuleBuilder` is provided, it will be used to create an LLVM
59   /// module from the given MLIR IR. Otherwise, a default
60   /// `translateModuleToLLVMIR` function will be used to translate to LLVM IR.
61   llvm::function_ref<std::unique_ptr<llvm::Module>(Operation *,
62                                                    llvm::LLVMContext &)>
63       llvmModuleBuilder = nullptr;
64 
65   /// If `transformer` is provided, it will be called on the LLVM module during
66   /// JIT-compilation and can be used, e.g., for reporting or optimization.
67   llvm::function_ref<llvm::Error(llvm::Module *)> transformer = {};
68 
69   /// `jitCodeGenOptLevel`, when provided, is used as the optimization level for
70   /// target code generation.
71   std::optional<llvm::CodeGenOptLevel> jitCodeGenOptLevel;
72 
73   /// If `sharedLibPaths` are provided, the underlying JIT-compilation will
74   /// open and link the shared libraries for symbol resolution. Libraries that
75   /// are designed to be used with the `ExecutionEngine` may implement a
76   /// loading and unloading protocol: if they implement the two functions with
77   /// the names defined in `kLibraryInitFnName` and `kLibraryDestroyFnName`,
78   /// these functions will be called upon loading the library and upon
79   /// destruction of the `ExecutionEngine`. In the init function, the library
80   /// may provide a list of symbols that it wants to make available to code
81   /// run by the `ExecutionEngine`. If the two functions are not defined, only
82   /// symbols with public visibility are available to the executed code.
83   ArrayRef<StringRef> sharedLibPaths = {};
84 
85   /// Specifies an existing `sectionMemoryMapper` to be associated with the
86   /// compiled code. If none is provided, a default memory mapper that directly
87   /// calls into the operating system is used.
88   llvm::SectionMemoryManager::MemoryMapper *sectionMemoryMapper = nullptr;
89 
90   /// If `enableObjectCache` is set, the JIT compiler will create one to store
91   /// the object generated for the given module. The contents of the cache can
92   /// be dumped to a file via the `dumpToObjectFile` method.
93   bool enableObjectDump = false;
94 
95   /// If enable `enableGDBNotificationListener` is set, the JIT compiler will
96   /// notify the llvm's global GDB notification listener.
97   bool enableGDBNotificationListener = true;
98 
99   /// If `enablePerfNotificationListener` is set, the JIT compiler will notify
100   /// the llvm's global Perf notification listener.
101   bool enablePerfNotificationListener = true;
102 };
103 
104 /// JIT-backed execution engine for MLIR. Assumes the IR can be converted to
105 /// LLVM IR. For each function, creates a wrapper function with the fixed
106 /// interface
107 ///
108 ///     void _mlir_funcName(void **)
109 ///
110 /// where the only argument is interpreted as a list of pointers to the actual
111 /// arguments of the function, followed by a pointer to the result.  This allows
112 /// the engine to provide the caller with a generic function pointer that can
113 /// be used to invoke the JIT-compiled function.
114 class ExecutionEngine {
115 public:
116   /// Name of init functions of shared libraries. If a library provides a
117   /// function with this name and the one of the destroy function, this function
118   /// is called upon loading the library.
119   static constexpr const char *const kLibraryInitFnName =
120       "__mlir_execution_engine_init";
121 
122   /// Name of destroy functions of shared libraries. If a library provides a
123   /// function with this name and the one of the init function, this function is
124   /// called upon destructing the `ExecutionEngine`.
125   static constexpr const char *const kLibraryDestroyFnName =
126       "__mlir_execution_engine_destroy";
127 
128   /// Function type for init functions of shared libraries. The library may
129   /// provide a list of symbols that it wants to make available to code run by
130   /// the `ExecutionEngine`. If the two functions are not defined, only symbols
131   /// with public visibility are available to the executed code.
132   using LibraryInitFn = void (*)(llvm::StringMap<void *> &);
133 
134   /// Function type for destroy functions of shared libraries.
135   using LibraryDestroyFn = void (*)();
136 
137   ExecutionEngine(bool enableObjectDump, bool enableGDBNotificationListener,
138                   bool enablePerfNotificationListener);
139 
140   ~ExecutionEngine();
141 
142   /// Creates an execution engine for the given MLIR IR. If TargetMachine is
143   /// not provided, default TM is created (i.e. ignoring any command line flags
144   /// that could affect the set-up).
145   static llvm::Expected<std::unique_ptr<ExecutionEngine>>
146   create(Operation *op, const ExecutionEngineOptions &options = {},
147          std::unique_ptr<llvm::TargetMachine> tm = nullptr);
148 
149   /// Looks up a packed-argument function wrapping the function with the given
150   /// name and returns a pointer to it. Propagates errors in case of failure.
151   llvm::Expected<void (*)(void **)> lookupPacked(StringRef name) const;
152 
153   /// Looks up the original function with the given name and returns a
154   /// pointer to it. This is not necesarily a packed function. Propagates
155   /// errors in case of failure.
156   llvm::Expected<void *> lookup(StringRef name) const;
157 
158   /// Invokes the function with the given name passing it the list of opaque
159   /// pointers to the actual arguments.
160   llvm::Error invokePacked(StringRef name,
161                            MutableArrayRef<void *> args = std::nullopt);
162 
163   /// Trait that defines how a given type is passed to the JIT code. This
164   /// defaults to passing the address but can be specialized.
165   template <typename T>
166   struct Argument {
packArgument167     static void pack(SmallVectorImpl<void *> &args, T &val) {
168       args.push_back(&val);
169     }
170   };
171 
172   /// Tag to wrap an output parameter when invoking a jitted function.
173   template <typename T>
174   struct Result {
ResultResult175     Result(T &result) : value(result) {}
176     T &value;
177   };
178 
179   /// Helper function to wrap an output operand when using
180   /// ExecutionEngine::invoke.
181   template <typename T>
result(T & t)182   static Result<T> result(T &t) {
183     return Result<T>(t);
184   }
185 
186   // Specialization for output parameter: their address is forwarded directly to
187   // the native code.
188   template <typename T>
189   struct Argument<Result<T>> {
190     static void pack(SmallVectorImpl<void *> &args, Result<T> &result) {
191       args.push_back(&result.value);
192     }
193   };
194 
195   /// Invokes the function with the given name passing it the list of arguments
196   /// by value. Function result can be obtain through output parameter using the
197   /// `Result` wrapper defined above. For example:
198   ///
199   ///     func @foo(%arg0 : i32) -> i32 attributes { llvm.emit_c_interface }
200   ///
201   /// can be invoked:
202   ///
203   ///     int32_t result = 0;
204   ///     llvm::Error error = jit->invoke("foo", 42,
205   ///                                     result(result));
206   template <typename... Args>
207   llvm::Error invoke(StringRef funcName, Args... args) {
208     const std::string adapterName =
209         std::string("_mlir_ciface_") + funcName.str();
210     llvm::SmallVector<void *> argsArray;
211     // Pack every arguments in an array of pointers. Delegate the packing to a
212     // trait so that it can be overridden per argument type.
213     (Argument<Args>::pack(argsArray, args), ...);
214     return invokePacked(adapterName, argsArray);
215   }
216 
217   /// Set the target triple and the data layout for the input module based on
218   /// the input TargetMachine. This is implicitly done when creating the
219   /// engine.
220   static void setupTargetTripleAndDataLayout(llvm::Module *llvmModule,
221                                              llvm::TargetMachine *tm);
222 
223   /// Dump object code to output file `filename`.
224   void dumpToObjectFile(StringRef filename);
225 
226   /// Register symbols with this ExecutionEngine.
227   void registerSymbols(
228       llvm::function_ref<llvm::orc::SymbolMap(llvm::orc::MangleAndInterner)>
229           symbolMap);
230 
231 private:
232   /// Ordering of llvmContext and jit is important for destruction purposes: the
233   /// jit must be destroyed before the context.
234   llvm::LLVMContext llvmContext;
235 
236   /// Underlying LLJIT.
237   std::unique_ptr<llvm::orc::LLJIT> jit;
238 
239   /// Underlying cache.
240   std::unique_ptr<SimpleObjectCache> cache;
241 
242   /// Names of functions that may be looked up.
243   std::vector<std::string> functionNames;
244 
245   /// GDB notification listener.
246   llvm::JITEventListener *gdbListener;
247 
248   /// Perf notification listener.
249   llvm::JITEventListener *perfListener;
250 
251   /// Destroy functions in the libraries loaded by the ExecutionEngine that are
252   /// called when this ExecutionEngine is destructed.
253   SmallVector<LibraryDestroyFn> destroyFns;
254 };
255 
256 } // namespace mlir
257 
258 #endif // MLIR_EXECUTIONENGINE_EXECUTIONENGINE_H_
259