1 //===----- KaleidoscopeJIT.h - A simple JIT for Kaleidoscope ----*- C++ -*-===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // Contains a simple JIT definition for use in the kaleidoscope tutorials. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #ifndef LLVM_EXECUTIONENGINE_ORC_KALEIDOSCOPEJIT_H 15 #define LLVM_EXECUTIONENGINE_ORC_KALEIDOSCOPEJIT_H 16 17 #include "llvm/ADT/STLExtras.h" 18 #include "llvm/ExecutionEngine/ExecutionEngine.h" 19 #include "llvm/ExecutionEngine/RuntimeDyld.h" 20 #include "llvm/ExecutionEngine/SectionMemoryManager.h" 21 #include "llvm/ExecutionEngine/Orc/CompileOnDemandLayer.h" 22 #include "llvm/ExecutionEngine/Orc/CompileUtils.h" 23 #include "llvm/ExecutionEngine/Orc/JITSymbol.h" 24 #include "llvm/ExecutionEngine/Orc/IRCompileLayer.h" 25 #include "llvm/ExecutionEngine/Orc/IRTransformLayer.h" 26 #include "llvm/ExecutionEngine/Orc/LambdaResolver.h" 27 #include "llvm/ExecutionEngine/Orc/ObjectLinkingLayer.h" 28 #include "llvm/IR/DataLayout.h" 29 #include "llvm/IR/Mangler.h" 30 #include "llvm/Support/DynamicLibrary.h" 31 #include "llvm/Support/raw_ostream.h" 32 #include "llvm/Target/TargetMachine.h" 33 #include <algorithm> 34 #include <memory> 35 #include <string> 36 #include <vector> 37 38 class PrototypeAST; 39 class ExprAST; 40 41 /// FunctionAST - This class represents a function definition itself. 42 class FunctionAST { 43 std::unique_ptr<PrototypeAST> Proto; 44 std::unique_ptr<ExprAST> Body; 45 46 public: FunctionAST(std::unique_ptr<PrototypeAST> Proto,std::unique_ptr<ExprAST> Body)47 FunctionAST(std::unique_ptr<PrototypeAST> Proto, 48 std::unique_ptr<ExprAST> Body) 49 : Proto(std::move(Proto)), Body(std::move(Body)) {} 50 const PrototypeAST& getProto() const; 51 const std::string& getName() const; 52 llvm::Function *codegen(); 53 }; 54 55 /// This will compile FnAST to IR, rename the function to add the given 56 /// suffix (needed to prevent a name-clash with the function's stub), 57 /// and then take ownership of the module that the function was compiled 58 /// into. 59 std::unique_ptr<llvm::Module> 60 irgenAndTakeOwnership(FunctionAST &FnAST, const std::string &Suffix); 61 62 namespace llvm { 63 namespace orc { 64 65 class KaleidoscopeJIT { 66 private: 67 std::unique_ptr<TargetMachine> TM; 68 const DataLayout DL; 69 std::unique_ptr<JITCompileCallbackManager> CompileCallbackMgr; 70 std::unique_ptr<IndirectStubsManager> IndirectStubsMgr; 71 ObjectLinkingLayer<> ObjectLayer; 72 IRCompileLayer<decltype(ObjectLayer)> CompileLayer; 73 74 typedef std::function<std::unique_ptr<Module>(std::unique_ptr<Module>)> 75 OptimizeFunction; 76 77 IRTransformLayer<decltype(CompileLayer), OptimizeFunction> OptimizeLayer; 78 79 public: 80 typedef decltype(OptimizeLayer)::ModuleSetHandleT ModuleHandle; 81 KaleidoscopeJIT()82 KaleidoscopeJIT() 83 : TM(EngineBuilder().selectTarget()), 84 DL(TM->createDataLayout()), 85 CompileCallbackMgr( 86 orc::createLocalCompileCallbackManager(TM->getTargetTriple(), 0)), 87 CompileLayer(ObjectLayer, SimpleCompiler(*TM)), 88 OptimizeLayer(CompileLayer, 89 [this](std::unique_ptr<Module> M) { 90 return optimizeModule(std::move(M)); 91 }) { 92 auto IndirectStubsMgrBuilder = 93 orc::createLocalIndirectStubsManagerBuilder(TM->getTargetTriple()); 94 IndirectStubsMgr = IndirectStubsMgrBuilder(); 95 llvm::sys::DynamicLibrary::LoadLibraryPermanently(nullptr); 96 } 97 getTargetMachine()98 TargetMachine &getTargetMachine() { return *TM; } 99 addModule(std::unique_ptr<Module> M)100 ModuleHandle addModule(std::unique_ptr<Module> M) { 101 102 // Build our symbol resolver: 103 // Lambda 1: Look back into the JIT itself to find symbols that are part of 104 // the same "logical dylib". 105 // Lambda 2: Search for external symbols in the host process. 106 auto Resolver = createLambdaResolver( 107 [&](const std::string &Name) { 108 if (auto Sym = IndirectStubsMgr->findStub(Name, false)) 109 return Sym.toRuntimeDyldSymbol(); 110 if (auto Sym = OptimizeLayer.findSymbol(Name, false)) 111 return Sym.toRuntimeDyldSymbol(); 112 return RuntimeDyld::SymbolInfo(nullptr); 113 }, 114 [](const std::string &Name) { 115 if (auto SymAddr = 116 RTDyldMemoryManager::getSymbolAddressInProcess(Name)) 117 return RuntimeDyld::SymbolInfo(SymAddr, JITSymbolFlags::Exported); 118 return RuntimeDyld::SymbolInfo(nullptr); 119 }); 120 121 // Build a singlton module set to hold our module. 122 std::vector<std::unique_ptr<Module>> Ms; 123 Ms.push_back(std::move(M)); 124 125 // Add the set to the JIT with the resolver we created above and a newly 126 // created SectionMemoryManager. 127 return OptimizeLayer.addModuleSet(std::move(Ms), 128 make_unique<SectionMemoryManager>(), 129 std::move(Resolver)); 130 } 131 addFunctionAST(std::unique_ptr<FunctionAST> FnAST)132 Error addFunctionAST(std::unique_ptr<FunctionAST> FnAST) { 133 // Create a CompileCallback - this is the re-entry point into the compiler 134 // for functions that haven't been compiled yet. 135 auto CCInfo = CompileCallbackMgr->getCompileCallback(); 136 137 // Create an indirect stub. This serves as the functions "canonical 138 // definition" - an unchanging (constant address) entry point to the 139 // function implementation. 140 // Initially we point the stub's function-pointer at the compile callback 141 // that we just created. In the compile action for the callback (see below) 142 // we will update the stub's function pointer to point at the function 143 // implementation that we just implemented. 144 if (auto Err = IndirectStubsMgr->createStub(mangle(FnAST->getName()), 145 CCInfo.getAddress(), 146 JITSymbolFlags::Exported)) 147 return Err; 148 149 // Move ownership of FnAST to a shared pointer - C++11 lambdas don't support 150 // capture-by-move, which is be required for unique_ptr. 151 auto SharedFnAST = std::shared_ptr<FunctionAST>(std::move(FnAST)); 152 153 // Set the action to compile our AST. This lambda will be run if/when 154 // execution hits the compile callback (via the stub). 155 // 156 // The steps to compile are: 157 // (1) IRGen the function. 158 // (2) Add the IR module to the JIT to make it executable like any other 159 // module. 160 // (3) Use findSymbol to get the address of the compiled function. 161 // (4) Update the stub pointer to point at the implementation so that 162 /// subsequent calls go directly to it and bypass the compiler. 163 // (5) Return the address of the implementation: this lambda will actually 164 // be run inside an attempted call to the function, and we need to 165 // continue on to the implementation to complete the attempted call. 166 // The JIT runtime (the resolver block) will use the return address of 167 // this function as the address to continue at once it has reset the 168 // CPU state to what it was immediately before the call. 169 CCInfo.setCompileAction( 170 [this, SharedFnAST]() { 171 auto M = irgenAndTakeOwnership(*SharedFnAST, "$impl"); 172 addModule(std::move(M)); 173 auto Sym = findSymbol(SharedFnAST->getName() + "$impl"); 174 assert(Sym && "Couldn't find compiled function?"); 175 TargetAddress SymAddr = Sym.getAddress(); 176 if (auto Err = 177 IndirectStubsMgr->updatePointer(mangle(SharedFnAST->getName()), 178 SymAddr)) { 179 logAllUnhandledErrors(std::move(Err), errs(), 180 "Error updating function pointer: "); 181 exit(1); 182 } 183 184 return SymAddr; 185 }); 186 187 return Error::success(); 188 } 189 findSymbol(const std::string Name)190 JITSymbol findSymbol(const std::string Name) { 191 return OptimizeLayer.findSymbol(mangle(Name), true); 192 } 193 removeModule(ModuleHandle H)194 void removeModule(ModuleHandle H) { 195 OptimizeLayer.removeModuleSet(H); 196 } 197 198 private: 199 mangle(const std::string & Name)200 std::string mangle(const std::string &Name) { 201 std::string MangledName; 202 raw_string_ostream MangledNameStream(MangledName); 203 Mangler::getNameWithPrefix(MangledNameStream, Name, DL); 204 return MangledNameStream.str(); 205 } 206 optimizeModule(std::unique_ptr<Module> M)207 std::unique_ptr<Module> optimizeModule(std::unique_ptr<Module> M) { 208 // Create a function pass manager. 209 auto FPM = llvm::make_unique<legacy::FunctionPassManager>(M.get()); 210 211 // Add some optimizations. 212 FPM->add(createInstructionCombiningPass()); 213 FPM->add(createReassociatePass()); 214 FPM->add(createGVNPass()); 215 FPM->add(createCFGSimplificationPass()); 216 FPM->doInitialization(); 217 218 // Run the optimizations over all functions in the module being added to 219 // the JIT. 220 for (auto &F : *M) 221 FPM->run(F); 222 223 return M; 224 } 225 226 }; 227 228 } // end namespace orc 229 } // end namespace llvm 230 231 #endif // LLVM_EXECUTIONENGINE_ORC_KALEIDOSCOPEJIT_H 232