• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "optimizer/code_generator/codegen.h"
17 
18 #include "llvm_ir_constructor.h"
19 
20 #include "gc_barriers.h"
21 #include "irtoc_function_utils.h"
22 #include "llvm_logger.h"
23 #include "llvm_options.h"
24 #include "metadata.h"
25 #include "transforms/runtime_calls.h"
26 
27 #include <llvm/IR/InlineAsm.h>
28 #include <llvm/IR/IntrinsicsAArch64.h>
29 #include <llvm/IR/MDBuilder.h>
30 #include <llvm/IR/Verifier.h>
31 #include <llvm/Transforms/Utils/BasicBlockUtils.h>
32 
33 using panda::llvmbackend::DebugDataBuilder;
34 using panda::llvmbackend::LLVMArkInterface;
35 using panda::llvmbackend::gc_barriers::EmitPostWRB;
36 using panda::llvmbackend::gc_barriers::EmitPreWRB;
37 using panda::llvmbackend::irtoc_function_utils::IsNoAliasIrtocFunction;
38 #ifndef NDEBUG
39 using panda::llvmbackend::irtoc_function_utils::IsPtrIgnIrtocFunction;
40 #endif
41 
42 static constexpr unsigned VECTOR_SIZE_8 = 8;
43 static constexpr unsigned VECTOR_SIZE_16 = 16;
44 
45 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
46 #define ASSERT_TYPE(input, expected_type)                                                                  \
47     ASSERT_DO((input)->getType() == (expected_type),                                                       \
48               std::cerr << "Unexpected data type: " << GetTypeName((input)->getType()) << ". Should be a " \
49                         << GetTypeName(expected_type) << "." << std::endl)
50 
51 // arm64: { dispatch: 24, pc: 20, frame: 23, acc: 21, acc_tag: 22, moffset: 25, method_ptr: 26 },
52 constexpr auto AARCH64_PC = 20;
53 constexpr auto AARCH64_ACC = 21;
54 constexpr auto AARCH64_ACC_TAG = 22;
55 constexpr auto AARCH64_FP = 23;
56 constexpr auto AARCH64_DISPATCH = 24;
57 constexpr auto AARCH64_MOFFSET = 25;
58 constexpr auto AARCH64_METHOD_PTR = 26;
59 constexpr auto AARCH64_REAL_FP = 29;
60 
61 // x86_64: { dispatch: 8, pc: 4, frame: 5, acc: 11, acc_tag: 3 }
62 constexpr auto X86_64_PC = 4;       // renamed r10
63 constexpr auto X86_64_ACC = 11;     // renamed r3 (rbx)
64 constexpr auto X86_64_ACC_TAG = 3;  // renamed r11
65 constexpr auto X86_64_FP = 5;       // renamed r9
66 constexpr auto X86_64_DISPATCH = 8;
67 constexpr auto X86_64_REAL_FP = 9;  // renamed r5 (rbp)
68 
69 namespace {
CreateFunctionDeclaration(llvm::FunctionType * functionType,const std::string & name,llvm::Module * module)70 inline llvm::Function *CreateFunctionDeclaration(llvm::FunctionType *functionType, const std::string &name,
71                                                  llvm::Module *module)
72 {
73     ASSERT(functionType != nullptr);
74     ASSERT(!name.empty());
75     ASSERT(module != nullptr);
76 
77     auto function = module->getFunction(name);
78     if (function != nullptr) {
79         ASSERT(function->getVisibility() == llvm::GlobalValue::ProtectedVisibility);
80         ASSERT(function->doesNotThrow());
81         return function;
82     }
83 
84     function = llvm::Function::Create(functionType, llvm::Function::ExternalLinkage, name, module);
85 
86     // Prevens emitting `.eh_frame` section
87     function->setDoesNotThrow();
88 
89     function->setVisibility(llvm::GlobalValue::ProtectedVisibility);
90 
91     function->setSectionPrefix(name);
92 
93     return function;
94 }
95 
CreateBlackBoxAsm(llvm::IRBuilder<> * builder,const std::string & inlineAsm)96 inline void CreateBlackBoxAsm(llvm::IRBuilder<> *builder, const std::string &inlineAsm)
97 {
98     auto iasmType = llvm::FunctionType::get(builder->getVoidTy(), {}, false);
99     builder->CreateCall(iasmType, llvm::InlineAsm::get(iasmType, inlineAsm, "", true), {});
100 }
101 
CreateInt32ImmAsm(llvm::IRBuilder<> * builder,const std::string & inlineAsm,uint32_t imm)102 inline void CreateInt32ImmAsm(llvm::IRBuilder<> *builder, const std::string &inlineAsm, uint32_t imm)
103 {
104     auto oneInt = llvm::FunctionType::get(builder->getVoidTy(), {builder->getInt32Ty()}, false);
105     builder->CreateCall(oneInt, llvm::InlineAsm::get(oneInt, inlineAsm, "i", true), {builder->getInt32(imm)});
106 }
107 
ToAtomicOrdering(bool isVolatile)108 inline llvm::AtomicOrdering ToAtomicOrdering(bool isVolatile)
109 {
110     return isVolatile ? LLVMArkInterface::VOLATILE_ORDER : LLVMArkInterface::NOT_ATOMIC_ORDER;
111 }
112 
113 #ifndef NDEBUG
GetTypeName(llvm::Type * type)114 inline std::string GetTypeName(llvm::Type *type)
115 {
116     std::string name;
117     auto stream = llvm::raw_string_ostream(name);
118     type->print(stream);
119     return stream.str();
120 }
121 #endif
122 }  // namespace
123 
124 namespace panda::compiler {
125 
126 #include <can_compile_intrinsics_gen.inl>
127 
128 // Use that only to pass it into method like rvalue
CreateBasicBlockName(Inst * inst,const std::string & bbName)129 static inline std::string CreateBasicBlockName(Inst *inst, const std::string &bbName)
130 {
131     std::stringstream name;
132     name << "bb" << std::to_string(inst->GetBasicBlock()->GetId()) << "_i" << std::to_string(inst->GetId()) << ".."
133          << bbName << "..";
134     return name.str();
135 }
136 
CreateNameForInst(Inst * inst)137 static inline std::string CreateNameForInst(Inst *inst)
138 {
139     return std::string("v") + std::to_string(inst->GetId());
140 }
141 
IsInteger(DataType::Type type)142 static inline bool IsInteger(DataType::Type type)
143 {
144     return DataType::IsTypeNumeric(type) && !DataType::IsFloatType(type) && type != DataType::POINTER;
145 }
146 
IsSignedInteger(const DataType::Type & type)147 static inline bool IsSignedInteger(const DataType::Type &type)
148 {
149     return IsInteger(type) && DataType::IsTypeSigned(type);
150 }
151 
IsUnsignedInteger(DataType::Type type)152 static inline bool IsUnsignedInteger(DataType::Type type)
153 {
154     return IsInteger(type) && !DataType::IsTypeSigned(type);
155 }
156 
ICmpCodeConvert(ConditionCode cc)157 static inline llvm::ICmpInst::Predicate ICmpCodeConvert(ConditionCode cc)
158 {
159     switch (cc) {
160         case ConditionCode::CC_EQ:
161             return llvm::CmpInst::Predicate::ICMP_EQ;
162         case ConditionCode::CC_NE:
163             return llvm::CmpInst::Predicate::ICMP_NE;
164         case ConditionCode::CC_LT:
165             return llvm::CmpInst::Predicate::ICMP_SLT;
166         case ConditionCode::CC_GT:
167             return llvm::CmpInst::Predicate::ICMP_SGT;
168         case ConditionCode::CC_LE:
169             return llvm::CmpInst::Predicate::ICMP_SLE;
170         case ConditionCode::CC_GE:
171             return llvm::CmpInst::Predicate::ICMP_SGE;
172         case ConditionCode::CC_B:
173             return llvm::CmpInst::Predicate::ICMP_ULT;
174         case ConditionCode::CC_A:
175             return llvm::CmpInst::Predicate::ICMP_UGT;
176         case ConditionCode::CC_BE:
177             return llvm::CmpInst::Predicate::ICMP_ULE;
178         case ConditionCode::CC_AE:
179             return llvm::CmpInst::Predicate::ICMP_UGE;
180         default:
181             UNREACHABLE();
182             return llvm::CmpInst::Predicate::ICMP_NE;
183     }
184 }
185 
FCmpCodeConvert(ConditionCode conditionCode)186 static inline llvm::FCmpInst::Predicate FCmpCodeConvert(ConditionCode conditionCode)
187 {
188     switch (conditionCode) {
189         case ConditionCode::CC_EQ:
190             return llvm::FCmpInst::Predicate::FCMP_UEQ;
191         case ConditionCode::CC_NE:
192             return llvm::FCmpInst::Predicate::FCMP_UNE;
193         case ConditionCode::CC_LT:
194             return llvm::FCmpInst::Predicate::FCMP_ULT;
195         case ConditionCode::CC_GT:
196             return llvm::FCmpInst::Predicate::FCMP_UGT;
197         case ConditionCode::CC_LE:
198             return llvm::FCmpInst::Predicate::FCMP_ULE;
199         case ConditionCode::CC_GE:
200             return llvm::FCmpInst::Predicate::FCMP_UGE;
201         case ConditionCode::CC_B:
202             return llvm::FCmpInst::Predicate::FCMP_ULT;
203         case ConditionCode::CC_A:
204             return llvm::FCmpInst::Predicate::FCMP_UGT;
205         case ConditionCode::CC_BE:
206             return llvm::FCmpInst::Predicate::FCMP_ULE;
207         case ConditionCode::CC_AE:
208             return llvm::FCmpInst::Predicate::FCMP_UGE;
209         default:
210             ASSERT_DO(false, (std::cerr << "Unexpected condition_code = " << conditionCode << std::endl));
211             UNREACHABLE();
212     }
213 }
214 
GetRealFrameReg(Arch arch)215 static size_t GetRealFrameReg(Arch arch)
216 {
217     switch (arch) {
218         case Arch::AARCH64:
219             return AARCH64_REAL_FP;
220         case Arch::X86_64:
221             return X86_64_REAL_FP;
222         default:
223             UNREACHABLE();
224     }
225 }
226 
TryEmitIntrinsic(Inst * inst,RuntimeInterface::IntrinsicId arkId)227 bool LLVMIrConstructor::TryEmitIntrinsic(Inst *inst, RuntimeInterface::IntrinsicId arkId)
228 {
229     switch (arkId) {
230         case RuntimeInterface::IntrinsicId::INTRINSIC_UNREACHABLE:
231             return EmitUnreachable();
232         case RuntimeInterface::IntrinsicId::INTRINSIC_SLOW_PATH_ENTRY:
233             return EmitSlowPathEntry(inst);
234         case RuntimeInterface::IntrinsicId::INTRINSIC_LOAD_ACQUIRE_MARK_WORD_EXCLUSIVE:
235             return EmitExclusiveLoadWithAcquire(inst);
236         case RuntimeInterface::IntrinsicId::INTRINSIC_STORE_RELEASE_MARK_WORD_EXCLUSIVE:
237             return EmitExclusiveStoreWithRelease(inst);
238         // LLVM encodes them using calling conventions
239         case RuntimeInterface::IntrinsicId::INTRINSIC_SAVE_REGISTERS_EP:
240         case RuntimeInterface::IntrinsicId::INTRINSIC_RESTORE_REGISTERS_EP:
241             return true;
242         case RuntimeInterface::IntrinsicId::INTRINSIC_INTERPRETER_RETURN:
243             return EmitInterpreterReturn();
244         case RuntimeInterface::IntrinsicId::INTRINSIC_TAIL_CALL:
245             return EmitTailCall(inst);
246         case RuntimeInterface::IntrinsicId::INTRINSIC_DATA_MEMORY_BARRIER_FULL:
247             return EmitMemoryFence(memory_order::FULL);
248         case RuntimeInterface::IntrinsicId::INTRINSIC_COMPRESS_EIGHT_UTF16_TO_UTF8_CHARS_USING_SIMD:
249             return EmitCompressUtf16ToUtf8CharsUsingSimd<VECTOR_SIZE_8>(inst);
250         case RuntimeInterface::IntrinsicId::INTRINSIC_COMPRESS_SIXTEEN_UTF16_TO_UTF8_CHARS_USING_SIMD:
251             return EmitCompressUtf16ToUtf8CharsUsingSimd<VECTOR_SIZE_16>(inst);
252         default:
253             return false;
254     }
255     return false;
256 }
257 
258 // Specific intrinsic Emitters
259 
EmitUnreachable()260 bool LLVMIrConstructor::EmitUnreachable()
261 {
262     auto bb = GetCurrentBasicBlock();
263     if (bb->empty() || !llvm::isa<llvm::ReturnInst>(*(bb->rbegin()))) {
264         auto trap = llvm::Intrinsic::getDeclaration(func_->getParent(), llvm::Intrinsic::trap, {});
265         builder_.CreateCall(trap, {});
266         builder_.CreateUnreachable();
267     }
268     return true;
269 }
270 
EmitSlowPathEntry(Inst * inst)271 bool LLVMIrConstructor::EmitSlowPathEntry(Inst *inst)
272 {
273     ASSERT(GetGraph()->GetMode().IsFastPath());
274     ASSERT(func_->getCallingConv() == llvm::CallingConv::ArkFast0 ||
275            func_->getCallingConv() == llvm::CallingConv::ArkFast1 ||
276            func_->getCallingConv() == llvm::CallingConv::ArkFast2 ||
277            func_->getCallingConv() == llvm::CallingConv::ArkFast3 ||
278            func_->getCallingConv() == llvm::CallingConv::ArkFast4 ||
279            func_->getCallingConv() == llvm::CallingConv::ArkFast5);
280 
281     // Arguments
282     ArenaVector<llvm::Value *> args(GetGraph()->GetLocalAllocator()->Adapter());
283     for (size_t i = 0; i < inst->GetInputs().Size(); ++i) {
284         args.push_back(GetInputValue(inst, i));
285     }
286     auto threadRegPtr = builder_.CreateIntToPtr(GetThreadRegValue(), builder_.getPtrTy());
287     auto frameRegPtr = builder_.CreateIntToPtr(GetRealFrameRegValue(), builder_.getPtrTy());
288     args.push_back(threadRegPtr);
289     args.push_back(frameRegPtr);
290 
291     // Types
292     ArenaVector<llvm::Type *> argTypes(GetGraph()->GetLocalAllocator()->Adapter());
293     for (const auto &input : inst->GetInputs()) {
294         argTypes.push_back(GetExactType(input.GetInst()->GetType()));
295     }
296     argTypes.push_back(builder_.getPtrTy());
297     argTypes.push_back(builder_.getPtrTy());
298     auto ftype = llvm::FunctionType::get(GetType(inst->GetType()), argTypes, false);
299 
300     ASSERT(inst->CastToIntrinsic()->GetImms().size() == 2U);
301     uint32_t offset = inst->CastToIntrinsic()->GetImms()[1];
302 
303     auto addr = builder_.CreateConstInBoundsGEP1_64(builder_.getInt8Ty(), threadRegPtr, offset);
304     auto callee = builder_.CreateLoad(builder_.getPtrTy(), addr);
305 
306     auto call = builder_.CreateCall(ftype, callee, args, "");
307     call->setCallingConv(func_->getCallingConv());
308     call->setTailCallKind(llvm::CallInst::TailCallKind::TCK_MustTail);
309     if (call->getType()->isVoidTy()) {
310         builder_.CreateRetVoid();
311     } else {
312         builder_.CreateRet(call);
313     }
314     return true;
315 }
316 
EmitExclusiveLoadWithAcquire(Inst * inst)317 bool LLVMIrConstructor::EmitExclusiveLoadWithAcquire(Inst *inst)
318 {
319     ASSERT(GetGraph()->GetArch() == Arch::AARCH64);
320     ASSERT(inst->GetInputType(0) == DataType::POINTER);
321     auto &ctx = func_->getContext();
322     auto addr = GetInputValue(inst, 0);
323     auto dstType = GetExactType(inst->GetType());
324     auto intrinsicId = llvm::Intrinsic::AARCH64Intrinsics::aarch64_ldaxr;
325     auto load = builder_.CreateUnaryIntrinsic(intrinsicId, addr);
326     load->addParamAttr(0, llvm::Attribute::get(ctx, llvm::Attribute::ElementType, dstType));
327     ValueMapAdd(inst, load);
328     return true;
329 }
330 
EmitExclusiveStoreWithRelease(Inst * inst)331 bool LLVMIrConstructor::EmitExclusiveStoreWithRelease(Inst *inst)
332 {
333     ASSERT(GetGraph()->GetArch() == Arch::AARCH64);
334     ASSERT(inst->GetInputType(0) == DataType::POINTER);
335     auto &ctx = func_->getContext();
336     auto addr = GetInputValue(inst, 0);
337     auto value = GetInputValue(inst, 1);
338     auto type = value->getType();
339     auto intrinsicId = llvm::Intrinsic::AARCH64Intrinsics::aarch64_stlxr;
340     auto stlxr = llvm::Intrinsic::getDeclaration(func_->getParent(), intrinsicId, builder_.getPtrTy());
341     value = builder_.CreateZExtOrBitCast(value, stlxr->getFunctionType()->getParamType(0));
342     auto store = builder_.CreateCall(stlxr, {value, addr});
343     store->addParamAttr(1, llvm::Attribute::get(ctx, llvm::Attribute::ElementType, type));
344     ValueMapAdd(inst, store);
345     return true;
346 }
347 
EmitInterpreterReturn()348 bool LLVMIrConstructor::EmitInterpreterReturn()
349 {
350     ASSERT(GetGraph()->GetMode().IsInterpreter());
351 
352     // This constant is hardcoded in codegen_interpreter.h and in interpreter.irt
353     constexpr size_t SPILL_SLOTS = 32;
354     CFrameLayout fl(GetGraph()->GetArch(), SPILL_SLOTS);
355     constexpr bool SAVE_UNUSED_CALLEE_REGS = true;
356 
357     // Restore callee-registers
358     auto calleeRegsMask = GetCalleeRegsMask(GetGraph()->GetArch(), false, SAVE_UNUSED_CALLEE_REGS);
359     auto calleeVregsMask = GetCalleeRegsMask(GetGraph()->GetArch(), true, SAVE_UNUSED_CALLEE_REGS);
360     if (GetGraph()->GetArch() == Arch::AARCH64) {
361         constexpr bool SAVE_FRAME_AND_LINK_REGS = true;
362 
363         size_t slotSize = fl.GetSlotSize();
364         size_t dslotSize = slotSize * 2U;
365 
366         auto lastCalleeReg = fl.GetRegsSlotsCount() - calleeRegsMask.Count();
367         auto lastCalleeVreg = fl.GetRegsSlotsCount() - fl.GetCalleeRegistersCount(false) - calleeVregsMask.Count();
368         CreateInterpreterReturnRestoreRegs(calleeRegsMask, lastCalleeReg, false);
369         CreateInterpreterReturnRestoreRegs(calleeVregsMask, lastCalleeVreg, true);
370 
371         // Adjust SP
372         auto spToFrameTopSlots = fl.GetRegsSlotsCount() + CFrameRegs::Start() - CFrameReturnAddr::Start();
373         if (SAVE_FRAME_AND_LINK_REGS) {
374             spToFrameTopSlots -= CFrameLayout::GetFpLrSlotsCount();
375         }
376 
377         CreateInt32ImmAsm(&builder_,
378                           std::string("add  sp, sp, $0").append(LLVMArkInterface::PATCH_STACK_ADJUSTMENT_COMMENT),
379                           spToFrameTopSlots * slotSize);
380         CreateInt32ImmAsm(&builder_, "ldp  x29, x30, [sp], $0", dslotSize);
381         CreateBlackBoxAsm(&builder_, "ret");
382     } else {
383         // Currently there is no vector regs usage in x86_64 handlers
384         ASSERT(calleeVregsMask.count() == 0);
385         auto regShift = DOUBLE_WORD_SIZE_BYTES *
386                         (fl.GetSpillsCount() + fl.GetCallerRegistersCount(false) + fl.GetCallerRegistersCount(true));
387         auto fpShift = DOUBLE_WORD_SIZE_BYTES * (2 + CFrameSlots::Start() - CFrameData::Start());
388 
389         std::string iasmStr =
390             std::string("leaq  ${0:c}(%rsp), %rsp").append(LLVMArkInterface::PATCH_STACK_ADJUSTMENT_COMMENT);
391         CreateInt32ImmAsm(&builder_, iasmStr, regShift);
392         Target target {GetGraph()->GetArch()};
393         while (calleeRegsMask.count() > 0) {
394             auto reg = calleeRegsMask.GetMinRegister();
395             calleeRegsMask ^= 1U << reg;
396             iasmStr = "pop  %" + target.GetRegName(reg, false);
397             CreateBlackBoxAsm(&builder_, iasmStr);
398         }
399         iasmStr = "leaq  " + std::to_string(fpShift) + "(%rsp), %rsp";
400         CreateBlackBoxAsm(&builder_, iasmStr);
401         CreateBlackBoxAsm(&builder_, "pop  %rbp");
402         CreateBlackBoxAsm(&builder_, "retq");
403     }
404     builder_.CreateUnreachable();
405 
406     return true;
407 }
408 
EmitTailCall(Inst * inst)409 bool LLVMIrConstructor::EmitTailCall(Inst *inst)
410 {
411     ASSERT(func_->getCallingConv() == llvm::CallingConv::ArkFast0 ||
412            func_->getCallingConv() == llvm::CallingConv::ArkFast1 ||
413            func_->getCallingConv() == llvm::CallingConv::ArkFast2 ||
414            func_->getCallingConv() == llvm::CallingConv::ArkFast3 ||
415            func_->getCallingConv() == llvm::CallingConv::ArkFast4 ||
416            func_->getCallingConv() == llvm::CallingConv::ArkFast5 ||
417            func_->getCallingConv() == llvm::CallingConv::ArkInt);
418     llvm::CallInst *call;
419 
420     if (GetGraph()->GetMode().IsFastPath()) {
421         call = CreateTailCallFastPath(inst);
422     } else if (GetGraph()->GetMode().IsInterpreter()) {
423         call = CreateTailCallInterpreter(inst);
424     } else {
425         UNREACHABLE();
426     }
427     call->setCallingConv(func_->getCallingConv());
428     call->setTailCallKind(llvm::CallInst::TailCallKind::TCK_Tail);
429     if (func_->getReturnType()->isVoidTy()) {
430         builder_.CreateRetVoid();
431     } else {
432         builder_.CreateRet(call);
433     }
434     std::fill(ccValues_.begin(), ccValues_.end(), nullptr);
435     return true;
436 }
437 
EmitMemoryFence(memory_order::Order order)438 bool LLVMIrConstructor::EmitMemoryFence(memory_order::Order order)
439 {
440     CreateMemoryFence(order);
441     return true;
442 }
443 
444 template <uint32_t VECTOR_SIZE>
EmitCompressUtf16ToUtf8CharsUsingSimd(Inst * inst)445 bool LLVMIrConstructor::EmitCompressUtf16ToUtf8CharsUsingSimd(Inst *inst)
446 {
447     ASSERT(GetGraph()->GetArch() == Arch::AARCH64);
448     ASSERT(inst->GetInputType(0) == DataType::POINTER);
449     ASSERT(inst->GetInputType(1) == DataType::POINTER);
450     static_assert(VECTOR_SIZE == VECTOR_SIZE_8 || VECTOR_SIZE == VECTOR_SIZE_16, "Unexpected vector size");
451     auto intrinsicId = llvm::Intrinsic::AARCH64Intrinsics::aarch64_neon_ld2;
452     auto vecTy = llvm::VectorType::get(builder_.getInt8Ty(), VECTOR_SIZE, false);
453 
454     auto u16Ptr = GetInputValue(inst, 0);  // ptr to src array of utf16 chars
455     auto u8Ptr = GetInputValue(inst, 1);   // ptr to dst array of utf8 chars
456     auto ld2 = llvm::Intrinsic::getDeclaration(func_->getParent(), intrinsicId, {vecTy, u16Ptr->getType()});
457     auto vld2 = builder_.CreateCall(ld2, {u16Ptr});
458     auto u8Vec = builder_.CreateExtractValue(vld2, {0});
459     builder_.CreateStore(u8Vec, u8Ptr);
460     return true;
461 }
462 
GetMappedValue(Inst * inst,DataType::Type type)463 llvm::Value *LLVMIrConstructor::GetMappedValue(Inst *inst, DataType::Type type)
464 {
465     ASSERT(inputMap_.count(inst) == 1);
466     auto &typeMap = inputMap_.at(inst);
467     ASSERT(typeMap.count(type) == 1);
468     auto result = typeMap.at(type);
469     ASSERT(result != nullptr);
470     return result;
471 }
472 
GetInputValue(Inst * inst,size_t index,bool skipCoerce)473 llvm::Value *LLVMIrConstructor::GetInputValue(Inst *inst, size_t index, bool skipCoerce)
474 {
475     auto input = inst->GetInput(index).GetInst();
476     auto type = inst->GetInputType(index);
477     ASSERT(type != DataType::NO_TYPE);
478 
479     if (skipCoerce) {
480         ASSERT(input->GetType() == DataType::UINT64 || input->GetType() == DataType::INT64);
481         type = input->GetType();
482     }
483 
484     if (input->IsConst()) {
485         return GetInputValueFromConstant(input->CastToConstant(), type);
486     }
487     if (input->GetOpcode() == Opcode::NullPtr) {
488         auto llvmType = GetExactType(DataType::REFERENCE);
489         ASSERT(llvmType == builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE));
490         return llvm::Constant::getNullValue(llvmType);
491     }
492     return GetMappedValue(input, type);
493 }
494 
GetInputValueFromConstant(ConstantInst * constant,DataType::Type pandaType)495 llvm::Value *LLVMIrConstructor::GetInputValueFromConstant(ConstantInst *constant, DataType::Type pandaType)
496 {
497     auto llvmType = GetExactType(pandaType);
498     if (pandaType == DataType::FLOAT64) {
499         double value = constant->GetDoubleValue();
500         return llvm::ConstantFP::get(llvmType, value);
501     }
502     if (pandaType == DataType::FLOAT32) {
503         float value = constant->GetFloatValue();
504         return llvm::ConstantFP::get(llvmType, value);
505     }
506     if (pandaType == DataType::POINTER) {
507         auto cval = static_cast<int64_t>(constant->GetIntValue());
508         auto integer = builder_.getInt64(cval);
509         return builder_.CreateIntToPtr(integer, builder_.getPtrTy());
510     }
511     if (DataType::IsTypeNumeric(pandaType)) {
512         auto isSigned = DataType::IsTypeSigned(pandaType);
513         auto cval = static_cast<int64_t>(constant->GetIntValue());
514         return llvm::ConstantInt::get(llvmType, cval, isSigned);
515     }
516     if (DataType::IsReference(pandaType) && constant->GetRawValue() == 0) {
517         return llvm::Constant::getNullValue(llvmType);
518     }
519     UNREACHABLE();
520 }
521 
522 // Initializers. BuildIr calls them
523 
BuildBasicBlocks()524 void LLVMIrConstructor::BuildBasicBlocks()
525 {
526     auto &context = func_->getContext();
527     for (auto block : graph_->GetBlocksRPO()) {
528         if (block->IsEndBlock()) {
529             continue;
530         }
531         auto bb = llvm::BasicBlock::Create(context, llvm::StringRef("bb") + llvm::Twine(block->GetId()), func_);
532         AddBlock(block, bb);
533         // Checking that irtoc handler contains a return instruction
534         if (!graph_->GetMode().IsInterpreter()) {
535             continue;
536         }
537         for (auto inst : block->AllInsts()) {
538             if (inst->IsIntrinsic() && inst->CastToIntrinsic()->GetIntrinsicId() ==
539                                            RuntimeInterface::IntrinsicId::INTRINSIC_INTERPRETER_RETURN) {
540                 arkInterface_->AppendIrtocReturnHandler(func_->getName());
541             }
542         }
543     }
544 }
545 
BuildInstructions()546 void LLVMIrConstructor::BuildInstructions()
547 {
548     for (auto block : graph_->GetBlocksRPO()) {
549         if (block->IsEndBlock()) {
550             continue;
551         }
552         SetCurrentBasicBlock(GetTailBlock(block));
553         for (auto inst : block->AllInsts()) {
554             auto bb = GetCurrentBasicBlock();
555             if (!bb->empty() && llvm::isa<llvm::UnreachableInst>(*(bb->rbegin()))) {
556                 break;
557             }
558             VisitInstruction(inst);
559         }
560 
561         if ((block->GetSuccsBlocks().size() == 1 && !block->GetSuccessor(0)->IsEndBlock())) {
562             builder_.CreateBr(GetHeadBlock(block->GetSuccessor(0)));
563         }
564         ReplaceTailBlock(block, GetCurrentBasicBlock());
565     }
566 }
567 
FillPhiInputs()568 void LLVMIrConstructor::FillPhiInputs()
569 {
570     for (auto block : graph_->GetBlocksRPO()) {
571         if (block->IsStartBlock() || block->IsEndBlock()) {
572             continue;
573         }
574         for (auto inst : block->PhiInsts()) {
575             auto phi = llvm::cast<llvm::PHINode>(GetMappedValue(inst, inst->GetType()));
576             for (size_t i = 0; i < inst->GetInputsCount(); i++) {
577                 auto inputBlock = block->GetPredBlockByIndex(i);
578                 auto input = GetInputValue(inst, i);
579                 phi->addIncoming(input, GetTailBlock(inputBlock));
580             }
581         }
582     }
583 }
584 
585 // Creator functions for internal usage
586 
CreateInterpreterReturnRestoreRegs(RegMask & regMask,size_t offset,bool fp)587 void LLVMIrConstructor::CreateInterpreterReturnRestoreRegs(RegMask &regMask, size_t offset, bool fp)
588 {
589     int32_t slotSize = PointerSize(GetGraph()->GetArch());
590     int32_t dslotSize = slotSize * 2U;
591     int32_t totalSize = regMask.count() * slotSize;
592     auto startRegOffset = offset * DOUBLE_WORD_SIZE_BYTES;
593     auto endRegOffset = startRegOffset + std::max(0, totalSize - dslotSize);
594 
595     constexpr uint32_t MAX_REPR_VAL = 504U;
596     bool representable = startRegOffset <= MAX_REPR_VAL && (startRegOffset & 0x7U) == 0 &&
597                          endRegOffset <= MAX_REPR_VAL && (endRegOffset & 0x7U) == 0;
598 
599     std::string baseReg = representable ? "sp" : "x16";
600     if (!representable) {
601         CreateInt32ImmAsm(&builder_,
602                           std::string("add  x16, sp, $0").append(LLVMArkInterface::PATCH_STACK_ADJUSTMENT_COMMENT),
603                           startRegOffset);
604         startRegOffset = 0;
605     }
606 
607     while (regMask.count() > 0) {
608         std::string asmString = regMask.count() / 2U > 0 ? "ldp " : "ldr ";
609         auto first = regMask.GetMinRegister();
610         asmString += (fp ? "d" : "x") + std::to_string(first);
611         regMask ^= 1U << first;
612         if (regMask.count() > 0) {
613             asmString += ", ";
614             auto second = regMask.GetMinRegister();
615             asmString += (fp ? "d" : "x") + std::to_string(second);
616             regMask ^= 1U << second;
617         }
618         asmString += ", [";
619         asmString += baseReg;
620         asmString += ", $0]";
621         if (representable) {
622             asmString += LLVMArkInterface::PATCH_STACK_ADJUSTMENT_COMMENT;
623         }
624         CreateInt32ImmAsm(&builder_, asmString, startRegOffset);
625         startRegOffset += dslotSize;
626     }
627 }
628 
CreateBinaryOp(Inst * inst,llvm::Instruction::BinaryOps opcode)629 llvm::Value *LLVMIrConstructor::CreateBinaryOp(Inst *inst, llvm::Instruction::BinaryOps opcode)
630 {
631     llvm::Value *x = GetInputValue(inst, 0);
632     llvm::Value *y = GetInputValue(inst, 1);
633 
634     if (x->getType()->isPointerTy()) {
635         if (y->getType()->isPointerTy()) {
636             ASSERT(opcode == llvm::Instruction::Sub);
637             x = builder_.CreatePtrToInt(x, builder_.getInt64Ty());
638             y = builder_.CreatePtrToInt(y, builder_.getInt64Ty());
639             return builder_.CreateBinOp(opcode, x, y);
640         }
641         if (y->getType()->isIntegerTy()) {
642             ASSERT(opcode == llvm::Instruction::Add);
643             ASSERT(x->getType()->isPointerTy());
644             return builder_.CreateInBoundsGEP(builder_.getInt8Ty(), x, y);
645         }
646         UNREACHABLE();
647     }
648     return builder_.CreateBinOp(opcode, x, y);
649 }
650 
CreateBinaryImmOp(Inst * inst,llvm::Instruction::BinaryOps opcode,uint64_t c)651 llvm::Value *LLVMIrConstructor::CreateBinaryImmOp(Inst *inst, llvm::Instruction::BinaryOps opcode, uint64_t c)
652 {
653     ASSERT(IsTypeNumeric(inst->GetType()));
654     llvm::Value *x = GetInputValue(inst, 0);
655     if (x->getType()->isPointerTy()) {
656         ASSERT(x->getType()->isPointerTy());
657         ASSERT(opcode == llvm::Instruction::Add || opcode == llvm::Instruction::Sub);
658         if (opcode == llvm::Instruction::Sub) {
659             c = -c;
660         }
661         return builder_.CreateConstInBoundsGEP1_64(builder_.getInt8Ty(), x, c);
662     }
663     llvm::Value *y = CoerceValue(builder_.getInt64(c), DataType::INT64, inst->GetType());
664     return builder_.CreateBinOp(opcode, x, y);
665 }
666 
CreateShiftOp(Inst * inst,llvm::Instruction::BinaryOps opcode)667 llvm::Value *LLVMIrConstructor::CreateShiftOp(Inst *inst, llvm::Instruction::BinaryOps opcode)
668 {
669     llvm::Value *x = GetInputValue(inst, 0);
670     llvm::Value *y = GetInputValue(inst, 1);
671     auto targetType = inst->GetType();
672     bool target64 = (targetType == DataType::UINT64) || (targetType == DataType::INT64);
673     auto constexpr SHIFT32_RANGE = 0x1f;
674     auto constexpr SHIFT64_RANGE = 0x3f;
675 
676     y = builder_.CreateBinOp(llvm::Instruction::And, y,
677                              llvm::ConstantInt::get(y->getType(), target64 ? SHIFT64_RANGE : SHIFT32_RANGE));
678 
679     return builder_.CreateBinOp(opcode, x, y);
680 }
681 
CreateSignDivMod(Inst * inst,llvm::Instruction::BinaryOps opcode)682 llvm::Value *LLVMIrConstructor::CreateSignDivMod(Inst *inst, llvm::Instruction::BinaryOps opcode)
683 {
684     ASSERT(opcode == llvm::Instruction::SDiv || opcode == llvm::Instruction::SRem);
685     llvm::Value *x = GetInputValue(inst, 0);
686     llvm::Value *y = GetInputValue(inst, 1);
687     auto arch = GetGraph()->GetArch();
688     if (arch == Arch::AARCH64 && !llvm::isa<llvm::Constant>(y)) {
689         return CreateAArch64SignDivMod(inst, opcode, x, y);
690     }
691     auto &ctx = func_->getContext();
692     auto type = y->getType();
693     auto eqM1 = builder_.CreateICmpEQ(y, llvm::ConstantInt::get(type, -1));
694     auto m1Bb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "divmod_minus1"), func_);
695     auto notM1Bb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "divmod_normal"), func_);
696     auto contBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "divmod_cont"), func_);
697     builder_.CreateCondBr(eqM1, m1Bb, notM1Bb);
698 
699     SetCurrentBasicBlock(m1Bb);
700     llvm::Value *m1Result;
701     if (opcode == llvm::Instruction::SDiv) {
702         m1Result = builder_.CreateNeg(x);
703     } else {
704         m1Result = llvm::ConstantInt::get(type, 0);
705     }
706     builder_.CreateBr(contBb);
707 
708     SetCurrentBasicBlock(notM1Bb);
709     auto result = builder_.CreateBinOp(opcode, x, y);
710     builder_.CreateBr(contBb);
711 
712     SetCurrentBasicBlock(contBb);
713     auto resultPhi = builder_.CreatePHI(y->getType(), 2U);
714     resultPhi->addIncoming(m1Result, m1Bb);
715     resultPhi->addIncoming(result, notM1Bb);
716     return resultPhi;
717 }
718 
CreateAArch64SignDivMod(Inst * inst,llvm::Instruction::BinaryOps opcode,llvm::Value * x,llvm::Value * y)719 llvm::Value *LLVMIrConstructor::CreateAArch64SignDivMod(Inst *inst, llvm::Instruction::BinaryOps opcode, llvm::Value *x,
720                                                         llvm::Value *y)
721 {  // LLVM opcodes SDiv/SRem don't fully suit our needs (int_min/-1 case is UB),
722     // but for now we inline assembly only for arm64 sdiv
723     auto targetType = inst->GetType();
724     bool target64 = (targetType == DataType::INT64);
725     llvm::Value *quotient {nullptr};
726     llvm::Value *result {nullptr};
727     llvm::Type *type {nullptr};
728     {
729         std::string itext;
730         if (targetType == DataType::INT8) {
731             type = builder_.getInt8Ty();
732             itext += "sxtb ${1:w}, ${1:w}\nsxtb ${2:w}, ${2:w}\n";
733         } else if (targetType == DataType::INT16) {
734             type = builder_.getInt16Ty();
735             itext += "sxth ${1:w}, ${1:w}\nsxth ${2:w}, ${2:w}\n";
736         } else {
737             ASSERT(target64 || targetType == DataType::INT32);
738             type = target64 ? builder_.getInt64Ty() : builder_.getInt32Ty();
739         }
740         itext += target64 ? "sdiv ${0:x}, ${1:x}, ${2:x}" : "sdiv ${0:w}, ${1:w}, ${2:w}";
741         auto itype = llvm::FunctionType::get(type, {type, type}, false);  // no var args
742         quotient = builder_.CreateCall(itype, llvm::InlineAsm::get(itype, itext, "=r,r,r", false), {x, y});
743         result = quotient;
744     }
745 
746     if (opcode == llvm::Instruction::SRem) {
747         auto modAsmType = llvm::FunctionType::get(type, {type, type, type}, false);  // no var args
748         // Inline asm "sdiv q, x, y" yields q = x / y
749         // Inline asm "msub r, x, y, q" yields r = x - y * q
750         std::string_view modAsm =
751             target64 ? "msub ${0:x}, ${3:x}, ${2:x}, ${1:x}" : "msub ${0:w}, ${3:w}, ${2:w}, ${1:w}";
752         auto remainder = builder_.CreateCall(modAsmType, llvm::InlineAsm::get(modAsmType, modAsm, "=r,r,r,r", false),
753                                              {x, y, result});
754         result = remainder;
755     }
756     return result;
757 }
758 
CreateFloatComparison(CmpInst * cmpInst,llvm::Value * x,llvm::Value * y)759 llvm::Value *LLVMIrConstructor::CreateFloatComparison(CmpInst *cmpInst, llvm::Value *x, llvm::Value *y)
760 {
761     // if x is less than y then return -1
762     // else return zero extend of (x > y)
763     llvm::CmpInst::Predicate greaterThanPredicate;
764     llvm::CmpInst::Predicate lessThanPredicate;
765     if (cmpInst->IsFcmpg()) {
766         // if x or y is nan then greater_than_predicate yields true
767         greaterThanPredicate = llvm::CmpInst::FCMP_UGT;
768         lessThanPredicate = llvm::CmpInst::FCMP_OLT;
769     } else if (cmpInst->IsFcmpl()) {
770         greaterThanPredicate = llvm::CmpInst::FCMP_OGT;
771         // if x or y is nan then less_than_predicate yields true
772         lessThanPredicate = llvm::CmpInst::FCMP_ULT;
773     } else {
774         ASSERT_PRINT(false, "cmp_inst must be either Fcmpg, or Fcmpl");
775         UNREACHABLE();
776     }
777     // x > y || (inst == Fcmpg && (x == NaN || y == NaN))
778     auto greaterThan = builder_.CreateFCmp(greaterThanPredicate, x, y);
779     // x < y || (inst == Fcmpl && (x == NaN || y == NaN))
780     auto lessThan = builder_.CreateFCmp(lessThanPredicate, x, y);
781     auto comparison = builder_.CreateZExt(greaterThan, builder_.getInt32Ty());
782     auto negativeOne = builder_.getInt32(-1);
783     return builder_.CreateSelect(lessThan, negativeOne, comparison);
784 }
785 
CreateIntegerComparison(CmpInst * inst,llvm::Value * x,llvm::Value * y)786 llvm::Value *LLVMIrConstructor::CreateIntegerComparison(CmpInst *inst, llvm::Value *x, llvm::Value *y)
787 {
788     ASSERT(x->getType() == y->getType());
789     llvm::Value *greaterThan;
790     llvm::Value *lessThan;
791 
792     if (DataType::IsTypeSigned(inst->GetOperandsType())) {
793         greaterThan = builder_.CreateICmpSGT(x, y);
794         lessThan = builder_.CreateICmpSLT(x, y);
795     } else {
796         greaterThan = builder_.CreateICmpUGT(x, y);
797         lessThan = builder_.CreateICmpULT(x, y);
798     }
799     auto castComparisonResult = builder_.CreateZExt(greaterThan, builder_.getInt32Ty());
800     auto negativeOne = builder_.getInt32(-1);
801     return builder_.CreateSelect(lessThan, negativeOne, castComparisonResult);
802 }
803 
CreateCastToInt(Inst * inst)804 llvm::Value *LLVMIrConstructor::CreateCastToInt(Inst *inst)
805 {
806     ASSERT(inst->GetInputsCount() == 1);
807 
808     llvm::Value *input = GetInputValue(inst, 0);
809     auto sourceType = input->getType();
810     auto targetType = inst->GetType();
811 
812     ASSERT_DO(sourceType->isFloatTy() || sourceType->isDoubleTy(),
813               std::cerr << "Unexpected source type: " << GetTypeName(sourceType) << ". Should be a float or double."
814                         << std::endl);
815 
816     auto llvmId = DataType::IsTypeSigned(targetType) ? llvm::Intrinsic::fptosi_sat : llvm::Intrinsic::fptoui_sat;
817     ArenaVector<llvm::Type *> intrinsicTypes(GetGraph()->GetLocalAllocator()->Adapter());
818     intrinsicTypes.assign({GetExactType(targetType), sourceType});
819     return builder_.CreateIntrinsic(llvmId, intrinsicTypes, {input}, nullptr);
820 }
821 
CreateLoadWithOrdering(Inst * inst,llvm::Value * value,llvm::AtomicOrdering ordering,const llvm::Twine & name)822 llvm::Value *LLVMIrConstructor::CreateLoadWithOrdering(Inst *inst, llvm::Value *value, llvm::AtomicOrdering ordering,
823                                                        const llvm::Twine &name)
824 {
825     auto pandaType = inst->GetType();
826     llvm::Type *type = GetExactType(pandaType);
827 
828     auto load = builder_.CreateLoad(type, value, false, name);  // C-like volatile is not applied
829     if (ordering != LLVMArkInterface::NOT_ATOMIC_ORDER) {
830         auto alignment = func_->getParent()->getDataLayout().getPrefTypeAlignment(type);
831         load->setOrdering(ordering);
832         load->setAlignment(llvm::Align(alignment));
833     }
834 
835     return load;
836 }
837 
CreateStoreWithOrdering(llvm::Value * value,llvm::Value * ptr,llvm::AtomicOrdering ordering)838 llvm::Value *LLVMIrConstructor::CreateStoreWithOrdering(llvm::Value *value, llvm::Value *ptr,
839                                                         llvm::AtomicOrdering ordering)
840 {
841     auto store = builder_.CreateStore(value, ptr, false);  // C-like volatile is not applied
842     if (ordering != LLVMArkInterface::NOT_ATOMIC_ORDER) {
843         auto alignment = func_->getParent()->getDataLayout().getPrefTypeAlignment(value->getType());
844         store->setAlignment(llvm::Align(alignment));
845         store->setOrdering(ordering);
846     }
847     return store;
848 }
849 
CreatePreWRB(Inst * inst,llvm::Value * mem)850 void LLVMIrConstructor::CreatePreWRB(Inst *inst, llvm::Value *mem)
851 {
852     ASSERT(GetGraph()->GetRuntime()->GetPreType() == panda::mem::BarrierType::PRE_SATB_BARRIER);
853 
854     auto &ctx = func_->getContext();
855     auto outBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "pre_wrb_out"), func_);
856     EmitPreWRB(&builder_, mem, IsVolatileMemInst(inst), outBb, arkInterface_, GetThreadRegValue());
857 }
858 
CreatePostWRB(Inst * inst,llvm::Value * mem,llvm::Value * offset,llvm::Value * value)859 void LLVMIrConstructor::CreatePostWRB(Inst *inst, llvm::Value *mem, llvm::Value *offset, llvm::Value *value)
860 {
861     ASSERT(GetGraph()->GetRuntime()->GetPostType() == panda::mem::BarrierType::POST_INTERGENERATIONAL_BARRIER ||
862            GetGraph()->GetRuntime()->GetPostType() == panda::mem::BarrierType::POST_INTERREGION_BARRIER);
863 
864     Inst *secondValue;
865     Inst *val = InstStoredValue(inst, &secondValue);
866     ASSERT(secondValue == nullptr);
867 
868     if (val->GetOpcode() == Opcode::NullPtr) {
869         return;
870     }
871 
872     ASSERT(GetGraph()->GetMode().IsInterpreter());
873     llvm::Value *frameRegValue = nullptr;
874     if (GetGraph()->GetArch() == Arch::X86_64) {
875         frameRegValue = GetRealFrameRegValue();
876     }
877     EmitPostWRB(&builder_, mem, offset, value, arkInterface_, GetThreadRegValue(), frameRegValue);
878 }
879 
CreateMemoryFence(memory_order::Order order)880 llvm::Value *LLVMIrConstructor::CreateMemoryFence(memory_order::Order order)
881 {
882     llvm::AtomicOrdering ordering;
883     switch (order) {
884         case memory_order::RELEASE:
885             ordering = llvm::AtomicOrdering::Release;
886             break;
887         case memory_order::ACQUIRE:
888             ordering = llvm::AtomicOrdering::Acquire;
889             break;
890         case memory_order::FULL:
891             ordering = llvm::AtomicOrdering::SequentiallyConsistent;
892             break;
893         default:
894             UNREACHABLE();
895     }
896     return builder_.CreateFence(ordering);
897 }
898 
CreateCondition(ConditionCode cc,llvm::Value * x,llvm::Value * y)899 llvm::Value *LLVMIrConstructor::CreateCondition(ConditionCode cc, llvm::Value *x, llvm::Value *y)
900 {
901     if (cc == CC_TST_EQ || cc == CC_TST_NE) {
902         auto tst = builder_.CreateBinOp(llvm::Instruction::And, x, y);
903         return (cc == CC_TST_EQ) ? builder_.CreateIsNull(tst) : builder_.CreateIsNotNull(tst);
904     }
905     return builder_.CreateICmp(ICmpCodeConvert(cc), x, y);
906 }
907 
CreateIf(Inst * inst,llvm::Value * cond,bool likely,bool unlikely)908 void LLVMIrConstructor::CreateIf(Inst *inst, llvm::Value *cond, bool likely, bool unlikely)
909 {
910     llvm::MDNode *weights = nullptr;
911     auto constexpr LIKELY = llvmbackend::Metadata::BranchWeights::LIKELY_BRANCH_WEIGHT;
912     auto constexpr UNLIKELY = llvmbackend::Metadata::BranchWeights::UNLIKELY_BRANCH_WEIGHT;
913     if (likely) {
914         weights = llvm::MDBuilder(func_->getContext()).createBranchWeights(LIKELY, UNLIKELY);
915     } else if (unlikely) {
916         weights = llvm::MDBuilder(func_->getContext()).createBranchWeights(UNLIKELY, LIKELY);
917     }
918     builder_.CreateCondBr(cond, GetHeadBlock(inst->GetBasicBlock()->GetTrueSuccessor()),
919                           GetHeadBlock(inst->GetBasicBlock()->GetFalseSuccessor()), weights);
920 }
921 
CreateTailCallFastPath(Inst * inst)922 llvm::CallInst *LLVMIrConstructor::CreateTailCallFastPath(Inst *inst)
923 {
924     ASSERT(inst->GetInputs().Size() == 0);
925     // Arguments
926     ArenaVector<llvm::Value *> args(GetGraph()->GetLocalAllocator()->Adapter());
927     ArenaVector<llvm::Type *> argTypes(GetGraph()->GetLocalAllocator()->Adapter());
928     ASSERT(ccValues_.size() == func_->arg_size());
929     for (size_t i = 0; i < func_->arg_size(); ++i) {
930         args.push_back(i < ccValues_.size() && ccValues_.at(i) != nullptr ? ccValues_.at(i) : func_->getArg(i));
931         argTypes.push_back(args.at(i)->getType());
932     }
933     ASSERT(inst->CastToIntrinsic()->HasImms() && inst->CastToIntrinsic()->GetImms().size() == 1U);
934     uint32_t offset = inst->CastToIntrinsic()->GetImms()[0];
935 
936     auto ftype = llvm::FunctionType::get(func_->getReturnType(), argTypes, false);
937 
938     auto threadRegPtr = builder_.CreateIntToPtr(GetThreadRegValue(), builder_.getPtrTy());
939     auto addr = builder_.CreateConstInBoundsGEP1_64(builder_.getInt8Ty(), threadRegPtr, offset);
940     auto callee = builder_.CreateLoad(builder_.getPtrTy(), addr);
941 
942     return builder_.CreateCall(ftype, callee, args);
943 }
944 
CreateTailCallInterpreter(Inst * inst)945 llvm::CallInst *LLVMIrConstructor::CreateTailCallInterpreter(Inst *inst)
946 {
947     auto ptr = GetInputValue(inst, 0);
948     ASSERT_TYPE(ptr, builder_.getPtrTy());
949     ASSERT(ccValues_.size() == (GetGraph()->GetArch() == Arch::AARCH64 ? 8U : 7U));
950     ASSERT(ccValues_.at(0) != nullptr);  // pc
951     static constexpr unsigned ACC = 1U;
952     static constexpr unsigned ACC_TAG = 2U;
953     ArenaVector<llvm::Type *> argTypes(GetGraph()->GetLocalAllocator()->Adapter());
954     for (size_t i = 0; i < cc_.size(); i++) {
955         if (ccValues_.at(i) != nullptr) {
956             argTypes.push_back(ccValues_.at(i)->getType());
957         } else {
958             argTypes.push_back(func_->getFunctionType()->getParamType(i));
959         }
960     }
961     if (ccValues_.at(ACC) == nullptr) {
962         ccValues_[ACC] = llvm::Constant::getNullValue(argTypes[ACC]);
963     }
964     if (ccValues_.at(ACC_TAG) == nullptr) {
965         ccValues_[ACC_TAG] = llvm::Constant::getNullValue(argTypes[ACC_TAG]);
966     }
967     ASSERT(ccValues_.at(3U) != nullptr);  // frame
968     ASSERT(ccValues_.at(4U) != nullptr);  // dispatch
969     if (GetGraph()->GetArch() == Arch::AARCH64) {
970         ASSERT(ccValues_.at(5U) != nullptr);  // moffset
971         ASSERT(ccValues_.at(6U) != nullptr);  // method_ptr
972         ASSERT(ccValues_.at(7U) != nullptr);  // thread
973     } else {
974         static constexpr unsigned REAL_FRAME_POINER = 6U;
975         ASSERT(ccValues_.at(5U) != nullptr);                 // thread
976         ASSERT(ccValues_.at(REAL_FRAME_POINER) == nullptr);  // real frame pointer
977         ccValues_[REAL_FRAME_POINER] = func_->getArg(REAL_FRAME_POINER);
978     }
979 
980     auto functionType = llvm::FunctionType::get(func_->getReturnType(), argTypes, false);
981     return builder_.CreateCall(functionType, ptr, ccValues_);
982 }
983 
984 // Getters
985 
GetEntryFunctionType()986 llvm::FunctionType *LLVMIrConstructor::GetEntryFunctionType()
987 {
988     ArenaVector<llvm::Type *> argTypes(graph_->GetLocalAllocator()->Adapter());
989 
990     // ArkInt have fake parameters
991     if (graph_->GetMode().IsInterpreter()) {
992         for (size_t i = 0; i < cc_.size(); ++i) {
993             argTypes.push_back(builder_.getPtrTy());
994         }
995     }
996 
997     // Actual function arguments
998     auto method = graph_->GetMethod();
999     for (size_t i = 0; i < graph_->GetRuntime()->GetMethodTotalArgumentsCount(method); i++) {
1000         ASSERT(!graph_->GetMode().IsInterpreter());
1001         auto type = graph_->GetRuntime()->GetMethodTotalArgumentType(method, i);
1002         if (graph_->GetMode().IsFastPath()) {
1003             argTypes.push_back(GetExactType(type));
1004         } else {
1005             argTypes.push_back(GetType(type));
1006         }
1007     }
1008 
1009     // ThreadReg and RealFP for FastPaths
1010     if (graph_->GetMode().IsFastPath()) {
1011         argTypes.push_back(builder_.getPtrTy());
1012         argTypes.push_back(builder_.getPtrTy());
1013     }
1014 
1015     auto retType = graph_->GetRuntime()->GetMethodReturnType(method);
1016     ASSERT(graph_->GetMode().IsInterpreter() || retType != DataType::NO_TYPE);
1017     retType = retType == DataType::NO_TYPE ? DataType::VOID : retType;
1018     return llvm::FunctionType::get(GetType(retType), makeArrayRef(argTypes.data(), argTypes.size()), false);
1019 }
1020 
GetThreadRegValue()1021 llvm::Value *LLVMIrConstructor::GetThreadRegValue()
1022 {
1023     auto regInput = std::find(cc_.begin(), cc_.end(), GetThreadReg(GetGraph()->GetArch()));
1024     ASSERT(regInput != cc_.end());
1025     auto threadRegValue = func_->arg_begin() + std::distance(cc_.begin(), regInput);
1026     return threadRegValue;
1027 }
1028 
GetRealFrameRegValue()1029 llvm::Value *LLVMIrConstructor::GetRealFrameRegValue()
1030 {
1031     ASSERT(GetGraph()->GetMode().IsFastPath() || GetGraph()->GetArch() == Arch::X86_64);
1032     auto regInput = std::find(cc_.begin(), cc_.end(), GetRealFrameReg(GetGraph()->GetArch()));
1033     ASSERT(regInput != cc_.end());
1034     auto frameRegValue = func_->arg_begin() + std::distance(cc_.begin(), regInput);
1035     return frameRegValue;
1036 }
1037 
GetType(DataType::Type pandaType)1038 llvm::Type *LLVMIrConstructor::GetType(DataType::Type pandaType)
1039 {
1040     switch (pandaType) {
1041         case DataType::VOID:
1042             return builder_.getVoidTy();
1043         case DataType::POINTER:
1044             return builder_.getPtrTy();
1045         case DataType::REFERENCE:
1046             return builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE);
1047         case DataType::BOOL:
1048         case DataType::UINT8:
1049         case DataType::INT8:
1050         case DataType::UINT16:
1051         case DataType::INT16:
1052         case DataType::UINT32:
1053         case DataType::INT32:
1054             return builder_.getInt32Ty();
1055         case DataType::UINT64:
1056         case DataType::INT64:
1057             return builder_.getInt64Ty();
1058         case DataType::FLOAT32:
1059             return builder_.getFloatTy();
1060         case DataType::FLOAT64:
1061             return builder_.getDoubleTy();
1062         default:
1063             ASSERT_DO(false, (std::cerr << "No handler for panda type = '" << DataType::ToString(pandaType)
1064                                         << "' to llvm type conversion."));
1065             UNREACHABLE();
1066     }
1067 }
1068 
GetExactType(DataType::Type targetType)1069 llvm::Type *LLVMIrConstructor::GetExactType(DataType::Type targetType)
1070 {
1071     switch (targetType) {
1072         case DataType::VOID:
1073             return builder_.getVoidTy();
1074         case DataType::POINTER:
1075             return builder_.getPtrTy();
1076         case DataType::REFERENCE:
1077             return builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE);
1078         case DataType::BOOL:
1079         case DataType::UINT8:
1080         case DataType::INT8:
1081             return builder_.getInt8Ty();
1082         case DataType::UINT16:
1083         case DataType::INT16:
1084             return builder_.getInt16Ty();
1085         case DataType::UINT32:
1086         case DataType::INT32:
1087             return builder_.getInt32Ty();
1088         case DataType::UINT64:
1089         case DataType::INT64:
1090             return builder_.getInt64Ty();
1091         case DataType::FLOAT32:
1092             return builder_.getFloatTy();
1093         case DataType::FLOAT64:
1094             return builder_.getDoubleTy();
1095         default:
1096             ASSERT_DO(false, (std::cerr << "No handler for panda type = '" << DataType::ToString(targetType)
1097                                         << "' to llvm type conversion."));
1098             UNREACHABLE();
1099     }
1100 }
1101 
GetCastOp(DataType::Type from,DataType::Type to)1102 llvm::Instruction::CastOps LLVMIrConstructor::GetCastOp(DataType::Type from, DataType::Type to)
1103 {
1104     Arch arch = GetGraph()->GetArch();
1105     if (IsInteger(from) && IsInteger(to) && DataType::GetTypeSize(from, arch) > DataType::GetTypeSize(to, arch)) {
1106         // narrowing, e.g. U32TOU8, I64TOI32
1107         return llvm::Instruction::Trunc;
1108     }
1109     if (IsSignedInteger(from) && IsInteger(to) && DataType::GetTypeSize(from, arch) < DataType::GetTypeSize(to, arch)) {
1110         // signed int widening, e.g. I32TOI64, I32TOU64
1111         return llvm::Instruction::SExt;
1112     }
1113     if (IsUnsignedInteger(from) && IsInteger(to) &&
1114         DataType::GetTypeSize(from, arch) < DataType::GetTypeSize(to, arch)) {
1115         // unsigned int widening, e.g. U32TOI64, U8TOU64
1116         return llvm::Instruction::ZExt;
1117     }
1118     if (IsUnsignedInteger(from) && DataType::IsFloatType(to)) {
1119         // unsigned int to float, e.g. U32TOF64, U64TOF64
1120         return llvm::Instruction::UIToFP;
1121     }
1122     if (IsSignedInteger(from) && DataType::IsFloatType(to)) {
1123         // signed int to float e.g. I32TOF64, I64TOF64
1124         return llvm::Instruction::SIToFP;
1125     }
1126     if (DataType::IsFloatType(from) && DataType::IsFloatType(to)) {
1127         if (DataType::GetTypeSize(from, arch) < DataType::GetTypeSize(to, arch)) {
1128             return llvm::Instruction::FPExt;
1129         }
1130         return llvm::Instruction::FPTrunc;
1131     }
1132     if (DataType::IsReference(from) && to == DataType::POINTER) {
1133         return llvm::Instruction::AddrSpaceCast;
1134     }
1135     ASSERT_DO(false, (std::cerr << "Cast from " << DataType::ToString(from) << " to " << DataType::ToString(to))
1136                          << " is not supported");
1137     UNREACHABLE();
1138 }
1139 
1140 // Various other helpers
1141 
CoerceValue(llvm::Value * value,DataType::Type sourceType,DataType::Type targetType)1142 llvm::Value *LLVMIrConstructor::CoerceValue(llvm::Value *value, DataType::Type sourceType, DataType::Type targetType)
1143 {
1144     ASSERT(value != nullptr);
1145     // Other non-integer mistyping prohibited
1146     ASSERT_DO(!IsInteger(targetType) || value->getType()->isIntegerTy(),
1147               std::cerr << "Unexpected data type: " << GetTypeName(value->getType()) << ". Should be an integer."
1148                         << std::endl);
1149     ASSERT_DO(!DataType::IsReference(targetType) || value->getType()->isPointerTy(),
1150               std::cerr << "Unexpected data type: " << GetTypeName(value->getType()) << ". Should be a pointer."
1151                         << std::endl);
1152     ASSERT_DO(targetType != DataType::FLOAT64 || value->getType()->isDoubleTy(),
1153               std::cerr << "Unexpected data type: " << GetTypeName(value->getType()) << ". Should be a double."
1154                         << std::endl);
1155     ASSERT_DO(targetType != DataType::FLOAT32 || value->getType()->isFloatTy(),
1156               std::cerr << "Unexpected data type: " << GetTypeName(value->getType()) << ". Should be a float."
1157                         << std::endl);
1158 
1159     if (!IsInteger(targetType)) {
1160         return value;
1161     }
1162     ASSERT(value->getType()->isIntegerTy());
1163 
1164     auto targetLlvmType = llvm::cast<llvm::IntegerType>(GetExactType(targetType));
1165     auto originalLlvmType = llvm::cast<llvm::IntegerType>(value->getType());
1166     ASSERT(originalLlvmType->getBitWidth() == DataType::GetTypeSize(sourceType, GetGraph()->GetArch()));
1167 
1168     llvm::CastInst::CastOps castOp;
1169     if (originalLlvmType->getBitWidth() > targetLlvmType->getBitWidth()) {
1170         castOp = llvm::Instruction::Trunc;
1171     } else if (originalLlvmType->getBitWidth() < targetLlvmType->getBitWidth()) {
1172         if (IsSignedInteger(sourceType)) {
1173             castOp = llvm::Instruction::SExt;
1174         } else {
1175             castOp = llvm::Instruction::ZExt;
1176         }
1177     } else {
1178         return value;
1179     }
1180     return builder_.CreateCast(castOp, value, targetLlvmType);
1181 }
1182 
CoerceValue(llvm::Value * value,llvm::Type * targetType)1183 llvm::Value *LLVMIrConstructor::CoerceValue(llvm::Value *value, llvm::Type *targetType)
1184 {
1185     auto valueType = value->getType();
1186     if (valueType == targetType) {
1187         return value;
1188     }
1189 
1190     if (!valueType->isPointerTy() && targetType->isPointerTy()) {
1191         // DataType::POINTER to target_type.
1192         // Example: i64 -> %"class.panda::Frame"*
1193         return builder_.CreateIntToPtr(value, targetType);
1194     }
1195     if (valueType->isPointerTy() && !targetType->isPointerTy()) {
1196         // value_type to DataType::POINTER
1197         // Example: %"class.panda::coretypes::String"* -> i64
1198         return builder_.CreatePtrToInt(value, targetType);
1199     }
1200 
1201     if (valueType->isIntegerTy() && targetType->isIntegerTy()) {
1202         auto valueWidth = llvm::cast<llvm::IntegerType>(valueType)->getBitWidth();
1203         auto targetWidth = llvm::cast<llvm::IntegerType>(targetType)->getBitWidth();
1204         if (valueWidth > targetWidth) {
1205             return builder_.CreateTrunc(value, targetType);
1206         }
1207         if (valueWidth < targetWidth) {
1208             return builder_.CreateZExt(value, targetType);
1209         }
1210     }
1211     if (valueType->isPointerTy() && targetType->isPointerTy()) {
1212         return builder_.CreateAddrSpaceCast(value, targetType);
1213     }
1214     UNREACHABLE();
1215 }
1216 
ValueMapAdd(Inst * inst,llvm::Value * value,bool setName)1217 void LLVMIrConstructor::ValueMapAdd(Inst *inst, llvm::Value *value, bool setName)
1218 {
1219     auto type = inst->GetType();
1220     auto ltype = GetExactType(type);
1221     ASSERT(inputMap_.count(inst) == 0);
1222     auto it = inputMap_.emplace(inst, GetGraph()->GetLocalAllocator()->Adapter());
1223     ASSERT(it.second);
1224     ArenaUnorderedMap<DataType::Type, llvm::Value *> &typeMap = it.first->second;
1225 
1226     if (value != nullptr && setName) {
1227         value->setName(CreateNameForInst(inst));
1228     }
1229     if (value == nullptr || inst->GetOpcode() == Opcode::LiveOut || !ltype->isIntegerTy()) {
1230         typeMap.insert({type, value});
1231         if (type == DataType::POINTER) {
1232             ASSERT(value != nullptr);
1233             /*
1234              * Ark compiler implicitly converts:
1235              * 1. POINTER to REFERENCE
1236              * 2. POINTER to UINT64
1237              * 3. POINTER to INT64
1238              * Add value against those types, so we get pointer when requesting it in GetMappedValue.
1239              */
1240             typeMap.insert({DataType::REFERENCE, value});
1241             typeMap.insert({DataType::UINT64, value});
1242             if (inst->GetOpcode() == Opcode::LiveOut) {
1243                 typeMap.insert({DataType::INT64, value});
1244             }
1245         }
1246         return;
1247     }
1248     ASSERT(value->getType()->isIntegerTy());
1249     if (value->getType()->getIntegerBitWidth() > ltype->getIntegerBitWidth()) {
1250         value = builder_.CreateTrunc(value, ltype);
1251     } else if (value->getType()->getIntegerBitWidth() < ltype->getIntegerBitWidth()) {
1252         value = builder_.CreateZExt(value, ltype);
1253     }
1254     typeMap.insert({type, value});
1255     FillValueMapForUsers(inst, value, type, &typeMap);
1256 }
1257 
FillValueMapForUsers(Inst * inst,llvm::Value * value,DataType::Type type,ArenaUnorderedMap<DataType::Type,llvm::Value * > * typeMap)1258 void LLVMIrConstructor::FillValueMapForUsers(Inst *inst, llvm::Value *value, DataType::Type type,
1259                                              ArenaUnorderedMap<DataType::Type, llvm::Value *> *typeMap)
1260 {
1261     auto liveIn = inst->GetOpcode() == Opcode::LiveIn ? value : nullptr;
1262     for (auto &userItem : inst->GetUsers()) {
1263         auto user = userItem.GetInst();
1264         for (unsigned i = 0; i < user->GetInputsCount(); i++) {
1265             auto itype = user->GetInputType(i);
1266             auto input = user->GetInput(i).GetInst();
1267             if (input != inst || itype == type || typeMap->count(itype) != 0) {
1268                 continue;
1269             }
1270             llvm::Value *cvalue;
1271             if (liveIn != nullptr && itype == DataType::REFERENCE) {
1272                 cvalue = liveIn;
1273             } else if ((type == DataType::INT64 || type == DataType::UINT64) && itype == DataType::REFERENCE) {
1274                 ASSERT(user->GetOpcode() == Opcode::LiveOut);
1275                 cvalue = builder_.CreateIntToPtr(value, builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE));
1276             } else {
1277                 cvalue = CoerceValue(value, type, itype);
1278             }
1279             typeMap->insert({itype, cvalue});
1280         }
1281     }
1282 }
1283 
1284 // Instruction Visitors
1285 
1286 // Constant and NullPtr are processed directly in GetInputValue
VisitConstant(GraphVisitor * v,Inst * inst)1287 void LLVMIrConstructor::VisitConstant([[maybe_unused]] GraphVisitor *v, [[maybe_unused]] Inst *inst)
1288 {
1289     ASSERT(inst->GetBasicBlock()->IsStartBlock());
1290 }
1291 
VisitNullPtr(GraphVisitor * v,Inst * inst)1292 void LLVMIrConstructor::VisitNullPtr([[maybe_unused]] GraphVisitor *v, [[maybe_unused]] Inst *inst)
1293 {
1294     ASSERT(inst->GetBasicBlock()->IsStartBlock());
1295 }
1296 
VisitLiveIn(GraphVisitor * v,Inst * inst)1297 void LLVMIrConstructor::VisitLiveIn(GraphVisitor *v, Inst *inst)
1298 {
1299     auto ctor = static_cast<LLVMIrConstructor *>(v);
1300     ASSERT(inst->GetBasicBlock()->IsStartBlock());
1301 
1302     auto regInput = std::find(ctor->cc_.begin(), ctor->cc_.end(), inst->CastToLiveIn()->GetDstReg());
1303     ASSERT(regInput != ctor->cc_.end());
1304     auto idx = std::distance(ctor->cc_.begin(), regInput);
1305     auto n = ctor->func_->arg_begin() + idx;
1306     ctor->ValueMapAdd(inst, ctor->CoerceValue(n, ctor->GetExactType(inst->GetType())));
1307 }
1308 
VisitParameter(GraphVisitor * v,Inst * inst)1309 void LLVMIrConstructor::VisitParameter(GraphVisitor *v, Inst *inst)
1310 {
1311     ASSERT(inst->GetBasicBlock()->IsStartBlock());
1312     auto ctor = static_cast<LLVMIrConstructor *>(v);
1313     ASSERT(ctor->GetGraph()->GetMode().IsFastPath());
1314     auto n = ctor->GetArgument(inst->CastToParameter()->GetArgNumber());
1315     ctor->ValueMapAdd(inst, n, false);
1316 }
1317 
VisitReturnVoid(GraphVisitor * v,Inst * inst)1318 void LLVMIrConstructor::VisitReturnVoid(GraphVisitor *v, Inst *inst)
1319 {
1320     auto ctor = static_cast<LLVMIrConstructor *>(v);
1321     if (inst->GetFlag(inst_flags::MEM_BARRIER)) {
1322         ctor->CreateMemoryFence(memory_order::RELEASE);
1323     }
1324     ctor->builder_.CreateRetVoid();
1325 }
1326 
VisitReturn(GraphVisitor * v,Inst * inst)1327 void LLVMIrConstructor::VisitReturn(GraphVisitor *v, Inst *inst)
1328 {
1329     auto ctor = static_cast<LLVMIrConstructor *>(v);
1330     auto ret = ctor->GetInputValue(inst, 0);
1331 
1332     auto type = inst->GetType();
1333     if (DataType::IsLessInt32(type)) {
1334         ret = ctor->CoerceValue(ret, type, DataType::INT32);
1335     }
1336 
1337     ctor->builder_.CreateRet(ret);
1338 }
1339 
VisitLiveOut(GraphVisitor * v,Inst * inst)1340 void LLVMIrConstructor::VisitLiveOut(GraphVisitor *v, Inst *inst)
1341 {
1342     auto ctor = static_cast<LLVMIrConstructor *>(v);
1343     auto input = ctor->GetInputValue(inst, 0);
1344 
1345     auto regInput = std::find(ctor->cc_.begin(), ctor->cc_.end(), inst->GetDstReg());
1346     ASSERT(regInput != ctor->cc_.end());
1347     size_t idx = std::distance(ctor->cc_.begin(), regInput);
1348     ASSERT(ctor->ccValues_[idx] == nullptr);
1349 
1350     // LiveOut not allowed for real frame register
1351     ASSERT(ctor->GetGraph()->GetArch() == Arch::AARCH64 || idx + 1 != ctor->cc_.size());
1352     auto value = ctor->CoerceValue(input, ctor->GetExactType(inst->GetType()));
1353     ctor->ccValues_[idx] = value;
1354     ctor->ValueMapAdd(inst, value, false);
1355 }
1356 
VisitLoad(GraphVisitor * v,Inst * inst)1357 void LLVMIrConstructor::VisitLoad(GraphVisitor *v, Inst *inst)
1358 {
1359     auto ctor = static_cast<LLVMIrConstructor *>(v);
1360     auto srcPtr = ctor->GetInputValue(inst, 0);
1361     ASSERT(srcPtr->getType()->isPointerTy());
1362 
1363     llvm::Value *offset;
1364     auto offsetInput = inst->GetInput(1).GetInst();
1365     auto offsetItype = offsetInput->GetType();
1366     if (offsetItype == DataType::UINT64 || offsetItype == DataType::INT64) {
1367         ASSERT(offsetInput->GetOpcode() != Opcode::Load && offsetInput->GetOpcode() != Opcode::LoadI);
1368         offset = ctor->GetInputValue(inst, 1, true);
1369     } else {
1370         offset = ctor->GetInputValue(inst, 1);
1371     }
1372 
1373     ASSERT(srcPtr->getType()->isPointerTy());
1374     auto ptr = ctor->builder_.CreateInBoundsGEP(ctor->builder_.getInt8Ty(), srcPtr, offset);
1375 
1376     auto n = ctor->CreateLoadWithOrdering(inst, ptr, ToAtomicOrdering(inst->CastToLoad()->GetVolatile()));
1377     ctor->ValueMapAdd(inst, n);
1378 }
1379 
VisitStore(GraphVisitor * v,Inst * inst)1380 void LLVMIrConstructor::VisitStore(GraphVisitor *v, Inst *inst)
1381 {
1382     auto ctor = static_cast<LLVMIrConstructor *>(v);
1383     auto srcPtr = ctor->GetInputValue(inst, 0);
1384     auto value = ctor->GetInputValue(inst, 2U);
1385 
1386     llvm::Value *offset;
1387     auto offsetInput = inst->GetInput(1).GetInst();
1388     auto offsetItype = offsetInput->GetType();
1389     if (offsetItype == DataType::UINT64 || offsetItype == DataType::INT64) {
1390         ASSERT(offsetInput->GetOpcode() != Opcode::Load && offsetInput->GetOpcode() != Opcode::LoadI);
1391         offset = ctor->GetInputValue(inst, 1, true);
1392     } else {
1393         offset = ctor->GetInputValue(inst, 1);
1394     }
1395 
1396     auto ptrPlus = ctor->builder_.CreateInBoundsGEP(ctor->builder_.getInt8Ty(), srcPtr, offset);
1397 
1398     // Pre
1399     if (inst->CastToStore()->GetNeedBarrier()) {
1400         ctor->CreatePreWRB(inst, ptrPlus);
1401     }
1402     // Write
1403     ctor->CreateStoreWithOrdering(value, ptrPlus, ToAtomicOrdering(inst->CastToStore()->GetVolatile()));
1404     // Post
1405     if (inst->CastToStore()->GetNeedBarrier()) {
1406         ctor->CreatePostWRB(inst, srcPtr, offset, value);
1407     }
1408 }
1409 
VisitLoadI(GraphVisitor * v,Inst * inst)1410 void LLVMIrConstructor::VisitLoadI(GraphVisitor *v, Inst *inst)
1411 {
1412     auto ctor = static_cast<LLVMIrConstructor *>(v);
1413     auto srcPtr = ctor->GetInputValue(inst, 0);
1414     auto index = inst->CastToLoadI()->GetImm();
1415 
1416     ASSERT(srcPtr->getType()->isPointerTy());
1417     auto ptrPlus = ctor->builder_.CreateConstInBoundsGEP1_64(ctor->builder_.getInt8Ty(), srcPtr, index);
1418 
1419     auto n = ctor->CreateLoadWithOrdering(inst, ptrPlus, ToAtomicOrdering(inst->CastToLoadI()->GetVolatile()));
1420     ctor->ValueMapAdd(inst, n);
1421 }
1422 
VisitStoreI(GraphVisitor * v,Inst * inst)1423 void LLVMIrConstructor::VisitStoreI(GraphVisitor *v, Inst *inst)
1424 {
1425     auto ctor = static_cast<LLVMIrConstructor *>(v);
1426     auto srcPtr = ctor->GetInputValue(inst, 0);
1427     auto value = ctor->GetInputValue(inst, 1);
1428 
1429     auto index = inst->CastToStoreI()->GetImm();
1430     ASSERT(srcPtr->getType()->isPointerTy());
1431     auto ptrPlus = ctor->builder_.CreateConstInBoundsGEP1_64(ctor->builder_.getInt8Ty(), srcPtr, index);
1432 
1433     // Pre
1434     if (inst->CastToStoreI()->GetNeedBarrier()) {
1435         ctor->CreatePreWRB(inst, ptrPlus);
1436     }
1437     // Write
1438     ctor->CreateStoreWithOrdering(value, ptrPlus, ToAtomicOrdering(inst->CastToStoreI()->GetVolatile()));
1439     // Post
1440     if (inst->CastToStoreI()->GetNeedBarrier()) {
1441         ctor->CreatePostWRB(inst, srcPtr, ctor->builder_.getInt32(index), value);
1442     }
1443 }
1444 
VisitBitcast(GraphVisitor * v,Inst * inst)1445 void LLVMIrConstructor::VisitBitcast(GraphVisitor *v, Inst *inst)
1446 {
1447     auto ctor = static_cast<LLVMIrConstructor *>(v);
1448     auto type = inst->GetType();
1449     auto llvmTargetType = ctor->GetExactType(type);
1450     auto input = ctor->GetInputValue(inst, 0);
1451     auto itype = inst->GetInputType(0);
1452 
1453     llvm::Value *n;
1454     if (itype == DataType::POINTER) {
1455         ASSERT(!llvmTargetType->isPointerTy());
1456         n = ctor->builder_.CreatePtrToInt(input, llvmTargetType);
1457     } else {
1458         if (type == DataType::REFERENCE) {
1459             n = ctor->builder_.CreateIntToPtr(input, ctor->builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE));
1460         } else if (type == DataType::POINTER) {
1461             n = ctor->builder_.CreateIntToPtr(input, ctor->builder_.getPtrTy());
1462         } else {
1463             n = ctor->builder_.CreateBitCast(input, llvmTargetType);
1464         }
1465     }
1466     ctor->ValueMapAdd(inst, n);
1467 }
1468 
VisitCast(GraphVisitor * v,Inst * inst)1469 void LLVMIrConstructor::VisitCast(GraphVisitor *v, Inst *inst)
1470 {
1471     auto ctor = static_cast<LLVMIrConstructor *>(v);
1472     auto x = ctor->GetInputValue(inst, 0);
1473 
1474     auto type = inst->GetInputType(0);
1475     auto targetType = inst->GetType();
1476     auto llvmTargetType = ctor->GetExactType(targetType);
1477     // Do not cast if either Ark or LLVM types are the same
1478     if (type == targetType || x->getType() == llvmTargetType) {
1479         ctor->ValueMapAdd(inst, x, false);
1480         return;
1481     }
1482 
1483     if (DataType::IsFloatType(type) && IsInteger(targetType)) {
1484         // float to int, e.g. F64TOI32, F32TOI64, F64TOU32, F32TOU64
1485         auto n = ctor->CreateCastToInt(inst);
1486         ctor->ValueMapAdd(inst, n);
1487         return;
1488     }
1489     auto op = ctor->GetCastOp(type, targetType);
1490     if (targetType == DataType::BOOL) {
1491         ASSERT(op == llvm::Instruction::Trunc);
1492         auto u1 = ctor->builder_.CreateIsNotNull(x, CreateNameForInst(inst));
1493         auto n = ctor->builder_.CreateZExt(u1, ctor->builder_.getInt8Ty());
1494         ctor->ValueMapAdd(inst, n, false);
1495         return;
1496     }
1497     auto n = ctor->builder_.CreateCast(op, x, llvmTargetType);
1498     ctor->ValueMapAdd(inst, n);
1499 }
1500 
VisitAnd(GraphVisitor * v,Inst * inst)1501 void LLVMIrConstructor::VisitAnd(GraphVisitor *v, Inst *inst)
1502 {
1503     auto ctor = static_cast<LLVMIrConstructor *>(v);
1504     auto n = ctor->CreateBinaryOp(inst, llvm::Instruction::And);
1505     ctor->ValueMapAdd(inst, n);
1506 }
1507 
VisitAndI(GraphVisitor * v,Inst * inst)1508 void LLVMIrConstructor::VisitAndI(GraphVisitor *v, Inst *inst)
1509 {
1510     auto ctor = static_cast<LLVMIrConstructor *>(v);
1511     auto n = ctor->CreateBinaryImmOp(inst, llvm::Instruction::And, inst->CastToAndI()->GetImm());
1512     ctor->ValueMapAdd(inst, n);
1513 }
1514 
VisitOr(GraphVisitor * v,Inst * inst)1515 void LLVMIrConstructor::VisitOr(GraphVisitor *v, Inst *inst)
1516 {
1517     auto ctor = static_cast<LLVMIrConstructor *>(v);
1518     auto n = ctor->CreateBinaryOp(inst, llvm::Instruction::Or);
1519     ctor->ValueMapAdd(inst, n);
1520 }
1521 
VisitOrI(GraphVisitor * v,Inst * inst)1522 void LLVMIrConstructor::VisitOrI(GraphVisitor *v, Inst *inst)
1523 {
1524     auto ctor = static_cast<LLVMIrConstructor *>(v);
1525     auto n = ctor->CreateBinaryImmOp(inst, llvm::Instruction::Or, inst->CastToOrI()->GetImm());
1526     ctor->ValueMapAdd(inst, n);
1527 }
1528 
VisitXor(GraphVisitor * v,Inst * inst)1529 void LLVMIrConstructor::VisitXor(GraphVisitor *v, Inst *inst)
1530 {
1531     auto ctor = static_cast<LLVMIrConstructor *>(v);
1532     auto n = ctor->CreateBinaryOp(inst, llvm::Instruction::Xor);
1533     ctor->ValueMapAdd(inst, n);
1534 }
1535 
VisitXorI(GraphVisitor * v,Inst * inst)1536 void LLVMIrConstructor::VisitXorI(GraphVisitor *v, Inst *inst)
1537 {
1538     auto ctor = static_cast<LLVMIrConstructor *>(v);
1539     auto n = ctor->CreateBinaryImmOp(inst, llvm::Instruction::Xor, inst->CastToXorI()->GetImm());
1540     ctor->ValueMapAdd(inst, n);
1541 }
1542 
VisitShl(GraphVisitor * v,Inst * inst)1543 void LLVMIrConstructor::VisitShl(GraphVisitor *v, Inst *inst)
1544 {
1545     auto ctor = static_cast<LLVMIrConstructor *>(v);
1546     auto n = ctor->CreateShiftOp(inst, llvm::Instruction::Shl);
1547     ctor->ValueMapAdd(inst, n);
1548 }
1549 
VisitShlI(GraphVisitor * v,Inst * inst)1550 void LLVMIrConstructor::VisitShlI(GraphVisitor *v, Inst *inst)
1551 {
1552     auto ctor = static_cast<LLVMIrConstructor *>(v);
1553     auto n = ctor->CreateBinaryImmOp(inst, llvm::Instruction::Shl, inst->CastToShlI()->GetImm());
1554     ctor->ValueMapAdd(inst, n);
1555 }
1556 
VisitShr(GraphVisitor * v,Inst * inst)1557 void LLVMIrConstructor::VisitShr(GraphVisitor *v, Inst *inst)
1558 {
1559     auto ctor = static_cast<LLVMIrConstructor *>(v);
1560     auto n = ctor->CreateShiftOp(inst, llvm::Instruction::LShr);
1561     ctor->ValueMapAdd(inst, n);
1562 }
1563 
VisitShrI(GraphVisitor * v,Inst * inst)1564 void LLVMIrConstructor::VisitShrI(GraphVisitor *v, Inst *inst)
1565 {
1566     auto ctor = static_cast<LLVMIrConstructor *>(v);
1567     auto n = ctor->CreateBinaryImmOp(inst, llvm::Instruction::LShr, inst->CastToShrI()->GetImm());
1568     ctor->ValueMapAdd(inst, n);
1569 }
1570 
VisitAShr(GraphVisitor * v,Inst * inst)1571 void LLVMIrConstructor::VisitAShr(GraphVisitor *v, Inst *inst)
1572 {
1573     auto ctor = static_cast<LLVMIrConstructor *>(v);
1574     auto n = ctor->CreateShiftOp(inst, llvm::Instruction::AShr);
1575     ctor->ValueMapAdd(inst, n);
1576 }
1577 
VisitAShrI(GraphVisitor * v,Inst * inst)1578 void LLVMIrConstructor::VisitAShrI(GraphVisitor *v, Inst *inst)
1579 {
1580     auto ctor = static_cast<LLVMIrConstructor *>(v);
1581     auto n = ctor->CreateBinaryImmOp(inst, llvm::Instruction::AShr, inst->CastToAShrI()->GetImm());
1582     ctor->ValueMapAdd(inst, n);
1583 }
1584 
VisitAdd(GraphVisitor * v,Inst * inst)1585 void LLVMIrConstructor::VisitAdd(GraphVisitor *v, Inst *inst)
1586 {
1587     auto ctor = static_cast<LLVMIrConstructor *>(v);
1588     llvm::Value *n;
1589     if (IsFloatType(inst->GetType())) {
1590         n = ctor->CreateBinaryOp(inst, llvm::Instruction::FAdd);
1591     } else if (IsTypeNumeric(inst->GetType())) {
1592         n = ctor->CreateBinaryOp(inst, llvm::Instruction::Add);
1593     } else {
1594         UNREACHABLE();
1595     }
1596     ctor->ValueMapAdd(inst, n);
1597 }
1598 
VisitAddI(GraphVisitor * v,Inst * inst)1599 void LLVMIrConstructor::VisitAddI(GraphVisitor *v, Inst *inst)
1600 {
1601     auto ctor = static_cast<LLVMIrConstructor *>(v);
1602     auto n = ctor->CreateBinaryImmOp(inst, llvm::Instruction::Add, inst->CastToAddI()->GetImm());
1603     ctor->ValueMapAdd(inst, n);
1604 }
1605 
VisitSub(GraphVisitor * v,Inst * inst)1606 void LLVMIrConstructor::VisitSub(GraphVisitor *v, Inst *inst)
1607 {
1608     auto ctor = static_cast<LLVMIrConstructor *>(v);
1609     llvm::Value *n;
1610     if (IsFloatType(inst->GetType())) {
1611         n = ctor->CreateBinaryOp(inst, llvm::Instruction::FSub);
1612     } else if (IsTypeNumeric(inst->GetType())) {
1613         n = ctor->CreateBinaryOp(inst, llvm::Instruction::Sub);
1614     } else {
1615         UNREACHABLE();
1616     }
1617     ctor->ValueMapAdd(inst, n);
1618 }
1619 
VisitSubI(GraphVisitor * v,Inst * inst)1620 void LLVMIrConstructor::VisitSubI(GraphVisitor *v, Inst *inst)
1621 {
1622     auto ctor = static_cast<LLVMIrConstructor *>(v);
1623     auto n = ctor->CreateBinaryImmOp(inst, llvm::Instruction::Sub, inst->CastToSubI()->GetImm());
1624     ctor->ValueMapAdd(inst, n);
1625 }
1626 
VisitMul(GraphVisitor * v,Inst * inst)1627 void LLVMIrConstructor::VisitMul(GraphVisitor *v, Inst *inst)
1628 {
1629     auto ctor = static_cast<LLVMIrConstructor *>(v);
1630     llvm::Value *n;
1631     if (IsFloatType(inst->GetType())) {
1632         n = ctor->CreateBinaryOp(inst, llvm::Instruction::FMul);
1633     } else if (IsTypeNumeric(inst->GetType())) {
1634         n = ctor->CreateBinaryOp(inst, llvm::Instruction::Mul);
1635     } else {
1636         UNREACHABLE();
1637     }
1638     ctor->ValueMapAdd(inst, n);
1639 }
1640 
VisitMulI(GraphVisitor * v,Inst * inst)1641 void LLVMIrConstructor::VisitMulI(GraphVisitor *v, Inst *inst)
1642 {
1643     auto ctor = static_cast<LLVMIrConstructor *>(v);
1644     auto n = ctor->CreateBinaryImmOp(inst, llvm::Instruction::Mul, inst->CastToMulI()->GetImm());
1645     ctor->ValueMapAdd(inst, n);
1646 }
1647 
VisitDiv(GraphVisitor * v,Inst * inst)1648 void LLVMIrConstructor::VisitDiv(GraphVisitor *v, Inst *inst)
1649 {
1650     auto ctor = static_cast<LLVMIrConstructor *>(v);
1651     auto type = inst->GetType();
1652     llvm::Value *n;
1653     if (IsFloatType(type)) {
1654         n = ctor->CreateBinaryOp(inst, llvm::Instruction::FDiv);
1655     } else if (IsInteger(type)) {
1656         if (IsSignedInteger(type)) {
1657             n = ctor->CreateSignDivMod(inst, llvm::Instruction::SDiv);
1658         } else {
1659             n = ctor->CreateBinaryOp(inst, llvm::Instruction::UDiv);
1660         }
1661     } else {
1662         UNREACHABLE();
1663     }
1664     ctor->ValueMapAdd(inst, n);
1665 }
1666 
VisitMod(GraphVisitor * v,Inst * inst)1667 void LLVMIrConstructor::VisitMod(GraphVisitor *v, Inst *inst)
1668 {
1669     auto ctor = static_cast<LLVMIrConstructor *>(v);
1670     auto type = inst->GetType();
1671     ASSERT(IsInteger(type));
1672     llvm::Value *n;
1673     if (IsSignedInteger(type)) {
1674         n = ctor->CreateSignDivMod(inst, llvm::Instruction::SRem);
1675     } else {
1676         n = ctor->CreateBinaryOp(inst, llvm::Instruction::URem);
1677     }
1678     ctor->ValueMapAdd(inst, n);
1679 }
1680 
VisitCompare(GraphVisitor * v,Inst * inst)1681 void LLVMIrConstructor::VisitCompare(GraphVisitor *v, Inst *inst)
1682 {
1683     auto ctor = static_cast<LLVMIrConstructor *>(v);
1684     auto compareInst = inst->CastToCompare();
1685     auto operandsType = compareInst->GetOperandsType();
1686 
1687     llvm::Value *x = ctor->GetInputValue(inst, 0);
1688     llvm::Value *y = ctor->GetInputValue(inst, 1);
1689 
1690     llvm::Value *n = nullptr;
1691     if (IsInteger(operandsType) || DataType::IsReference(operandsType)) {
1692         n = ctor->CreateCondition(compareInst->GetCc(), x, y);
1693     } else {
1694         n = ctor->builder_.CreateFCmp(FCmpCodeConvert(compareInst->GetCc()), x, y);
1695     }
1696     ctor->ValueMapAdd(inst, n);
1697 }
1698 
VisitCmp(GraphVisitor * v,Inst * inst)1699 void LLVMIrConstructor::VisitCmp(GraphVisitor *v, Inst *inst)
1700 {
1701     auto ctor = static_cast<LLVMIrConstructor *>(v);
1702     CmpInst *cmpInst = inst->CastToCmp();
1703     DataType::Type operandsType = cmpInst->GetOperandsType();
1704 
1705     auto x = ctor->GetInputValue(inst, 0);
1706     auto y = ctor->GetInputValue(inst, 1);
1707     llvm::Value *n;
1708     if (DataType::IsFloatType(operandsType)) {
1709         n = ctor->CreateFloatComparison(cmpInst, x, y);
1710     } else if (IsInteger(operandsType)) {
1711         n = ctor->CreateIntegerComparison(cmpInst, x, y);
1712     } else {
1713         ASSERT_DO(false, (std::cerr << "Unsupported comparison for operands of type = "
1714                                     << DataType::ToString(operandsType) << std::endl));
1715         UNREACHABLE();
1716     }
1717     ctor->ValueMapAdd(inst, n);
1718 }
1719 
VisitNeg(GraphVisitor * v,Inst * inst)1720 void LLVMIrConstructor::VisitNeg(GraphVisitor *v, Inst *inst)
1721 {
1722     auto ctor = static_cast<LLVMIrConstructor *>(v);
1723     auto inputType = inst->GetInputType(0);
1724     auto toNegate = ctor->GetInputValue(inst, 0);
1725     llvm::Value *n;
1726     if (inputType == DataType::Type::FLOAT64 || inputType == DataType::Type::FLOAT32) {
1727         n = ctor->builder_.CreateFNeg(toNegate);
1728     } else if (IsInteger(inputType)) {
1729         n = ctor->builder_.CreateNeg(toNegate);
1730     } else {
1731         ASSERT_DO(false, (std::cerr << "Negation is not supported for" << DataType::ToString(inputType) << std::endl));
1732         UNREACHABLE();
1733     }
1734     ctor->ValueMapAdd(inst, n);
1735 }
1736 
VisitNot(GraphVisitor * v,Inst * inst)1737 void LLVMIrConstructor::VisitNot(GraphVisitor *v, Inst *inst)
1738 {
1739     ASSERT(inst->GetInputsCount() == 1);
1740     ASSERT(IsInteger(inst->GetInputType(0)));
1741 
1742     auto ctor = static_cast<LLVMIrConstructor *>(v);
1743     auto input = ctor->GetInputValue(inst, 0);
1744 
1745     auto notOperator = ctor->builder_.CreateNot(input);
1746     ctor->ValueMapAdd(inst, notOperator);
1747 }
1748 
VisitIfImm(GraphVisitor * v,Inst * inst)1749 void LLVMIrConstructor::VisitIfImm(GraphVisitor *v, Inst *inst)
1750 {
1751     auto ctor = static_cast<LLVMIrConstructor *>(v);
1752     auto x = ctor->GetInputValue(inst, 0);
1753     auto ifimm = inst->CastToIfImm();
1754 
1755     llvm::Value *cond = nullptr;
1756     if (ifimm->GetCc() == ConditionCode::CC_NE && ifimm->GetImm() == 0 && x->getType()->isIntegerTy()) {
1757         cond = ctor->builder_.CreateTrunc(x, ctor->builder_.getInt1Ty());
1758     } else {
1759         ASSERT(x->getType()->isIntOrPtrTy());
1760         llvm::Constant *immCst;
1761         if (x->getType()->isPointerTy()) {
1762             if (ifimm->GetImm() == 0) {
1763                 immCst = llvm::ConstantPointerNull::get(llvm::cast<llvm::PointerType>(x->getType()));
1764             } else {
1765                 immCst = llvm::ConstantInt::getSigned(x->getType(), ifimm->GetImm());
1766                 immCst = llvm::ConstantExpr::getPointerCast(immCst, x->getType());
1767             }
1768         } else {
1769             immCst = llvm::ConstantInt::getSigned(x->getType(), ifimm->GetImm());
1770         }
1771         cond = ctor->CreateCondition(ifimm->GetCc(), x, immCst);
1772     }
1773     ctor->CreateIf(inst, cond, ifimm->IsLikely(), ifimm->IsUnlikely());
1774 }
1775 
VisitIf(GraphVisitor * v,Inst * inst)1776 void LLVMIrConstructor::VisitIf(GraphVisitor *v, Inst *inst)
1777 {
1778     auto ctor = static_cast<LLVMIrConstructor *>(v);
1779     auto x = ctor->GetInputValue(inst, 0);
1780     auto y = ctor->GetInputValue(inst, 1);
1781     ASSERT(x->getType()->isIntOrPtrTy());
1782     ASSERT(y->getType()->isIntOrPtrTy());
1783     auto ifi = inst->CastToIf();
1784     auto cond = ctor->CreateCondition(ifi->GetCc(), x, y);
1785     ctor->CreateIf(inst, cond, ifi->IsLikely(), ifi->IsUnlikely());
1786 }
1787 
VisitCallIndirect(GraphVisitor * v,Inst * inst)1788 void LLVMIrConstructor::VisitCallIndirect(GraphVisitor *v, Inst *inst)
1789 {
1790     auto ctor = static_cast<LLVMIrConstructor *>(v);
1791     auto ptr = ctor->GetInputValue(inst, 0);
1792     ASSERT_TYPE(ptr, ctor->builder_.getPtrTy());
1793     // Build FunctionType
1794     ArenaVector<llvm::Type *> argTypes(ctor->GetGraph()->GetLocalAllocator()->Adapter());
1795     ArenaVector<llvm::Value *> args(ctor->GetGraph()->GetLocalAllocator()->Adapter());
1796     for (size_t i = 1; i < inst->GetInputs().Size(); ++i) {
1797         argTypes.push_back(ctor->GetType(inst->GetInput(i).GetInst()->GetType()));
1798         args.push_back(ctor->GetInputValue(inst, i));
1799     }
1800     auto retType = ctor->GetType(inst->GetType());
1801     auto funcType = llvm::FunctionType::get(retType, argTypes, false);
1802     auto call = ctor->builder_.CreateCall(funcType, ptr, args);
1803     if (!retType->isVoidTy()) {
1804         ctor->ValueMapAdd(inst, call);
1805     }
1806 }
1807 
VisitCall(GraphVisitor * v,Inst * inst)1808 void LLVMIrConstructor::VisitCall(GraphVisitor *v, Inst *inst)
1809 {
1810     auto ctor = static_cast<LLVMIrConstructor *>(v);
1811     ASSERT(!ctor->GetGraph()->SupportManagedCode());
1812 
1813     // Prepare external call if needed
1814     auto externalId = inst->CastToCall()->GetCallMethodId();
1815     auto runtime = ctor->GetGraph()->GetRuntime();
1816     auto externalName = runtime->GetExternalMethodName(ctor->GetGraph()->GetMethod(), externalId);
1817     auto function = ctor->func_->getParent()->getFunction(externalName);
1818     if (function == nullptr) {
1819         ArenaVector<llvm::Type *> argTypes(ctor->GetGraph()->GetLocalAllocator()->Adapter());
1820         for (size_t i = 0; i < inst->GetInputs().Size(); ++i) {
1821             argTypes.push_back(ctor->GetType(inst->GetInputType(i)));
1822         }
1823         auto ftype = llvm::FunctionType::get(ctor->GetType(inst->GetType()), argTypes, false);
1824         function =
1825             llvm::Function::Create(ftype, llvm::Function::ExternalLinkage, externalName, ctor->func_->getParent());
1826     }
1827     // Arguments
1828     ArenaVector<llvm::Value *> args(ctor->GetGraph()->GetLocalAllocator()->Adapter());
1829     for (size_t i = 0; i < inst->GetInputs().Size(); ++i) {
1830         args.push_back(ctor->CoerceValue(ctor->GetInputValue(inst, i), function->getArg(i)->getType()));
1831     }
1832     // Call
1833     auto call = ctor->builder_.CreateCall(function->getFunctionType(), function, args);
1834 
1835     if (IsNoAliasIrtocFunction(externalName)) {
1836         ASSERT(call->getType()->isPointerTy());
1837         call->addRetAttr(llvm::Attribute::NoAlias);
1838     } else {
1839         ASSERT(call->getType()->isPointerTy() ^ !IsPtrIgnIrtocFunction(externalName));
1840     }
1841 
1842     // Check if function has debug info
1843     if (function->getSubprogram() != nullptr) {
1844         ctor->debugData_->SetLocation(call, inst->GetPc());
1845     }
1846 
1847     if (inst->GetType() != DataType::VOID) {
1848         ctor->ValueMapAdd(inst, ctor->CoerceValue(call, ctor->GetType(inst->GetType())));
1849     }
1850 }
1851 
VisitPhi(GraphVisitor * v,Inst * inst)1852 void LLVMIrConstructor::VisitPhi(GraphVisitor *v, Inst *inst)
1853 {
1854     auto ctor = static_cast<LLVMIrConstructor *>(v);
1855     auto ltype = ctor->GetExactType(inst->GetType());
1856     auto block = ctor->GetCurrentBasicBlock();
1857 
1858     // PHI need adjusted insert point if ValueMapAdd already created coerced values for other PHIs
1859     auto nonPhi = block->getFirstNonPHI();
1860     if (nonPhi != nullptr) {
1861         ctor->builder_.SetInsertPoint(nonPhi);
1862     }
1863 
1864     auto phi = ctor->builder_.CreatePHI(ltype, inst->GetInputsCount());
1865     ctor->SetCurrentBasicBlock(block);
1866     ctor->ValueMapAdd(inst, phi);
1867 }
1868 
VisitIntrinsic(GraphVisitor * v,Inst * inst)1869 void LLVMIrConstructor::VisitIntrinsic(GraphVisitor *v, Inst *inst)
1870 {
1871     auto ctor = static_cast<LLVMIrConstructor *>(v);
1872     auto entryId = inst->CastToIntrinsic()->GetIntrinsicId();
1873 
1874     // Some intrinsics are lowered into some code or LLVM intrinsics. For LLVM intrinsics final decision about
1875     // lowering into code or call is made later in IntrinsicsLowering
1876     if (g_options.IsCompilerEncodeIntrinsics()) {
1877         bool lowered = ctor->TryEmitIntrinsic(inst, entryId);
1878         if (lowered) {
1879             return;
1880         }
1881         // Every intrinsic that panda can encode should be lowered
1882         ASSERT(!EncodesBuiltin(ctor->GetGraph()->GetRuntime(), entryId, ctor->GetGraph()->GetArch()));
1883     }
1884 
1885     UNREACHABLE();
1886 }
1887 
VisitDefault(Inst * inst)1888 void LLVMIrConstructor::VisitDefault([[maybe_unused]] Inst *inst)
1889 {
1890     ASSERT_DO(false, (std::cerr << "Unsupported llvm lowering for \n", inst->Dump(&std::cerr, true)));
1891     UNREACHABLE();
1892 }
1893 
LLVMIrConstructor(Graph * graph,llvm::Module * module,llvm::LLVMContext * context,LLVMArkInterface * arkInterface,const std::unique_ptr<DebugDataBuilder> & debugData)1894 LLVMIrConstructor::LLVMIrConstructor(Graph *graph, llvm::Module *module, llvm::LLVMContext *context,
1895                                      LLVMArkInterface *arkInterface, const std::unique_ptr<DebugDataBuilder> &debugData)
1896     : graph_(graph),
1897       builder_(llvm::IRBuilder<>(*context)),
1898       inputMap_(graph->GetLocalAllocator()->Adapter()),
1899       blockTailMap_(graph->GetLocalAllocator()->Adapter()),
1900       blockHeadMap_(graph->GetLocalAllocator()->Adapter()),
1901       arkInterface_(arkInterface),
1902       debugData_(debugData),
1903       cc_(graph->GetLocalAllocator()->Adapter()),
1904       ccValues_(graph->GetLocalAllocator()->Adapter())
1905 {
1906     // Assign regmaps
1907     if (graph->GetMode().IsInterpreter()) {
1908         if (graph->GetArch() == Arch::AARCH64) {
1909             cc_.assign({AARCH64_PC, AARCH64_ACC, AARCH64_ACC_TAG, AARCH64_FP, AARCH64_DISPATCH, AARCH64_MOFFSET,
1910                         AARCH64_METHOD_PTR, GetThreadReg(Arch::AARCH64)});
1911         } else if (graph->GetArch() == Arch::X86_64) {
1912             cc_.assign({X86_64_PC, X86_64_ACC, X86_64_ACC_TAG, X86_64_FP, X86_64_DISPATCH, GetThreadReg(Arch::X86_64),
1913                         X86_64_REAL_FP});
1914         } else {
1915             LLVM_LOG(FATAL, ENTRY) << "Unsupported architecture for arkintcc";
1916         }
1917     } else if (graph->GetMode().IsFastPath()) {
1918         ASSERT(graph->GetArch() == Arch::AARCH64);
1919         for (size_t i = 0; i < graph->GetRuntime()->GetMethodTotalArgumentsCount(graph->GetMethod()); i++) {
1920             cc_.push_back(i);
1921         }
1922         cc_.push_back(GetThreadReg(Arch::AARCH64));
1923         cc_.push_back(AARCH64_REAL_FP);
1924     }
1925     ccValues_.assign(cc_.size(), nullptr);
1926 
1927     // Create function
1928     auto funcProto = GetEntryFunctionType();
1929     auto methodName = arkInterface_->GetUniqMethodName(graph_->GetMethod());
1930     func_ = CreateFunctionDeclaration(funcProto, methodName, module);
1931     arkInterface_->PutFunction(graph_->GetMethod(), func_);
1932 
1933     auto klassId = graph_->GetRuntime()->GetClassIdForMethod(graph_->GetMethod());
1934     auto klassIdMd = llvm::ConstantAsMetadata::get(builder_.getInt32(klassId));
1935     func_->addMetadata(llvmbackend::LLVMArkInterface::FUNCTION_MD_CLASS_ID, *llvm::MDNode::get(*context, {klassIdMd}));
1936 }
1937 
BuildIr()1938 bool LLVMIrConstructor::BuildIr()
1939 {
1940     LLVM_LOG(DEBUG, IR) << "Building IR for LLVM";
1941 
1942     // Set Argument Names
1943     auto it = func_->arg_begin();
1944     auto idx = 0;
1945     while (it != func_->arg_end()) {
1946         std::stringstream name;
1947         name << "a" << idx++;
1948         (it++)->setName(name.str());
1949     }
1950 
1951     auto method = graph_->GetMethod();
1952     auto runtime = graph_->GetRuntime();
1953     arkInterface_->RememberFunctionOrigin(func_, method);
1954     debugData_->BeginSubprogram(func_, runtime->GetFullFileName(method), runtime->GetMethodId(method));
1955 
1956     // First step - create blocks, leaving LLVM EntryBlock untouched
1957     BuildBasicBlocks();
1958     // Second step - visit all instructions, including StartBlock, but not filling PHI inputs
1959     BuildInstructions();
1960     // Third step is to fill the PHIs inputs
1961     FillPhiInputs();
1962 
1963     debugData_->EndSubprogram(func_);
1964 
1965     // verifyFunction returns false if there are no errors. But we return true if everything is ok.
1966     auto verified = !verifyFunction(*func_, &llvm::errs());
1967     if (!verified) {
1968         func_->print(llvm::errs());
1969     }
1970     return verified;
1971 }
1972 
CanCompile(Graph * graph)1973 Expected<bool, std::string> LLVMIrConstructor::CanCompile(Graph *graph)
1974 {
1975     if (graph->IsDynamicMethod()) {
1976         return false;
1977     }
1978     bool can = true;
1979     for (auto basicBlock : graph->GetBlocksRPO()) {
1980         for (auto inst : basicBlock->AllInsts()) {
1981             bool canCompile = LLVMIrConstructor::CanCompile(inst);
1982             if (!canCompile) {
1983                 LLVM_LOG(ERROR, ENTRY) << GetOpcodeString(inst->GetOpcode())
1984                                        << " unexpected in LLVM lowering. Method = "
1985                                        << graph->GetRuntime()->GetMethodFullName(graph->GetMethod());
1986                 return Unexpected(std::string("Unexpected opcode in CanCompile"));
1987             }
1988             if (graph->GetMode().IsInterpreter()) {
1989                 continue;
1990             }
1991 
1992             auto opcode = inst->GetOpcode();
1993             if (opcode == Opcode::SaveState || opcode == Opcode::SaveStateDeoptimize) {
1994                 can = false;  // no immediate return here - to check CanCompile for other instructions
1995             }
1996         }
1997     }
1998     return can;
1999 }
2000 
CanCompile(Inst * inst)2001 bool LLVMIrConstructor::CanCompile(Inst *inst)
2002 {
2003     if (inst->IsIntrinsic()) {
2004         auto iid = inst->CastToIntrinsic()->GetIntrinsicId();
2005         // We support only slowpaths where the second immediate is an external function
2006         if (iid == RuntimeInterface::IntrinsicId::INTRINSIC_SLOW_PATH_ENTRY) {
2007             return inst->CastToIntrinsic()->GetImms().size() > 1;
2008         }
2009         return CanCompileIntrinsic(iid);
2010     }
2011     // Check if we have method that can handle it
2012     switch (inst->GetOpcode()) {
2013         default:
2014             UNREACHABLE_CONSTEXPR();
2015             // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
2016 #define INST_DEF(OPCODE, ...)                                                     \
2017     case Opcode::OPCODE: {                                                        \
2018         return &LLVMIrConstructor::Visit##OPCODE != &GraphVisitor::Visit##OPCODE; \
2019     }
2020             OPCODE_LIST(INST_DEF)
2021     }
2022 #undef INST_DEF
2023 }
2024 
2025 }  // namespace panda::compiler
2026