1 /*
2 * Copyright (c) 2023-2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "optimizer/code_generator/codegen.h"
17 #include "runtime/include/coretypes/string.h"
18
19 #include "llvm_ir_constructor.h"
20
21 #include "gc_barriers.h"
22 #include "irtoc_function_utils.h"
23 #include "llvm_logger.h"
24 #include "llvm_options.h"
25 #include "metadata.h"
26 #include "utils.h"
27 #include "transforms/builtins.h"
28 #include "transforms/gc_utils.h"
29 #include "transforms/runtime_calls.h"
30
31 namespace ark::compiler {
32 #define ONLY_NEEDSAFEPOINT
33 #include <intrinsics_ir_build.inl>
34 #undef ONLY_NEEDSAFEPOINT
35 } // namespace ark::compiler
36
37 #include <llvm/IR/InlineAsm.h>
38 #include <llvm/IR/IntrinsicsAArch64.h>
39 #include <llvm/IR/MDBuilder.h>
40 #include <llvm/IR/Verifier.h>
41 #include <llvm/Transforms/Utils/BasicBlockUtils.h>
42
43 using ark::llvmbackend::DebugDataBuilder;
44 using ark::llvmbackend::LLVMArkInterface;
45 using ark::llvmbackend::builtins::BarrierReturnVoid;
46 using ark::llvmbackend::builtins::KeepThis;
47 using ark::llvmbackend::builtins::LenArray;
48 using ark::llvmbackend::builtins::LoadClass;
49 using ark::llvmbackend::builtins::LoadInitClass;
50 using ark::llvmbackend::builtins::LoadString;
51 using ark::llvmbackend::builtins::ResolveVirtual;
52 using ark::llvmbackend::irtoc_function_utils::IsNoAliasIrtocFunction;
53 #ifndef NDEBUG
54 using ark::llvmbackend::irtoc_function_utils::IsPtrIgnIrtocFunction;
55 #endif
56 using ark::llvmbackend::utils::CreateLoadClassFromObject;
57
58 static constexpr unsigned VECTOR_SIZE_8 = 8;
59 static constexpr unsigned VECTOR_SIZE_16 = 16;
60
61 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
62 #define ASSERT_TYPE(input, expectedType) \
63 ASSERT_DO((input)->getType() == (expectedType), \
64 std::cerr << "Unexpected data type: " << GetTypeName((input)->getType()) << ". Should be a " \
65 << GetTypeName(expectedType) << "." << std::endl)
66
67 // Max integer that can be represented in float/double without losing precision
MaxIntAsExactFloat()68 constexpr float MaxIntAsExactFloat()
69 {
70 return static_cast<float>((1U << static_cast<unsigned>(std::numeric_limits<float>::digits)) - 1);
71 }
72
MaxIntAsExactDouble()73 constexpr double MaxIntAsExactDouble()
74 {
75 return static_cast<double>((1ULL << static_cast<unsigned>(std::numeric_limits<double>::digits)) - 1);
76 }
77
78 // arm64: { dispatch: 24, pc: 20, frame: 23, acc: 21, accTag: 22, moffset: 25, methodPtr: 26 },
79 static constexpr auto AARCH64_PC = 20;
80 static constexpr auto AARCH64_ACC = 21;
81 static constexpr auto AARCH64_ACC_TAG = 22;
82 static constexpr auto AARCH64_FP = 23;
83 static constexpr auto AARCH64_DISPATCH = 24;
84 static constexpr auto AARCH64_MOFFSET = 25;
85 static constexpr auto AARCH64_METHOD_PTR = 26;
86 static constexpr auto AARCH64_REAL_FP = 29;
87
88 // x86_64: { dispatch: 8, pc: 4, frame: 5, acc: 11, accTag: 3 }
89 static constexpr auto X86_64_PC = 4; // renamed r10
90 static constexpr auto X86_64_ACC = 11; // renamed r3 (rbx)
91 static constexpr auto X86_64_ACC_TAG = 3; // renamed r11
92 static constexpr auto X86_64_FP = 5; // renamed r9
93 static constexpr auto X86_64_DISPATCH = 8;
94 static constexpr auto X86_64_REAL_FP = 9; // renamed r5 (rbp)
95
96 namespace {
CreateFunctionDeclaration(llvm::FunctionType * functionType,const std::string & name,llvm::Module * module)97 inline llvm::Function *CreateFunctionDeclaration(llvm::FunctionType *functionType, const std::string &name,
98 llvm::Module *module)
99 {
100 ASSERT(functionType != nullptr);
101 ASSERT(!name.empty());
102 ASSERT(module != nullptr);
103
104 auto function = module->getFunction(name);
105 if (function != nullptr) {
106 ASSERT(function->getVisibility() == llvm::GlobalValue::ProtectedVisibility);
107 ASSERT(function->doesNotThrow());
108 return function;
109 }
110
111 function = llvm::Function::Create(functionType, llvm::Function::ExternalLinkage, name, module);
112 function->setDoesNotThrow();
113 function->setVisibility(llvm::GlobalValue::ProtectedVisibility);
114 function->setSectionPrefix(name);
115
116 return function;
117 }
118
CreateBlackBoxAsm(llvm::IRBuilder<> * builder,const std::string & inlineAsm)119 inline void CreateBlackBoxAsm(llvm::IRBuilder<> *builder, const std::string &inlineAsm)
120 {
121 auto iasmType = llvm::FunctionType::get(builder->getVoidTy(), {}, false);
122 builder->CreateCall(iasmType, llvm::InlineAsm::get(iasmType, inlineAsm, "", true), {});
123 }
124
CreateInt32ImmAsm(llvm::IRBuilder<> * builder,const std::string & inlineAsm,uint32_t imm)125 inline void CreateInt32ImmAsm(llvm::IRBuilder<> *builder, const std::string &inlineAsm, uint32_t imm)
126 {
127 auto oneInt = llvm::FunctionType::get(builder->getVoidTy(), {builder->getInt32Ty()}, false);
128 builder->CreateCall(oneInt, llvm::InlineAsm::get(oneInt, inlineAsm, "i", true), {builder->getInt32(imm)});
129 }
130
ToAtomicOrdering(bool isVolatile)131 inline llvm::AtomicOrdering ToAtomicOrdering(bool isVolatile)
132 {
133 return isVolatile ? LLVMArkInterface::VOLATILE_ORDER : LLVMArkInterface::NOT_ATOMIC_ORDER;
134 }
135
136 #ifndef NDEBUG
GetTypeName(llvm::Type * type)137 inline std::string GetTypeName(llvm::Type *type)
138 {
139 std::string name;
140 auto stream = llvm::raw_string_ostream(name);
141 type->print(stream);
142 return stream.str();
143 }
144 #endif
145 } // namespace
146
147 namespace ark::compiler {
148
149 #include <can_compile_intrinsics_gen.inl>
150
MarkNormalBlocksRecursive(BasicBlock * block,Marker normal)151 static void MarkNormalBlocksRecursive(BasicBlock *block, Marker normal)
152 {
153 [[maybe_unused]] size_t expected = 0;
154 bool processSucc = true;
155 auto last = block->GetLastInst();
156 if (last != nullptr) {
157 // Any successors of blocks with terminators are either TryEnd or Catch blocks
158 if (last->GetFlag(inst_flags::TERMINATOR)) {
159 processSucc = false;
160 }
161 if (last->GetOpcode() == Opcode::IfImm || last->GetOpcode() == Opcode::If) {
162 expected = 1;
163 }
164 }
165 for (size_t i = 0; i < block->GetSuccsBlocks().size(); i++) {
166 auto succ = block->GetSuccessor(i);
167 if (succ->IsCatch()) {
168 ASSERT_DO(i > expected,
169 (std::cerr << "Catch block found too early in successors: at index " << i << std::endl));
170 continue;
171 }
172 ASSERT_DO(i <= expected, (std::cerr << "Unexpected non-catch successor block at index " << i << std::endl));
173 if (processSucc && !succ->SetMarker(normal)) {
174 MarkNormalBlocksRecursive(succ, normal);
175 }
176 }
177 }
178
179 // Use that only to pass it into method like rvalue
CreateBasicBlockName(Inst * inst,const std::string & bbName)180 static inline std::string CreateBasicBlockName(Inst *inst, const std::string &bbName)
181 {
182 std::stringstream name;
183 name << "bb" << std::to_string(inst->GetBasicBlock()->GetId()) << "_i" << std::to_string(inst->GetId()) << ".."
184 << bbName << "..";
185 return name.str();
186 }
187
CreateNameForInst(Inst * inst)188 static inline std::string CreateNameForInst(Inst *inst)
189 {
190 return std::string("v") + std::to_string(inst->GetId());
191 }
192
IsInteger(DataType::Type type)193 static inline bool IsInteger(DataType::Type type)
194 {
195 return DataType::IsTypeNumeric(type) && !DataType::IsFloatType(type) && type != DataType::POINTER;
196 }
197
IsSignedInteger(const DataType::Type & type)198 static inline bool IsSignedInteger(const DataType::Type &type)
199 {
200 return IsInteger(type) && DataType::IsTypeSigned(type);
201 }
202
IsUnsignedInteger(DataType::Type type)203 static inline bool IsUnsignedInteger(DataType::Type type)
204 {
205 return IsInteger(type) && !DataType::IsTypeSigned(type);
206 }
207
IsAlwaysThrowBasicBlock(Inst * inst)208 static inline bool IsAlwaysThrowBasicBlock(Inst *inst)
209 {
210 if (!g_options.IsCompilerInliningSkipThrowBlocks()) {
211 return false;
212 }
213
214 auto bbLastInst = inst->GetBasicBlock()->GetLastInst();
215 return bbLastInst->GetOpcode() == Opcode::Throw || bbLastInst->GetOpcode() == Opcode::Deoptimize;
216 }
217
ICmpCodeConvert(ConditionCode cc)218 static llvm::ICmpInst::Predicate ICmpCodeConvert(ConditionCode cc)
219 {
220 switch (cc) {
221 case ConditionCode::CC_EQ:
222 return llvm::CmpInst::Predicate::ICMP_EQ;
223 case ConditionCode::CC_NE:
224 return llvm::CmpInst::Predicate::ICMP_NE;
225 case ConditionCode::CC_LT:
226 return llvm::CmpInst::Predicate::ICMP_SLT;
227 case ConditionCode::CC_GT:
228 return llvm::CmpInst::Predicate::ICMP_SGT;
229 case ConditionCode::CC_LE:
230 return llvm::CmpInst::Predicate::ICMP_SLE;
231 case ConditionCode::CC_GE:
232 return llvm::CmpInst::Predicate::ICMP_SGE;
233 case ConditionCode::CC_B:
234 return llvm::CmpInst::Predicate::ICMP_ULT;
235 case ConditionCode::CC_A:
236 return llvm::CmpInst::Predicate::ICMP_UGT;
237 case ConditionCode::CC_BE:
238 return llvm::CmpInst::Predicate::ICMP_ULE;
239 case ConditionCode::CC_AE:
240 return llvm::CmpInst::Predicate::ICMP_UGE;
241 default:
242 UNREACHABLE();
243 return llvm::CmpInst::Predicate::ICMP_NE;
244 }
245 }
246
FCmpCodeConvert(ConditionCode conditionCode)247 static llvm::FCmpInst::Predicate FCmpCodeConvert(ConditionCode conditionCode)
248 {
249 switch (conditionCode) {
250 case ConditionCode::CC_EQ:
251 return llvm::FCmpInst::Predicate::FCMP_UEQ;
252 case ConditionCode::CC_NE:
253 return llvm::FCmpInst::Predicate::FCMP_UNE;
254 case ConditionCode::CC_LT:
255 return llvm::FCmpInst::Predicate::FCMP_ULT;
256 case ConditionCode::CC_GT:
257 return llvm::FCmpInst::Predicate::FCMP_UGT;
258 case ConditionCode::CC_LE:
259 return llvm::FCmpInst::Predicate::FCMP_ULE;
260 case ConditionCode::CC_GE:
261 return llvm::FCmpInst::Predicate::FCMP_UGE;
262 case ConditionCode::CC_B:
263 return llvm::FCmpInst::Predicate::FCMP_ULT;
264 case ConditionCode::CC_A:
265 return llvm::FCmpInst::Predicate::FCMP_UGT;
266 case ConditionCode::CC_BE:
267 return llvm::FCmpInst::Predicate::FCMP_ULE;
268 case ConditionCode::CC_AE:
269 return llvm::FCmpInst::Predicate::FCMP_UGE;
270 default:
271 ASSERT_DO(false, (std::cerr << "Unexpected condition_code = " << conditionCode << std::endl));
272 UNREACHABLE();
273 }
274 }
275
GetDeoptimizationType(Inst * inst)276 static DeoptimizeType GetDeoptimizationType(Inst *inst)
277 {
278 switch (inst->GetOpcode()) {
279 case Opcode::NullCheck:
280 return DeoptimizeType::NULL_CHECK;
281 case Opcode::DeoptimizeIf:
282 return inst->CastToDeoptimizeIf()->GetDeoptimizeType();
283 case Opcode::BoundsCheck:
284 return DeoptimizeType::BOUNDS_CHECK_WITH_DEOPT;
285 case Opcode::NegativeCheck:
286 return DeoptimizeType::NEGATIVE_CHECK;
287 case Opcode::ZeroCheck:
288 return DeoptimizeType::ZERO_CHECK;
289 case Opcode::SubOverflowCheck:
290 return DeoptimizeType::OVERFLOW;
291 case Opcode::CheckCast:
292 return DeoptimizeType::CHECK_CAST;
293 case Opcode::RefTypeCheck:
294 default:
295 ASSERT_DO(false, (std::cerr << "Unexpected inst to GetDeoptimizationType, inst:" << std::endl,
296 inst->Dump(&std::cerr, true)));
297 UNREACHABLE();
298 }
299 }
300
GetFastPathCallingConv(uint32_t numArgs)301 static llvm::CallingConv::ID GetFastPathCallingConv(uint32_t numArgs)
302 {
303 switch (numArgs) {
304 case 0U:
305 return llvm::CallingConv::ArkFast0;
306 case 1U:
307 return llvm::CallingConv::ArkFast1;
308 case 2U:
309 return llvm::CallingConv::ArkFast2;
310 case 3U:
311 return llvm::CallingConv::ArkFast3;
312 case 4U:
313 return llvm::CallingConv::ArkFast4;
314 case 5U:
315 return llvm::CallingConv::ArkFast5;
316 case 6U:
317 return llvm::CallingConv::ArkFast6;
318 default:
319 UNREACHABLE();
320 }
321 }
322
GetAllocateArrayTlabEntrypoint(size_t elementSize)323 static RuntimeInterface::EntrypointId GetAllocateArrayTlabEntrypoint(size_t elementSize)
324 {
325 switch (elementSize) {
326 case sizeof(uint8_t):
327 return RuntimeInterface::EntrypointId::ALLOCATE_ARRAY_TLAB8;
328 case sizeof(uint16_t):
329 return RuntimeInterface::EntrypointId::ALLOCATE_ARRAY_TLAB16;
330 case sizeof(uint32_t):
331 return RuntimeInterface::EntrypointId::ALLOCATE_ARRAY_TLAB32;
332 case sizeof(uint64_t):
333 return RuntimeInterface::EntrypointId::ALLOCATE_ARRAY_TLAB64;
334 default:
335 UNREACHABLE();
336 }
337 }
338
GetRealFrameReg(Arch arch)339 static size_t GetRealFrameReg(Arch arch)
340 {
341 switch (arch) {
342 case Arch::AARCH64:
343 return AARCH64_REAL_FP;
344 case Arch::X86_64:
345 return X86_64_REAL_FP;
346 default:
347 UNREACHABLE();
348 }
349 }
350
351 /**
352 * Call when we are sure that instruction shouldn't appear for translating but
353 * eventually we've tried to translate it.
354 */
UnexpectedLowering(Inst * inst)355 static void UnexpectedLowering([[maybe_unused]] Inst *inst)
356 {
357 ASSERT_DO(false, (std::cerr << "Unexpected attempt to lower: ", inst->Dump(&std::cerr, true)));
358 UNREACHABLE();
359 }
360
IsSafeCast(Inst * inst,unsigned int index)361 bool LLVMIrConstructor::IsSafeCast(Inst *inst, unsigned int index)
362 {
363 auto trueType = inst->GetInput(index).GetInst()->GetType();
364 auto instType = inst->GetInputType(index);
365 bool signTheSame = IsSignedInteger(trueType) == IsSignedInteger(instType);
366 bool extending = DataType::GetTypeSize(trueType, GetGraph()->GetArch()) <=
367 DataType::GetTypeSize(instType, GetGraph()->GetArch());
368 return signTheSame || extending;
369 }
370
TryEmitIntrinsic(Inst * inst,RuntimeInterface::IntrinsicId arkId)371 bool LLVMIrConstructor::TryEmitIntrinsic(Inst *inst, RuntimeInterface::IntrinsicId arkId)
372 {
373 auto module = func_->getParent();
374 auto f32Ty = builder_.getFloatTy();
375 auto f64Ty = builder_.getDoubleTy();
376 llvm::Function *llvmId = nullptr;
377
378 switch (arkId) {
379 #include "intrinsics_llvm_codegen.inl"
380 #ifndef NDEBUG
381 // Must be lowered earlier in IrBuilder, impossible to meet
382 case RuntimeInterface::IntrinsicId::INTRINSIC_OBJECT_MONITOR_ENTER:
383 case RuntimeInterface::IntrinsicId::INTRINSIC_OBJECT_MONITOR_EXIT:
384 case RuntimeInterface::IntrinsicId::INTRINSIC_MATH_ABS_I32:
385 case RuntimeInterface::IntrinsicId::INTRINSIC_MATH_ABS_I64:
386 case RuntimeInterface::IntrinsicId::INTRINSIC_MATH_ABS_F32:
387 case RuntimeInterface::IntrinsicId::INTRINSIC_MATH_ABS_F64:
388 case RuntimeInterface::IntrinsicId::INTRINSIC_MATH_SQRT_F32:
389 case RuntimeInterface::IntrinsicId::INTRINSIC_MATH_SQRT_F64:
390 case RuntimeInterface::IntrinsicId::INTRINSIC_MATH_MIN_I32:
391 case RuntimeInterface::IntrinsicId::INTRINSIC_MATH_MIN_I64:
392 case RuntimeInterface::IntrinsicId::INTRINSIC_MATH_MIN_F32:
393 case RuntimeInterface::IntrinsicId::INTRINSIC_MATH_MIN_F64:
394 case RuntimeInterface::IntrinsicId::INTRINSIC_MATH_MAX_I32:
395 case RuntimeInterface::IntrinsicId::INTRINSIC_MATH_MAX_I64:
396 case RuntimeInterface::IntrinsicId::INTRINSIC_MATH_MAX_F32:
397 case RuntimeInterface::IntrinsicId::INTRINSIC_MATH_MAX_F64:
398 UNREACHABLE();
399 // Can appear only after LLVM optimizations
400 case RuntimeInterface::IntrinsicId::LIB_CALL_MEM_COPY:
401 case RuntimeInterface::IntrinsicId::LIB_CALL_MEM_SET:
402 case RuntimeInterface::IntrinsicId::LIB_CALL_MEM_MOVE:
403 UNREACHABLE();
404 #include "emit_intrinsic_llvm_ir_constructor_gen.inl"
405 #endif
406 default:
407 return false;
408 }
409
410 ASSERT(llvmId != nullptr);
411 ASSERT(!inst->CanThrow());
412
413 arkInterface_->GetOrCreateRuntimeFunctionType(func_->getContext(), func_->getParent(),
414 LLVMArkInterface::RuntimeCallType::INTRINSIC,
415 static_cast<LLVMArkInterface::EntrypointId>(arkId));
416
417 auto arguments = GetIntrinsicArguments(llvmId->getFunctionType(), inst->CastToIntrinsic());
418 auto result = llvm::CallInst::Create(llvmId, arguments, "", GetCurrentBasicBlock());
419 SetIntrinsicParamAttrs(result, inst->CastToIntrinsic(), arguments);
420 ValueMapAdd(inst, result);
421 return true;
422 }
423
424 // Specific intrinsic Emitters
425
EmitFastPath(Inst * inst,RuntimeInterface::EntrypointId eid,uint32_t numArgs)426 bool LLVMIrConstructor::EmitFastPath(Inst *inst, RuntimeInterface::EntrypointId eid, uint32_t numArgs)
427 {
428 ArenaVector<llvm::Value *> args(GetGraph()->GetLocalAllocator()->Adapter());
429 for (uint32_t i = 0; i < numArgs; i++) {
430 args.push_back(GetInputValue(inst, i));
431 }
432 auto call = CreateFastPathCall(inst, eid, args);
433
434 auto retType = GetType(inst->GetType());
435 if (!retType->isVoidTy()) {
436 ValueMapAdd(inst, call);
437 }
438 return true;
439 }
440
EmitStringEquals(Inst * inst)441 bool LLVMIrConstructor::EmitStringEquals(Inst *inst)
442 {
443 return EmitFastPath(inst, RuntimeInterface::EntrypointId::STRING_EQUALS_COMPRESSED, 2U);
444 }
445
EmitStringBuilderBool(Inst * inst)446 bool LLVMIrConstructor::EmitStringBuilderBool(Inst *inst)
447 {
448 return EmitFastPath(inst, RuntimeInterface::EntrypointId::STRING_BUILDER_BOOL, 2U);
449 }
450
EmitStringBuilderChar(Inst * inst)451 bool LLVMIrConstructor::EmitStringBuilderChar(Inst *inst)
452 {
453 return EmitFastPath(inst, RuntimeInterface::EntrypointId::STRING_BUILDER_CHAR, 2U);
454 }
455
EmitStringBuilderString(Inst * inst)456 bool LLVMIrConstructor::EmitStringBuilderString(Inst *inst)
457 {
458 return EmitFastPath(inst, RuntimeInterface::EntrypointId::STRING_BUILDER_STRING_COMPRESSED, 2U);
459 }
460
EmitStringConcat2(Inst * inst)461 bool LLVMIrConstructor::EmitStringConcat2(Inst *inst)
462 {
463 return EmitFastPath(inst, RuntimeInterface::EntrypointId::STRING_CONCAT2_TLAB, 2U);
464 }
465
EmitStringConcat3(Inst * inst)466 bool LLVMIrConstructor::EmitStringConcat3(Inst *inst)
467 {
468 return EmitFastPath(inst, RuntimeInterface::EntrypointId::STRING_CONCAT3_TLAB, 3U);
469 }
470
EmitStringConcat4(Inst * inst)471 bool LLVMIrConstructor::EmitStringConcat4(Inst *inst)
472 {
473 return EmitFastPath(inst, RuntimeInterface::EntrypointId::STRING_CONCAT4_TLAB, 4U);
474 }
475
EmitStringCompareTo(Inst * inst)476 bool LLVMIrConstructor::EmitStringCompareTo(Inst *inst)
477 {
478 return EmitFastPath(inst, RuntimeInterface::EntrypointId::STRING_COMPARE_TO, 2U);
479 }
480
EmitIsInf(Inst * inst)481 bool LLVMIrConstructor::EmitIsInf(Inst *inst)
482 {
483 auto result = CreateIsInf(GetInputValue(inst, 0));
484 ValueMapAdd(inst, result);
485 return true;
486 }
487
EmitUnreachable(Inst * inst)488 bool LLVMIrConstructor::EmitUnreachable([[maybe_unused]] Inst *inst)
489 {
490 auto bb = GetCurrentBasicBlock();
491 if (bb->empty() || !llvm::isa<llvm::ReturnInst>(*(bb->rbegin()))) {
492 auto trap = llvm::Intrinsic::getDeclaration(func_->getParent(), llvm::Intrinsic::trap, {});
493 builder_.CreateCall(trap, {});
494 builder_.CreateUnreachable();
495 }
496 return true;
497 }
498
EmitNothing(Inst * inst)499 bool LLVMIrConstructor::EmitNothing([[maybe_unused]] Inst *inst)
500 {
501 return true;
502 }
503
504 #ifndef NDEBUG
CheckSlowPathName(const std::string & name,size_t funcArgsNum,size_t callArgsNum)505 static void CheckSlowPathName(const std::string &name, size_t funcArgsNum, size_t callArgsNum)
506 {
507 ASSERT_DO(std::string_view {name}.find("SlowPath") == std::string_view::npos,
508 std::cerr << "Bad bridge: SlowPath bridge not allowed in LLVM FastPath: " << name << std::endl);
509 ASSERT(callArgsNum <= funcArgsNum);
510 if (callArgsNum < funcArgsNum) {
511 funcArgsNum -= 2U; // exclude fake arguments for these asserts
512 ASSERT(funcArgsNum <= 4U);
513 ASSERT_DO((std::string_view {name}.find("1ArgBridge") != std::string_view::npos) == (funcArgsNum == 1U),
514 std::cerr << "Bad bridge: OddSaved1 for FastPath with 1 arguments "
515 << "and SlowPath with zero arguments: " << name << std::endl);
516 ASSERT_DO((std::string_view {name}.find("2ArgBridge") != std::string_view::npos) == (funcArgsNum == 2U),
517 std::cerr << "Bad bridge: OddSaved2 for FastPath with 2 arguments "
518 << "and SlowPath with 0-1 arguments: " << name << std::endl);
519 ASSERT_DO((std::string_view {name}.find("3ArgBridge") != std::string_view::npos) == (funcArgsNum == 3U),
520 std::cerr << "Bad bridge: OddSaved3 for FastPath with 3 arguments "
521 << "and SlowPath with 0-2 arguments: " << name << std::endl);
522 ASSERT_DO((std::string_view {name}.find("4ArgBridge") != std::string_view::npos) == (funcArgsNum == 4U),
523 std::cerr << "Bad bridge: OddSaved4 for FastPath with 4 arguments "
524 << "and SlowPath with 0-3 arguments: " << name << std::endl);
525 } else { // callArgsNum == funcArgsNum
526 ASSERT_DO((std::string_view {name}.find("OddSaved") != std::string_view::npos) == (funcArgsNum % 2U == 1U),
527 std::cerr << "Bad bridge: OddSaved <=> amount of arguments is odd: " << name << std::endl);
528 }
529 }
530 #endif
531
EmitSlowPathEntry(Inst * inst)532 bool LLVMIrConstructor::EmitSlowPathEntry(Inst *inst)
533 {
534 ASSERT(GetGraph()->GetMode().IsFastPath());
535 ASSERT(func_->getCallingConv() == llvm::CallingConv::ArkFast0 ||
536 func_->getCallingConv() == llvm::CallingConv::ArkFast1 ||
537 func_->getCallingConv() == llvm::CallingConv::ArkFast2 ||
538 func_->getCallingConv() == llvm::CallingConv::ArkFast3 ||
539 func_->getCallingConv() == llvm::CallingConv::ArkFast4 ||
540 func_->getCallingConv() == llvm::CallingConv::ArkFast5 ||
541 func_->getCallingConv() == llvm::CallingConv::ArkFast6);
542
543 // Arguments
544 ArenaVector<llvm::Value *> args(GetGraph()->GetLocalAllocator()->Adapter());
545 for (size_t i = 0; i < inst->GetInputs().Size(); i++) {
546 args.push_back(GetInputValue(inst, i));
547 }
548 auto threadRegPtr = builder_.CreateIntToPtr(GetThreadRegValue(), builder_.getPtrTy());
549 auto frameRegPtr = builder_.CreateIntToPtr(GetRealFrameRegValue(), builder_.getPtrTy());
550 args.push_back(threadRegPtr);
551 args.push_back(frameRegPtr);
552
553 ASSERT(inst->CastToIntrinsic()->HasImms() && inst->CastToIntrinsic()->GetImms().size() == 2U);
554 uint32_t externalId = inst->CastToIntrinsic()->GetImms()[1];
555 auto externalName = GetGraph()->GetRuntime()->GetExternalMethodName(GetGraph()->GetMethod(), externalId);
556 #ifndef NDEBUG
557 CheckSlowPathName(externalName, func_->arg_size(), args.size());
558 #endif
559 auto callee = func_->getParent()->getFunction(externalName);
560 if (callee == nullptr) {
561 ArenaVector<llvm::Type *> argTypes(GetGraph()->GetLocalAllocator()->Adapter());
562 for (const auto &input : inst->GetInputs()) {
563 argTypes.push_back(GetExactType(input.GetInst()->GetType()));
564 }
565 argTypes.push_back(builder_.getPtrTy());
566 argTypes.push_back(builder_.getPtrTy());
567 auto ftype = llvm::FunctionType::get(GetType(inst->GetType()), argTypes, false);
568 callee = llvm::Function::Create(ftype, llvm::Function::ExternalLinkage, externalName, func_->getParent());
569 callee->setCallingConv(GetFastPathCallingConv(inst->GetInputs().Size()));
570 }
571
572 auto call = builder_.CreateCall(callee->getFunctionType(), callee, args);
573 call->setCallingConv(callee->getCallingConv());
574 call->setTailCallKind(llvm::CallInst::TailCallKind::TCK_Tail);
575 call->addFnAttr(llvm::Attribute::get(call->getContext(), "ark-tail-call"));
576 if (call->getType()->isVoidTy()) {
577 builder_.CreateRetVoid();
578 } else {
579 builder_.CreateRet(call);
580 }
581 return true;
582 }
583
EmitExclusiveLoadWithAcquire(Inst * inst)584 bool LLVMIrConstructor::EmitExclusiveLoadWithAcquire(Inst *inst)
585 {
586 ASSERT(GetGraph()->GetArch() == Arch::AARCH64);
587 ASSERT(inst->GetInputType(0) == DataType::POINTER);
588 auto &ctx = func_->getContext();
589 auto addr = GetInputValue(inst, 0);
590 auto dstType = GetExactType(inst->GetType());
591 auto intrinsicId = llvm::Intrinsic::AARCH64Intrinsics::aarch64_ldaxr;
592 auto load = builder_.CreateUnaryIntrinsic(intrinsicId, addr);
593 load->addParamAttr(0, llvm::Attribute::get(ctx, llvm::Attribute::ElementType, dstType));
594 ValueMapAdd(inst, load);
595 return true;
596 }
597
EmitExclusiveStoreWithRelease(Inst * inst)598 bool LLVMIrConstructor::EmitExclusiveStoreWithRelease(Inst *inst)
599 {
600 ASSERT(GetGraph()->GetArch() == Arch::AARCH64);
601 ASSERT(inst->GetInputType(0) == DataType::POINTER);
602 auto &ctx = func_->getContext();
603 auto addr = GetInputValue(inst, 0);
604 auto value = GetInputValue(inst, 1);
605 auto type = value->getType();
606 auto intrinsicId = llvm::Intrinsic::AARCH64Intrinsics::aarch64_stlxr;
607 auto stlxr = llvm::Intrinsic::getDeclaration(func_->getParent(), intrinsicId, builder_.getPtrTy());
608 value = builder_.CreateZExtOrBitCast(value, stlxr->getFunctionType()->getParamType(0));
609 auto store = builder_.CreateCall(stlxr, {value, addr});
610 store->addParamAttr(1, llvm::Attribute::get(ctx, llvm::Attribute::ElementType, type));
611 ValueMapAdd(inst, store);
612 return true;
613 }
614
EmitInterpreterReturn(Inst * inst)615 bool LLVMIrConstructor::EmitInterpreterReturn([[maybe_unused]] Inst *inst)
616 {
617 // We only support it for Irtoc interpreters on AArch64
618 ASSERT(GetGraph()->GetMode().IsInterpreter());
619
620 // This constant is hardcoded in codegen_interpreter.h and in interpreter.irt
621 constexpr size_t SPILL_SLOTS = 32;
622 CFrameLayout fl(GetGraph()->GetArch(), SPILL_SLOTS);
623 constexpr bool SAVE_UNUSED_CALLEE_REGS = true;
624
625 // Restore callee-registers
626 auto calleeRegsMask = GetCalleeRegsMask(GetGraph()->GetArch(), false, SAVE_UNUSED_CALLEE_REGS);
627 auto calleeVregsMask = GetCalleeRegsMask(GetGraph()->GetArch(), true, SAVE_UNUSED_CALLEE_REGS);
628 if (GetGraph()->GetArch() == Arch::AARCH64) {
629 constexpr bool SAVE_FRAME_AND_LINK_REGS = true;
630
631 size_t slotSize = fl.GetSlotSize();
632 size_t dslotSize = slotSize * 2U;
633
634 auto lastCalleeReg = fl.GetRegsSlotsCount() - calleeRegsMask.Count();
635 auto lastCalleeVreg = fl.GetRegsSlotsCount() - fl.GetCalleeRegistersCount(false) - calleeVregsMask.Count();
636 CreateInterpreterReturnRestoreRegs(calleeRegsMask, lastCalleeReg, false);
637 CreateInterpreterReturnRestoreRegs(calleeVregsMask, lastCalleeVreg, true);
638
639 // Adjust SP
640 auto spToFrameTopSlots = fl.GetRegsSlotsCount() + CFrameRegs::Start() - CFrameReturnAddr::Start();
641 if (SAVE_FRAME_AND_LINK_REGS) {
642 spToFrameTopSlots -= CFrameLayout::GetFpLrSlotsCount();
643 }
644
645 CreateInt32ImmAsm(&builder_,
646 std::string("add sp, sp, $0").append(LLVMArkInterface::PATCH_STACK_ADJUSTMENT_COMMENT),
647 spToFrameTopSlots * slotSize);
648 CreateInt32ImmAsm(&builder_, "ldp x29, x30, [sp], $0", dslotSize);
649 CreateBlackBoxAsm(&builder_, "ret");
650 } else {
651 // Currently there is no vector regs usage at x86_64 handlers
652 ASSERT(calleeVregsMask.count() == 0);
653 auto regShift = DOUBLE_WORD_SIZE_BYTES *
654 (fl.GetSpillsCount() + fl.GetCallerRegistersCount(false) + fl.GetCallerRegistersCount(true));
655 auto fpShift = DOUBLE_WORD_SIZE_BYTES * (2 + CFrameSlots::Start() - CFrameData::Start());
656
657 std::string iasmStr =
658 std::string("leaq ${0:c}(%rsp), %rsp").append(LLVMArkInterface::PATCH_STACK_ADJUSTMENT_COMMENT);
659 CreateInt32ImmAsm(&builder_, iasmStr, regShift);
660 Target target {GetGraph()->GetArch()};
661 while (calleeRegsMask.count() > 0) {
662 auto reg = calleeRegsMask.GetMinRegister();
663 calleeRegsMask ^= 1U << reg;
664 iasmStr = "pop %" + target.GetRegName(reg, false);
665 CreateBlackBoxAsm(&builder_, iasmStr);
666 }
667 iasmStr = "leaq " + std::to_string(fpShift) + "(%rsp), %rsp";
668 CreateBlackBoxAsm(&builder_, iasmStr);
669 CreateBlackBoxAsm(&builder_, "pop %rbp");
670 CreateBlackBoxAsm(&builder_, "retq");
671 }
672 builder_.CreateUnreachable();
673
674 return true;
675 }
676
EmitTailCall(Inst * inst)677 bool LLVMIrConstructor::EmitTailCall(Inst *inst)
678 {
679 ASSERT(func_->getCallingConv() == llvm::CallingConv::ArkFast0 ||
680 func_->getCallingConv() == llvm::CallingConv::ArkFast1 ||
681 func_->getCallingConv() == llvm::CallingConv::ArkFast2 ||
682 func_->getCallingConv() == llvm::CallingConv::ArkFast3 ||
683 func_->getCallingConv() == llvm::CallingConv::ArkFast4 ||
684 func_->getCallingConv() == llvm::CallingConv::ArkFast5 ||
685 func_->getCallingConv() == llvm::CallingConv::ArkFast6 ||
686 func_->getCallingConv() == llvm::CallingConv::ArkInt);
687 llvm::CallInst *call;
688
689 if (GetGraph()->GetMode().IsFastPath()) {
690 call = CreateTailCallFastPath(inst);
691 } else if (GetGraph()->GetMode().IsInterpreter()) {
692 call = CreateTailCallInterpreter(inst);
693 } else {
694 UNREACHABLE();
695 }
696 call->setTailCallKind(llvm::CallInst::TailCallKind::TCK_Tail);
697 call->addFnAttr(llvm::Attribute::get(call->getContext(), "ark-tail-call"));
698 if (func_->getReturnType()->isVoidTy()) {
699 builder_.CreateRetVoid();
700 } else {
701 builder_.CreateRet(call);
702 }
703 std::fill(ccValues_.begin(), ccValues_.end(), nullptr);
704 return true;
705 }
706
EmitCompressEightUtf16ToUtf8CharsUsingSimd(Inst * inst)707 bool LLVMIrConstructor::EmitCompressEightUtf16ToUtf8CharsUsingSimd(Inst *inst)
708 {
709 CreateCompressUtf16ToUtf8CharsUsingSimd<VECTOR_SIZE_8>(inst);
710 return true;
711 }
712
EmitCompressSixteenUtf16ToUtf8CharsUsingSimd(Inst * inst)713 bool LLVMIrConstructor::EmitCompressSixteenUtf16ToUtf8CharsUsingSimd(Inst *inst)
714 {
715 CreateCompressUtf16ToUtf8CharsUsingSimd<VECTOR_SIZE_16>(inst);
716 return true;
717 }
718
EmitReverseBytes(Inst * inst)719 bool LLVMIrConstructor::EmitReverseBytes(Inst *inst)
720 {
721 ASSERT(IsSafeCast(inst, 0));
722 auto result = builder_.CreateUnaryIntrinsic(llvm::Intrinsic::bswap, GetInputValue(inst, 0), nullptr);
723 ValueMapAdd(inst, result);
724 return true;
725 }
726
EmitMemoryFenceFull(Inst * inst)727 bool LLVMIrConstructor::EmitMemoryFenceFull([[maybe_unused]] Inst *inst)
728 {
729 CreateMemoryFence(memory_order::FULL);
730 return true;
731 }
732
EmitMemoryFenceRelease(Inst * inst)733 bool LLVMIrConstructor::EmitMemoryFenceRelease([[maybe_unused]] Inst *inst)
734 {
735 CreateMemoryFence(memory_order::RELEASE);
736 return true;
737 }
738
EmitMemoryFenceAcquire(Inst * inst)739 bool LLVMIrConstructor::EmitMemoryFenceAcquire([[maybe_unused]] Inst *inst)
740 {
741 CreateMemoryFence(memory_order::ACQUIRE);
742 return true;
743 }
744
EmitFround(Inst * inst)745 bool LLVMIrConstructor::EmitFround(Inst *inst)
746 {
747 llvm::Value *input = GetInputValue(inst, 0);
748 ASSERT_TYPE(input, builder_.getDoubleTy());
749 auto isNan = CreateIsNan(input);
750 auto floatCasted = builder_.CreateCast(llvm::Instruction::FPTrunc, input, builder_.getFloatTy());
751 auto casted = builder_.CreateCast(llvm::Instruction::FPExt, floatCasted, builder_.getDoubleTy());
752 llvm::Value *nan = llvm::ConstantFP::getQNaN(builder_.getDoubleTy());
753 auto result = builder_.CreateSelect(isNan, nan, casted);
754 ValueMapAdd(inst, result);
755 return true;
756 }
757
EmitCtlz(Inst * inst)758 bool LLVMIrConstructor::EmitCtlz(Inst *inst)
759 {
760 auto result = CreateZerosCount(inst, llvm::Intrinsic::ctlz);
761 ValueMapAdd(inst, result);
762 return true;
763 }
764
EmitCttz(Inst * inst)765 bool LLVMIrConstructor::EmitCttz(Inst *inst)
766 {
767 auto result = CreateZerosCount(inst, llvm::Intrinsic::cttz);
768 ValueMapAdd(inst, result);
769 return true;
770 }
771
EmitSignbit(Inst * inst)772 bool LLVMIrConstructor::EmitSignbit(Inst *inst)
773 {
774 auto num = GetInputValue(inst, 0);
775 auto bitcast = builder_.CreateBitCast(num, builder_.getInt64Ty());
776 auto cmp = builder_.CreateICmpSLT(bitcast, builder_.getInt64(0));
777 ValueMapAdd(inst, cmp);
778 return true;
779 }
780
EmitIsInteger(Inst * inst)781 bool LLVMIrConstructor::EmitIsInteger(Inst *inst)
782 {
783 auto result = CreateIsInteger(inst, GetInputValue(inst, 0));
784 ValueMapAdd(inst, result);
785 return true;
786 }
787
EmitIsSafeInteger(Inst * inst)788 bool LLVMIrConstructor::EmitIsSafeInteger(Inst *inst)
789 {
790 auto &ctx = func_->getContext();
791 auto input = GetInputValue(inst, 0);
792 ASSERT(input->getType()->isDoubleTy() || input->getType()->isFloatTy());
793 auto isInteger = CreateIsInteger(inst, input);
794
795 auto maxSafe = input->getType()->isDoubleTy() ? llvm::ConstantFP::get(builder_.getDoubleTy(), MaxIntAsExactDouble())
796 : llvm::ConstantFP::get(builder_.getFloatTy(), MaxIntAsExactFloat());
797
798 auto initialBb = GetCurrentBasicBlock();
799 auto isSafeIntegerBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "is_safe_integer"), func_);
800 auto continueBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "is_safe_integer_continue"), func_);
801
802 builder_.CreateCondBr(isInteger, isSafeIntegerBb, continueBb);
803
804 SetCurrentBasicBlock(isSafeIntegerBb);
805 // fabs(v) <= MaxSafeInteger
806 auto inputAbs = builder_.CreateUnaryIntrinsic(llvm::Intrinsic::fabs, input);
807 auto cmp = builder_.CreateFCmp(llvm::CmpInst::FCMP_OLE, inputAbs, maxSafe);
808 builder_.CreateBr(continueBb);
809
810 SetCurrentBasicBlock(continueBb);
811 auto result = builder_.CreatePHI(builder_.getInt1Ty(), 2U);
812 result->addIncoming(builder_.getInt1(false), initialBb);
813 result->addIncoming(cmp, isSafeIntegerBb);
814
815 ValueMapAdd(inst, result);
816 return true;
817 }
818
EmitRawBitcastToInt(Inst * inst)819 bool LLVMIrConstructor::EmitRawBitcastToInt(Inst *inst)
820 {
821 llvm::Value *input = GetInputValue(inst, 0);
822 ASSERT_TYPE(input, builder_.getFloatTy());
823 auto result = builder_.CreateBitCast(input, builder_.getInt32Ty());
824 ValueMapAdd(inst, result);
825 return true;
826 }
827
EmitRawBitcastToLong(Inst * inst)828 bool LLVMIrConstructor::EmitRawBitcastToLong(Inst *inst)
829 {
830 llvm::Value *input = GetInputValue(inst, 0);
831 ASSERT_TYPE(input, builder_.getDoubleTy());
832 auto result = builder_.CreateBitCast(input, builder_.getInt64Ty());
833 ValueMapAdd(inst, result);
834 return true;
835 }
836
EmitRawBitcastFromInt(Inst * inst)837 bool LLVMIrConstructor::EmitRawBitcastFromInt(Inst *inst)
838 {
839 llvm::Value *input = GetInputValue(inst, 0);
840 ASSERT_TYPE(input, builder_.getInt32Ty());
841 auto result = builder_.CreateBitCast(input, builder_.getFloatTy());
842 ValueMapAdd(inst, result);
843 return true;
844 }
845
EmitRawBitcastFromLong(Inst * inst)846 bool LLVMIrConstructor::EmitRawBitcastFromLong(Inst *inst)
847 {
848 llvm::Value *input = GetInputValue(inst, 0);
849 ASSERT_TYPE(input, builder_.getInt64Ty());
850 auto result = builder_.CreateBitCast(input, builder_.getDoubleTy());
851 ValueMapAdd(inst, result);
852 return true;
853 }
854
EmitStringGetCharsTlab(Inst * inst)855 bool LLVMIrConstructor::EmitStringGetCharsTlab(Inst *inst)
856 {
857 auto offset = GetGraph()->GetRuntime()->GetArrayU16ClassPointerTlsOffset(GetGraph()->GetArch());
858 auto klass = llvmbackend::runtime_calls::LoadTLSValue(&builder_, arkInterface_, offset, builder_.getPtrTy());
859 auto eid = RuntimeInterface::EntrypointId::STRING_GET_CHARS_TLAB_COMPRESSED;
860 auto result = CreateEntrypointCall(eid, inst,
861 {GetInputValue(inst, 0), GetInputValue(inst, 1), GetInputValue(inst, 2), klass});
862 ASSERT(result->getCallingConv() == llvm::CallingConv::C);
863 result->setCallingConv(llvm::CallingConv::ArkFast4);
864 result->addRetAttr(llvm::Attribute::NonNull);
865 result->addRetAttr(llvm::Attribute::NoAlias);
866 ValueMapAdd(inst, result);
867 return true;
868 }
869
EmitStringHashCode(Inst * inst)870 bool LLVMIrConstructor::EmitStringHashCode(Inst *inst)
871 {
872 ASSERT(GetGraph()->GetRuntime()->IsCompressedStringsEnabled());
873 auto string = GetInputValue(inst, 0);
874 auto offset = coretypes::String::GetHashcodeOffset();
875 auto gep = builder_.CreateConstInBoundsGEP1_32(builder_.getInt8Ty(), string, offset);
876 auto hashCode = builder_.CreateLoad(builder_.getInt32Ty(), gep);
877 auto isZero = builder_.CreateICmpEQ(hashCode, llvm::Constant::getNullValue(hashCode->getType()));
878 auto fastPath = GetCurrentBasicBlock();
879 auto slowPath = llvm::BasicBlock::Create(func_->getContext(), "hash_code_slow_path", func_);
880 auto continuation = llvm::BasicBlock::Create(func_->getContext(), "hash_code_continuation", func_);
881 auto branchWeights = llvm::MDBuilder(func_->getContext())
882 .createBranchWeights(llvmbackend::Metadata::BranchWeights::UNLIKELY_BRANCH_WEIGHT,
883 llvmbackend::Metadata::BranchWeights::LIKELY_BRANCH_WEIGHT);
884 builder_.CreateCondBr(isZero, slowPath, continuation, branchWeights);
885 SetCurrentBasicBlock(slowPath);
886
887 auto newHash = CreateEntrypointCall(RuntimeInterface::EntrypointId::STRING_HASH_CODE_COMPRESSED, inst, {string});
888 ASSERT(newHash->getCallingConv() == llvm::CallingConv::C);
889 newHash->setCallingConv(llvm::CallingConv::ArkFast1);
890 builder_.CreateBr(continuation);
891 SetCurrentBasicBlock(continuation);
892
893 auto result = builder_.CreatePHI(hashCode->getType(), 2U);
894 result->addIncoming(hashCode, fastPath);
895 result->addIncoming(newHash, slowPath);
896 ValueMapAdd(inst, result);
897
898 return true;
899 }
900
EmitWriteTlabStatsSafe(Inst * inst)901 bool LLVMIrConstructor::EmitWriteTlabStatsSafe(Inst *inst)
902 {
903 auto addr = GetInputValue(inst, 0);
904 auto size = GetInputValue(inst, 1);
905 CreateEntrypointCall(RuntimeInterface::EntrypointId::WRITE_TLAB_STATS_NO_BRIDGE, inst, {addr, size});
906
907 return true;
908 }
909
EmitExpandU8U16(Inst * inst)910 bool LLVMIrConstructor::EmitExpandU8U16(Inst *inst)
911 {
912 auto input = GetInputValue(inst, 0);
913 ASSERT(input->getType()->getScalarSizeInBits() == 32U); // has to be f32
914
915 auto srcTy = llvm::VectorType::get(builder_.getInt8Ty(), 4U, false);
916 auto dstTy = llvm::VectorType::get(builder_.getInt16Ty(), 4U, false);
917
918 auto val = builder_.CreateBitCast(input, srcTy);
919 auto result = builder_.CreateZExt(val, dstTy);
920 ValueMapAdd(inst, result);
921
922 return true;
923 }
924
EmitReverseHalfWords(Inst * inst)925 bool LLVMIrConstructor::EmitReverseHalfWords(Inst *inst)
926 {
927 auto input = GetInputValue(inst, 0);
928 ASSERT(input->getType()->getScalarSizeInBits() == 64U); // has to be f64
929 auto srcTy = llvm::VectorType::get(builder_.getInt16Ty(), 4U, false);
930 auto val = builder_.CreateBitCast(input, srcTy);
931
932 const llvm::SmallVector<int, 4> indices = {3, 2, 1, 0};
933 auto result = builder_.CreateShuffleVector(val, indices);
934 ValueMapAdd(inst, result);
935
936 return true;
937 }
938
EmitAtomicByteOr(Inst * inst)939 bool LLVMIrConstructor::EmitAtomicByteOr(Inst *inst)
940 {
941 auto addr = GetInputValue(inst, 0);
942 auto value = GetInputValue(inst, 1);
943 auto byteVal = builder_.CreateTrunc(value, builder_.getInt8Ty());
944 auto op = llvm::AtomicRMWInst::BinOp::Or;
945 builder_.CreateAtomicRMW(op, addr, byteVal, llvm::MaybeAlign(0), llvm::AtomicOrdering::Monotonic);
946
947 return true;
948 }
949
GetMappedValue(Inst * inst,DataType::Type type)950 llvm::Value *LLVMIrConstructor::GetMappedValue(Inst *inst, DataType::Type type)
951 {
952 ASSERT(inputMap_.count(inst) == 1);
953 auto &typeMap = inputMap_.at(inst);
954 ASSERT(typeMap.count(type) == 1);
955 auto result = typeMap.at(type);
956 ASSERT(result != nullptr);
957 return result;
958 }
959
GetInputValue(Inst * inst,size_t index,bool skipCoerce)960 llvm::Value *LLVMIrConstructor::GetInputValue(Inst *inst, size_t index, bool skipCoerce)
961 {
962 auto input = inst->GetInput(index).GetInst();
963 auto type = inst->GetInputType(index);
964 ASSERT(type != DataType::NO_TYPE);
965
966 if (skipCoerce) {
967 ASSERT(input->GetType() == DataType::UINT64 || input->GetType() == DataType::INT64);
968 type = input->GetType();
969 }
970
971 if (input->IsConst()) {
972 return GetInputValueFromConstant(input->CastToConstant(), type);
973 }
974 if (input->GetOpcode() == Opcode::NullPtr) {
975 auto llvmType = GetExactType(DataType::REFERENCE);
976 ASSERT(llvmType == builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE));
977 return llvm::Constant::getNullValue(llvmType);
978 }
979 return GetMappedValue(input, type);
980 }
981
GetInputValueFromConstant(ConstantInst * constant,DataType::Type pandaType)982 llvm::Value *LLVMIrConstructor::GetInputValueFromConstant(ConstantInst *constant, DataType::Type pandaType)
983 {
984 auto llvmType = GetExactType(pandaType);
985 if (pandaType == DataType::FLOAT64) {
986 double value = constant->GetDoubleValue();
987 return llvm::ConstantFP::get(llvmType, value);
988 }
989 if (pandaType == DataType::FLOAT32) {
990 float value = constant->GetFloatValue();
991 return llvm::ConstantFP::get(llvmType, value);
992 }
993 if (pandaType == DataType::POINTER) {
994 auto cval = static_cast<int64_t>(constant->GetIntValue());
995 auto integer = builder_.getInt64(cval);
996 return builder_.CreateIntToPtr(integer, builder_.getPtrTy());
997 }
998 if (DataType::IsTypeNumeric(pandaType)) {
999 auto isSigned = DataType::IsTypeSigned(pandaType);
1000 auto cval = static_cast<int64_t>(constant->GetIntValue());
1001 return llvm::ConstantInt::get(llvmType, cval, isSigned);
1002 }
1003 if (DataType::IsReference(pandaType) && constant->GetRawValue() == 0) {
1004 return llvm::Constant::getNullValue(llvmType);
1005 }
1006 UNREACHABLE();
1007 }
1008
1009 // Initializers. BuildIr calls them
1010
BuildBasicBlocks(Marker normal)1011 void LLVMIrConstructor::BuildBasicBlocks(Marker normal)
1012 {
1013 auto &context = func_->getContext();
1014 for (auto block : graph_->GetBlocksRPO()) {
1015 if (block->IsEndBlock()) {
1016 continue;
1017 }
1018 if (!block->IsMarked(normal)) {
1019 continue;
1020 }
1021 auto bb = llvm::BasicBlock::Create(context, llvm::StringRef("bb") + llvm::Twine(block->GetId()), func_);
1022 AddBlock(block, bb);
1023 // Checking that irtoc handler contains a return instruction
1024 if (!graph_->GetMode().IsInterpreter()) {
1025 continue;
1026 }
1027 for (auto inst : block->AllInsts()) {
1028 if (inst->IsIntrinsic() && inst->CastToIntrinsic()->GetIntrinsicId() ==
1029 RuntimeInterface::IntrinsicId::INTRINSIC_INTERPRETER_RETURN) {
1030 arkInterface_->AppendIrtocReturnHandler(func_->getName());
1031 }
1032 }
1033 }
1034 }
1035
BuildInstructions(Marker normal)1036 void LLVMIrConstructor::BuildInstructions(Marker normal)
1037 {
1038 for (auto block : graph_->GetBlocksRPO()) {
1039 if (block->IsEndBlock() || !block->IsMarked(normal)) {
1040 continue;
1041 }
1042 SetCurrentBasicBlock(GetTailBlock(block));
1043 for (auto inst : block->AllInsts()) {
1044 auto bb = GetCurrentBasicBlock();
1045 if (!bb->empty() && llvm::isa<llvm::UnreachableInst>(*(bb->rbegin()))) {
1046 break;
1047 }
1048 VisitInstruction(inst);
1049 }
1050
1051 if (block->IsTryBegin()) {
1052 ASSERT(block->GetSuccsBlocks().size() > 1);
1053 ASSERT(block->GetSuccessor(0)->IsMarked(normal) && !block->GetSuccessor(1)->IsMarked(normal));
1054 ASSERT(!block->GetLastInst()->IsControlFlow());
1055 builder_.CreateBr(GetHeadBlock(block->GetSuccessor(0)));
1056 }
1057 if (((block->GetSuccsBlocks().size() == 1 && !block->GetSuccessor(0)->IsEndBlock()) || block->IsTryEnd()) &&
1058 block->GetSuccessor(0)->IsMarked(normal)) {
1059 ASSERT(block->IsTryEnd() ? !block->GetSuccessor(1)->IsMarked(normal) : true);
1060 builder_.CreateBr(GetHeadBlock(block->GetSuccessor(0)));
1061 }
1062 ReplaceTailBlock(block, GetCurrentBasicBlock());
1063 }
1064 }
1065
FillPhiInputs(BasicBlock * block,Marker normal)1066 void LLVMIrConstructor::FillPhiInputs(BasicBlock *block, Marker normal)
1067 {
1068 if (block->IsStartBlock() || block->IsEndBlock() || !block->IsMarked(normal)) {
1069 return;
1070 }
1071 for (auto inst : block->PhiInsts()) {
1072 auto phi = llvm::cast<llvm::PHINode>(GetMappedValue(inst, inst->GetType()));
1073 for (size_t i = 0; i < inst->GetInputsCount(); i++) {
1074 auto inputBlock = inst->CastToPhi()->GetPhiInputBb(i);
1075 if (!inputBlock->IsMarked(normal)) {
1076 continue;
1077 }
1078
1079 auto input = GetInputValue(inst, i);
1080 phi->addIncoming(input, GetTailBlock(inputBlock));
1081 }
1082 }
1083 }
1084
1085 // Creator functions for internal usage
1086
CreateEntrypointCall(RuntimeInterface::EntrypointId eid,Inst * inst,llvm::ArrayRef<llvm::Value * > args)1087 llvm::CallInst *LLVMIrConstructor::CreateEntrypointCall(RuntimeInterface::EntrypointId eid, Inst *inst,
1088 llvm::ArrayRef<llvm::Value *> args)
1089 {
1090 arkInterface_->GetOrCreateRuntimeFunctionType(func_->getContext(), func_->getParent(),
1091 LLVMArkInterface::RuntimeCallType::ENTRYPOINT,
1092 static_cast<LLVMArkInterface::EntrypointId>(eid));
1093
1094 // Sanity assert to not misuse this scenario
1095 ASSERT(inst != nullptr);
1096
1097 llvm::CallInst *call;
1098 auto threadReg = GetThreadRegValue();
1099 if (GetGraph()->SupportManagedCode() && (inst->CanThrow() || inst->CanDeoptimize())) {
1100 bool noReturn = GetGraph()->GetRuntime()->IsEntrypointNoreturn(eid);
1101 call = llvmbackend::runtime_calls::CreateEntrypointCallCommon(
1102 &builder_, threadReg, arkInterface_, static_cast<llvmbackend::runtime_calls::EntrypointId>(eid), args,
1103 CreateSaveStateBundle(inst, noReturn));
1104 } else {
1105 call = llvmbackend::runtime_calls::CreateEntrypointCallCommon(
1106 &builder_, threadReg, arkInterface_, static_cast<llvmbackend::runtime_calls::EntrypointId>(eid), args);
1107 }
1108 if (inst->RequireState()) {
1109 WrapArkCall(inst, call);
1110 }
1111 return call;
1112 }
1113
CreateIntrinsicCall(Inst * inst)1114 llvm::CallInst *LLVMIrConstructor::CreateIntrinsicCall(Inst *inst)
1115 {
1116 auto entryId = inst->CastToIntrinsic()->GetIntrinsicId();
1117 auto rtFunctionTy = arkInterface_->GetOrCreateRuntimeFunctionType(
1118 func_->getContext(), func_->getParent(), LLVMArkInterface::RuntimeCallType::INTRINSIC,
1119 static_cast<LLVMArkInterface::EntrypointId>(entryId));
1120 auto arguments = GetIntrinsicArguments(rtFunctionTy, inst->CastToIntrinsic());
1121 return CreateIntrinsicCall(inst, entryId, arguments);
1122 }
1123
CreateIntrinsicCall(Inst * inst,RuntimeInterface::IntrinsicId entryId,llvm::ArrayRef<llvm::Value * > arguments)1124 llvm::CallInst *LLVMIrConstructor::CreateIntrinsicCall(Inst *inst, RuntimeInterface::IntrinsicId entryId,
1125 llvm::ArrayRef<llvm::Value *> arguments)
1126 {
1127 auto rtFunctionTy = arkInterface_->GetOrCreateRuntimeFunctionType(
1128 func_->getContext(), func_->getParent(), LLVMArkInterface::RuntimeCallType::INTRINSIC,
1129 static_cast<LLVMArkInterface::EntrypointId>(entryId));
1130 auto rtFunctionName = arkInterface_->GetRuntimeFunctionName(LLVMArkInterface::RuntimeCallType::INTRINSIC,
1131 static_cast<LLVMArkInterface::EntrypointId>(entryId));
1132 auto intrinsicOffset = static_cast<int>(entryId);
1133 auto callee = llvmbackend::runtime_calls::GetPandaRuntimeFunctionCallee(intrinsicOffset, rtFunctionTy, &builder_,
1134 rtFunctionName);
1135 llvm::CallInst *result;
1136 if (inst->CanThrow()) {
1137 ASSERT_PRINT(inst->GetSaveState() != nullptr, "Intrinsic with can_throw does not have a savestate");
1138 result = builder_.CreateCall(callee, arguments, CreateSaveStateBundle(inst));
1139 } else {
1140 result = builder_.CreateCall(callee, arguments);
1141 }
1142 SetIntrinsicParamAttrs(result, inst->CastToIntrinsic(), arguments);
1143
1144 if (inst->RequireState()) {
1145 WrapArkCall(inst, result);
1146 }
1147 if (NeedSafePointAfterIntrinsic(entryId) && g_options.IsCompilerUseSafepoint()) {
1148 result->addFnAttr(llvm::Attribute::get(result->getContext(), "needs-extra-safepoint"));
1149 result->getFunction()->addFnAttr("needs-extra-safepoint");
1150 }
1151
1152 return result;
1153 }
1154
1155 // Helper function. Regardless of where we use `alloca` to pass args, we want to do all of them in the
1156 // first basic block. This should allow LLVM to combine allocas into prologue
CreateAllocaForArgs(llvm::Type * type,uint32_t arraySize)1157 llvm::Value *LLVMIrConstructor::CreateAllocaForArgs(llvm::Type *type, uint32_t arraySize)
1158 {
1159 auto currentBb = GetCurrentBasicBlock();
1160 auto &firstBb = func_->getEntryBlock();
1161 auto inst = firstBb.getFirstNonPHI();
1162 builder_.SetInsertPoint(inst);
1163 llvm::AllocaInst *result;
1164
1165 if (llvm::isa<llvm::AllocaInst>(inst)) {
1166 auto alloca = llvm::cast<llvm::AllocaInst>(inst);
1167 ASSERT(alloca->getAllocatedType() == type);
1168 ASSERT(llvm::isa<llvm::ConstantInt>(alloca->getArraySize()));
1169 auto allocaSize = llvm::cast<llvm::ConstantInt>(alloca->getArraySize())->getZExtValue();
1170 if (allocaSize < arraySize) {
1171 alloca->setOperand(0, builder_.getInt32(arraySize));
1172 }
1173 result = alloca;
1174 } else {
1175 result = builder_.CreateAlloca(type, builder_.getInt32(arraySize), "call_arg_buffer");
1176 }
1177
1178 SetCurrentBasicBlock(currentBb);
1179 return result;
1180 }
1181
CreateFastPathCall(Inst * inst,RuntimeInterface::EntrypointId eid,llvm::ArrayRef<llvm::Value * > args)1182 llvm::CallInst *LLVMIrConstructor::CreateFastPathCall(Inst *inst, RuntimeInterface::EntrypointId eid,
1183 llvm::ArrayRef<llvm::Value *> args)
1184 {
1185 auto call = CreateEntrypointCall(eid, inst, args);
1186 ASSERT(call->getCallingConv() == llvm::CallingConv::C);
1187 call->setCallingConv(GetFastPathCallingConv(args.size()));
1188 return call;
1189 }
1190
1191 // IsInstance Helpers
1192
CreateIsInstanceEntrypointCall(Inst * inst)1193 llvm::Value *LLVMIrConstructor::CreateIsInstanceEntrypointCall(Inst *inst)
1194 {
1195 auto object = GetInputValue(inst, 0);
1196 auto klass = GetInputValue(inst, 1);
1197 return CreateEntrypointCall(RuntimeInterface::EntrypointId::IS_INSTANCE, inst, {object, klass});
1198 }
1199
CreateIsInstanceObject(llvm::Value * klassObj)1200 llvm::Value *LLVMIrConstructor::CreateIsInstanceObject(llvm::Value *klassObj)
1201 {
1202 auto typeOffset = GetGraph()->GetRuntime()->GetClassTypeOffset(GetGraph()->GetArch());
1203 auto typeMask = GetGraph()->GetRuntime()->GetReferenceTypeMask();
1204 auto typePtr = builder_.CreateConstInBoundsGEP1_32(builder_.getInt8Ty(), klassObj, typeOffset);
1205 auto typeLdr = builder_.CreateLoad(builder_.getInt8Ty(), typePtr);
1206 auto cmpLocal =
1207 builder_.CreateICmpEQ(builder_.getInt32(typeMask), builder_.CreateZExt(typeLdr, builder_.getInt32Ty()));
1208 return builder_.CreateZExt(cmpLocal, builder_.getInt8Ty(), "isinstance_object_out");
1209 }
1210
CreateIsInstanceOther(Inst * inst,llvm::Value * klassObj,llvm::Value * klassId)1211 llvm::Value *LLVMIrConstructor::CreateIsInstanceOther(Inst *inst, llvm::Value *klassObj, llvm::Value *klassId)
1212 {
1213 auto initialBb = GetCurrentBasicBlock();
1214 auto &ctx = func_->getContext();
1215 auto loopHeaderBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "other_loop_h"), func_);
1216 auto loopBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "other_loop"), func_);
1217 auto outBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "other_out"), func_);
1218 builder_.CreateBr(loopHeaderBb);
1219
1220 SetCurrentBasicBlock(loopHeaderBb);
1221 auto typeOffset = GetGraph()->GetRuntime()->GetClassBaseOffset(GetGraph()->GetArch());
1222 auto loopPhi = builder_.CreatePHI(builder_.getPtrTy(), 2U, "loop_in");
1223 auto typePtr = builder_.CreateConstInBoundsGEP1_32(builder_.getInt8Ty(), loopPhi, typeOffset);
1224 auto typeLdr = builder_.CreateLoad(builder_.getPtrTy(), typePtr);
1225 auto cmpLocal = builder_.CreateIsNotNull(typeLdr);
1226 loopPhi->addIncoming(klassObj, initialBb);
1227 loopPhi->addIncoming(typeLdr, loopBb);
1228 builder_.CreateCondBr(cmpLocal, loopBb, outBb);
1229
1230 SetCurrentBasicBlock(loopBb);
1231 cmpLocal = builder_.CreateICmpEQ(typeLdr, klassId);
1232 builder_.CreateCondBr(cmpLocal, outBb, loopHeaderBb);
1233
1234 SetCurrentBasicBlock(outBb);
1235 auto outPhi = builder_.CreatePHI(builder_.getInt8Ty(), 2U, "isinstance_other_out");
1236 outPhi->addIncoming(builder_.getInt8(1), loopBb);
1237 outPhi->addIncoming(builder_.getInt8(0), loopHeaderBb);
1238 return outPhi;
1239 }
1240
CreateIsInstanceArray(Inst * inst,llvm::Value * klassObj,llvm::Value * klassId)1241 llvm::Value *LLVMIrConstructor::CreateIsInstanceArray(Inst *inst, llvm::Value *klassObj, llvm::Value *klassId)
1242 {
1243 auto &ctx = func_->getContext();
1244 auto initialBb = GetCurrentBasicBlock();
1245 auto secondLoadBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "array_second_load"), func_);
1246 auto slowPath = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "array_slow_path"), func_);
1247 auto outBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "array_out"), func_);
1248
1249 auto componentOffset = GetGraph()->GetRuntime()->GetClassComponentTypeOffset(GetGraph()->GetArch());
1250 auto typePtrObj = builder_.CreateConstInBoundsGEP1_32(builder_.getInt8Ty(), klassObj, componentOffset);
1251 auto typeLdrObj = builder_.CreateLoad(builder_.getPtrTy(), typePtrObj);
1252 auto cmpLocal = builder_.CreateIsNotNull(typeLdrObj);
1253 builder_.CreateCondBr(cmpLocal, secondLoadBb, outBb);
1254
1255 SetCurrentBasicBlock(secondLoadBb);
1256 auto typePtrKlass = builder_.CreateConstInBoundsGEP1_32(builder_.getInt8Ty(), klassId, componentOffset);
1257 auto typeLdrKlass = builder_.CreateLoad(builder_.getPtrTy(), typePtrKlass);
1258 cmpLocal = builder_.CreateICmpEQ(typeLdrObj, typeLdrKlass);
1259 auto branchWeights = llvm::MDBuilder(ctx).createBranchWeights(
1260 llvmbackend::Metadata::BranchWeights::LIKELY_BRANCH_WEIGHT, // if other comparisons are enough
1261 llvmbackend::Metadata::BranchWeights::UNLIKELY_BRANCH_WEIGHT); // else
1262 builder_.CreateCondBr(cmpLocal, outBb, slowPath, branchWeights);
1263
1264 SetCurrentBasicBlock(slowPath);
1265 auto slowPathResult = CreateIsInstanceEntrypointCall(inst);
1266 builder_.CreateBr(outBb);
1267
1268 SetCurrentBasicBlock(outBb);
1269 auto outPhi = builder_.CreatePHI(builder_.getInt8Ty(), 3U, "isinstance_array_out");
1270 outPhi->addIncoming(builder_.getInt8(0), initialBb);
1271 outPhi->addIncoming(builder_.getInt8(1), secondLoadBb);
1272 outPhi->addIncoming(slowPathResult, slowPath);
1273 return outPhi;
1274 }
1275
CreateIsInstanceArrayObject(Inst * inst,llvm::Value * klassObj)1276 llvm::Value *LLVMIrConstructor::CreateIsInstanceArrayObject(Inst *inst, llvm::Value *klassObj)
1277 {
1278 auto &ctx = func_->getContext();
1279 auto initialBb = GetCurrentBasicBlock();
1280 auto checkMaskBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "array_object_check_mask"), func_);
1281 auto outBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "array_object_out"), func_);
1282
1283 auto componentOffset = GetGraph()->GetRuntime()->GetClassComponentTypeOffset(GetGraph()->GetArch());
1284 auto typePtr = builder_.CreateConstInBoundsGEP1_32(builder_.getInt8Ty(), klassObj, componentOffset);
1285 auto typeLdr = builder_.CreateLoad(builder_.getPtrTy(), typePtr);
1286 auto cmpLocal = builder_.CreateIsNotNull(typeLdr);
1287 builder_.CreateCondBr(cmpLocal, checkMaskBb, outBb);
1288
1289 SetCurrentBasicBlock(checkMaskBb);
1290 auto typeOffset = GetGraph()->GetRuntime()->GetClassTypeOffset(GetGraph()->GetArch());
1291 auto typeMask = GetGraph()->GetRuntime()->GetReferenceTypeMask();
1292 auto typePtrElem = builder_.CreateConstInBoundsGEP1_32(builder_.getInt8Ty(), typeLdr, typeOffset);
1293 auto typeLdrElem = builder_.CreateLoad(builder_.getInt8Ty(), typePtrElem);
1294 cmpLocal =
1295 builder_.CreateICmpEQ(builder_.getInt32(typeMask), builder_.CreateZExt(typeLdrElem, builder_.getInt32Ty()));
1296 auto cmpExt = builder_.CreateZExt(cmpLocal, builder_.getInt8Ty());
1297 builder_.CreateBr(outBb);
1298
1299 SetCurrentBasicBlock(outBb);
1300 auto outPhi = builder_.CreatePHI(builder_.getInt8Ty(), 2U, "isinstance_array_object_out");
1301 outPhi->addIncoming(builder_.getInt8(0), initialBb);
1302 outPhi->addIncoming(cmpExt, checkMaskBb);
1303 return outPhi;
1304 }
1305
CreateIsInstanceInnerBlock(Inst * inst,llvm::Value * klassObj,llvm::Value * klassId)1306 llvm::Value *LLVMIrConstructor::CreateIsInstanceInnerBlock(Inst *inst, llvm::Value *klassObj, llvm::Value *klassId)
1307 {
1308 auto klassType = inst->CastToIsInstance()->GetClassType();
1309 switch (klassType) {
1310 case ClassType::OBJECT_CLASS:
1311 return CreateIsInstanceObject(klassObj);
1312 case ClassType::OTHER_CLASS:
1313 return CreateIsInstanceOther(inst, klassObj, klassId);
1314 case ClassType::ARRAY_CLASS:
1315 return CreateIsInstanceArray(inst, klassObj, klassId);
1316 case ClassType::ARRAY_OBJECT_CLASS:
1317 return CreateIsInstanceArrayObject(inst, klassObj);
1318 case ClassType::INTERFACE_CLASS:
1319 return CreateIsInstanceEntrypointCall(inst);
1320 default:
1321 UNREACHABLE();
1322 }
1323 }
1324
1325 // IsInstance Helpers End
1326
1327 // CheckCast Helpers
1328
CreateCheckCastEntrypointCall(Inst * inst)1329 void LLVMIrConstructor::CreateCheckCastEntrypointCall(Inst *inst)
1330 {
1331 auto object = GetInputValue(inst, 0);
1332 auto klass = GetInputValue(inst, 1);
1333 if (inst->CanDeoptimize()) {
1334 auto call = CreateEntrypointCall(RuntimeInterface::EntrypointId::CHECK_CAST_DEOPTIMIZE, inst, {object, klass});
1335 call->addFnAttr(llvm::Attribute::get(call->getContext(), "may-deoptimize"));
1336 } else {
1337 CreateEntrypointCall(RuntimeInterface::EntrypointId::CHECK_CAST, inst, {object, klass});
1338 }
1339 }
1340
CreateCheckCastObject(Inst * inst,llvm::Value * klassObj,llvm::Value * klassId)1341 void LLVMIrConstructor::CreateCheckCastObject(Inst *inst, llvm::Value *klassObj, llvm::Value *klassId)
1342 {
1343 auto typeOffset = GetGraph()->GetRuntime()->GetClassTypeOffset(GetGraph()->GetArch());
1344 auto typeMask = GetGraph()->GetRuntime()->GetReferenceTypeMask();
1345 auto typePtr = builder_.CreateConstInBoundsGEP1_32(builder_.getInt8Ty(), klassObj, typeOffset);
1346 auto typeLdr = builder_.CreateLoad(builder_.getInt8Ty(), typePtr);
1347 auto src = GetInputValue(inst, 0);
1348 auto zext = builder_.CreateZExt(typeLdr, builder_.getInt32Ty());
1349 auto deoptimize = builder_.CreateICmpNE(builder_.getInt32(typeMask), zext);
1350
1351 auto exception = RuntimeInterface::EntrypointId::CLASS_CAST_EXCEPTION;
1352 CreateDeoptimizationBranch(inst, deoptimize, exception, {klassId, src});
1353 }
1354
CreateCheckCastOther(Inst * inst,llvm::Value * klassObj,llvm::Value * klassId)1355 void LLVMIrConstructor::CreateCheckCastOther(Inst *inst, llvm::Value *klassObj, llvm::Value *klassId)
1356 {
1357 auto initialBb = GetCurrentBasicBlock();
1358 auto src = GetInputValue(inst, 0);
1359
1360 auto &ctx = func_->getContext();
1361 auto loopHeaderBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "other_loop_h"), func_);
1362 auto loopBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "other_loop"), func_);
1363 auto outBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "other_out"), func_);
1364 builder_.CreateBr(loopHeaderBb);
1365
1366 SetCurrentBasicBlock(loopHeaderBb);
1367 auto typeOffset = GetGraph()->GetRuntime()->GetClassBaseOffset(GetGraph()->GetArch());
1368 auto loopPhi = builder_.CreatePHI(builder_.getPtrTy(), 2U, "loop_in");
1369 auto typePtr = builder_.CreateConstInBoundsGEP1_32(builder_.getInt8Ty(), loopPhi, typeOffset);
1370 auto typeLdr = builder_.CreateLoad(builder_.getPtrTy(), typePtr);
1371 auto deoptimize = builder_.CreateIsNull(typeLdr);
1372 loopPhi->addIncoming(klassObj, initialBb);
1373 loopPhi->addIncoming(typeLdr, loopBb);
1374
1375 auto exception = RuntimeInterface::EntrypointId::CLASS_CAST_EXCEPTION;
1376 CreateDeoptimizationBranch(inst, deoptimize, exception, {klassId, src});
1377 builder_.CreateBr(loopBb);
1378
1379 SetCurrentBasicBlock(loopBb);
1380 auto cmp = builder_.CreateICmpEQ(typeLdr, klassId);
1381 builder_.CreateCondBr(cmp, outBb, loopHeaderBb);
1382
1383 SetCurrentBasicBlock(outBb);
1384 }
1385
CreateCheckCastArray(Inst * inst,llvm::Value * klassObj,llvm::Value * klassId)1386 void LLVMIrConstructor::CreateCheckCastArray(Inst *inst, llvm::Value *klassObj, llvm::Value *klassId)
1387 {
1388 auto src = GetInputValue(inst, 0);
1389
1390 auto &ctx = func_->getContext();
1391 auto slowPath = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "array_slow_path"), func_);
1392 auto outBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "array_out"), func_);
1393
1394 auto componentOffset = GetGraph()->GetRuntime()->GetClassComponentTypeOffset(GetGraph()->GetArch());
1395 auto typePtrObj = builder_.CreateConstInBoundsGEP1_32(builder_.getInt8Ty(), klassObj, componentOffset);
1396 auto typeLdrObj = builder_.CreateLoad(builder_.getPtrTy(), typePtrObj);
1397
1398 auto deoptimize = builder_.CreateIsNull(typeLdrObj);
1399 auto exception = RuntimeInterface::EntrypointId::CLASS_CAST_EXCEPTION;
1400 CreateDeoptimizationBranch(inst, deoptimize, exception, {klassId, src});
1401
1402 auto typePtrKlass = builder_.CreateConstInBoundsGEP1_32(builder_.getInt8Ty(), klassId, componentOffset);
1403 auto typeLdrKlass = builder_.CreateLoad(builder_.getPtrTy(), typePtrKlass);
1404 auto cmpLocal = builder_.CreateICmpEQ(typeLdrObj, typeLdrKlass);
1405 auto branchWeights = llvm::MDBuilder(ctx).createBranchWeights(
1406 llvmbackend::Metadata::BranchWeights::LIKELY_BRANCH_WEIGHT, // if other comparisons are enough
1407 llvmbackend::Metadata::BranchWeights::UNLIKELY_BRANCH_WEIGHT); // else
1408 builder_.CreateCondBr(cmpLocal, outBb, slowPath, branchWeights);
1409
1410 SetCurrentBasicBlock(slowPath);
1411 CreateCheckCastEntrypointCall(inst);
1412 builder_.CreateBr(outBb);
1413
1414 SetCurrentBasicBlock(outBb);
1415 }
1416
CreateCheckCastArrayObject(Inst * inst,llvm::Value * klassObj,llvm::Value * klassId)1417 void LLVMIrConstructor::CreateCheckCastArrayObject(Inst *inst, llvm::Value *klassObj, llvm::Value *klassId)
1418 {
1419 auto src = GetInputValue(inst, 0);
1420
1421 auto componentOffset = GetGraph()->GetRuntime()->GetClassComponentTypeOffset(GetGraph()->GetArch());
1422 auto typePtr = builder_.CreateConstInBoundsGEP1_32(builder_.getInt8Ty(), klassObj, componentOffset);
1423 auto typeLdr = builder_.CreateLoad(builder_.getPtrTy(), typePtr);
1424
1425 auto deoptimize = builder_.CreateIsNull(typeLdr);
1426 auto exception = RuntimeInterface::EntrypointId::CLASS_CAST_EXCEPTION;
1427 CreateDeoptimizationBranch(inst, deoptimize, exception, {klassId, src});
1428
1429 auto typeOffset = GetGraph()->GetRuntime()->GetClassTypeOffset(GetGraph()->GetArch());
1430 auto typeMask = GetGraph()->GetRuntime()->GetReferenceTypeMask();
1431 auto typePtrElem = builder_.CreateConstInBoundsGEP1_32(builder_.getInt8Ty(), typeLdr, typeOffset);
1432 auto typeLdrElem = builder_.CreateLoad(builder_.getInt8Ty(), typePtrElem);
1433 deoptimize =
1434 builder_.CreateICmpNE(builder_.getInt32(typeMask), builder_.CreateZExt(typeLdrElem, builder_.getInt32Ty()));
1435 CreateDeoptimizationBranch(inst, deoptimize, exception, {klassId, src});
1436 }
1437
CreateCheckCastInner(Inst * inst,llvm::Value * klassObj,llvm::Value * klassId)1438 void LLVMIrConstructor::CreateCheckCastInner(Inst *inst, llvm::Value *klassObj, llvm::Value *klassId)
1439 {
1440 auto klassType = inst->CastToCheckCast()->GetClassType();
1441 switch (klassType) {
1442 case ClassType::OBJECT_CLASS:
1443 CreateCheckCastObject(inst, klassObj, klassId);
1444 break;
1445 case ClassType::OTHER_CLASS:
1446 CreateCheckCastOther(inst, klassObj, klassId);
1447 break;
1448 case ClassType::ARRAY_CLASS:
1449 CreateCheckCastArray(inst, klassObj, klassId);
1450 break;
1451 case ClassType::ARRAY_OBJECT_CLASS:
1452 CreateCheckCastArrayObject(inst, klassObj, klassId);
1453 break;
1454 case ClassType::INTERFACE_CLASS:
1455 default:
1456 UNREACHABLE();
1457 }
1458 }
1459
1460 // CheckCast Helpers End
1461
CreateInterpreterReturnRestoreRegs(RegMask & regMask,size_t offset,bool fp)1462 void LLVMIrConstructor::CreateInterpreterReturnRestoreRegs(RegMask ®Mask, size_t offset, bool fp)
1463 {
1464 int32_t slotSize = PointerSize(GetGraph()->GetArch());
1465 int32_t dslotSize = slotSize * 2U;
1466 int32_t totalSize = regMask.count() * slotSize;
1467 auto startRegOffset = offset * DOUBLE_WORD_SIZE_BYTES;
1468 auto endRegOffset = startRegOffset + std::max(0, totalSize - dslotSize);
1469
1470 constexpr uint32_t MAX_REPR_VAL = 504U;
1471 bool representable = startRegOffset <= MAX_REPR_VAL && (startRegOffset & 0x7U) == 0 &&
1472 endRegOffset <= MAX_REPR_VAL && (endRegOffset & 0x7U) == 0;
1473
1474 std::string baseReg = representable ? "sp" : "x16";
1475 if (!representable) {
1476 CreateInt32ImmAsm(&builder_,
1477 std::string("add x16, sp, $0").append(LLVMArkInterface::PATCH_STACK_ADJUSTMENT_COMMENT),
1478 startRegOffset);
1479 startRegOffset = 0;
1480 }
1481
1482 while (regMask.count() > 0) {
1483 std::string asmString = regMask.count() / 2U > 0 ? "ldp " : "ldr ";
1484 auto first = regMask.GetMinRegister();
1485 asmString += (fp ? "d" : "x") + std::to_string(first);
1486 regMask ^= 1U << first;
1487 if (regMask.count() > 0) {
1488 asmString += ", ";
1489 auto second = regMask.GetMinRegister();
1490 asmString += (fp ? "d" : "x") + std::to_string(second);
1491 regMask ^= 1U << second;
1492 }
1493 asmString += ", [";
1494 asmString += baseReg;
1495 asmString += ", $0]";
1496 if (representable) {
1497 asmString += LLVMArkInterface::PATCH_STACK_ADJUSTMENT_COMMENT;
1498 }
1499 CreateInt32ImmAsm(&builder_, asmString, startRegOffset);
1500 startRegOffset += dslotSize;
1501 }
1502 }
1503
CreateLoadClassById(Inst * inst,uint32_t typeId,bool init)1504 llvm::Value *LLVMIrConstructor::CreateLoadClassById(Inst *inst, uint32_t typeId, bool init)
1505 {
1506 auto builtin = init ? LoadInitClass(func_->getParent()) : LoadClass(func_->getParent());
1507 auto slotIdVal = builder_.getInt32(arkInterface_->GetClassIndexInAotGot(GetGraph()->GetAotData(), typeId, init));
1508
1509 // remember two functions, later we will use it in panda_runtime_lowering pass
1510 arkInterface_->GetOrCreateRuntimeFunctionType(
1511 func_->getContext(), func_->getParent(), LLVMArkInterface::RuntimeCallType::ENTRYPOINT,
1512 static_cast<LLVMArkInterface::EntrypointId>(RuntimeInterface::EntrypointId::CLASS_RESOLVER));
1513 arkInterface_->GetOrCreateRuntimeFunctionType(
1514 func_->getContext(), func_->getParent(), LLVMArkInterface::RuntimeCallType::ENTRYPOINT,
1515 static_cast<LLVMArkInterface::EntrypointId>(RuntimeInterface::EntrypointId::CLASS_INIT_RESOLVER));
1516
1517 auto callInst = builder_.CreateCall(builtin, {builder_.getInt32(typeId), slotIdVal}, CreateSaveStateBundle(inst));
1518 WrapArkCall(inst, callInst);
1519 return callInst;
1520 }
1521
CreateBinaryOp(Inst * inst,llvm::Instruction::BinaryOps opcode)1522 llvm::Value *LLVMIrConstructor::CreateBinaryOp(Inst *inst, llvm::Instruction::BinaryOps opcode)
1523 {
1524 llvm::Value *x = GetInputValue(inst, 0);
1525 llvm::Value *y = GetInputValue(inst, 1);
1526
1527 if (x->getType()->isPointerTy()) {
1528 if (y->getType()->isPointerTy()) {
1529 ASSERT(opcode == llvm::Instruction::Sub);
1530 x = builder_.CreatePtrToInt(x, builder_.getInt64Ty());
1531 y = builder_.CreatePtrToInt(y, builder_.getInt64Ty());
1532 return builder_.CreateBinOp(opcode, x, y);
1533 }
1534 if (y->getType()->isIntegerTy()) {
1535 ASSERT(opcode == llvm::Instruction::Add);
1536 ASSERT(x->getType()->isPointerTy());
1537 return builder_.CreateInBoundsGEP(builder_.getInt8Ty(), x, y);
1538 }
1539 UNREACHABLE();
1540 }
1541 if (IsTypeNumeric(inst->GetType())) {
1542 // Peephole can remove casts and instead put a constant with the wrong type
1543 // so we need to create them here.
1544 x = CoerceValue(x, inst->GetInputType(0), inst->GetType());
1545 y = CoerceValue(y, inst->GetInputType(1), inst->GetType());
1546 }
1547 return builder_.CreateBinOp(opcode, x, y);
1548 }
1549
CreateBinaryImmOp(Inst * inst,llvm::Instruction::BinaryOps opcode,uint64_t c)1550 llvm::Value *LLVMIrConstructor::CreateBinaryImmOp(Inst *inst, llvm::Instruction::BinaryOps opcode, uint64_t c)
1551 {
1552 ASSERT(IsTypeNumeric(inst->GetType()));
1553 llvm::Value *x = GetInputValue(inst, 0);
1554 if (x->getType()->isPointerTy()) {
1555 ASSERT(x->getType()->isPointerTy());
1556 ASSERT(opcode == llvm::Instruction::Add || opcode == llvm::Instruction::Sub);
1557 if (opcode == llvm::Instruction::Sub) {
1558 c = -c;
1559 }
1560 return builder_.CreateConstInBoundsGEP1_64(builder_.getInt8Ty(), x, c);
1561 }
1562 llvm::Value *y = CoerceValue(builder_.getInt64(c), DataType::INT64, inst->GetType());
1563 return builder_.CreateBinOp(opcode, x, y);
1564 }
1565
CreateShiftOp(Inst * inst,llvm::Instruction::BinaryOps opcode)1566 llvm::Value *LLVMIrConstructor::CreateShiftOp(Inst *inst, llvm::Instruction::BinaryOps opcode)
1567 {
1568 llvm::Value *x = GetInputValue(inst, 0);
1569 llvm::Value *y = GetInputValue(inst, 1);
1570 auto targetType = inst->GetType();
1571 bool target64 = (targetType == DataType::UINT64) || (targetType == DataType::INT64);
1572 auto constexpr SHIFT32_RANGE = 0x1f;
1573 auto constexpr SHIFT64_RANGE = 0x3f;
1574
1575 y = builder_.CreateBinOp(llvm::Instruction::And, y,
1576 llvm::ConstantInt::get(y->getType(), target64 ? SHIFT64_RANGE : SHIFT32_RANGE));
1577
1578 return builder_.CreateBinOp(opcode, x, y);
1579 }
1580
CreateSignDivMod(Inst * inst,llvm::Instruction::BinaryOps opcode)1581 llvm::Value *LLVMIrConstructor::CreateSignDivMod(Inst *inst, llvm::Instruction::BinaryOps opcode)
1582 {
1583 ASSERT(opcode == llvm::Instruction::SDiv || opcode == llvm::Instruction::SRem);
1584 llvm::Value *x = GetInputValue(inst, 0);
1585 llvm::Value *y = GetInputValue(inst, 1);
1586 auto &ctx = func_->getContext();
1587 auto eqM1 = builder_.CreateICmpEQ(y, llvm::ConstantInt::get(y->getType(), -1));
1588 auto m1Result = opcode == llvm::Instruction::SDiv ? builder_.CreateNeg(x) : llvm::ConstantInt::get(y->getType(), 0);
1589
1590 // Select for AArch64, as 'sdiv' correctly handles the INT_MIN / -1 case
1591 if (GetGraph()->GetArch() == Arch::AARCH64) {
1592 auto result = builder_.CreateBinOp(opcode, x, y);
1593 auto selectVal = builder_.CreateSelect(eqM1, m1Result, result);
1594 if (auto selectInst = llvm::dyn_cast<llvm::SelectInst>(selectVal)) {
1595 auto *metadata = llvm::MDNode::get(ctx, {});
1596 auto sdiv = ark::llvmbackend::LLVMArkInterface::AARCH64_SDIV_INST;
1597 selectInst->setMetadata(sdiv, metadata);
1598 }
1599 return selectVal;
1600 }
1601
1602 // X86_64 solution with control flow
1603 auto currBb = GetCurrentBasicBlock();
1604 auto notM1Bb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "divmod_normal"), func_);
1605 auto contBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "divmod_cont"), func_);
1606 builder_.CreateCondBr(eqM1, contBb, notM1Bb);
1607
1608 SetCurrentBasicBlock(notM1Bb);
1609 auto result = builder_.CreateBinOp(opcode, x, y);
1610 builder_.CreateBr(contBb);
1611
1612 SetCurrentBasicBlock(contBb);
1613 auto resultPhi = builder_.CreatePHI(y->getType(), 2U);
1614 resultPhi->addIncoming(m1Result, currBb);
1615 resultPhi->addIncoming(result, notM1Bb);
1616 return resultPhi;
1617 }
1618
CreateFloatComparison(CmpInst * cmpInst,llvm::Value * x,llvm::Value * y)1619 llvm::Value *LLVMIrConstructor::CreateFloatComparison(CmpInst *cmpInst, llvm::Value *x, llvm::Value *y)
1620 {
1621 // if x is less than y then return -1
1622 // else return zero extend of (x > y)
1623 llvm::CmpInst::Predicate greaterThanPredicate;
1624 llvm::CmpInst::Predicate lessThanPredicate;
1625 if (cmpInst->IsFcmpg()) {
1626 // if x or y is nan then greaterThanPredicate yields true
1627 greaterThanPredicate = llvm::CmpInst::FCMP_UGT;
1628 lessThanPredicate = llvm::CmpInst::FCMP_OLT;
1629 } else if (cmpInst->IsFcmpl()) {
1630 greaterThanPredicate = llvm::CmpInst::FCMP_OGT;
1631 // if x or y is nan then lessThanPredicate yields true
1632 lessThanPredicate = llvm::CmpInst::FCMP_ULT;
1633 } else {
1634 ASSERT_PRINT(false, "cmpInst must be either Fcmpg, or Fcmpl");
1635 UNREACHABLE();
1636 }
1637 // x > y || (inst == Fcmpg && (x == NaN || y == NaN))
1638 auto greaterThan = builder_.CreateFCmp(greaterThanPredicate, x, y);
1639 // x < y || (inst == Fcmpl && (x == NaN || y == NaN))
1640 auto lessThan = builder_.CreateFCmp(lessThanPredicate, x, y);
1641 auto comparison = builder_.CreateZExt(greaterThan, builder_.getInt32Ty());
1642 auto negativeOne = builder_.getInt32(-1);
1643 return builder_.CreateSelect(lessThan, negativeOne, comparison);
1644 }
1645
CreateIntegerComparison(CmpInst * inst,llvm::Value * x,llvm::Value * y)1646 llvm::Value *LLVMIrConstructor::CreateIntegerComparison(CmpInst *inst, llvm::Value *x, llvm::Value *y)
1647 {
1648 ASSERT(x->getType() == y->getType());
1649 llvm::Value *greaterThan;
1650 llvm::Value *lessThan;
1651
1652 if (DataType::IsTypeSigned(inst->GetOperandsType())) {
1653 greaterThan = builder_.CreateICmpSGT(x, y);
1654 lessThan = builder_.CreateICmpSLT(x, y);
1655 } else {
1656 greaterThan = builder_.CreateICmpUGT(x, y);
1657 lessThan = builder_.CreateICmpULT(x, y);
1658 }
1659 auto castComparisonResult = builder_.CreateZExt(greaterThan, builder_.getInt32Ty());
1660 auto negativeOne = builder_.getInt32(-1);
1661 return builder_.CreateSelect(lessThan, negativeOne, castComparisonResult);
1662 }
1663
CreateNewArrayWithRuntime(Inst * inst)1664 llvm::Value *LLVMIrConstructor::CreateNewArrayWithRuntime(Inst *inst)
1665 {
1666 auto type = GetInputValue(inst, 0);
1667 auto size = ToSizeT(GetInputValue(inst, 1));
1668 auto eid = RuntimeInterface::EntrypointId::CREATE_ARRAY;
1669 auto result = CreateEntrypointCall(eid, inst, {type, size});
1670 MarkAsAllocation(result);
1671 if (inst->GetFlag(inst_flags::MEM_BARRIER)) {
1672 result->addFnAttr(llvm::Attribute::get(result->getContext(), "needs-mem-barrier"));
1673 }
1674 return result;
1675 }
1676
CreateNewObjectWithRuntime(Inst * inst)1677 llvm::Value *LLVMIrConstructor::CreateNewObjectWithRuntime(Inst *inst)
1678 {
1679 auto initClass = GetInputValue(inst, 0);
1680 auto eid = RuntimeInterface::EntrypointId::CREATE_OBJECT_BY_CLASS;
1681 auto result = CreateEntrypointCall(eid, inst, {initClass});
1682 auto srcInst = inst->GetInput(0).GetInst();
1683 if (srcInst->GetOpcode() != Opcode::LoadAndInitClass ||
1684 GetGraph()->GetRuntime()->CanUseTlabForClass(srcInst->CastToLoadAndInitClass()->GetClass())) {
1685 MarkAsAllocation(result);
1686 }
1687 if (inst->GetFlag(inst_flags::MEM_BARRIER)) {
1688 result->addFnAttr(llvm::Attribute::get(result->getContext(), "needs-mem-barrier"));
1689 }
1690 return result;
1691 }
1692
CreateResolveVirtualCallBuiltin(Inst * inst,llvm::Value * thiz,uint32_t methodId)1693 llvm::Value *LLVMIrConstructor::CreateResolveVirtualCallBuiltin(Inst *inst, llvm::Value *thiz, uint32_t methodId)
1694 {
1695 ASSERT(thiz->getType()->isPointerTy());
1696
1697 auto builtin = ResolveVirtual(func_->getParent());
1698 arkInterface_->GetOrCreateRuntimeFunctionType(
1699 func_->getContext(), func_->getParent(), LLVMArkInterface::RuntimeCallType::ENTRYPOINT,
1700 static_cast<LLVMArkInterface::EntrypointId>(RuntimeInterface::EntrypointId::RESOLVE_VIRTUAL_CALL_AOT));
1701 arkInterface_->GetOrCreateRuntimeFunctionType(
1702 func_->getContext(), func_->getParent(), LLVMArkInterface::RuntimeCallType::ENTRYPOINT,
1703 static_cast<LLVMArkInterface::EntrypointId>(RuntimeInterface::EntrypointId::INTF_INLINE_CACHE));
1704
1705 auto zero = builder_.getInt64(0);
1706 auto arrayType = llvm::ArrayType::get(builder_.getInt64Ty(), 0);
1707 auto offset = builder_.CreateIntToPtr(zero, arrayType->getPointerTo());
1708 auto callInst =
1709 builder_.CreateCall(builtin, {thiz, ToSizeT(builder_.getInt32(methodId)), offset}, CreateSaveStateBundle(inst));
1710 WrapArkCall(inst, callInst);
1711 return builder_.CreateIntToPtr(callInst, builder_.getPtrTy());
1712 }
1713
CreateLoadManagedClassFromClass(llvm::Value * klass)1714 llvm::Value *LLVMIrConstructor::CreateLoadManagedClassFromClass(llvm::Value *klass)
1715 {
1716 ASSERT(klass->getType()->isPointerTy());
1717 auto dataOff = GetGraph()->GetRuntime()->GetManagedClassOffset(GetGraph()->GetArch());
1718 auto ptrData = builder_.CreateConstInBoundsGEP1_32(builder_.getInt8Ty(), klass, dataOff);
1719 return builder_.CreateLoad(builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE), ptrData);
1720 }
1721
CreateIsInf(llvm::Value * input)1722 llvm::Value *LLVMIrConstructor::CreateIsInf(llvm::Value *input)
1723 {
1724 llvm::Type *type = nullptr;
1725 uint64_t infMaskInt;
1726 if (input->getType()->isFloatTy()) {
1727 constexpr uint32_t INF_MASK_FLOAT = 0xff000000;
1728 infMaskInt = INF_MASK_FLOAT;
1729 type = builder_.getInt32Ty();
1730 } else {
1731 ASSERT_TYPE(input, builder_.getDoubleTy());
1732 constexpr uint64_t INF_MASK_DOUBLE = 0xffe0000000000000;
1733 infMaskInt = INF_MASK_DOUBLE;
1734 type = builder_.getInt64Ty();
1735 }
1736
1737 auto infMask = llvm::ConstantInt::get(type, infMaskInt);
1738 auto one = llvm::ConstantInt::get(type, 1);
1739 auto castedInput = builder_.CreateBitCast(input, type);
1740 auto shiftedInput = builder_.CreateShl(castedInput, one);
1741 auto result = builder_.CreateICmpEQ(shiftedInput, infMask);
1742 return result;
1743 }
1744
CreateIsInteger(Inst * inst,llvm::Value * input)1745 llvm::Value *LLVMIrConstructor::CreateIsInteger(Inst *inst, llvm::Value *input)
1746 {
1747 auto &ctx = func_->getContext();
1748 ASSERT(input->getType()->isDoubleTy() || input->getType()->isFloatTy());
1749
1750 auto isInf = CreateIsInf(input);
1751 auto epsilon = input->getType()->isDoubleTy()
1752 ? llvm::ConstantFP::get(builder_.getDoubleTy(), std::numeric_limits<double>::epsilon())
1753 : llvm::ConstantFP::get(builder_.getFloatTy(), std::numeric_limits<float>::epsilon());
1754
1755 auto initialBb = GetCurrentBasicBlock();
1756 auto notInfBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "is_integer_not_inf"), func_);
1757 auto continueBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "is_integer_continue"), func_);
1758
1759 builder_.CreateCondBr(isInf, continueBb, notInfBb);
1760
1761 SetCurrentBasicBlock(notInfBb);
1762 // fabs(v - trunc(v)) <= epsilon
1763 auto truncated = builder_.CreateUnaryIntrinsic(llvm::Intrinsic::trunc, input);
1764 auto diff = builder_.CreateFSub(input, truncated);
1765 auto diffAbs = builder_.CreateUnaryIntrinsic(llvm::Intrinsic::fabs, diff);
1766 auto cmp = builder_.CreateFCmp(llvm::CmpInst::FCMP_OLE, diffAbs, epsilon);
1767 builder_.CreateBr(continueBb);
1768
1769 SetCurrentBasicBlock(continueBb);
1770 auto result = builder_.CreatePHI(builder_.getInt1Ty(), 2U);
1771 result->addIncoming(builder_.getInt1(false), initialBb);
1772 result->addIncoming(cmp, notInfBb);
1773
1774 return result;
1775 }
1776
CreateCastToInt(Inst * inst)1777 llvm::Value *LLVMIrConstructor::CreateCastToInt(Inst *inst)
1778 {
1779 llvm::Value *input = GetInputValue(inst, 0);
1780 auto sourceType = input->getType();
1781 auto targetType = inst->GetType();
1782
1783 ASSERT_DO(sourceType->isFloatTy() || sourceType->isDoubleTy(),
1784 std::cerr << "Unexpected source type: " << GetTypeName(sourceType) << ". Should be a float or double."
1785 << std::endl);
1786
1787 auto llvmId = DataType::IsTypeSigned(targetType) ? llvm::Intrinsic::fptosi_sat : llvm::Intrinsic::fptoui_sat;
1788 ArenaVector<llvm::Type *> intrinsicTypes(GetGraph()->GetLocalAllocator()->Adapter());
1789 intrinsicTypes.assign({GetExactType(targetType), sourceType});
1790 return builder_.CreateIntrinsic(llvmId, intrinsicTypes, {input}, nullptr);
1791 }
1792
CreateLoadWithOrdering(Inst * inst,llvm::Value * value,llvm::AtomicOrdering ordering,const llvm::Twine & name)1793 llvm::Value *LLVMIrConstructor::CreateLoadWithOrdering(Inst *inst, llvm::Value *value, llvm::AtomicOrdering ordering,
1794 const llvm::Twine &name)
1795 {
1796 auto pandaType = inst->GetType();
1797 llvm::Type *type = GetExactType(pandaType);
1798
1799 auto load = builder_.CreateLoad(type, value, false, name); // C-like volatile is not applied
1800 if (ordering != LLVMArkInterface::NOT_ATOMIC_ORDER) {
1801 auto alignment = func_->getParent()->getDataLayout().getPrefTypeAlignment(type);
1802 load->setOrdering(ordering);
1803 load->setAlignment(llvm::Align(alignment));
1804 }
1805
1806 return load;
1807 }
1808
CreateStoreWithOrdering(llvm::Value * value,llvm::Value * ptr,llvm::AtomicOrdering ordering)1809 llvm::Value *LLVMIrConstructor::CreateStoreWithOrdering(llvm::Value *value, llvm::Value *ptr,
1810 llvm::AtomicOrdering ordering)
1811 {
1812 auto store = builder_.CreateStore(value, ptr, false); // C-like volatile is not applied
1813 if (ordering != LLVMArkInterface::NOT_ATOMIC_ORDER) {
1814 auto alignment = func_->getParent()->getDataLayout().getPrefTypeAlignment(value->getType());
1815 store->setAlignment(llvm::Align(alignment));
1816 store->setOrdering(ordering);
1817 }
1818 return store;
1819 }
1820
CreateZerosCount(Inst * inst,llvm::Intrinsic::ID llvmId)1821 llvm::Value *LLVMIrConstructor::CreateZerosCount(Inst *inst, llvm::Intrinsic::ID llvmId)
1822 {
1823 ASSERT(IsSafeCast(inst, 0));
1824 auto zeroDefined = llvm::ConstantInt::getFalse(func_->getContext());
1825 return builder_.CreateBinaryIntrinsic(llvmId, GetInputValue(inst, 0), zeroDefined, nullptr);
1826 }
1827
CreateRoundArm64(Inst * inst,bool is64)1828 llvm::Value *LLVMIrConstructor::CreateRoundArm64(Inst *inst, bool is64)
1829 {
1830 auto input = GetInputValue(inst, 0);
1831
1832 auto sourceType = is64 ? builder_.getDoubleTy() : builder_.getFloatTy();
1833 auto targetType = is64 ? builder_.getInt64Ty() : builder_.getInt32Ty();
1834
1835 double constexpr HALF = 0.5;
1836 auto half = llvm::ConstantFP::get(sourceType, HALF);
1837 auto zero = is64 ? builder_.getInt64(0) : builder_.getInt32(0);
1838
1839 auto initialBb = GetCurrentBasicBlock();
1840 auto &ctx = func_->getContext();
1841 auto module = func_->getParent();
1842
1843 // lround - fcvtas instruction (positive solved fine, NaN mapped to 0, but negatives ties wrong way)
1844 auto decl = llvm::Intrinsic::getDeclaration(module, llvm::Intrinsic::lround, {targetType, sourceType});
1845 llvm::Value *round = llvm::CallInst::Create(decl, input, "", initialBb);
1846
1847 // Check if rounded value less than zero (if not negative rounding is done)
1848 auto negative = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "neg"), func_);
1849 auto done = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "cont"), func_);
1850 auto lessThan = builder_.CreateICmpSLT(round, zero);
1851 builder_.CreateCondBr(lessThan, negative, done);
1852
1853 // Negative input case, add 1 iff "input - round(input) == 0.5"
1854 SetCurrentBasicBlock(negative);
1855 // frinta instruction
1856 auto floatRound = builder_.CreateUnaryIntrinsic(llvm::Intrinsic::round, input, nullptr);
1857 auto sub = builder_.CreateBinOp(llvm::Instruction::FSub, input, floatRound);
1858 auto one = is64 ? builder_.getInt64(1) : builder_.getInt32(1);
1859 auto add = builder_.CreateBinOp(llvm::Instruction::Add, round, one);
1860 auto equal = builder_.CreateFCmp(llvm::CmpInst::FCMP_OEQ, sub, half);
1861 auto roundMayInc = builder_.CreateSelect(equal, add, round);
1862 builder_.CreateBr(done);
1863
1864 // Continue block
1865 SetCurrentBasicBlock(done);
1866 auto roundPhi = builder_.CreatePHI(targetType, 2U);
1867 roundPhi->addIncoming(round, initialBb);
1868 roundPhi->addIncoming(roundMayInc, negative);
1869 return roundPhi;
1870 }
1871
CreateNewStringFromCharsTlab(Inst * inst,llvm::Value * offset,llvm::Value * length,llvm::Value * array)1872 llvm::Value *LLVMIrConstructor::CreateNewStringFromCharsTlab(Inst *inst, llvm::Value *offset, llvm::Value *length,
1873 llvm::Value *array)
1874 {
1875 auto entryId = RuntimeInterface::EntrypointId::CREATE_STRING_FROM_CHAR_ARRAY_TLAB_COMPRESSED;
1876 ArenaVector<llvm::Value *> arguments(GetGraph()->GetLocalAllocator()->Adapter());
1877 auto callConv = llvm::CallingConv::ArkFast3;
1878 if (llvm::isa<llvm::Constant>(offset) && llvm::cast<llvm::Constant>(offset)->isNullValue()) {
1879 entryId = RuntimeInterface::EntrypointId::CREATE_STRING_FROM_ZERO_BASED_CHAR_ARRAY_TLAB_COMPRESSED;
1880 } else {
1881 arguments.push_back(offset);
1882 callConv = llvm::CallingConv::ArkFast4;
1883 }
1884 arguments.push_back(length);
1885 arguments.push_back(array);
1886 auto klassOffset = GetGraph()->GetRuntime()->GetStringClassPointerTlsOffset(GetGraph()->GetArch());
1887 auto klass = llvmbackend::runtime_calls::LoadTLSValue(&builder_, arkInterface_, klassOffset, builder_.getPtrTy());
1888 arguments.push_back(klass);
1889 auto result = CreateEntrypointCall(entryId, inst, arguments);
1890 ASSERT(result->getCallingConv() == llvm::CallingConv::C);
1891 result->setCallingConv(callConv);
1892 MarkAsAllocation(result);
1893 return result;
1894 }
1895
CreateNewStringFromStringTlab(Inst * inst,llvm::Value * stringVal)1896 llvm::Value *LLVMIrConstructor::CreateNewStringFromStringTlab(Inst *inst, llvm::Value *stringVal)
1897 {
1898 auto entryId = RuntimeInterface::EntrypointId::CREATE_STRING_FROM_STRING_TLAB_COMPRESSED;
1899 auto result = CreateEntrypointCall(entryId, inst, {stringVal});
1900 ASSERT(result->getCallingConv() == llvm::CallingConv::C);
1901 result->setCallingConv(llvm::CallingConv::ArkFast1);
1902 MarkAsAllocation(result);
1903 return result;
1904 }
1905
CreateLaunchArgsArray(CallInst * callInst,uint32_t argStart)1906 llvm::Value *LLVMIrConstructor::CreateLaunchArgsArray(CallInst *callInst, uint32_t argStart)
1907 {
1908 auto callArgsCount = callInst->GetInputsCount() - argStart - 1U; // last arg is a SaveState
1909 auto callArgs = CreateAllocaForArgs(builder_.getInt64Ty(), callArgsCount);
1910
1911 // Store actual call arguments
1912 for (size_t i = 0; i < callArgsCount; i++) {
1913 auto arg = GetInputValue(callInst, argStart + i);
1914
1915 auto type = callInst->GetInputType(argStart + i);
1916 auto typeSize = DataType::GetTypeSize(type, GetGraph()->GetArch());
1917 if (typeSize < DataType::GetTypeSize(DataType::INT32, GetGraph()->GetArch())) {
1918 arg = CoerceValue(arg, type, DataType::INT32);
1919 }
1920
1921 auto gep = builder_.CreateConstInBoundsGEP1_32(builder_.getInt64Ty(), callArgs, i);
1922 builder_.CreateStore(arg, gep);
1923 }
1924 return callArgs;
1925 }
1926
CreateLaunchCall(CallInst * callInst)1927 void LLVMIrConstructor::CreateLaunchCall([[maybe_unused]] CallInst *callInst)
1928 {
1929 #ifdef PANDA_WITH_ETS
1930 ArenaVector<llvm::Value *> args(GetGraph()->GetLocalAllocator()->Adapter());
1931
1932 if (callInst->GetOpcode() == Opcode::CallResolvedLaunchStatic ||
1933 callInst->GetOpcode() == Opcode::CallResolvedLaunchVirtual) {
1934 args.push_back(GetInputValue(callInst, 0));
1935 args.push_back(GetInputValue(callInst, 1));
1936
1937 auto argStart = callInst->GetOpcode() == Opcode::CallResolvedLaunchVirtual ? 3U : 2U;
1938 auto callArgs = CreateLaunchArgsArray(callInst, argStart);
1939
1940 args.push_back(callArgs);
1941
1942 if (callInst->GetOpcode() == Opcode::CallResolvedLaunchVirtual) {
1943 args.push_back(GetInputValue(callInst, 2U));
1944 }
1945 } else {
1946 if (callInst->GetOpcode() != Opcode::CallLaunchVirtual) {
1947 ASSERT_DO(false, (std::cerr << "Unexpected Launch Call: \n", callInst->Dump(&std::cerr, true)));
1948 UNREACHABLE();
1949 }
1950
1951 ASSERT(GetGraph()->GetAotData()->GetUseCha());
1952
1953 auto method = ark::llvmbackend::utils::CreateLoadMethodUsingVTable(
1954 GetInputValue(callInst, 1), func_, callInst->GetCallMethodId(), &builder_, arkInterface_);
1955 args.push_back(method);
1956 args.push_back(GetInputValue(callInst, 0));
1957
1958 auto callArgs = CreateLaunchArgsArray(callInst, 2U);
1959 args.push_back(callArgs);
1960 args.push_back(GetInputValue(callInst, 1));
1961 }
1962
1963 auto eid = callInst->IsStaticLaunchCall() ? RuntimeInterface::EntrypointId::CREATE_LAUNCH_STATIC_COROUTINE
1964 : RuntimeInterface::EntrypointId::CREATE_LAUNCH_VIRTUAL_COROUTINE;
1965 auto entryCall = CreateEntrypointCall(eid, callInst, args);
1966 if (callInst->GetOpcode() == Opcode::CallResolvedLaunchVirtual) {
1967 entryCall->addFnAttr(llvm::Attribute::get(entryCall->getContext(), "original-method-id",
1968 std::to_string(callInst->GetCallMethodId())));
1969 entryCall->addFnAttr(llvm::Attribute::get(entryCall->getContext(), "is-launch-call"));
1970 }
1971 if (callInst->GetFlag(inst_flags::MEM_BARRIER)) {
1972 entryCall->addFnAttr(llvm::Attribute::get(entryCall->getContext(), "needs-mem-barrier"));
1973 }
1974 #else
1975 UNREACHABLE();
1976 #endif
1977 }
1978
CreateDeoptimizationBranch(Inst * inst,llvm::Value * deoptimize,RuntimeInterface::EntrypointId exception,llvm::ArrayRef<llvm::Value * > arguments)1979 void LLVMIrConstructor::CreateDeoptimizationBranch(Inst *inst, llvm::Value *deoptimize,
1980 RuntimeInterface::EntrypointId exception,
1981 llvm::ArrayRef<llvm::Value *> arguments)
1982 {
1983 ASSERT_TYPE(deoptimize, builder_.getInt1Ty());
1984 ASSERT(exception != RuntimeInterface::EntrypointId::DEOPTIMIZE || inst->CanDeoptimize());
1985 auto &ctx = func_->getContext();
1986
1987 /* Create basic blocks for continuation and throw */
1988 auto continuation = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "cont"), func_);
1989 auto throwPath = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "deopt"), func_);
1990
1991 /* Creating branch */
1992 auto branchWeights = llvm::MDBuilder(ctx).createBranchWeights(
1993 llvmbackend::Metadata::BranchWeights::UNLIKELY_BRANCH_WEIGHT, // if unlikely(deoptimize) then throw
1994 llvmbackend::Metadata::BranchWeights::LIKELY_BRANCH_WEIGHT); // else continue
1995 auto branch = builder_.CreateCondBr(deoptimize, throwPath, continuation, branchWeights);
1996
1997 /* Creating throw block */
1998 SetCurrentBasicBlock(throwPath);
1999
2000 if (inst->CanDeoptimize()) {
2001 // If inst CanDeoptimize then call Deoptimize to bail out into interpreter, do not throw exception
2002 exception = RuntimeInterface::EntrypointId::DEOPTIMIZE;
2003 auto type = helpers::ToUnderlying(GetDeoptimizationType(inst)) |
2004 (inst->GetId() << MinimumBitsToStore(DeoptimizeType::COUNT));
2005 auto call = CreateEntrypointCall(exception, inst, {builder_.getInt64(type)});
2006 call->addFnAttr(llvm::Attribute::get(call->getContext(), "may-deoptimize"));
2007 } else {
2008 if (exception == RuntimeInterface::EntrypointId::NULL_POINTER_EXCEPTION &&
2009 compiler::g_options.IsCompilerImplicitNullCheck()) {
2010 ASSERT(inst->IsNullCheck());
2011 auto *metadata = llvm::MDNode::get(ctx, {});
2012 branch->setMetadata(llvm::LLVMContext::MD_make_implicit, metadata);
2013 }
2014 CreateEntrypointCall(exception, inst, arguments);
2015 }
2016 builder_.CreateUnreachable();
2017
2018 /* Continue */
2019 SetCurrentBasicBlock(continuation);
2020 }
2021
CreateSaveStateBundle(Inst * inst,bool noReturn)2022 ArenaVector<llvm::OperandBundleDef> LLVMIrConstructor::CreateSaveStateBundle(Inst *inst, bool noReturn)
2023 {
2024 ASSERT_PRINT(inst->CanThrow() || inst->CanDeoptimize(),
2025 "Attempt to create a regmap for instruction that doesn't throw (or deoptimize)");
2026 ArenaVector<llvm::OperandBundleDef> bundle(GetGraph()->GetLocalAllocator()->Adapter());
2027 if (!arkInterface_->DeoptsEnabled()) {
2028 return bundle;
2029 }
2030 ArenaVector<llvm::Value *> vals(GetGraph()->GetLocalAllocator()->Adapter());
2031 ArenaVector<SaveStateInst *> saveStates(GetGraph()->GetLocalAllocator()->Adapter());
2032
2033 auto saveState = inst->GetSaveState();
2034 while (saveState != nullptr) {
2035 saveStates.push_back(saveState);
2036 auto caller = saveState->GetCallerInst();
2037 saveState = caller == nullptr ? nullptr : caller->GetSaveState();
2038 }
2039
2040 std::reverse(saveStates.begin(), saveStates.end());
2041 for (auto ss : saveStates) {
2042 auto method = ss->GetMethod();
2043 auto caller = ss->GetCallerInst();
2044 if (caller != nullptr) {
2045 method = caller->GetCallMethod();
2046 }
2047 ASSERT(method != nullptr);
2048 // Put a function as a delimiter in inlining chain
2049 auto function = GetOrCreateFunctionForCall(caller, method);
2050 ASSERT(function != nullptr);
2051 vals.push_back(function);
2052 // Put methodId needed for inline info
2053 vals.push_back(builder_.getInt32(GetGraph()->GetRuntime()->GetMethodId(method)));
2054 // Put bytecode pc for inlining chain as well
2055 vals.push_back(builder_.getInt32(ss->GetPc()));
2056 // Put a marker if catch has been met
2057 uint32_t flags = (inst->RequireRegMap() ? 1U : 0U) | (noReturn ? 2U : 0U);
2058 vals.push_back(builder_.getInt32(flags));
2059 // Put a number of interpreter registers for the method
2060 auto vregCount = arkInterface_->GetVirtualRegistersCount(method);
2061 vals.push_back(builder_.getInt32(vregCount));
2062
2063 EncodeSaveStateInputs(&vals, ss);
2064 }
2065 bundle.assign({llvm::OperandBundleDef {"deopt", vals}});
2066 return bundle;
2067 }
2068
EncodeSaveStateInputs(ArenaVector<llvm::Value * > * vals,SaveStateInst * ss)2069 void LLVMIrConstructor::EncodeSaveStateInputs(ArenaVector<llvm::Value *> *vals, SaveStateInst *ss)
2070 {
2071 for (size_t i = 0; i < ss->GetInputsCount(); ++i) {
2072 if (ss->GetVirtualRegister(i).Value() == VirtualRegister::BRIDGE) {
2073 continue;
2074 }
2075 // Put a virtual register index
2076 vals->push_back(builder_.getInt32(ss->GetVirtualRegister(i).Value()));
2077 // Put a virtual register type
2078 auto metatype = IrTypeToMetainfoType(ss->GetInputType(i));
2079 uint32_t undertype = static_cast<std::underlying_type_t<VRegInfo::Type>>(metatype);
2080 vals->push_back(builder_.getInt32(undertype));
2081 // Put a virtual register value
2082 auto value = GetInputValue(ss, i);
2083 if (!value->getType()->isPointerTy()) {
2084 ASSERT(value->getType()->getScalarSizeInBits() <= 64U);
2085 auto intVal = builder_.CreateBitCast(value, builder_.getIntNTy(value->getType()->getScalarSizeInBits()));
2086 if (metatype == VRegInfo::Type::INT32) {
2087 intVal = CoerceValue(intVal, ss->GetInputType(i), DataType::INT32);
2088 }
2089 vals->push_back(builder_.CreateZExt(intVal, builder_.getInt64Ty()));
2090 } else {
2091 vals->push_back(value);
2092 }
2093 }
2094 }
2095
EncodeInlineInfo(Inst * inst,llvm::Instruction * instruction)2096 void LLVMIrConstructor::EncodeInlineInfo(Inst *inst, llvm::Instruction *instruction)
2097 {
2098 SaveStateInst *saveState = inst->GetSaveState();
2099 llvm::SmallVector<SaveStateInst *> saveStates;
2100 bool first = true;
2101 while (saveState != nullptr) {
2102 if (!first) {
2103 saveStates.push_back(saveState);
2104 }
2105 first = false;
2106 saveState = saveState->GetCallerInst() == nullptr ? nullptr : saveState->GetCallerInst()->GetSaveState();
2107 }
2108 llvm::reverse(saveStates);
2109 for (auto ss : saveStates) {
2110 auto method = ss->GetMethod();
2111 auto methodName = arkInterface_->GetUniqMethodName(method);
2112 auto function = func_->getParent()->getFunction(methodName);
2113 auto caller = ss->GetCallerInst();
2114 if (caller != nullptr) {
2115 method = ss->GetCallerInst()->GetCallMethod();
2116 function = GetOrCreateFunctionForCall(caller, method);
2117 }
2118 ASSERT(function != nullptr);
2119 debugData_->AppendInlinedAt(instruction, function, ss->GetPc());
2120 }
2121 }
2122
CreatePreWRB(Inst * inst,llvm::Value * mem)2123 void LLVMIrConstructor::CreatePreWRB(Inst *inst, llvm::Value *mem)
2124 {
2125 auto barrierType = GetGraph()->GetRuntime()->GetPreType();
2126 auto isVolatile = IsVolatileMemInst(inst);
2127 if (barrierType == mem::BarrierType::PRE_WRB_NONE) {
2128 ASSERT(GetGraph()->SupportManagedCode());
2129 return;
2130 }
2131 ASSERT(barrierType == mem::BarrierType::PRE_SATB_BARRIER);
2132
2133 if (llvmbackend::g_options.IsLlvmBuiltinWrb() && !arkInterface_->IsIrtocMode()) {
2134 auto builtin = llvmbackend::builtins::PreWRB(func_->getParent(), mem->getType()->getPointerAddressSpace());
2135 builder_.CreateCall(builtin, {mem, builder_.getInt1(isVolatile)});
2136 return;
2137 }
2138 auto &ctx = func_->getContext();
2139 auto outBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "pre_wrb_out"), func_);
2140 llvmbackend::gc_barriers::EmitPreWRB(&builder_, mem, isVolatile, outBb, arkInterface_, GetThreadRegValue());
2141 }
2142
CreatePostWRB(Inst * inst,llvm::Value * mem,llvm::Value * offset,llvm::Value * value)2143 void LLVMIrConstructor::CreatePostWRB(Inst *inst, llvm::Value *mem, llvm::Value *offset, llvm::Value *value)
2144 {
2145 auto barrierType = GetGraph()->GetRuntime()->GetPostType();
2146 if (barrierType == mem::BarrierType::POST_WRB_NONE) {
2147 ASSERT(GetGraph()->SupportManagedCode());
2148 return;
2149 }
2150 ASSERT(barrierType == mem::BarrierType::POST_INTERGENERATIONAL_BARRIER ||
2151 barrierType == mem::BarrierType::POST_INTERREGION_BARRIER);
2152
2153 Inst *secondValue;
2154 Inst *val = InstStoredValue(inst, &secondValue);
2155 ASSERT(secondValue == nullptr);
2156
2157 if (val->GetOpcode() == Opcode::NullPtr) {
2158 return;
2159 }
2160
2161 bool irtoc = arkInterface_->IsIrtocMode();
2162 if (!irtoc && llvmbackend::g_options.IsLlvmBuiltinWrb()) {
2163 auto builtin = llvmbackend::builtins::PostWRB(func_->getParent(), mem->getType()->getPointerAddressSpace());
2164 builder_.CreateCall(builtin, {mem, offset, value});
2165 return;
2166 }
2167 auto frame = (irtoc && GetGraph()->GetArch() == Arch::X86_64) ? GetRealFrameRegValue() : nullptr;
2168 llvmbackend::gc_barriers::EmitPostWRB(&builder_, mem, offset, value, arkInterface_, GetThreadRegValue(), frame);
2169 }
2170
CreateMemoryFence(memory_order::Order order)2171 llvm::Value *LLVMIrConstructor::CreateMemoryFence(memory_order::Order order)
2172 {
2173 llvm::AtomicOrdering ordering;
2174 switch (order) {
2175 case memory_order::RELEASE:
2176 ordering = llvm::AtomicOrdering::Release;
2177 break;
2178 case memory_order::ACQUIRE:
2179 ordering = llvm::AtomicOrdering::Acquire;
2180 break;
2181 case memory_order::FULL:
2182 ordering = llvm::AtomicOrdering::SequentiallyConsistent;
2183 break;
2184 default:
2185 UNREACHABLE();
2186 }
2187 return builder_.CreateFence(ordering);
2188 }
2189
CreateCondition(ConditionCode cc,llvm::Value * x,llvm::Value * y)2190 llvm::Value *LLVMIrConstructor::CreateCondition(ConditionCode cc, llvm::Value *x, llvm::Value *y)
2191 {
2192 if (cc == CC_TST_EQ || cc == CC_TST_NE) {
2193 auto tst = builder_.CreateBinOp(llvm::Instruction::And, x, y);
2194 return (cc == CC_TST_EQ) ? builder_.CreateIsNull(tst) : builder_.CreateIsNotNull(tst);
2195 }
2196 return builder_.CreateICmp(ICmpCodeConvert(cc), x, y);
2197 }
2198
CreateIf(Inst * inst,llvm::Value * cond,bool likely,bool unlikely)2199 void LLVMIrConstructor::CreateIf(Inst *inst, llvm::Value *cond, bool likely, bool unlikely)
2200 {
2201 llvm::MDNode *weights = nullptr;
2202 auto constexpr LIKELY = llvmbackend::Metadata::BranchWeights::LIKELY_BRANCH_WEIGHT;
2203 auto constexpr UNLIKELY = llvmbackend::Metadata::BranchWeights::UNLIKELY_BRANCH_WEIGHT;
2204 if (likely) {
2205 weights = llvm::MDBuilder(func_->getContext()).createBranchWeights(LIKELY, UNLIKELY);
2206 } else if (unlikely) {
2207 weights = llvm::MDBuilder(func_->getContext()).createBranchWeights(UNLIKELY, LIKELY);
2208 }
2209 builder_.CreateCondBr(cond, GetHeadBlock(inst->GetBasicBlock()->GetTrueSuccessor()),
2210 GetHeadBlock(inst->GetBasicBlock()->GetFalseSuccessor()), weights);
2211 }
2212
CreateTailCallFastPath(Inst * inst)2213 llvm::CallInst *LLVMIrConstructor::CreateTailCallFastPath(Inst *inst)
2214 {
2215 ASSERT(inst->GetInputs().Size() == 0);
2216 ASSERT(inst->CastToIntrinsic()->HasImms() && inst->CastToIntrinsic()->GetImms().size() == 2U);
2217 ASSERT(ccValues_.size() == func_->arg_size());
2218
2219 ArenaVector<llvm::Value *> args(GetGraph()->GetLocalAllocator()->Adapter());
2220 uint32_t externalId = inst->CastToIntrinsic()->GetImms()[1];
2221 auto externalName = GetGraph()->GetRuntime()->GetExternalMethodName(GetGraph()->GetMethod(), externalId);
2222 auto callee = func_->getParent()->getFunction(externalName);
2223 llvm::CallingConv::ID cc = 0;
2224 if (callee == nullptr) {
2225 ArenaVector<llvm::Type *> argTypes(GetGraph()->GetLocalAllocator()->Adapter());
2226 for (size_t i = 0; i < func_->arg_size(); i++) {
2227 args.push_back(i < ccValues_.size() && ccValues_.at(i) != nullptr ? ccValues_.at(i) : func_->getArg(i));
2228 argTypes.push_back(args.at(i)->getType());
2229 }
2230 auto ftype = llvm::FunctionType::get(GetType(inst->GetType()), argTypes, false);
2231 callee = llvm::Function::Create(ftype, llvm::Function::ExternalLinkage, externalName, func_->getParent());
2232 cc = func_->getCallingConv();
2233 } else {
2234 size_t size = func_->arg_size();
2235 ASSERT(callee->arg_size() <= size);
2236 for (size_t i = 0; i < callee->arg_size() - 2U; i++) {
2237 args.push_back(i < ccValues_.size() && ccValues_.at(i) != nullptr ? ccValues_.at(i) : func_->getArg(i));
2238 }
2239 args.push_back(func_->getArg(size - 2U));
2240 args.push_back(func_->getArg(size - 1U));
2241 cc = callee->getCallingConv();
2242 }
2243 auto call = builder_.CreateCall(callee->getFunctionType(), callee, args);
2244 call->setCallingConv(cc);
2245 return call;
2246 }
2247
CreateTailCallInterpreter(Inst * inst)2248 llvm::CallInst *LLVMIrConstructor::CreateTailCallInterpreter(Inst *inst)
2249 {
2250 auto ptr = GetInputValue(inst, 0);
2251 ASSERT_TYPE(ptr, builder_.getPtrTy());
2252 ASSERT(ccValues_.size() == (GetGraph()->GetArch() == Arch::AARCH64 ? 8U : 7U));
2253 ASSERT(ccValues_.at(0) != nullptr); // pc
2254 static constexpr unsigned ACC = 1U;
2255 static constexpr unsigned ACC_TAG = 2U;
2256 ArenaVector<llvm::Type *> argTypes(GetGraph()->GetLocalAllocator()->Adapter());
2257 for (size_t i = 0; i < cc_.size(); i++) {
2258 if (ccValues_.at(i) != nullptr) {
2259 argTypes.push_back(ccValues_.at(i)->getType());
2260 } else {
2261 argTypes.push_back(func_->getFunctionType()->getParamType(i));
2262 }
2263 }
2264 if (ccValues_.at(ACC) == nullptr) {
2265 ccValues_[ACC] = llvm::Constant::getNullValue(argTypes[ACC]);
2266 }
2267 if (ccValues_.at(ACC_TAG) == nullptr) {
2268 ccValues_[ACC_TAG] = llvm::Constant::getNullValue(argTypes[ACC_TAG]);
2269 }
2270 ASSERT(ccValues_.at(3U) != nullptr); // frame
2271 ASSERT(ccValues_.at(4U) != nullptr); // dispatch
2272 if (GetGraph()->GetArch() == Arch::AARCH64) {
2273 ASSERT(ccValues_.at(5U) != nullptr); // moffset
2274 ASSERT(ccValues_.at(6U) != nullptr); // methodPtr
2275 ASSERT(ccValues_.at(7U) != nullptr); // thread
2276 } else {
2277 static constexpr unsigned REAL_FRAME_POINER = 6U;
2278 ASSERT(ccValues_.at(5U) != nullptr); // thread
2279 ASSERT(ccValues_.at(REAL_FRAME_POINER) == nullptr); // real frame pointer
2280 ccValues_[REAL_FRAME_POINER] = func_->getArg(REAL_FRAME_POINER);
2281 }
2282
2283 auto functionType = llvm::FunctionType::get(func_->getReturnType(), argTypes, false);
2284 auto call = builder_.CreateCall(functionType, ptr, ccValues_);
2285 call->setCallingConv(func_->getCallingConv());
2286 return call;
2287 }
2288
2289 template <uint32_t VECTOR_SIZE>
CreateCompressUtf16ToUtf8CharsUsingSimd(Inst * inst)2290 void LLVMIrConstructor::CreateCompressUtf16ToUtf8CharsUsingSimd(Inst *inst)
2291 {
2292 ASSERT(GetGraph()->GetArch() == Arch::AARCH64);
2293 ASSERT(inst->GetInputType(0) == DataType::POINTER);
2294 ASSERT(inst->GetInputType(1) == DataType::POINTER);
2295 static_assert(VECTOR_SIZE == VECTOR_SIZE_8 || VECTOR_SIZE == VECTOR_SIZE_16, "Unexpected vector size");
2296 auto intrinsicId = llvm::Intrinsic::AARCH64Intrinsics::aarch64_neon_ld2;
2297 auto vecTy = llvm::VectorType::get(builder_.getInt8Ty(), VECTOR_SIZE, false);
2298
2299 auto u16Ptr = GetInputValue(inst, 0); // ptr to src array of utf16 chars
2300 auto u8Ptr = GetInputValue(inst, 1); // ptr to dst array of utf8 chars
2301 auto ld2 = llvm::Intrinsic::getDeclaration(func_->getParent(), intrinsicId, {vecTy, u16Ptr->getType()});
2302 auto vld2 = builder_.CreateCall(ld2, {u16Ptr});
2303 auto u8Vec = builder_.CreateExtractValue(vld2, {0});
2304 builder_.CreateStore(u8Vec, u8Ptr);
2305 }
2306
2307 // Getters
2308
GetEntryFunctionType()2309 llvm::FunctionType *LLVMIrConstructor::GetEntryFunctionType()
2310 {
2311 ArenaVector<llvm::Type *> argTypes(graph_->GetLocalAllocator()->Adapter());
2312
2313 // Method*
2314 if (graph_->SupportManagedCode()) {
2315 argTypes.push_back(builder_.getPtrTy());
2316 }
2317
2318 // ArkInt have fake parameters
2319 if (graph_->GetMode().IsInterpreter()) {
2320 for (size_t i = 0; i < cc_.size(); ++i) {
2321 argTypes.push_back(builder_.getPtrTy());
2322 }
2323 }
2324
2325 // Actual function arguments
2326 auto method = graph_->GetMethod();
2327 for (size_t i = 0; i < graph_->GetRuntime()->GetMethodTotalArgumentsCount(method); i++) {
2328 ASSERT(!graph_->GetMode().IsInterpreter());
2329 auto type = graph_->GetRuntime()->GetMethodTotalArgumentType(method, i);
2330 if (graph_->GetMode().IsFastPath()) {
2331 argTypes.push_back(GetExactType(type));
2332 } else {
2333 argTypes.push_back(GetType(type));
2334 }
2335 }
2336
2337 // ThreadReg and RealFP for FastPaths
2338 if (graph_->GetMode().IsFastPath()) {
2339 argTypes.push_back(builder_.getPtrTy());
2340 argTypes.push_back(builder_.getPtrTy());
2341 }
2342
2343 auto retType = graph_->GetRuntime()->GetMethodReturnType(method);
2344 ASSERT(graph_->GetMode().IsInterpreter() || retType != DataType::NO_TYPE);
2345 retType = retType == DataType::NO_TYPE ? DataType::VOID : retType;
2346 return llvm::FunctionType::get(GetType(retType), makeArrayRef(argTypes.data(), argTypes.size()), false);
2347 }
2348
ToSizeT(llvm::Value * value)2349 llvm::Value *LLVMIrConstructor::ToSizeT(llvm::Value *value)
2350 {
2351 auto entrypointSizeType = GetEntrypointSizeType();
2352 if (value->getType() == entrypointSizeType) {
2353 return value;
2354 }
2355 ASSERT(value->getType()->getIntegerBitWidth() < entrypointSizeType->getBitWidth());
2356 return builder_.CreateZExt(value, entrypointSizeType);
2357 }
2358
ToSSizeT(llvm::Value * value)2359 llvm::Value *LLVMIrConstructor::ToSSizeT(llvm::Value *value)
2360 {
2361 auto entrypointSizeType = GetEntrypointSizeType();
2362 if (value->getType() == entrypointSizeType) {
2363 return value;
2364 }
2365 ASSERT(value->getType()->getIntegerBitWidth() < entrypointSizeType->getBitWidth());
2366 return builder_.CreateSExt(value, entrypointSizeType);
2367 }
2368
GetArgumentsForCall(llvm::Value * callee,CallInst * call,bool skipFirst)2369 ArenaVector<llvm::Value *> LLVMIrConstructor::GetArgumentsForCall(llvm::Value *callee, CallInst *call, bool skipFirst)
2370 {
2371 ASSERT(callee->getType()->isPointerTy());
2372 ArenaVector<llvm::Value *> args(GetGraph()->GetLocalAllocator()->Adapter());
2373 args.push_back(callee);
2374
2375 // SaveState skipping - last arg
2376 for (size_t i = skipFirst ? 1 : 0; i < call->GetInputsCount() - 1; i++) {
2377 auto arg = GetInputValue(call, i);
2378 auto type = call->GetInputType(i);
2379 if (DataType::IsLessInt32(type)) {
2380 arg = CoerceValue(arg, type, DataType::INT32);
2381 }
2382 args.push_back(arg);
2383 }
2384
2385 return args;
2386 }
2387
GetIntrinsicArguments(llvm::FunctionType * intrinsicFunctionType,IntrinsicInst * inst)2388 ArenaVector<llvm::Value *> LLVMIrConstructor::GetIntrinsicArguments(llvm::FunctionType *intrinsicFunctionType,
2389 IntrinsicInst *inst)
2390 {
2391 ASSERT(intrinsicFunctionType != nullptr);
2392 ASSERT(inst != nullptr);
2393
2394 ArenaVector<llvm::Value *> args(GetGraph()->GetLocalAllocator()->Adapter());
2395
2396 if (inst->IsMethodFirstInput()) {
2397 args.push_back(GetMethodArgument());
2398 }
2399 if (inst->HasImms()) {
2400 for (uint64_t imm : inst->GetImms()) {
2401 size_t index = args.size();
2402 auto type = intrinsicFunctionType->getParamType(index);
2403 args.push_back(llvm::ConstantInt::get(type, imm));
2404 }
2405 }
2406 for (size_t i = 0; i < inst->GetInputsCount(); i++) {
2407 // Skip SaveState
2408 if (inst->GetInput(i).GetInst()->IsSaveState()) {
2409 continue;
2410 }
2411 args.push_back(GetInputValue(inst, i));
2412 }
2413 ASSERT(intrinsicFunctionType->getNumParams() == args.size());
2414 return args;
2415 }
2416
SetIntrinsicParamAttrs(llvm::CallInst * call,IntrinsicInst * inst,llvm::ArrayRef<llvm::Value * > args)2417 void LLVMIrConstructor::SetIntrinsicParamAttrs(llvm::CallInst *call, IntrinsicInst *inst,
2418 [[maybe_unused]] llvm::ArrayRef<llvm::Value *> args)
2419 {
2420 size_t i = inst->IsMethodFirstInput() ? 1U : 0;
2421 if (inst->HasImms()) {
2422 i += inst->GetImms().size();
2423 }
2424 #ifndef NDEBUG
2425 for (size_t j = 0; j < i; j++) {
2426 ASSERT(!args[j]->getType()->isIntegerTy() || args[j]->getType()->getIntegerBitWidth() > VECTOR_SIZE_16);
2427 }
2428 #endif
2429 for (size_t arkIndex = 0; arkIndex < inst->GetInputsCount(); arkIndex++) {
2430 // Skip SaveState
2431 if (inst->GetInput(arkIndex).GetInst()->IsSaveState()) {
2432 continue;
2433 }
2434 auto arkType = inst->GetInputType(arkIndex);
2435 switch (arkType) {
2436 case DataType::UINT8:
2437 ASSERT(args[i]->getType()->isIntegerTy() && args[i]->getType()->getIntegerBitWidth() == VECTOR_SIZE_8);
2438 call->addParamAttr(i, llvm::Attribute::ZExt);
2439 break;
2440 case DataType::UINT16:
2441 ASSERT(args[i]->getType()->isIntegerTy() && args[i]->getType()->getIntegerBitWidth() == VECTOR_SIZE_16);
2442 call->addParamAttr(i, llvm::Attribute::ZExt);
2443 break;
2444 case DataType::INT8:
2445 ASSERT(args[i]->getType()->isIntegerTy() && args[i]->getType()->getIntegerBitWidth() == VECTOR_SIZE_8);
2446 call->addParamAttr(i, llvm::Attribute::SExt);
2447 break;
2448 case DataType::INT16:
2449 ASSERT(args[i]->getType()->isIntegerTy() && args[i]->getType()->getIntegerBitWidth() == VECTOR_SIZE_16);
2450 call->addParamAttr(i, llvm::Attribute::SExt);
2451 break;
2452 case DataType::BOOL:
2453 break;
2454 default:
2455 ASSERT(!args[i]->getType()->isIntegerTy() || args[i]->getType()->getIntegerBitWidth() > VECTOR_SIZE_16);
2456 break;
2457 }
2458 i++;
2459 }
2460 ASSERT(i == args.size());
2461 }
2462
2463 template <typename T>
GetFunctionTypeForCall(T * inst)2464 llvm::FunctionType *LLVMIrConstructor::GetFunctionTypeForCall(T *inst)
2465 {
2466 ArenaVector<llvm::Type *> argTypes(GetGraph()->GetLocalAllocator()->Adapter());
2467
2468 if (GetGraph()->SupportManagedCode()) {
2469 // Callee
2470 argTypes.push_back(builder_.getPtrTy());
2471 }
2472
2473 auto runtime = GetGraph()->GetRuntime();
2474 auto methodPtr = GetGraph()->GetMethod();
2475 auto methodId = inst->GetCallMethodId();
2476 // For instance methods pass implicit object argument
2477 if (!runtime->IsMethodStatic(methodPtr, methodId)) {
2478 argTypes.push_back(builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE));
2479 }
2480
2481 for (size_t i = 0; i < runtime->GetMethodArgumentsCount(methodPtr, methodId); i++) {
2482 auto ptype = runtime->GetMethodArgumentType(methodPtr, methodId, i);
2483 argTypes.push_back(GetType(ptype));
2484 }
2485
2486 auto retType = runtime->GetMethodReturnType(methodPtr, methodId);
2487 // Ugly fix CallVirtual opcode for SaveState-excluded run codegen statistics
2488 if (methodPtr == nullptr) {
2489 retType = inst->GetType();
2490 }
2491
2492 if constexpr (std::is_same_v<T, CallInst>) {
2493 ASSERT(inst->IsInlined() || inst->GetType() == retType);
2494 }
2495
2496 return llvm::FunctionType::get(GetType(retType), argTypes, false);
2497 }
2498
GetThreadRegValue()2499 llvm::Value *LLVMIrConstructor::GetThreadRegValue()
2500 {
2501 if (GetGraph()->SupportManagedCode()) {
2502 return llvmbackend::runtime_calls::GetThreadRegValue(&builder_, arkInterface_);
2503 }
2504 auto regInput = std::find(cc_.begin(), cc_.end(), GetThreadReg(GetGraph()->GetArch()));
2505 ASSERT(regInput != cc_.end());
2506 auto threadRegValue = func_->arg_begin() + std::distance(cc_.begin(), regInput);
2507 return threadRegValue;
2508 }
2509
GetRealFrameRegValue()2510 llvm::Value *LLVMIrConstructor::GetRealFrameRegValue()
2511 {
2512 if (GetGraph()->SupportManagedCode()) {
2513 return llvmbackend::runtime_calls::GetRealFrameRegValue(&builder_, arkInterface_);
2514 }
2515 ASSERT(GetGraph()->GetMode().IsFastPath() || GetGraph()->GetArch() == Arch::X86_64);
2516 auto regInput = std::find(cc_.begin(), cc_.end(), GetRealFrameReg(GetGraph()->GetArch()));
2517 ASSERT(regInput != cc_.end());
2518 auto frameRegValue = func_->arg_begin() + std::distance(cc_.begin(), regInput);
2519 return frameRegValue;
2520 }
2521
GetOrCreateFunctionForCall(ark::compiler::CallInst * call,void * method)2522 llvm::Function *LLVMIrConstructor::GetOrCreateFunctionForCall(ark::compiler::CallInst *call, void *method)
2523 {
2524 ASSERT(method != nullptr);
2525 auto module = func_->getParent();
2526 auto methodName = arkInterface_->GetUniqMethodName(method);
2527 auto function = module->getFunction(methodName);
2528 if (function == nullptr) {
2529 auto functionProto = GetFunctionTypeForCall(call);
2530 function = CreateFunctionDeclaration(functionProto, methodName, module);
2531 function->addFnAttr("frame-pointer", "all");
2532 function->addFnAttr(
2533 ark::llvmbackend::LLVMArkInterface::SOURCE_LANG_ATTR,
2534 std::to_string(static_cast<uint8_t>(GetGraph()->GetRuntime()->GetMethodSourceLanguage(method))));
2535 }
2536 return function;
2537 }
2538
GetType(DataType::Type pandaType)2539 llvm::Type *LLVMIrConstructor::GetType(DataType::Type pandaType)
2540 {
2541 switch (pandaType) {
2542 case DataType::VOID:
2543 return builder_.getVoidTy();
2544 case DataType::POINTER:
2545 return builder_.getPtrTy();
2546 case DataType::REFERENCE:
2547 return builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE);
2548 case DataType::BOOL:
2549 case DataType::UINT8:
2550 case DataType::INT8:
2551 case DataType::UINT16:
2552 case DataType::INT16:
2553 case DataType::UINT32:
2554 case DataType::INT32:
2555 return builder_.getInt32Ty();
2556 case DataType::UINT64:
2557 case DataType::INT64:
2558 return builder_.getInt64Ty();
2559 case DataType::FLOAT32:
2560 return builder_.getFloatTy();
2561 case DataType::FLOAT64:
2562 return builder_.getDoubleTy();
2563 default:
2564 ASSERT_DO(false, (std::cerr << "No handler for panda type = '" << DataType::ToString(pandaType)
2565 << "' to llvm type conversion." << std::endl));
2566 UNREACHABLE();
2567 }
2568 }
2569
2570 /**
2571 * Return exact llvm::Type corresponding to the panda type.
2572 *
2573 * Use this method when exact panda type is indeed required.
2574 * It is the case for:
2575 * - array loads and stores. If 32-bit version were used, then the neighbour array elements would be overwritten or read
2576 * - object field loads and stores. The reason the same as in the case above.
2577 * - object static field loads and stores. The reason the same as in the cases above.
2578 * - comparisons. Sometimes boolean is compared with i32 or other integral type.
2579 * The exact type could be obtained from the compareInst->GetOperandsType(),
2580 * which should be used to coerce its operands
2581 * - Runtime calls. Some runtime call function declarations have narrower types than 32-bit version. To invoke them
2582 * the argument should be coerced to the exact type.
2583 */
GetExactType(DataType::Type targetType)2584 llvm::Type *LLVMIrConstructor::GetExactType(DataType::Type targetType)
2585 {
2586 switch (targetType) {
2587 case DataType::VOID:
2588 return builder_.getVoidTy();
2589 case DataType::POINTER:
2590 return builder_.getPtrTy();
2591 case DataType::REFERENCE:
2592 return builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE);
2593 case DataType::BOOL:
2594 case DataType::UINT8:
2595 case DataType::INT8:
2596 return builder_.getInt8Ty();
2597 case DataType::UINT16:
2598 case DataType::INT16:
2599 return builder_.getInt16Ty();
2600 case DataType::UINT32:
2601 case DataType::INT32:
2602 return builder_.getInt32Ty();
2603 case DataType::UINT64:
2604 case DataType::INT64:
2605 return builder_.getInt64Ty();
2606 case DataType::FLOAT32:
2607 return builder_.getFloatTy();
2608 case DataType::FLOAT64:
2609 return builder_.getDoubleTy();
2610 default:
2611 ASSERT_DO(false, (std::cerr << "No handler for panda type = '" << DataType::ToString(targetType)
2612 << "' to llvm type conversion." << std::endl));
2613 UNREACHABLE();
2614 }
2615 }
2616
GetCastOp(DataType::Type from,DataType::Type to)2617 llvm::Instruction::CastOps LLVMIrConstructor::GetCastOp(DataType::Type from, DataType::Type to)
2618 {
2619 Arch arch = GetGraph()->GetArch();
2620 if (IsInteger(from) && IsInteger(to) && DataType::GetTypeSize(from, arch) > DataType::GetTypeSize(to, arch)) {
2621 // narrowing, e.g. U32TOU8, I64TOI32
2622 return llvm::Instruction::Trunc;
2623 }
2624 if (IsSignedInteger(from) && IsInteger(to) && DataType::GetTypeSize(from, arch) < DataType::GetTypeSize(to, arch)) {
2625 // signed int widening, e.g. I32TOI64, I32TOU64
2626 return llvm::Instruction::SExt;
2627 }
2628 if (IsUnsignedInteger(from) && IsInteger(to) &&
2629 DataType::GetTypeSize(from, arch) < DataType::GetTypeSize(to, arch)) {
2630 // unsigned int widening, e.g. U32TOI64, U8TOU64
2631 return llvm::Instruction::ZExt;
2632 }
2633 if (IsUnsignedInteger(from) && DataType::IsFloatType(to)) {
2634 // unsigned int to float, e.g. U32TOF64, U64TOF64
2635 return llvm::Instruction::UIToFP;
2636 }
2637 if (IsSignedInteger(from) && DataType::IsFloatType(to)) {
2638 // signed int to float e.g. I32TOF64, I64TOF64
2639 return llvm::Instruction::SIToFP;
2640 }
2641 if (DataType::IsFloatType(from) && DataType::IsFloatType(to)) {
2642 if (DataType::GetTypeSize(from, arch) < DataType::GetTypeSize(to, arch)) {
2643 return llvm::Instruction::FPExt;
2644 }
2645 return llvm::Instruction::FPTrunc;
2646 }
2647 if (DataType::IsReference(from) && to == DataType::POINTER) {
2648 return llvm::Instruction::AddrSpaceCast;
2649 }
2650 ASSERT_DO(false, (std::cerr << "Cast from " << DataType::ToString(from) << " to " << DataType::ToString(to))
2651 << " is not supported" << std::endl);
2652 UNREACHABLE();
2653 }
2654
2655 // Various other helpers
2656
2657 /**
2658 * Coerce given {@code value} with {@code sourceType} to the {@code targetType}.
2659 *
2660 * The method may perform truncation or widening cast, or leave the original
2661 * {@code value}, if no cast is necessary.
2662 *
2663 * For integer {@code value} when widening cast is performed the sign of the {@code sourceType} is taken
2664 * into account:
2665 * * {@code value} is zero extended if the {@code sourceType} is unsigned integer
2666 * * {@code value} is sign extended if the {@code sourceType} is signed integer
2667 *
2668 * Reference types are returned as is.
2669 *
2670 * Currently Ark Bytecode:
2671 * * does not differentiate between ints of sizes less than 32 bits, and treats them all as i32/u32
2672 * * leaves resolution of such conversions to the discretion of bytecodes accepting them
2673 * * assumes implicit casts between small integers
2674 *
2675 * Sometimes it causes inconsistencies in LLVM since Ark Compiler IR input has implicit casts too,
2676 * but LLVM does not permit such conversions. This function perform those casts if necessary.
2677 */
CoerceValue(llvm::Value * value,DataType::Type sourceType,DataType::Type targetType)2678 llvm::Value *LLVMIrConstructor::CoerceValue(llvm::Value *value, DataType::Type sourceType, DataType::Type targetType)
2679 {
2680 ASSERT(value != nullptr);
2681 // Other non-integer mistyping prohibited
2682 ASSERT_DO(!IsInteger(targetType) || value->getType()->isIntegerTy(),
2683 std::cerr << "Unexpected data type: " << GetTypeName(value->getType()) << ". Should be an integer."
2684 << std::endl);
2685 ASSERT_DO(!DataType::IsReference(targetType) || value->getType()->isPointerTy(),
2686 std::cerr << "Unexpected data type: " << GetTypeName(value->getType()) << ". Should be a pointer."
2687 << std::endl);
2688 ASSERT_DO(targetType != DataType::FLOAT64 || value->getType()->isDoubleTy(),
2689 std::cerr << "Unexpected data type: " << GetTypeName(value->getType()) << ". Should be a double."
2690 << std::endl);
2691 ASSERT_DO(targetType != DataType::FLOAT32 || value->getType()->isFloatTy(),
2692 std::cerr << "Unexpected data type: " << GetTypeName(value->getType()) << ". Should be a float."
2693 << std::endl);
2694
2695 if (!IsInteger(targetType)) {
2696 return value;
2697 }
2698 ASSERT(value->getType()->isIntegerTy());
2699
2700 auto targetLlvmType = llvm::cast<llvm::IntegerType>(GetExactType(targetType));
2701 auto originalLlvmType = llvm::cast<llvm::IntegerType>(value->getType());
2702 ASSERT(originalLlvmType->getBitWidth() == DataType::GetTypeSize(sourceType, GetGraph()->GetArch()));
2703
2704 llvm::CastInst::CastOps castOp;
2705 if (originalLlvmType->getBitWidth() > targetLlvmType->getBitWidth()) {
2706 castOp = llvm::Instruction::Trunc;
2707 } else if (originalLlvmType->getBitWidth() < targetLlvmType->getBitWidth()) {
2708 if (IsSignedInteger(sourceType)) {
2709 castOp = llvm::Instruction::SExt;
2710 } else {
2711 castOp = llvm::Instruction::ZExt;
2712 }
2713 } else {
2714 return value;
2715 }
2716 return builder_.CreateCast(castOp, value, targetLlvmType);
2717 }
2718
2719 /**
2720 * Used in irtoc C++ inlining.
2721 *
2722 * When we compile irtoc handlers, we do not have ark's types.
2723 * For example, ark::Frame is missing.
2724 * LLVM AOT uses i8* or i64 instead
2725 *
2726 * For example, the irtoc handler could look like:
2727 *
2728 * @code
2729 * void MyHandler(i8* myMbject) {
2730 * var clone = CloneObjectEntrypoint(myObject);
2731 * }
2732 * @endcode
2733 *
2734 * When we compile interpreter handlers with cpp inlining we have the definition of CloneObjectEntrypoint:
2735 *
2736 * @code
2737 * ObjectHeader *CloneObjectEntrypoint(ObjectHeader *obj) {
2738 * ...
2739 * }
2740 * @endcode
2741 *
2742 * and we must invoke the CloneObjectEntrypoint with ObjectHeader* argument, not i8*.
2743 * The CoerceValue method converts i8* to ObjectHeader*
2744 */
CoerceValue(llvm::Value * value,llvm::Type * targetType)2745 llvm::Value *LLVMIrConstructor::CoerceValue(llvm::Value *value, llvm::Type *targetType)
2746 {
2747 auto valueType = value->getType();
2748 if (valueType == targetType) {
2749 return value;
2750 }
2751
2752 if (!valueType->isPointerTy() && targetType->isPointerTy()) {
2753 // DataType::POINTER to targetType.
2754 // Example: i64 -> %"class.ark::Frame"*
2755 return builder_.CreateIntToPtr(value, targetType);
2756 }
2757 if (valueType->isPointerTy() && !targetType->isPointerTy()) {
2758 // valueType to DataType::POINTER
2759 // Example: %"class.ark::coretypes::String"* -> i64
2760 return builder_.CreatePtrToInt(value, targetType);
2761 }
2762
2763 if (valueType->isIntegerTy() && targetType->isIntegerTy()) {
2764 auto valueWidth = llvm::cast<llvm::IntegerType>(valueType)->getBitWidth();
2765 auto targetWidth = llvm::cast<llvm::IntegerType>(targetType)->getBitWidth();
2766 if (valueWidth > targetWidth) {
2767 return builder_.CreateTrunc(value, targetType);
2768 }
2769 if (valueWidth < targetWidth) {
2770 return builder_.CreateZExt(value, targetType);
2771 }
2772 }
2773 if (valueType->isPointerTy() && targetType->isPointerTy()) {
2774 return builder_.CreateAddrSpaceCast(value, targetType);
2775 }
2776 UNREACHABLE();
2777 }
2778
ValueMapAdd(Inst * inst,llvm::Value * value,bool setName)2779 void LLVMIrConstructor::ValueMapAdd(Inst *inst, llvm::Value *value, bool setName)
2780 {
2781 if (!inst->IsMovableObject() && !inst->IsCheck() && llvmbackend::gc_utils::IsGcRefType(value->getType())) {
2782 auto llvmInst = llvm::dyn_cast<llvm::Instruction>(value);
2783 if (llvmInst != nullptr) {
2784 llvmbackend::gc_utils::MarkAsNonMovable(llvmInst);
2785 }
2786 }
2787
2788 auto type = inst->GetType();
2789 auto ltype = GetExactType(type);
2790 ASSERT(inputMap_.count(inst) == 0);
2791 auto it = inputMap_.emplace(inst, GetGraph()->GetLocalAllocator()->Adapter());
2792 ASSERT(it.second);
2793 ArenaUnorderedMap<DataType::Type, llvm::Value *> &typeMap = it.first->second;
2794
2795 if (value == nullptr) {
2796 typeMap.insert({type, nullptr});
2797 return;
2798 }
2799 if (setName) {
2800 value->setName(CreateNameForInst(inst));
2801 }
2802 if (inst->GetOpcode() == Opcode::LiveOut || !ltype->isIntegerTy()) {
2803 typeMap.insert({type, value});
2804 if (type == DataType::POINTER) {
2805 FillValueMapForUsers(&typeMap, inst, value);
2806 }
2807 return;
2808 }
2809 ASSERT(value->getType()->isIntegerTy());
2810 if (value->getType()->getIntegerBitWidth() > ltype->getIntegerBitWidth()) {
2811 value = builder_.CreateTrunc(value, ltype);
2812 } else if (value->getType()->getIntegerBitWidth() < ltype->getIntegerBitWidth()) {
2813 value = builder_.CreateZExt(value, ltype);
2814 }
2815 typeMap.insert({type, value});
2816 FillValueMapForUsers(&typeMap, inst, value);
2817 }
2818
FillValueMapForUsers(ArenaUnorderedMap<DataType::Type,llvm::Value * > * map,Inst * inst,llvm::Value * value)2819 void LLVMIrConstructor::FillValueMapForUsers(ArenaUnorderedMap<DataType::Type, llvm::Value *> *map, Inst *inst,
2820 llvm::Value *value)
2821 {
2822 auto type = inst->GetType();
2823 ASSERT(type != DataType::REFERENCE);
2824 for (auto &userItem : inst->GetUsers()) {
2825 auto user = userItem.GetInst();
2826 for (unsigned i = 0; i < user->GetInputsCount(); i++) {
2827 auto itype = user->GetInputType(i);
2828 auto input = user->GetInput(i).GetInst();
2829 if (input != inst || itype == type || map->count(itype) != 0) {
2830 continue;
2831 }
2832 /*
2833 * When Ark Compiler implicitly converts something -> LLVM side:
2834 * 1. POINTER to REFERENCE (user LiveOut or Store) -> AddrSpaceCast
2835 * 2. POINTER to UINT64 (user is LiveOut) -> no conversion necessary
2836 * 3. LiveIn to REFERENCE -> no conversion necessary
2837 * 4. INT64/UINT64 to REFERENCE (user is LiveOut) -> IntToPtr
2838 * 5. Integers -> use coercing
2839 */
2840 llvm::Value *cvalue;
2841 if (type == DataType::POINTER && itype == DataType::REFERENCE) {
2842 ASSERT(user->GetOpcode() == Opcode::LiveOut || user->GetOpcode() == Opcode::Store);
2843 cvalue = builder_.CreateAddrSpaceCast(value, builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE));
2844 } else if (type == DataType::POINTER && itype == DataType::UINT64) {
2845 ASSERT(user->GetOpcode() == Opcode::LiveOut);
2846 cvalue = value;
2847 } else if (type == DataType::POINTER) {
2848 continue;
2849 } else if (inst->GetOpcode() == Opcode::LiveIn && itype == DataType::REFERENCE) {
2850 cvalue = value;
2851 } else if ((type == DataType::INT64 || type == DataType::UINT64) && itype == DataType::REFERENCE) {
2852 ASSERT(user->GetOpcode() == Opcode::LiveOut);
2853 cvalue = builder_.CreateIntToPtr(value, builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE));
2854 } else {
2855 cvalue = CoerceValue(value, type, itype);
2856 }
2857 map->insert({itype, cvalue});
2858 }
2859 }
2860 }
2861
WrapArkCall(Inst * orig,llvm::CallInst * call)2862 void LLVMIrConstructor::WrapArkCall(Inst *orig, llvm::CallInst *call)
2863 {
2864 ASSERT_PRINT(!call->getDebugLoc(), "Debug info must be unset");
2865 // Ark calls may call GC inside, so add statepoint
2866 debugData_->SetLocation(call, orig->GetPc());
2867 EncodeInlineInfo(orig, call);
2868 }
2869
InitializeEntryBlock(bool noInline)2870 void LLVMIrConstructor::InitializeEntryBlock(bool noInline)
2871 {
2872 if (noInline) {
2873 ASSERT(!arkInterface_->IsIrtocMode() && GetGraph()->SupportManagedCode());
2874 func_->addFnAttr(llvm::Attribute::NoInline);
2875 // This type of linkage prevents return value propagation.
2876 // llvm::GlobalValue::isDefinitionExact becomes false and as a result
2877 // llvm::canTrackReturnsInterprocedurally() also false.
2878 func_->setLinkage(llvm::Function::WeakAnyLinkage);
2879 }
2880
2881 if (GetGraph()->SupportManagedCode()) {
2882 func_->addParamAttr(GetMethodArgument()->getArgNo(), llvm::Attribute::NonNull);
2883 if (!GetGraph()->GetRuntime()->IsMethodStatic(GetGraph()->GetMethod())) {
2884 func_->addParamAttr(GetArgument(0)->getArgNo(), llvm::Attribute::NonNull);
2885 }
2886 }
2887
2888 if (func_->hasMetadata(LLVMArkInterface::FUNCTION_MD_INLINE_MODULE) &&
2889 !GetGraph()->GetRuntime()->IsMethodStatic(GetGraph()->GetMethod())) {
2890 SetCurrentBasicBlock(&func_->getEntryBlock());
2891 builder_.CreateCall(KeepThis(func_->getParent()), GetArgument(0));
2892 }
2893 }
2894
MarkAsAllocation(llvm::CallInst * call)2895 void LLVMIrConstructor::MarkAsAllocation(llvm::CallInst *call)
2896 {
2897 llvm::AttrBuilder builder {call->getContext()};
2898 /**
2899 * When we add allockind(alloc) attribute, then llvm can assume that the function is allocation function.
2900 * With this assumption llvm can remove dead allocations
2901 */
2902 builder.addAllocKindAttr(llvm::AllocFnKind::Alloc);
2903 call->addFnAttr(builder.getAttribute(llvm::Attribute::AllocKind));
2904 call->addRetAttr(llvm::Attribute::NonNull);
2905 call->addRetAttr(llvm::Attribute::NoAlias);
2906 }
2907
2908 // Instruction Visitors
2909
2910 // Constant and NullPtr are processed directly in GetInputValue
VisitConstant(GraphVisitor * v,Inst * inst)2911 void LLVMIrConstructor::VisitConstant([[maybe_unused]] GraphVisitor *v, [[maybe_unused]] Inst *inst)
2912 {
2913 ASSERT(inst->GetBasicBlock()->IsStartBlock());
2914 }
2915
VisitNullPtr(GraphVisitor * v,Inst * inst)2916 void LLVMIrConstructor::VisitNullPtr([[maybe_unused]] GraphVisitor *v, [[maybe_unused]] Inst *inst)
2917 {
2918 ASSERT(inst->GetBasicBlock()->IsStartBlock());
2919 }
2920
VisitLiveIn(GraphVisitor * v,Inst * inst)2921 void LLVMIrConstructor::VisitLiveIn(GraphVisitor *v, Inst *inst)
2922 {
2923 auto ctor = static_cast<LLVMIrConstructor *>(v);
2924 ASSERT(inst->GetBasicBlock()->IsStartBlock());
2925 ASSERT(!ctor->GetGraph()->SupportManagedCode());
2926
2927 auto regInput = std::find(ctor->cc_.begin(), ctor->cc_.end(), inst->CastToLiveIn()->GetDstReg());
2928 ASSERT(regInput != ctor->cc_.end());
2929 auto idx = std::distance(ctor->cc_.begin(), regInput);
2930 auto n = ctor->func_->arg_begin() + idx;
2931 ctor->ValueMapAdd(inst, ctor->CoerceValue(n, ctor->GetExactType(inst->GetType())));
2932 }
2933
VisitParameter(GraphVisitor * v,Inst * inst)2934 void LLVMIrConstructor::VisitParameter(GraphVisitor *v, Inst *inst)
2935 {
2936 ASSERT(inst->GetBasicBlock()->IsStartBlock());
2937 auto ctor = static_cast<LLVMIrConstructor *>(v);
2938 ASSERT(ctor->GetGraph()->SupportManagedCode() || ctor->GetGraph()->GetMode().IsFastPath());
2939 auto n = ctor->GetArgument(inst->CastToParameter()->GetArgNumber());
2940 ctor->ValueMapAdd(inst, n, false);
2941 }
2942
VisitReturnVoid(GraphVisitor * v,Inst * inst)2943 void LLVMIrConstructor::VisitReturnVoid(GraphVisitor *v, Inst *inst)
2944 {
2945 auto ctor = static_cast<LLVMIrConstructor *>(v);
2946 if (inst->GetFlag(inst_flags::MEM_BARRIER)) {
2947 auto builtin = BarrierReturnVoid(ctor->func_->getParent());
2948 auto builtinCall = ctor->builder_.CreateCall(builtin);
2949 builtinCall->addFnAttr(llvm::Attribute::get(builtinCall->getContext(), "needs-mem-barrier"));
2950 }
2951 ctor->builder_.CreateRetVoid();
2952 }
2953
VisitReturn(GraphVisitor * v,Inst * inst)2954 void LLVMIrConstructor::VisitReturn(GraphVisitor *v, Inst *inst)
2955 {
2956 auto ctor = static_cast<LLVMIrConstructor *>(v);
2957 auto ret = ctor->GetInputValue(inst, 0);
2958
2959 auto type = inst->GetType();
2960 if (DataType::IsLessInt32(type)) {
2961 ret = ctor->CoerceValue(ret, type, DataType::INT32);
2962 }
2963
2964 ctor->builder_.CreateRet(ret);
2965 }
2966
VisitReturnInlined(GraphVisitor * v,Inst * inst)2967 void LLVMIrConstructor::VisitReturnInlined(GraphVisitor *v, Inst *inst)
2968 {
2969 auto ctor = static_cast<LLVMIrConstructor *>(v);
2970
2971 if (inst->GetFlag(inst_flags::MEM_BARRIER)) {
2972 auto builtin = BarrierReturnVoid(ctor->func_->getParent());
2973 auto builtinCall = ctor->builder_.CreateCall(builtin);
2974 builtinCall->addFnAttr(llvm::Attribute::get(builtinCall->getContext(), "needs-mem-barrier"));
2975 }
2976 }
2977
VisitReturnI(GraphVisitor * v,Inst * inst)2978 void LLVMIrConstructor::VisitReturnI(GraphVisitor *v, Inst *inst)
2979 {
2980 auto ctor = static_cast<LLVMIrConstructor *>(v);
2981 llvm::Value *ret = ctor->builder_.getInt64(inst->CastToReturnI()->GetImm());
2982
2983 auto type = inst->GetType();
2984 if (DataType::IsInt32Bit(type)) {
2985 ret = ctor->CoerceValue(ret, DataType::INT64, DataType::INT32);
2986 }
2987
2988 ctor->builder_.CreateRet(ret);
2989 }
2990
2991 // No-op "pseudo" instructions
VisitTry(GraphVisitor * v,Inst * inst)2992 void LLVMIrConstructor::VisitTry([[maybe_unused]] GraphVisitor *v, [[maybe_unused]] Inst *inst) {}
VisitSaveState(GraphVisitor * v,Inst * inst)2993 void LLVMIrConstructor::VisitSaveState([[maybe_unused]] GraphVisitor *v, [[maybe_unused]] Inst *inst) {}
VisitSaveStateDeoptimize(GraphVisitor * v,Inst * inst)2994 void LLVMIrConstructor::VisitSaveStateDeoptimize([[maybe_unused]] GraphVisitor *v, [[maybe_unused]] Inst *inst) {}
VisitSafePoint(GraphVisitor * v,Inst * inst)2995 void LLVMIrConstructor::VisitSafePoint([[maybe_unused]] GraphVisitor *v, [[maybe_unused]] Inst *inst) {}
2996 // NOP and Deoptimize* required after adding CheckElim* passes
VisitNOP(GraphVisitor * v,Inst * inst)2997 void LLVMIrConstructor::VisitNOP([[maybe_unused]] GraphVisitor *v, [[maybe_unused]] Inst *inst) {}
2998
VisitLiveOut(GraphVisitor * v,Inst * inst)2999 void LLVMIrConstructor::VisitLiveOut(GraphVisitor *v, Inst *inst)
3000 {
3001 auto ctor = static_cast<LLVMIrConstructor *>(v);
3002 ASSERT(!ctor->GetGraph()->SupportManagedCode());
3003 auto input = ctor->GetInputValue(inst, 0);
3004
3005 auto regInput = std::find(ctor->cc_.begin(), ctor->cc_.end(), inst->GetDstReg());
3006 ASSERT(regInput != ctor->cc_.end());
3007 size_t idx = std::distance(ctor->cc_.begin(), regInput);
3008 ASSERT(ctor->ccValues_[idx] == nullptr);
3009
3010 // LiveOut not allowed for real frame register
3011 ASSERT(ctor->GetGraph()->GetArch() == Arch::AARCH64 || idx + 1 != ctor->cc_.size());
3012 auto value = ctor->CoerceValue(input, ctor->GetExactType(inst->GetType()));
3013 ctor->ccValues_[idx] = value;
3014 ctor->ValueMapAdd(inst, value, false);
3015 }
3016
VisitSubOverflowCheck(GraphVisitor * v,Inst * inst)3017 void LLVMIrConstructor::VisitSubOverflowCheck(GraphVisitor *v, Inst *inst)
3018 {
3019 auto ctor = static_cast<LLVMIrConstructor *>(v);
3020 auto dtype = inst->GetType();
3021 auto ltype = ctor->GetExactType(dtype);
3022 auto src0 = ctor->GetInputValue(inst, 0);
3023 auto src1 = ctor->GetInputValue(inst, 1);
3024 ASSERT(inst->GetInputType(0) == inst->GetInputType(1));
3025
3026 auto arch = ctor->GetGraph()->GetArch();
3027 auto dtypeSize = DataType::GetTypeSize(dtype, arch);
3028 auto srcTypeSize = DataType::GetTypeSize(inst->GetInputType(0), arch);
3029 ASSERT(DataType::Is32Bits(dtype, arch) || DataType::Is64Bits(dtype, arch));
3030 if (srcTypeSize < dtypeSize) {
3031 src0 = ctor->builder_.CreateSExt(src0, ltype);
3032 src1 = ctor->builder_.CreateSExt(src1, ltype);
3033 }
3034 if (dtypeSize < srcTypeSize) {
3035 src0 = ctor->builder_.CreateTrunc(src0, ltype);
3036 src1 = ctor->builder_.CreateTrunc(src1, ltype);
3037 }
3038
3039 auto ssubOverflow = ctor->builder_.CreateBinaryIntrinsic(llvm::Intrinsic::ssub_with_overflow, src0, src1);
3040 auto result = ctor->builder_.CreateExtractValue(ssubOverflow, {0}, "ssub");
3041 auto deoptimize = ctor->builder_.CreateExtractValue(ssubOverflow, {1}, "obit");
3042
3043 auto exception = RuntimeInterface::EntrypointId::DEOPTIMIZE;
3044 ctor->CreateDeoptimizationBranch(inst, deoptimize, exception);
3045
3046 ctor->ValueMapAdd(inst, result, false);
3047 }
3048
VisitDeoptimize(GraphVisitor * v,Inst * inst)3049 void LLVMIrConstructor::VisitDeoptimize(GraphVisitor *v, Inst *inst)
3050 {
3051 auto ctor = static_cast<LLVMIrConstructor *>(v);
3052 auto type = inst->CastToDeoptimize()->GetDeoptimizeType();
3053 auto exception = RuntimeInterface::EntrypointId::DEOPTIMIZE;
3054 uint64_t value = static_cast<uint64_t>(type) | (inst->GetId() << MinimumBitsToStore(DeoptimizeType::COUNT));
3055 auto call = ctor->CreateEntrypointCall(exception, inst, {ctor->builder_.getInt64(value)});
3056 call->addFnAttr(llvm::Attribute::get(call->getContext(), "may-deoptimize"));
3057 ctor->builder_.CreateUnreachable();
3058 }
3059
VisitDeoptimizeIf(GraphVisitor * v,Inst * inst)3060 void LLVMIrConstructor::VisitDeoptimizeIf(GraphVisitor *v, Inst *inst)
3061 {
3062 auto ctor = static_cast<LLVMIrConstructor *>(v);
3063 auto exception = RuntimeInterface::EntrypointId::DEOPTIMIZE;
3064 auto deoptimize = ctor->builder_.CreateIsNotNull(ctor->GetInputValue(inst, 0));
3065 ctor->CreateDeoptimizationBranch(inst, deoptimize, exception);
3066 }
3067
VisitNegativeCheck(GraphVisitor * v,Inst * inst)3068 void LLVMIrConstructor::VisitNegativeCheck(GraphVisitor *v, Inst *inst)
3069 {
3070 auto ctor = static_cast<LLVMIrConstructor *>(v);
3071 auto val = ctor->GetInputValue(inst, 0);
3072
3073 auto deoptimize = ctor->builder_.CreateICmpSLT(val, llvm::Constant::getNullValue(val->getType()));
3074 auto exception = RuntimeInterface::EntrypointId::NEGATIVE_ARRAY_SIZE_EXCEPTION;
3075 ctor->CreateDeoptimizationBranch(inst, deoptimize, exception, {ctor->ToSSizeT(val)});
3076
3077 ctor->ValueMapAdd(inst, val, false);
3078 }
3079
VisitZeroCheck(GraphVisitor * v,Inst * inst)3080 void LLVMIrConstructor::VisitZeroCheck(GraphVisitor *v, Inst *inst)
3081 {
3082 auto ctor = static_cast<LLVMIrConstructor *>(v);
3083 auto val = ctor->GetInputValue(inst, 0);
3084
3085 auto deoptimize = ctor->builder_.CreateIsNull(val);
3086 auto exception = RuntimeInterface::EntrypointId::ARITHMETIC_EXCEPTION;
3087 ctor->CreateDeoptimizationBranch(inst, deoptimize, exception);
3088
3089 ctor->ValueMapAdd(inst, val, false);
3090 }
3091
VisitNullCheck(GraphVisitor * v,Inst * inst)3092 void LLVMIrConstructor::VisitNullCheck(GraphVisitor *v, Inst *inst)
3093 {
3094 auto ctor = static_cast<LLVMIrConstructor *>(v);
3095 auto obj = ctor->GetInputValue(inst, 0);
3096 auto obj64 = obj;
3097
3098 if (compiler::g_options.IsCompilerImplicitNullCheck()) {
3099 // LLVM's ImplicitNullChecks pass can't operate with 32-bit pointers, but it is enough
3100 // to create address space cast to an usual 64-bit pointer before comparing with null.
3101 obj64 = ctor->builder_.CreateAddrSpaceCast(obj, ctor->builder_.getPtrTy());
3102 }
3103
3104 auto deoptimize = ctor->builder_.CreateIsNull(obj64);
3105 auto exception = RuntimeInterface::EntrypointId::NULL_POINTER_EXCEPTION;
3106 ctor->CreateDeoptimizationBranch(inst, deoptimize, exception);
3107
3108 ctor->ValueMapAdd(inst, obj, false);
3109 }
3110
VisitBoundsCheck(GraphVisitor * v,Inst * inst)3111 void LLVMIrConstructor::VisitBoundsCheck(GraphVisitor *v, Inst *inst)
3112 {
3113 auto ctor = static_cast<LLVMIrConstructor *>(v);
3114 auto length = ctor->GetInputValue(inst, 0);
3115 ASSERT_TYPE(length, ctor->builder_.getInt32Ty());
3116 auto index = ctor->GetInputValue(inst, 1);
3117 ASSERT(index->getType()->isIntegerTy());
3118
3119 auto deoptimize = ctor->builder_.CreateICmpUGE(index, length);
3120 auto exception = inst->CastToBoundsCheck()->IsArray()
3121 ? RuntimeInterface::EntrypointId::ARRAY_INDEX_OUT_OF_BOUNDS_EXCEPTION
3122 : RuntimeInterface::EntrypointId::STRING_INDEX_OUT_OF_BOUNDS_EXCEPTION;
3123 ctor->CreateDeoptimizationBranch(inst, deoptimize, exception, {ctor->ToSSizeT(index), ctor->ToSizeT(length)});
3124
3125 ctor->ValueMapAdd(inst, index, false);
3126 }
3127
VisitRefTypeCheck(GraphVisitor * v,Inst * inst)3128 void LLVMIrConstructor::VisitRefTypeCheck(GraphVisitor *v, Inst *inst)
3129 {
3130 auto ctor = static_cast<LLVMIrConstructor *>(v);
3131
3132 auto array = ctor->GetInputValue(inst, 0);
3133 auto ref = ctor->GetInputValue(inst, 1);
3134
3135 auto &ctx = ctor->func_->getContext();
3136 auto compareBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "comparison"), ctor->func_);
3137 auto compBaseBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "comp_base"), ctor->func_);
3138 auto slowPathBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "slow_path"), ctor->func_);
3139 auto outBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "out"), ctor->func_);
3140
3141 auto runtime = ctor->GetGraph()->GetRuntime();
3142 auto arch = ctor->GetGraph()->GetArch();
3143
3144 auto cmp = ctor->builder_.CreateIsNotNull(ref);
3145 ctor->builder_.CreateCondBr(cmp, compareBb, outBb);
3146
3147 // Get element class from array
3148 ctor->SetCurrentBasicBlock(compareBb);
3149 auto arrayClass = CreateLoadClassFromObject(array, &ctor->builder_, ctor->arkInterface_);
3150 auto elementTypeOffset = runtime->GetClassComponentTypeOffset(arch);
3151 auto int8Ty = ctor->builder_.getInt8Ty();
3152 auto elementClassPtr = ctor->builder_.CreateConstInBoundsGEP1_32(int8Ty, arrayClass, elementTypeOffset);
3153 auto elementClass = ctor->builder_.CreateLoad(ctor->builder_.getPtrTy(), elementClassPtr);
3154 // And class from stored object
3155 auto refClass = CreateLoadClassFromObject(ref, &ctor->builder_, ctor->arkInterface_);
3156
3157 // Unlike other checks, there's another check in the runtime function, so don't use CreateDeoptimizationBranch
3158 cmp = ctor->builder_.CreateICmpNE(elementClass, refClass);
3159 auto branchWeights =
3160 llvm::MDBuilder(ctx).createBranchWeights(llvmbackend::Metadata::BranchWeights::LIKELY_BRANCH_WEIGHT,
3161 llvmbackend::Metadata::BranchWeights::UNLIKELY_BRANCH_WEIGHT);
3162 ctor->builder_.CreateCondBr(cmp, compBaseBb, outBb, branchWeights);
3163
3164 // If the array's element class is Object (Base class is null) - no further check needed
3165 ctor->SetCurrentBasicBlock(compBaseBb);
3166 auto baseTypeOffset = runtime->GetClassBaseOffset(arch);
3167 auto baseClassPtr = ctor->builder_.CreateConstInBoundsGEP1_32(int8Ty, elementClass, baseTypeOffset);
3168 auto baseClass = ctor->builder_.CreateLoad(ctor->builder_.getPtrTy(), baseClassPtr);
3169 auto notObjectArray = ctor->builder_.CreateIsNotNull(baseClass);
3170 ctor->builder_.CreateCondBr(notObjectArray, slowPathBb, outBb);
3171
3172 ctor->SetCurrentBasicBlock(slowPathBb);
3173 if (inst->CanDeoptimize()) {
3174 auto entrypoint = RuntimeInterface::EntrypointId::CHECK_STORE_ARRAY_REFERENCE_DEOPTIMIZE;
3175 auto call = ctor->CreateEntrypointCall(entrypoint, inst, {array, ref});
3176 call->addFnAttr(llvm::Attribute::get(call->getContext(), "may-deoptimize"));
3177 } else {
3178 ctor->CreateEntrypointCall(RuntimeInterface::EntrypointId::CHECK_STORE_ARRAY_REFERENCE, inst, {array, ref});
3179 }
3180 ctor->builder_.CreateBr(outBb);
3181
3182 ctor->SetCurrentBasicBlock(outBb);
3183 ctor->ValueMapAdd(inst, ref, false);
3184 }
3185
VisitLoadString(GraphVisitor * v,Inst * inst)3186 void LLVMIrConstructor::VisitLoadString(GraphVisitor *v, Inst *inst)
3187 {
3188 auto ctor = static_cast<LLVMIrConstructor *>(v);
3189
3190 llvm::Value *result;
3191 if (g_options.IsCompilerAotLoadStringPlt() &&
3192 !ctor->GetGraph()->GetRuntime()->IsMethodStaticConstructor(ctor->GetGraph()->GetMethod())) {
3193 auto aotData = ctor->GetGraph()->GetAotData();
3194 ASSERT(aotData != nullptr);
3195
3196 auto typeId = inst->CastToLoadString()->GetTypeId();
3197 auto typeVal = ctor->builder_.getInt32(typeId);
3198 auto slotVal = ctor->builder_.getInt32(ctor->arkInterface_->GetStringSlotId(aotData, typeId));
3199 ctor->arkInterface_->GetOrCreateRuntimeFunctionType(
3200 ctor->func_->getContext(), ctor->func_->getParent(), LLVMArkInterface::RuntimeCallType::ENTRYPOINT,
3201 static_cast<LLVMArkInterface::EntrypointId>(RuntimeInterface::EntrypointId::RESOLVE_STRING_AOT));
3202
3203 auto builtin = LoadString(ctor->func_->getParent());
3204 auto call = ctor->builder_.CreateCall(builtin, {typeVal, slotVal}, ctor->CreateSaveStateBundle(inst));
3205 ctor->WrapArkCall(inst, call);
3206 result = call;
3207 } else {
3208 auto stringType = ctor->builder_.getInt32(inst->CastToLoadString()->GetTypeId());
3209 auto entrypointId = RuntimeInterface::EntrypointId::RESOLVE_STRING;
3210 result = ctor->CreateEntrypointCall(entrypointId, inst, {ctor->GetMethodArgument(), stringType});
3211 }
3212 ctor->ValueMapAdd(inst, result);
3213 }
3214
VisitLenArray(GraphVisitor * v,Inst * inst)3215 void LLVMIrConstructor::VisitLenArray(GraphVisitor *v, Inst *inst)
3216 {
3217 auto ctor = static_cast<LLVMIrConstructor *>(v);
3218 auto array = ctor->GetInputValue(inst, 0);
3219 auto runtime = ctor->GetGraph()->GetRuntime();
3220 bool isString = !inst->CastToLenArray()->IsArray();
3221 auto &builder = ctor->builder_;
3222
3223 auto arrayInput = inst->GetDataFlowInput(0);
3224 // Try to extract array length from constructor
3225 if (arrayInput->GetOpcode() == Opcode::NewArray) {
3226 auto size = ctor->GetInputValue(arrayInput, NewArrayInst::INDEX_SIZE);
3227 ctor->ValueMapAdd(inst, size);
3228 return;
3229 }
3230 auto builtin = LenArray(ctor->func_->getParent());
3231 auto arch = ctor->GetGraph()->GetArch();
3232 auto offset = isString ? runtime->GetStringLengthOffset(arch) : runtime->GetArrayLengthOffset(arch);
3233 auto len = ctor->builder_.CreateCall(builtin, {array, builder.getInt32(offset)});
3234
3235 ctor->ValueMapAdd(inst, len);
3236 }
3237
VisitLoadArray(GraphVisitor * v,Inst * inst)3238 void LLVMIrConstructor::VisitLoadArray(GraphVisitor *v, Inst *inst)
3239 {
3240 auto ctor = static_cast<LLVMIrConstructor *>(v);
3241 auto loadArray = inst->CastToLoadArray();
3242
3243 auto array = ctor->GetInputValue(inst, 0);
3244 ASSERT_TYPE(array, ctor->builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE));
3245
3246 auto dtype = inst->GetType();
3247 auto ltype = ctor->GetExactType(dtype);
3248 auto arch = ctor->GetGraph()->GetArch();
3249 uint32_t dataOffset = ctor->GetGraph()->GetRuntime()->GetArrayDataOffset(arch);
3250 if (!loadArray->IsArray()) {
3251 dataOffset = ctor->GetGraph()->GetRuntime()->GetStringDataOffset(arch);
3252 }
3253 auto ptrData = ctor->builder_.CreateConstInBoundsGEP1_32(ctor->builder_.getInt8Ty(), array, dataOffset);
3254
3255 llvm::Value *ptrElem = ctor->builder_.CreateInBoundsGEP(ltype, ptrData, ctor->GetInputValue(inst, 1));
3256
3257 llvm::Value *n = ctor->builder_.CreateLoad(ltype, ptrElem);
3258 ctor->ValueMapAdd(inst, n);
3259 }
3260
VisitLoadCompressedStringChar(GraphVisitor * v,Inst * inst)3261 void LLVMIrConstructor::VisitLoadCompressedStringChar(GraphVisitor *v, Inst *inst)
3262 {
3263 auto ctor = static_cast<LLVMIrConstructor *>(v);
3264 auto loadString = inst->CastToLoadCompressedStringChar();
3265
3266 ASSERT(inst->GetType() == DataType::UINT16);
3267
3268 auto array = ctor->GetInputValue(loadString, 0);
3269 auto index = ctor->GetInputValue(loadString, 1);
3270 auto length = ctor->GetInputValue(loadString, 2);
3271
3272 ASSERT(ctor->GetGraph()->GetRuntime()->GetStringCompressionMask() == 1U);
3273 auto compressionMask = ctor->builder_.getInt32(ctor->GetGraph()->GetRuntime()->GetStringCompressionMask());
3274 auto dataOff = ctor->GetGraph()->GetRuntime()->GetStringDataOffset(ctor->GetGraph()->GetArch());
3275 auto chars = ctor->builder_.CreateConstInBoundsGEP1_64(ctor->builder_.getInt8Ty(), array, dataOff);
3276 auto isCompressed = ctor->builder_.CreateIsNull(ctor->builder_.CreateAnd(length, compressionMask));
3277
3278 /**
3279 * int32_t CompressedCharAt(uint8_t *string, int32_t index) {
3280 * int32_t length = LenArray(string, LENGTH_OFFSET, SHIFT);
3281 * bool isCompressed = (length & COMPRESSION_MASK) == 0;
3282 * uint8_t *chars = string + DATA_OFFSET;
3283 *
3284 * uint16_t c;
3285 * if (isCompressed) {
3286 * // compressedBb
3287 * c = static_cast<uint16_t>(chars[index]);
3288 * } else {
3289 * // uncompressedBb
3290 * c = reinterpret_cast<uint16_t *>(chars)[index];
3291 * }
3292 * // Coercing
3293 * return static_cast<int32_t>(c);
3294 * }
3295 */
3296 auto compressedBb =
3297 llvm::BasicBlock::Create(ctor->func_->getContext(), CreateBasicBlockName(inst, "compressed_bb"), ctor->func_);
3298 auto uncompressedBb =
3299 llvm::BasicBlock::Create(ctor->func_->getContext(), CreateBasicBlockName(inst, "uncompressed_bb"), ctor->func_);
3300 auto continuation =
3301 llvm::BasicBlock::Create(ctor->func_->getContext(), CreateBasicBlockName(inst, "char_at_cont"), ctor->func_);
3302 ctor->builder_.CreateCondBr(isCompressed, compressedBb, uncompressedBb);
3303 llvm::Value *compressedChar;
3304 {
3305 ctor->SetCurrentBasicBlock(compressedBb);
3306 ASSERT_TYPE(chars, ctor->builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE));
3307 auto charAt = ctor->builder_.CreateInBoundsGEP(ctor->builder_.getInt8Ty(), chars, index);
3308 auto character = ctor->builder_.CreateLoad(ctor->builder_.getInt8Ty(), charAt);
3309 compressedChar = ctor->builder_.CreateSExt(character, ctor->builder_.getInt16Ty());
3310 ctor->builder_.CreateBr(continuation);
3311 }
3312
3313 llvm::Value *uncompressedChar;
3314 {
3315 ctor->SetCurrentBasicBlock(uncompressedBb);
3316 auto u16CharAt = ctor->builder_.CreateInBoundsGEP(ctor->builder_.getInt16Ty(), chars, index);
3317 uncompressedChar = ctor->builder_.CreateLoad(ctor->builder_.getInt16Ty(), u16CharAt);
3318 ctor->builder_.CreateBr(continuation);
3319 }
3320 ctor->SetCurrentBasicBlock(continuation);
3321
3322 auto charAt = ctor->builder_.CreatePHI(ctor->builder_.getInt16Ty(), 2U);
3323 charAt->addIncoming(compressedChar, compressedBb);
3324 charAt->addIncoming(uncompressedChar, uncompressedBb);
3325 ctor->ValueMapAdd(inst, charAt);
3326 }
3327
VisitStoreArray(GraphVisitor * v,Inst * inst)3328 void LLVMIrConstructor::VisitStoreArray(GraphVisitor *v, Inst *inst)
3329 {
3330 auto ctor = static_cast<LLVMIrConstructor *>(v);
3331 auto array = ctor->GetInputValue(inst, 0);
3332 ASSERT_TYPE(array, ctor->builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE));
3333 auto value = ctor->GetInputValue(inst, 2U);
3334
3335 auto dtype = inst->GetType();
3336 auto arch = ctor->GetGraph()->GetArch();
3337 auto ltype = ctor->GetExactType(dtype);
3338 auto dataOff = ctor->GetGraph()->GetRuntime()->GetArrayDataOffset(arch);
3339 auto ptrData = ctor->builder_.CreateConstInBoundsGEP1_32(ctor->builder_.getInt8Ty(), array, dataOff);
3340 auto index = ctor->GetInputValue(inst, 1);
3341 auto ptrElem = ctor->builder_.CreateInBoundsGEP(ltype, ptrData, index);
3342
3343 // Pre
3344 if (inst->CastToStoreArray()->GetNeedBarrier()) {
3345 ctor->CreatePreWRB(inst, ptrElem);
3346 }
3347 // Write
3348 ctor->builder_.CreateStore(value, ptrElem);
3349 // Post
3350 if (inst->CastToStoreArray()->GetNeedBarrier()) {
3351 auto indexOffset = ctor->builder_.CreateBinOp(llvm::Instruction::Shl, index,
3352 ctor->builder_.getInt32(DataType::ShiftByType(dtype, arch)));
3353 auto offset = ctor->builder_.CreateBinOp(llvm::Instruction::Add, indexOffset, ctor->builder_.getInt32(dataOff));
3354 ctor->CreatePostWRB(inst, array, offset, value);
3355 }
3356 }
3357
VisitLoad(GraphVisitor * v,Inst * inst)3358 void LLVMIrConstructor::VisitLoad(GraphVisitor *v, Inst *inst)
3359 {
3360 auto ctor = static_cast<LLVMIrConstructor *>(v);
3361 auto srcPtr = ctor->GetInputValue(inst, 0);
3362 ASSERT(srcPtr->getType()->isPointerTy());
3363
3364 llvm::Value *offset;
3365 auto offsetInput = inst->GetInput(1).GetInst();
3366 auto offsetItype = offsetInput->GetType();
3367 if (offsetItype == DataType::UINT64 || offsetItype == DataType::INT64) {
3368 ASSERT(offsetInput->GetOpcode() != Opcode::Load && offsetInput->GetOpcode() != Opcode::LoadI);
3369 offset = ctor->GetInputValue(inst, 1, true);
3370 } else {
3371 offset = ctor->GetInputValue(inst, 1);
3372 }
3373
3374 ASSERT(srcPtr->getType()->isPointerTy());
3375 auto ptr = ctor->builder_.CreateInBoundsGEP(ctor->builder_.getInt8Ty(), srcPtr, offset);
3376
3377 auto n = ctor->CreateLoadWithOrdering(inst, ptr, ToAtomicOrdering(inst->CastToLoad()->GetVolatile()));
3378 ctor->ValueMapAdd(inst, n);
3379 }
3380
VisitStore(GraphVisitor * v,Inst * inst)3381 void LLVMIrConstructor::VisitStore(GraphVisitor *v, Inst *inst)
3382 {
3383 auto ctor = static_cast<LLVMIrConstructor *>(v);
3384 auto srcPtr = ctor->GetInputValue(inst, 0);
3385 auto value = ctor->GetInputValue(inst, 2U);
3386
3387 llvm::Value *offset;
3388 auto offsetInput = inst->GetInput(1).GetInst();
3389 auto offsetItype = offsetInput->GetType();
3390 if (offsetItype == DataType::UINT64 || offsetItype == DataType::INT64) {
3391 ASSERT(offsetInput->GetOpcode() != Opcode::Load && offsetInput->GetOpcode() != Opcode::LoadI);
3392 offset = ctor->GetInputValue(inst, 1, true);
3393 } else {
3394 offset = ctor->GetInputValue(inst, 1);
3395 }
3396
3397 auto ptrPlus = ctor->builder_.CreateInBoundsGEP(ctor->builder_.getInt8Ty(), srcPtr, offset);
3398
3399 // Pre
3400 if (inst->CastToStore()->GetNeedBarrier()) {
3401 ctor->CreatePreWRB(inst, ptrPlus);
3402 }
3403 // Write
3404 ctor->CreateStoreWithOrdering(value, ptrPlus, ToAtomicOrdering(inst->CastToStore()->GetVolatile()));
3405 // Post
3406 if (inst->CastToStore()->GetNeedBarrier()) {
3407 ctor->CreatePostWRB(inst, srcPtr, offset, value);
3408 }
3409 }
3410
VisitLoadI(GraphVisitor * v,Inst * inst)3411 void LLVMIrConstructor::VisitLoadI(GraphVisitor *v, Inst *inst)
3412 {
3413 auto ctor = static_cast<LLVMIrConstructor *>(v);
3414 auto srcPtr = ctor->GetInputValue(inst, 0);
3415 auto index = inst->CastToLoadI()->GetImm();
3416
3417 ASSERT(srcPtr->getType()->isPointerTy());
3418 auto ptrPlus = ctor->builder_.CreateConstInBoundsGEP1_64(ctor->builder_.getInt8Ty(), srcPtr, index);
3419
3420 auto n = ctor->CreateLoadWithOrdering(inst, ptrPlus, ToAtomicOrdering(inst->CastToLoadI()->GetVolatile()));
3421 ctor->ValueMapAdd(inst, n);
3422 }
3423
VisitStoreI(GraphVisitor * v,Inst * inst)3424 void LLVMIrConstructor::VisitStoreI(GraphVisitor *v, Inst *inst)
3425 {
3426 auto ctor = static_cast<LLVMIrConstructor *>(v);
3427 auto srcPtr = ctor->GetInputValue(inst, 0);
3428 auto value = ctor->GetInputValue(inst, 1);
3429
3430 auto index = inst->CastToStoreI()->GetImm();
3431 ASSERT(srcPtr->getType()->isPointerTy());
3432 auto ptrPlus = ctor->builder_.CreateConstInBoundsGEP1_64(ctor->builder_.getInt8Ty(), srcPtr, index);
3433
3434 // Pre
3435 if (inst->CastToStoreI()->GetNeedBarrier()) {
3436 ctor->CreatePreWRB(inst, ptrPlus);
3437 }
3438 // Write
3439 ctor->CreateStoreWithOrdering(value, ptrPlus, ToAtomicOrdering(inst->CastToStoreI()->GetVolatile()));
3440 // Post
3441 if (inst->CastToStoreI()->GetNeedBarrier()) {
3442 ctor->CreatePostWRB(inst, srcPtr, ctor->builder_.getInt32(index), value);
3443 }
3444 }
3445
VisitLoadObject(GraphVisitor * v,Inst * inst)3446 void LLVMIrConstructor::VisitLoadObject(GraphVisitor *v, Inst *inst)
3447 {
3448 auto ctor = static_cast<LLVMIrConstructor *>(v);
3449 auto obj = ctor->GetInputValue(inst, 0);
3450 ASSERT_TYPE(obj, ctor->builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE));
3451
3452 auto field = inst->CastToLoadObject()->GetObjField();
3453 auto dataOff = ctor->GetGraph()->GetRuntime()->GetFieldOffset(field);
3454 auto ptrData = ctor->builder_.CreateConstInBoundsGEP1_32(ctor->builder_.getInt8Ty(), obj, dataOff);
3455
3456 auto n = ctor->CreateLoadWithOrdering(inst, ptrData, ToAtomicOrdering(inst->CastToLoadObject()->GetVolatile()));
3457 ctor->ValueMapAdd(inst, n);
3458 }
3459
VisitStoreObject(GraphVisitor * v,Inst * inst)3460 void LLVMIrConstructor::VisitStoreObject(GraphVisitor *v, Inst *inst)
3461 {
3462 auto ctor = static_cast<LLVMIrConstructor *>(v);
3463 auto obj = ctor->GetInputValue(inst, 0);
3464 ASSERT_TYPE(obj, ctor->builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE));
3465 auto value = ctor->GetInputValue(inst, 1);
3466
3467 auto field = inst->CastToStoreObject()->GetObjField();
3468 auto dataOff = ctor->GetGraph()->GetRuntime()->GetFieldOffset(field);
3469
3470 auto ptrData = ctor->builder_.CreateConstInBoundsGEP1_32(ctor->builder_.getInt8Ty(), obj, dataOff);
3471
3472 // Pre
3473 if (inst->CastToStoreObject()->GetNeedBarrier()) {
3474 ctor->CreatePreWRB(inst, ptrData);
3475 }
3476 // Write
3477 ctor->CreateStoreWithOrdering(value, ptrData, ToAtomicOrdering(inst->CastToStoreObject()->GetVolatile()));
3478 // Post
3479 if (inst->CastToStoreObject()->GetNeedBarrier()) {
3480 ctor->CreatePostWRB(inst, obj, ctor->builder_.getInt32(dataOff), value);
3481 }
3482 }
3483
VisitResolveObjectField(GraphVisitor * v,Inst * inst)3484 void LLVMIrConstructor::VisitResolveObjectField(GraphVisitor *v, Inst *inst)
3485 {
3486 auto ctor = static_cast<LLVMIrConstructor *>(v);
3487
3488 auto typeId = ctor->builder_.getInt32(inst->CastToResolveObjectField()->GetTypeId());
3489
3490 auto entrypointId = RuntimeInterface::EntrypointId::GET_FIELD_OFFSET;
3491 auto offset = ctor->CreateEntrypointCall(entrypointId, inst, {ctor->GetMethodArgument(), typeId});
3492
3493 ctor->ValueMapAdd(inst, offset);
3494 }
3495
VisitLoadResolvedObjectField(GraphVisitor * v,Inst * inst)3496 void LLVMIrConstructor::VisitLoadResolvedObjectField(GraphVisitor *v, Inst *inst)
3497 {
3498 auto ctor = static_cast<LLVMIrConstructor *>(v);
3499 auto obj = ctor->GetInputValue(inst, 0);
3500 ASSERT_TYPE(obj, ctor->builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE));
3501
3502 auto offset = ctor->GetInputValue(inst, 1);
3503 auto ptrData = ctor->builder_.CreateInBoundsGEP(ctor->builder_.getInt8Ty(), obj, offset);
3504
3505 auto n = ctor->CreateLoadWithOrdering(inst, ptrData, LLVMArkInterface::VOLATILE_ORDER);
3506 ctor->ValueMapAdd(inst, n);
3507 }
3508
VisitStoreResolvedObjectField(GraphVisitor * v,Inst * inst)3509 void LLVMIrConstructor::VisitStoreResolvedObjectField(GraphVisitor *v, Inst *inst)
3510 {
3511 auto ctor = static_cast<LLVMIrConstructor *>(v);
3512 auto obj = ctor->GetInputValue(inst, 0);
3513 ASSERT_TYPE(obj, ctor->builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE));
3514 auto value = ctor->GetInputValue(inst, 1);
3515
3516 auto offset = ctor->GetInputValue(inst, 2);
3517 auto ptrData = ctor->builder_.CreateInBoundsGEP(ctor->builder_.getInt8Ty(), obj, offset);
3518
3519 // Pre
3520 if (inst->CastToStoreResolvedObjectField()->GetNeedBarrier()) {
3521 ctor->CreatePreWRB(inst, ptrData);
3522 }
3523 // Write
3524 ctor->CreateStoreWithOrdering(value, ptrData, LLVMArkInterface::VOLATILE_ORDER);
3525 // Post
3526 if (inst->CastToStoreResolvedObjectField()->GetNeedBarrier()) {
3527 ctor->CreatePostWRB(inst, obj, offset, value);
3528 }
3529 }
3530
VisitResolveObjectFieldStatic(GraphVisitor * v,Inst * inst)3531 void LLVMIrConstructor::VisitResolveObjectFieldStatic(GraphVisitor *v, Inst *inst)
3532 {
3533 auto ctor = static_cast<LLVMIrConstructor *>(v);
3534 auto resolverInst = inst->CastToResolveObjectFieldStatic();
3535
3536 auto entrypoint = RuntimeInterface::EntrypointId::GET_UNKNOWN_STATIC_FIELD_MEMORY_ADDRESS;
3537
3538 auto typeId = ctor->builder_.getInt32(resolverInst->GetTypeId());
3539 auto slotPtr = llvm::Constant::getNullValue(ctor->builder_.getPtrTy());
3540
3541 auto ptrInt = ctor->CreateEntrypointCall(entrypoint, inst, {ctor->GetMethodArgument(), typeId, slotPtr});
3542 auto n = ctor->builder_.CreateIntToPtr(ptrInt, ctor->builder_.getPtrTy());
3543 ctor->ValueMapAdd(inst, n);
3544 }
3545
VisitLoadResolvedObjectFieldStatic(GraphVisitor * v,Inst * inst)3546 void LLVMIrConstructor::VisitLoadResolvedObjectFieldStatic(GraphVisitor *v, Inst *inst)
3547 {
3548 auto ctor = static_cast<LLVMIrConstructor *>(v);
3549 auto offset = ctor->GetInputValue(inst, 0);
3550
3551 auto casted = ctor->builder_.CreateIntToPtr(offset, ctor->builder_.getPtrTy());
3552 auto n = ctor->CreateLoadWithOrdering(inst, casted, LLVMArkInterface::VOLATILE_ORDER);
3553 ctor->ValueMapAdd(inst, n);
3554 }
3555
VisitStoreResolvedObjectFieldStatic(GraphVisitor * v,Inst * inst)3556 void LLVMIrConstructor::VisitStoreResolvedObjectFieldStatic(GraphVisitor *v, Inst *inst)
3557 {
3558 auto ctor = static_cast<LLVMIrConstructor *>(v);
3559 [[maybe_unused]] auto storeInst = inst->CastToStoreResolvedObjectFieldStatic();
3560
3561 ASSERT(!DataType::IsReference(inst->GetType()));
3562 ASSERT(!storeInst->GetNeedBarrier());
3563
3564 auto value = ctor->GetInputValue(inst, 1);
3565 auto destPtr = ctor->GetInputValue(inst, 0);
3566
3567 [[maybe_unused]] auto dtype = inst->GetType();
3568 ASSERT(value->getType()->getScalarSizeInBits() == DataType::GetTypeSize(dtype, ctor->GetGraph()->GetArch()));
3569 ctor->CreateStoreWithOrdering(value, destPtr, LLVMArkInterface::VOLATILE_ORDER);
3570 }
3571
VisitBitcast(GraphVisitor * v,Inst * inst)3572 void LLVMIrConstructor::VisitBitcast(GraphVisitor *v, Inst *inst)
3573 {
3574 auto ctor = static_cast<LLVMIrConstructor *>(v);
3575 auto type = inst->GetType();
3576 auto llvmTargetType = ctor->GetExactType(type);
3577 auto input = ctor->GetInputValue(inst, 0);
3578 auto itype = inst->GetInputType(0);
3579
3580 llvm::Value *n;
3581 if (itype == DataType::POINTER) {
3582 ASSERT(!llvmTargetType->isPointerTy());
3583 n = ctor->builder_.CreatePtrToInt(input, llvmTargetType);
3584 } else {
3585 if (type == DataType::REFERENCE) {
3586 n = ctor->builder_.CreateIntToPtr(input, ctor->builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE));
3587 } else if (type == DataType::POINTER) {
3588 n = ctor->builder_.CreateIntToPtr(input, ctor->builder_.getPtrTy());
3589 } else {
3590 n = ctor->builder_.CreateBitCast(input, llvmTargetType);
3591 }
3592 }
3593 ctor->ValueMapAdd(inst, n);
3594 }
3595
VisitCast(GraphVisitor * v,Inst * inst)3596 void LLVMIrConstructor::VisitCast(GraphVisitor *v, Inst *inst)
3597 {
3598 auto ctor = static_cast<LLVMIrConstructor *>(v);
3599 auto x = ctor->GetInputValue(inst, 0);
3600
3601 auto type = inst->GetInputType(0);
3602 auto targetType = inst->GetType();
3603 auto llvmTargetType = ctor->GetExactType(targetType);
3604 // Do not cast if either Ark or LLVM types are the same
3605 if (type == targetType || x->getType() == llvmTargetType) {
3606 ctor->ValueMapAdd(inst, x, false);
3607 return;
3608 }
3609
3610 if (DataType::IsFloatType(type) && IsInteger(targetType)) {
3611 // float to int, e.g. F64TOI32, F32TOI64, F64TOU32, F32TOU64
3612 auto n = ctor->CreateCastToInt(inst);
3613 ctor->ValueMapAdd(inst, n);
3614 return;
3615 }
3616 auto op = ctor->GetCastOp(type, targetType);
3617 if (targetType == DataType::BOOL) {
3618 ASSERT(op == llvm::Instruction::Trunc);
3619 auto u1 = ctor->builder_.CreateIsNotNull(x, CreateNameForInst(inst));
3620 auto n = ctor->builder_.CreateZExt(u1, ctor->builder_.getInt8Ty());
3621 ctor->ValueMapAdd(inst, n, false);
3622 return;
3623 }
3624 auto n = ctor->builder_.CreateCast(op, x, llvmTargetType);
3625 ctor->ValueMapAdd(inst, n);
3626 }
3627
VisitAnd(GraphVisitor * v,Inst * inst)3628 void LLVMIrConstructor::VisitAnd(GraphVisitor *v, Inst *inst)
3629 {
3630 auto ctor = static_cast<LLVMIrConstructor *>(v);
3631 auto n = ctor->CreateBinaryOp(inst, llvm::Instruction::And);
3632 ctor->ValueMapAdd(inst, n);
3633 }
3634
VisitAndI(GraphVisitor * v,Inst * inst)3635 void LLVMIrConstructor::VisitAndI(GraphVisitor *v, Inst *inst)
3636 {
3637 auto ctor = static_cast<LLVMIrConstructor *>(v);
3638 auto n = ctor->CreateBinaryImmOp(inst, llvm::Instruction::And, inst->CastToAndI()->GetImm());
3639 ctor->ValueMapAdd(inst, n);
3640 }
3641
VisitOr(GraphVisitor * v,Inst * inst)3642 void LLVMIrConstructor::VisitOr(GraphVisitor *v, Inst *inst)
3643 {
3644 auto ctor = static_cast<LLVMIrConstructor *>(v);
3645 auto n = ctor->CreateBinaryOp(inst, llvm::Instruction::Or);
3646 ctor->ValueMapAdd(inst, n);
3647 }
3648
VisitOrI(GraphVisitor * v,Inst * inst)3649 void LLVMIrConstructor::VisitOrI(GraphVisitor *v, Inst *inst)
3650 {
3651 auto ctor = static_cast<LLVMIrConstructor *>(v);
3652 auto n = ctor->CreateBinaryImmOp(inst, llvm::Instruction::Or, inst->CastToOrI()->GetImm());
3653 ctor->ValueMapAdd(inst, n);
3654 }
3655
VisitXor(GraphVisitor * v,Inst * inst)3656 void LLVMIrConstructor::VisitXor(GraphVisitor *v, Inst *inst)
3657 {
3658 auto ctor = static_cast<LLVMIrConstructor *>(v);
3659 auto n = ctor->CreateBinaryOp(inst, llvm::Instruction::Xor);
3660 ctor->ValueMapAdd(inst, n);
3661 }
3662
VisitXorI(GraphVisitor * v,Inst * inst)3663 void LLVMIrConstructor::VisitXorI(GraphVisitor *v, Inst *inst)
3664 {
3665 auto ctor = static_cast<LLVMIrConstructor *>(v);
3666 auto n = ctor->CreateBinaryImmOp(inst, llvm::Instruction::Xor, inst->CastToXorI()->GetImm());
3667 ctor->ValueMapAdd(inst, n);
3668 }
3669
VisitShl(GraphVisitor * v,Inst * inst)3670 void LLVMIrConstructor::VisitShl(GraphVisitor *v, Inst *inst)
3671 {
3672 auto ctor = static_cast<LLVMIrConstructor *>(v);
3673 auto n = ctor->CreateShiftOp(inst, llvm::Instruction::Shl);
3674 ctor->ValueMapAdd(inst, n);
3675 }
3676
VisitShlI(GraphVisitor * v,Inst * inst)3677 void LLVMIrConstructor::VisitShlI(GraphVisitor *v, Inst *inst)
3678 {
3679 auto ctor = static_cast<LLVMIrConstructor *>(v);
3680 auto n = ctor->CreateBinaryImmOp(inst, llvm::Instruction::Shl, inst->CastToShlI()->GetImm());
3681 ctor->ValueMapAdd(inst, n);
3682 }
3683
VisitShr(GraphVisitor * v,Inst * inst)3684 void LLVMIrConstructor::VisitShr(GraphVisitor *v, Inst *inst)
3685 {
3686 auto ctor = static_cast<LLVMIrConstructor *>(v);
3687 auto n = ctor->CreateShiftOp(inst, llvm::Instruction::LShr);
3688 ctor->ValueMapAdd(inst, n);
3689 }
3690
VisitShrI(GraphVisitor * v,Inst * inst)3691 void LLVMIrConstructor::VisitShrI(GraphVisitor *v, Inst *inst)
3692 {
3693 auto ctor = static_cast<LLVMIrConstructor *>(v);
3694 auto n = ctor->CreateBinaryImmOp(inst, llvm::Instruction::LShr, inst->CastToShrI()->GetImm());
3695 ctor->ValueMapAdd(inst, n);
3696 }
3697
VisitAShr(GraphVisitor * v,Inst * inst)3698 void LLVMIrConstructor::VisitAShr(GraphVisitor *v, Inst *inst)
3699 {
3700 auto ctor = static_cast<LLVMIrConstructor *>(v);
3701 auto n = ctor->CreateShiftOp(inst, llvm::Instruction::AShr);
3702 ctor->ValueMapAdd(inst, n);
3703 }
3704
VisitAShrI(GraphVisitor * v,Inst * inst)3705 void LLVMIrConstructor::VisitAShrI(GraphVisitor *v, Inst *inst)
3706 {
3707 auto ctor = static_cast<LLVMIrConstructor *>(v);
3708 auto n = ctor->CreateBinaryImmOp(inst, llvm::Instruction::AShr, inst->CastToAShrI()->GetImm());
3709 ctor->ValueMapAdd(inst, n);
3710 }
3711
VisitAdd(GraphVisitor * v,Inst * inst)3712 void LLVMIrConstructor::VisitAdd(GraphVisitor *v, Inst *inst)
3713 {
3714 auto ctor = static_cast<LLVMIrConstructor *>(v);
3715 llvm::Value *n;
3716 if (IsFloatType(inst->GetType())) {
3717 n = ctor->CreateBinaryOp(inst, llvm::Instruction::FAdd);
3718 } else if (IsTypeNumeric(inst->GetType())) {
3719 n = ctor->CreateBinaryOp(inst, llvm::Instruction::Add);
3720 } else {
3721 UNREACHABLE();
3722 }
3723 ctor->ValueMapAdd(inst, n);
3724 }
3725
VisitAddI(GraphVisitor * v,Inst * inst)3726 void LLVMIrConstructor::VisitAddI(GraphVisitor *v, Inst *inst)
3727 {
3728 auto ctor = static_cast<LLVMIrConstructor *>(v);
3729 auto n = ctor->CreateBinaryImmOp(inst, llvm::Instruction::Add, inst->CastToAddI()->GetImm());
3730 ctor->ValueMapAdd(inst, n);
3731 }
3732
VisitSub(GraphVisitor * v,Inst * inst)3733 void LLVMIrConstructor::VisitSub(GraphVisitor *v, Inst *inst)
3734 {
3735 auto ctor = static_cast<LLVMIrConstructor *>(v);
3736 llvm::Value *n;
3737 if (IsFloatType(inst->GetType())) {
3738 n = ctor->CreateBinaryOp(inst, llvm::Instruction::FSub);
3739 } else if (IsTypeNumeric(inst->GetType())) {
3740 n = ctor->CreateBinaryOp(inst, llvm::Instruction::Sub);
3741 } else {
3742 UNREACHABLE();
3743 }
3744 ctor->ValueMapAdd(inst, n);
3745 }
3746
VisitSubI(GraphVisitor * v,Inst * inst)3747 void LLVMIrConstructor::VisitSubI(GraphVisitor *v, Inst *inst)
3748 {
3749 auto ctor = static_cast<LLVMIrConstructor *>(v);
3750 auto n = ctor->CreateBinaryImmOp(inst, llvm::Instruction::Sub, inst->CastToSubI()->GetImm());
3751 ctor->ValueMapAdd(inst, n);
3752 }
3753
VisitMul(GraphVisitor * v,Inst * inst)3754 void LLVMIrConstructor::VisitMul(GraphVisitor *v, Inst *inst)
3755 {
3756 auto ctor = static_cast<LLVMIrConstructor *>(v);
3757 llvm::Value *n;
3758 if (IsFloatType(inst->GetType())) {
3759 n = ctor->CreateBinaryOp(inst, llvm::Instruction::FMul);
3760 } else if (IsTypeNumeric(inst->GetType())) {
3761 n = ctor->CreateBinaryOp(inst, llvm::Instruction::Mul);
3762 } else {
3763 UNREACHABLE();
3764 }
3765 ctor->ValueMapAdd(inst, n);
3766 }
3767
VisitMulI(GraphVisitor * v,Inst * inst)3768 void LLVMIrConstructor::VisitMulI(GraphVisitor *v, Inst *inst)
3769 {
3770 auto ctor = static_cast<LLVMIrConstructor *>(v);
3771 auto n = ctor->CreateBinaryImmOp(inst, llvm::Instruction::Mul, inst->CastToMulI()->GetImm());
3772 ctor->ValueMapAdd(inst, n);
3773 }
3774
VisitDiv(GraphVisitor * v,Inst * inst)3775 void LLVMIrConstructor::VisitDiv(GraphVisitor *v, Inst *inst)
3776 {
3777 auto ctor = static_cast<LLVMIrConstructor *>(v);
3778 auto type = inst->GetType();
3779 llvm::Value *n;
3780 if (IsFloatType(type)) {
3781 n = ctor->CreateBinaryOp(inst, llvm::Instruction::FDiv);
3782 } else if (IsInteger(type)) {
3783 if (IsSignedInteger(type)) {
3784 n = ctor->CreateSignDivMod(inst, llvm::Instruction::SDiv);
3785 } else {
3786 n = ctor->CreateBinaryOp(inst, llvm::Instruction::UDiv);
3787 }
3788 } else {
3789 UNREACHABLE();
3790 }
3791 ctor->ValueMapAdd(inst, n);
3792 }
3793
VisitMod(GraphVisitor * v,Inst * inst)3794 void LLVMIrConstructor::VisitMod(GraphVisitor *v, Inst *inst)
3795 {
3796 auto ctor = static_cast<LLVMIrConstructor *>(v);
3797 auto type = inst->GetType();
3798 llvm::Value *n;
3799 if (IsFloatType(type)) {
3800 n = ctor->CreateBinaryOp(inst, llvm::Instruction::FRem);
3801 } else if (IsInteger(type)) {
3802 if (IsSignedInteger(type)) {
3803 n = ctor->CreateSignDivMod(inst, llvm::Instruction::SRem);
3804 } else {
3805 n = ctor->CreateBinaryOp(inst, llvm::Instruction::URem);
3806 }
3807 } else {
3808 UNREACHABLE();
3809 }
3810 ctor->ValueMapAdd(inst, n);
3811 }
3812
VisitMin(GraphVisitor * v,Inst * inst)3813 void LLVMIrConstructor::VisitMin(GraphVisitor *v, Inst *inst)
3814 {
3815 ASSERT(g_options.IsCompilerEncodeIntrinsics());
3816 auto ctor = static_cast<LLVMIrConstructor *>(v);
3817 auto operType = inst->CastToMin()->GetType();
3818 llvm::Value *x = ctor->GetInputValue(inst, 0);
3819 llvm::Value *y = ctor->GetInputValue(inst, 1);
3820 llvm::Intrinsic::ID llvmId = 0;
3821
3822 if (DataType::IsFloatType(operType)) {
3823 llvmId = llvm::Intrinsic::minimum;
3824 } else if (IsInteger(operType)) {
3825 llvmId = DataType::IsTypeSigned(operType) ? llvm::Intrinsic::smin : llvm::Intrinsic::umin;
3826 } else {
3827 ASSERT_DO(false, (std::cerr << "Min is not supported for type " << DataType::ToString(operType) << std::endl));
3828 UNREACHABLE();
3829 }
3830 auto min = ctor->builder_.CreateBinaryIntrinsic(llvmId, x, y);
3831 ctor->ValueMapAdd(inst, min);
3832 }
3833
VisitMax(GraphVisitor * v,Inst * inst)3834 void LLVMIrConstructor::VisitMax(GraphVisitor *v, Inst *inst)
3835 {
3836 ASSERT(g_options.IsCompilerEncodeIntrinsics());
3837 auto ctor = static_cast<LLVMIrConstructor *>(v);
3838 auto operType = inst->CastToMax()->GetType();
3839 llvm::Value *x = ctor->GetInputValue(inst, 0);
3840 llvm::Value *y = ctor->GetInputValue(inst, 1);
3841 llvm::Intrinsic::ID llvmId = 0;
3842
3843 if (DataType::IsFloatType(operType)) {
3844 llvmId = llvm::Intrinsic::maximum;
3845 } else if (IsInteger(operType)) {
3846 llvmId = DataType::IsTypeSigned(operType) ? llvm::Intrinsic::smax : llvm::Intrinsic::umax;
3847 } else {
3848 ASSERT_DO(false, (std::cerr << "Max is not supported for type " << DataType::ToString(operType) << std::endl));
3849 UNREACHABLE();
3850 }
3851 auto max = ctor->builder_.CreateBinaryIntrinsic(llvmId, x, y);
3852 ctor->ValueMapAdd(inst, max);
3853 }
3854
VisitCompare(GraphVisitor * v,Inst * inst)3855 void LLVMIrConstructor::VisitCompare(GraphVisitor *v, Inst *inst)
3856 {
3857 auto ctor = static_cast<LLVMIrConstructor *>(v);
3858 auto compareInst = inst->CastToCompare();
3859 auto operandsType = compareInst->GetOperandsType();
3860
3861 llvm::Value *x = ctor->GetInputValue(inst, 0);
3862 llvm::Value *y = ctor->GetInputValue(inst, 1);
3863
3864 llvm::Value *n = nullptr;
3865 if (IsInteger(operandsType) || DataType::IsReference(operandsType)) {
3866 n = ctor->CreateCondition(compareInst->GetCc(), x, y);
3867 } else {
3868 n = ctor->builder_.CreateFCmp(FCmpCodeConvert(compareInst->GetCc()), x, y);
3869 }
3870 ctor->ValueMapAdd(inst, n);
3871 }
3872
VisitCmp(GraphVisitor * v,Inst * inst)3873 void LLVMIrConstructor::VisitCmp(GraphVisitor *v, Inst *inst)
3874 {
3875 auto ctor = static_cast<LLVMIrConstructor *>(v);
3876 CmpInst *cmpInst = inst->CastToCmp();
3877 DataType::Type operandsType = cmpInst->GetOperandsType();
3878
3879 auto x = ctor->GetInputValue(inst, 0);
3880 auto y = ctor->GetInputValue(inst, 1);
3881 llvm::Value *n;
3882 if (DataType::IsFloatType(operandsType)) {
3883 n = ctor->CreateFloatComparison(cmpInst, x, y);
3884 } else if (IsInteger(operandsType)) {
3885 n = ctor->CreateIntegerComparison(cmpInst, x, y);
3886 } else {
3887 ASSERT_DO(false, (std::cerr << "Unsupported comparison for operands of type = "
3888 << DataType::ToString(operandsType) << std::endl));
3889 UNREACHABLE();
3890 }
3891 ctor->ValueMapAdd(inst, n);
3892 }
3893
VisitNeg(GraphVisitor * v,Inst * inst)3894 void LLVMIrConstructor::VisitNeg(GraphVisitor *v, Inst *inst)
3895 {
3896 auto ctor = static_cast<LLVMIrConstructor *>(v);
3897 auto inputType = inst->GetInputType(0);
3898 auto toNegate = ctor->GetInputValue(inst, 0);
3899 llvm::Value *n;
3900 if (inputType == DataType::Type::FLOAT64 || inputType == DataType::Type::FLOAT32) {
3901 n = ctor->builder_.CreateFNeg(toNegate);
3902 } else if (IsInteger(inputType)) {
3903 n = ctor->builder_.CreateNeg(toNegate);
3904 } else {
3905 ASSERT_DO(false, (std::cerr << "Negation is not supported for" << DataType::ToString(inputType) << std::endl));
3906 UNREACHABLE();
3907 }
3908 ctor->ValueMapAdd(inst, n);
3909 }
3910
VisitNot(GraphVisitor * v,Inst * inst)3911 void LLVMIrConstructor::VisitNot(GraphVisitor *v, Inst *inst)
3912 {
3913 ASSERT(IsInteger(inst->GetInputType(0)));
3914
3915 auto ctor = static_cast<LLVMIrConstructor *>(v);
3916 auto input = ctor->GetInputValue(inst, 0);
3917
3918 auto notOperator = ctor->builder_.CreateNot(input);
3919 ctor->ValueMapAdd(inst, notOperator);
3920 }
3921
VisitIfImm(GraphVisitor * v,Inst * inst)3922 void LLVMIrConstructor::VisitIfImm(GraphVisitor *v, Inst *inst)
3923 {
3924 auto ctor = static_cast<LLVMIrConstructor *>(v);
3925 auto x = ctor->GetInputValue(inst, 0);
3926 auto ifimm = inst->CastToIfImm();
3927
3928 llvm::Value *cond = nullptr;
3929 if (ifimm->GetCc() == ConditionCode::CC_NE && ifimm->GetImm() == 0 && x->getType()->isIntegerTy()) {
3930 ASSERT(ifimm->GetOperandsType() == DataType::BOOL);
3931 cond = ctor->builder_.CreateTrunc(x, ctor->builder_.getInt1Ty());
3932 } else {
3933 ASSERT(x->getType()->isIntOrPtrTy());
3934 llvm::Constant *immCst;
3935 if (x->getType()->isPointerTy()) {
3936 if (ifimm->GetImm() == 0) {
3937 immCst = llvm::ConstantPointerNull::get(llvm::cast<llvm::PointerType>(x->getType()));
3938 } else {
3939 immCst = llvm::ConstantInt::getSigned(x->getType(), ifimm->GetImm());
3940 immCst = llvm::ConstantExpr::getPointerCast(immCst, x->getType());
3941 }
3942 } else {
3943 immCst = llvm::ConstantInt::getSigned(x->getType(), ifimm->GetImm());
3944 }
3945 cond = ctor->CreateCondition(ifimm->GetCc(), x, immCst);
3946 }
3947 ctor->CreateIf(inst, cond, ifimm->IsLikely(), ifimm->IsUnlikely());
3948 }
3949
VisitIf(GraphVisitor * v,Inst * inst)3950 void LLVMIrConstructor::VisitIf(GraphVisitor *v, Inst *inst)
3951 {
3952 auto ctor = static_cast<LLVMIrConstructor *>(v);
3953 auto x = ctor->GetInputValue(inst, 0);
3954 auto y = ctor->GetInputValue(inst, 1);
3955 ASSERT(x->getType()->isIntOrPtrTy());
3956 ASSERT(y->getType()->isIntOrPtrTy());
3957 auto ifi = inst->CastToIf();
3958 auto cond = ctor->CreateCondition(ifi->GetCc(), x, y);
3959 ctor->CreateIf(inst, cond, ifi->IsLikely(), ifi->IsUnlikely());
3960 }
3961
VisitCallIndirect(GraphVisitor * v,Inst * inst)3962 void LLVMIrConstructor::VisitCallIndirect(GraphVisitor *v, Inst *inst)
3963 {
3964 auto ctor = static_cast<LLVMIrConstructor *>(v);
3965 auto ptr = ctor->GetInputValue(inst, 0);
3966 ASSERT_TYPE(ptr, ctor->builder_.getPtrTy());
3967 // Build FunctionType
3968 ArenaVector<llvm::Type *> argTypes(ctor->GetGraph()->GetLocalAllocator()->Adapter());
3969 ArenaVector<llvm::Value *> args(ctor->GetGraph()->GetLocalAllocator()->Adapter());
3970 for (size_t i = 1; i < inst->GetInputs().Size(); ++i) {
3971 argTypes.push_back(ctor->GetType(inst->GetInput(i).GetInst()->GetType()));
3972 args.push_back(ctor->GetInputValue(inst, i));
3973 }
3974 auto retType = ctor->GetType(inst->GetType());
3975 auto funcType = llvm::FunctionType::get(retType, argTypes, false);
3976 auto call = ctor->builder_.CreateCall(funcType, ptr, args);
3977 if (!retType->isVoidTy()) {
3978 ctor->ValueMapAdd(inst, call);
3979 }
3980 }
3981
VisitCall(GraphVisitor * v,Inst * inst)3982 void LLVMIrConstructor::VisitCall(GraphVisitor *v, Inst *inst)
3983 {
3984 auto ctor = static_cast<LLVMIrConstructor *>(v);
3985 ASSERT(!ctor->GetGraph()->SupportManagedCode());
3986
3987 // Prepare external call if needed
3988 auto externalId = inst->CastToCall()->GetCallMethodId();
3989 auto runtime = ctor->GetGraph()->GetRuntime();
3990 auto externalName = runtime->GetExternalMethodName(ctor->GetGraph()->GetMethod(), externalId);
3991 auto function = ctor->func_->getParent()->getFunction(externalName);
3992 if (function == nullptr) {
3993 ArenaVector<llvm::Type *> argTypes(ctor->GetGraph()->GetLocalAllocator()->Adapter());
3994 for (size_t i = 0; i < inst->GetInputs().Size(); ++i) {
3995 argTypes.push_back(ctor->GetType(inst->GetInputType(i)));
3996 }
3997 auto ftype = llvm::FunctionType::get(ctor->GetType(inst->GetType()), argTypes, false);
3998 function =
3999 llvm::Function::Create(ftype, llvm::Function::ExternalLinkage, externalName, ctor->func_->getParent());
4000 }
4001 // Arguments
4002 ArenaVector<llvm::Value *> args(ctor->GetGraph()->GetLocalAllocator()->Adapter());
4003 for (size_t i = 0; i < inst->GetInputs().Size(); ++i) {
4004 args.push_back(ctor->CoerceValue(ctor->GetInputValue(inst, i), function->getArg(i)->getType()));
4005 }
4006 // Call
4007 auto call = ctor->builder_.CreateCall(function->getFunctionType(), function, args);
4008
4009 if (IsNoAliasIrtocFunction(externalName)) {
4010 ASSERT(call->getType()->isPointerTy());
4011 call->addRetAttr(llvm::Attribute::NoAlias);
4012 } else {
4013 ASSERT(call->getType()->isPointerTy() ^ !IsPtrIgnIrtocFunction(externalName));
4014 }
4015
4016 // Check if function has debug info
4017 if (function->getSubprogram() != nullptr) {
4018 ctor->debugData_->SetLocation(call, inst->GetPc());
4019 }
4020
4021 if (inst->GetType() != DataType::VOID) {
4022 ctor->ValueMapAdd(inst, ctor->CoerceValue(call, ctor->GetType(inst->GetType())));
4023 }
4024 }
4025
VisitPhi(GraphVisitor * v,Inst * inst)4026 void LLVMIrConstructor::VisitPhi(GraphVisitor *v, Inst *inst)
4027 {
4028 auto ctor = static_cast<LLVMIrConstructor *>(v);
4029 auto ltype = ctor->GetExactType(inst->GetType());
4030 auto block = ctor->GetCurrentBasicBlock();
4031
4032 // PHI need adjusted insert point if ValueMapAdd already created coerced values for other PHIs
4033 auto nonPhi = block->getFirstNonPHI();
4034 if (nonPhi != nullptr) {
4035 ctor->builder_.SetInsertPoint(nonPhi);
4036 }
4037
4038 auto phi = ctor->builder_.CreatePHI(ltype, inst->GetInputsCount());
4039 ctor->SetCurrentBasicBlock(block);
4040 ctor->ValueMapAdd(inst, phi);
4041 }
4042
VisitMultiArray(GraphVisitor * v,Inst * inst)4043 void LLVMIrConstructor::VisitMultiArray(GraphVisitor *v, Inst *inst)
4044 {
4045 auto ctor = static_cast<LLVMIrConstructor *>(v);
4046
4047 ArenaVector<llvm::Value *> args(ctor->GetGraph()->GetLocalAllocator()->Adapter());
4048 args.push_back(ctor->GetInputValue(inst, 0));
4049
4050 auto sizesCount = inst->GetInputsCount() - 2U;
4051 args.push_back(ctor->builder_.getInt32(sizesCount));
4052 auto sizes = ctor->CreateAllocaForArgs(ctor->builder_.getInt64Ty(), sizesCount);
4053
4054 // Store multi-array sizes
4055 for (size_t i = 1; i <= sizesCount; i++) {
4056 auto size = ctor->GetInputValue(inst, i);
4057
4058 auto type = inst->GetInputType(i);
4059 if (type != DataType::INT64) {
4060 size = ctor->CoerceValue(size, type, DataType::INT64);
4061 }
4062
4063 auto gep = ctor->builder_.CreateConstInBoundsGEP1_32(ctor->builder_.getInt64Ty(), sizes, i - 1);
4064 ctor->builder_.CreateStore(size, gep);
4065 }
4066 args.push_back(sizes);
4067
4068 auto entrypointId = RuntimeInterface::EntrypointId::CREATE_MULTI_ARRAY;
4069 auto result = ctor->CreateEntrypointCall(entrypointId, inst, args);
4070 ctor->MarkAsAllocation(result);
4071 if (inst->GetFlag(inst_flags::MEM_BARRIER)) {
4072 result->addFnAttr(llvm::Attribute::get(result->getContext(), "needs-mem-barrier"));
4073 }
4074 ctor->ValueMapAdd(inst, result);
4075 }
4076
VisitInitEmptyString(GraphVisitor * v,Inst * inst)4077 void LLVMIrConstructor::VisitInitEmptyString(GraphVisitor *v, Inst *inst)
4078 {
4079 auto ctor = static_cast<LLVMIrConstructor *>(v);
4080 auto eid = RuntimeInterface::EntrypointId::CREATE_EMPTY_STRING;
4081 auto result = ctor->CreateEntrypointCall(eid, inst);
4082 ctor->MarkAsAllocation(result);
4083 ctor->ValueMapAdd(inst, result);
4084 }
4085
VisitInitString(GraphVisitor * v,Inst * inst)4086 void LLVMIrConstructor::VisitInitString(GraphVisitor *v, Inst *inst)
4087 {
4088 auto ctor = static_cast<LLVMIrConstructor *>(v);
4089 auto strInit = inst->CastToInitString();
4090 auto arg = ctor->GetInputValue(inst, 0);
4091 if (strInit->IsFromString()) {
4092 auto result = ctor->CreateNewStringFromStringTlab(inst, arg);
4093 ctor->ValueMapAdd(inst, result);
4094 } else {
4095 auto lengthOffset = ctor->GetGraph()->GetRuntime()->GetArrayLengthOffset(ctor->GetGraph()->GetArch());
4096 auto lengthPtr = ctor->builder_.CreateConstInBoundsGEP1_32(ctor->builder_.getInt8Ty(), arg, lengthOffset);
4097 auto length = ctor->builder_.CreateLoad(ctor->builder_.getInt32Ty(), lengthPtr);
4098 auto result = ctor->CreateNewStringFromCharsTlab(
4099 inst, llvm::Constant::getNullValue(ctor->builder_.getInt32Ty()), length, arg);
4100 ctor->ValueMapAdd(inst, result);
4101 }
4102 }
4103
VisitNewArray(GraphVisitor * v,Inst * inst)4104 void LLVMIrConstructor::VisitNewArray(GraphVisitor *v, Inst *inst)
4105 {
4106 auto ctor = static_cast<LLVMIrConstructor *>(v);
4107 auto method = inst->CastToNewArray()->GetMethod();
4108
4109 auto type = ctor->GetInputValue(inst, 0);
4110 auto size = ctor->ToSizeT(ctor->GetInputValue(inst, 1));
4111 auto arrayType = inst->CastToNewArray()->GetTypeId();
4112 auto runtime = ctor->GetGraph()->GetRuntime();
4113 auto maxTlabSize = runtime->GetTLABMaxSize();
4114 if (maxTlabSize == 0) {
4115 auto result = ctor->CreateNewArrayWithRuntime(inst);
4116 ctor->ValueMapAdd(inst, result);
4117 return;
4118 }
4119
4120 auto lenInst = inst->GetDataFlowInput(0);
4121 auto classArraySize = runtime->GetClassArraySize(ctor->GetGraph()->GetArch());
4122 uint64_t arraySize = 0;
4123 uint64_t elementSize = runtime->GetArrayElementSize(method, arrayType);
4124 uint64_t alignment = runtime->GetTLABAlignment();
4125 ASSERT(alignment != 0);
4126
4127 if (lenInst->GetOpcode() == Opcode::Constant) {
4128 ASSERT(lenInst->GetType() == DataType::INT64);
4129 arraySize = lenInst->CastToConstant()->GetIntValue() * elementSize + classArraySize;
4130 arraySize = (arraySize & ~(alignment - 1U)) + ((arraySize % alignment) != 0U ? alignment : 0U);
4131 if (arraySize > maxTlabSize) {
4132 auto result = ctor->CreateNewArrayWithRuntime(inst);
4133 ctor->ValueMapAdd(inst, result);
4134 return;
4135 }
4136 }
4137 auto eid = GetAllocateArrayTlabEntrypoint(elementSize);
4138 auto result = ctor->CreateFastPathCall(inst, eid, {type, size});
4139 ctor->MarkAsAllocation(result);
4140 if (inst->GetFlag(inst_flags::MEM_BARRIER)) {
4141 result->addFnAttr(llvm::Attribute::get(result->getContext(), "needs-mem-barrier"));
4142 }
4143 ctor->ValueMapAdd(inst, result);
4144 }
4145
VisitNewObject(GraphVisitor * v,Inst * inst)4146 void LLVMIrConstructor::VisitNewObject(GraphVisitor *v, Inst *inst)
4147 {
4148 auto ctor = static_cast<LLVMIrConstructor *>(v);
4149
4150 auto newObjInst = inst->CastToNewObject();
4151 auto srcInst = newObjInst->GetInput(0).GetInst();
4152
4153 auto runtime = ctor->GetGraph()->GetRuntime();
4154 auto maxTlabSize = runtime->GetTLABMaxSize();
4155 if (maxTlabSize == 0 || srcInst->GetOpcode() != Opcode::LoadAndInitClass) {
4156 auto runtimeCall = ctor->CreateNewObjectWithRuntime(inst);
4157 ctor->ValueMapAdd(inst, runtimeCall);
4158 return;
4159 }
4160
4161 auto klass = srcInst->CastToLoadAndInitClass()->GetClass();
4162 if (klass == nullptr || !runtime->CanUseTlabForClass(klass)) {
4163 auto runtimeCall = ctor->CreateNewObjectWithRuntime(inst);
4164 ctor->ValueMapAdd(inst, runtimeCall);
4165 return;
4166 }
4167 auto classSize = runtime->GetClassSize(klass);
4168 auto alignment = runtime->GetTLABAlignment();
4169 ASSERT(alignment != 0);
4170
4171 classSize = (classSize & ~(alignment - 1U)) + ((classSize % alignment) != 0U ? alignment : 0U);
4172 if (classSize > maxTlabSize) {
4173 auto runtimeCall = ctor->CreateNewObjectWithRuntime(inst);
4174 ctor->ValueMapAdd(inst, runtimeCall);
4175 return;
4176 }
4177
4178 auto initClass = ctor->GetInputValue(inst, 0);
4179 auto klassSize = ctor->ToSizeT(ctor->builder_.getInt32(classSize));
4180 auto eid = RuntimeInterface::EntrypointId::ALLOCATE_OBJECT_TLAB;
4181 auto result = ctor->CreateFastPathCall(inst, eid, {initClass, klassSize});
4182 ctor->MarkAsAllocation(result);
4183 if (inst->GetFlag(inst_flags::MEM_BARRIER)) {
4184 result->addFnAttr(llvm::Attribute::get(result->getContext(), "needs-mem-barrier"));
4185 }
4186 ctor->ValueMapAdd(inst, result);
4187 }
4188
VisitCallStatic(GraphVisitor * v,Inst * inst)4189 void LLVMIrConstructor::VisitCallStatic(GraphVisitor *v, Inst *inst)
4190 {
4191 auto call = inst->CastToCallStatic();
4192 if (call->IsInlined()) {
4193 return;
4194 }
4195
4196 auto ctor = static_cast<LLVMIrConstructor *>(v);
4197 auto methodPtr = ctor->GetGraph()->GetMethod();
4198 auto methodId = call->GetCallMethodId();
4199 auto callee = ctor->GetGraph()->GetRuntime()->GetMethodById(methodPtr, methodId);
4200 ASSERT(callee != nullptr);
4201 // Create a declare statement if we haven't met this function yet
4202 auto function = ctor->GetOrCreateFunctionForCall(call, callee);
4203 ctor->arkInterface_->RememberFunctionCall(ctor->func_, function, methodId);
4204
4205 // Replaced to real callee in the PandaRuntimeLowering
4206 auto args = ctor->GetArgumentsForCall(ctor->GetMethodArgument(), call);
4207 auto result = ctor->builder_.CreateCall(function, args, ctor->CreateSaveStateBundle(inst));
4208 ctor->WrapArkCall(inst, result);
4209
4210 if (inst->GetType() != DataType::VOID) {
4211 ctor->ValueMapAdd(inst, result);
4212 }
4213
4214 if (ctor->GetGraph()->GetRuntime()->IsMethodExternal(methodPtr, callee)) {
4215 result->addAttributeAtIndex(llvm::AttributeList::FunctionIndex, llvm::Attribute::NoInline);
4216 }
4217 if (IsAlwaysThrowBasicBlock(inst)) {
4218 result->addAttributeAtIndex(llvm::AttributeList::FunctionIndex, llvm::Attribute::NoInline);
4219 result->addFnAttr(llvm::Attribute::get(ctor->func_->getContext(), "keep-noinline"));
4220 }
4221 }
4222
VisitResolveStatic(GraphVisitor * v,Inst * inst)4223 void LLVMIrConstructor::VisitResolveStatic(GraphVisitor *v, Inst *inst)
4224 {
4225 auto ctor = static_cast<LLVMIrConstructor *>(v);
4226 auto call = inst->CastToResolveStatic();
4227
4228 auto slotPtr = llvm::Constant::getNullValue(ctor->builder_.getPtrTy());
4229 auto methodPtr = ctor->CreateEntrypointCall(
4230 RuntimeInterface::EntrypointId::GET_UNKNOWN_CALLEE_METHOD, inst,
4231 {ctor->GetMethodArgument(), ctor->ToSizeT(ctor->builder_.getInt32(call->GetCallMethodId())), slotPtr});
4232 auto method = ctor->builder_.CreateIntToPtr(methodPtr, ctor->builder_.getPtrTy());
4233
4234 ctor->ValueMapAdd(inst, method);
4235 }
4236
VisitCallResolvedStatic(GraphVisitor * v,Inst * inst)4237 void LLVMIrConstructor::VisitCallResolvedStatic(GraphVisitor *v, Inst *inst)
4238 {
4239 auto ctor = static_cast<LLVMIrConstructor *>(v);
4240 auto call = inst->CastToCallResolvedStatic();
4241 if (call->IsInlined()) {
4242 return;
4243 }
4244
4245 auto method = ctor->GetInputValue(inst, 0);
4246
4247 llvm::FunctionType *fType = ctor->GetFunctionTypeForCall(call);
4248 auto args = ctor->GetArgumentsForCall(method, call, true); // skip first input
4249
4250 auto offset = ctor->GetGraph()->GetRuntime()->GetCompiledEntryPointOffset(ctor->GetGraph()->GetArch());
4251 auto entrypointPtr = ctor->builder_.CreateConstInBoundsGEP1_32(ctor->builder_.getInt8Ty(), method, offset);
4252 auto entrypoint = ctor->builder_.CreateLoad(ctor->builder_.getPtrTy(), entrypointPtr);
4253
4254 auto result = ctor->builder_.CreateCall(fType, entrypoint, args, ctor->CreateSaveStateBundle(inst));
4255 if (inst->GetType() != DataType::VOID) {
4256 ctor->ValueMapAdd(inst, result);
4257 }
4258 ctor->WrapArkCall(inst, result);
4259 }
4260
4261 template <typename T>
CreateDeclForVirtualCall(T * inst,LLVMIrConstructor * ctor,LLVMArkInterface * arkInterface)4262 llvm::Function *CreateDeclForVirtualCall(T *inst, LLVMIrConstructor *ctor, LLVMArkInterface *arkInterface)
4263 {
4264 arkInterface->GetOrCreateRuntimeFunctionType(
4265 ctor->GetFunc()->getContext(), ctor->GetFunc()->getParent(), LLVMArkInterface::RuntimeCallType::ENTRYPOINT,
4266 static_cast<LLVMArkInterface::EntrypointId>(RuntimeInterface::EntrypointId::RESOLVE_VIRTUAL_CALL_AOT));
4267 arkInterface->GetOrCreateRuntimeFunctionType(
4268 ctor->GetFunc()->getContext(), ctor->GetFunc()->getParent(), LLVMArkInterface::RuntimeCallType::ENTRYPOINT,
4269 static_cast<LLVMArkInterface::EntrypointId>(RuntimeInterface::EntrypointId::INTF_INLINE_CACHE));
4270
4271 auto methodPtr = ctor->GetGraph()->GetMethod();
4272 auto methodId = inst->GetCallMethodId();
4273 auto callee = ctor->GetGraph()->GetRuntime()->GetMethodById(methodPtr, methodId);
4274 ASSERT(callee != nullptr);
4275
4276 std::stringstream ssUniqName;
4277 ssUniqName << "f_" << std::hex << inst;
4278 auto uniqName = ssUniqName.str();
4279 auto methodName = arkInterface->GetUniqMethodName(callee) + "_" + uniqName;
4280 auto functionProto = ctor->GetFunctionTypeForCall(inst);
4281 auto func = CreateFunctionDeclaration(functionProto, methodName, ctor->GetFunc()->getParent());
4282
4283 func->addFnAttr("frame-pointer", "all");
4284 arkInterface->PutVirtualFunction(inst->GetCallMethod(), func);
4285 return func;
4286 }
4287
VisitCallVirtual(GraphVisitor * v,Inst * inst)4288 void LLVMIrConstructor::VisitCallVirtual(GraphVisitor *v, Inst *inst)
4289 {
4290 auto ctor = static_cast<LLVMIrConstructor *>(v);
4291 auto call = inst->CastToCallVirtual();
4292 if (call->IsInlined()) {
4293 return;
4294 }
4295 ASSERT_PRINT(ctor->GetGraph()->GetAotData()->GetUseCha(),
4296 std::string("GetUseCha must be 'true' but was 'false' for method = '") +
4297 ctor->GetGraph()->GetRuntime()->GetMethodFullName(ctor->GetGraph()->GetMethod()) + "'");
4298
4299 ASSERT(!ctor->GetGraph()->GetRuntime()->IsInterfaceMethod(call->GetCallMethod()));
4300 auto methodId = call->GetCallMethodId();
4301
4302 auto func = CreateDeclForVirtualCall(call, ctor, ctor->arkInterface_);
4303 auto args = ctor->GetArgumentsForCall(ctor->GetMethodArgument(), call);
4304 auto result = ctor->builder_.CreateCall(func, args, ctor->CreateSaveStateBundle(inst));
4305 result->addFnAttr(llvm::Attribute::get(result->getContext(), "original-method-id", std::to_string(methodId)));
4306 if (inst->GetType() != DataType::VOID) {
4307 ctor->ValueMapAdd(inst, result);
4308 }
4309 ctor->WrapArkCall(inst, result);
4310 }
4311
VisitResolveVirtual(GraphVisitor * v,Inst * inst)4312 void LLVMIrConstructor::VisitResolveVirtual(GraphVisitor *v, Inst *inst)
4313 {
4314 auto ctor = static_cast<LLVMIrConstructor *>(v);
4315 auto resolver = inst->CastToResolveVirtual();
4316
4317 llvm::Value *method = nullptr;
4318 if (resolver->GetCallMethod() == nullptr) {
4319 llvm::Value *thiz = ctor->GetInputValue(inst, 0);
4320 method = ctor->CreateResolveVirtualCallBuiltin(inst, thiz, resolver->GetCallMethodId());
4321 ASSERT(method->getType()->isPointerTy());
4322 } else {
4323 ASSERT(ctor->GetGraph()->GetRuntime()->IsInterfaceMethod(resolver->GetCallMethod()));
4324 method = CreateDeclForVirtualCall(resolver, ctor, ctor->arkInterface_);
4325 }
4326 ctor->ValueMapAdd(inst, method, false);
4327 }
4328
VisitCallResolvedVirtual(GraphVisitor * v,Inst * inst)4329 void LLVMIrConstructor::VisitCallResolvedVirtual(GraphVisitor *v, Inst *inst)
4330 {
4331 auto ctor = static_cast<LLVMIrConstructor *>(v);
4332 auto call = inst->CastToCallResolvedVirtual();
4333 if (call->IsInlined()) {
4334 return;
4335 }
4336 auto runtime = ctor->GetGraph()->GetRuntime();
4337 auto method = ctor->GetInputValue(inst, 0);
4338 auto args = ctor->GetArgumentsForCall(method, call, true);
4339
4340 llvm::CallInst *result = nullptr;
4341 if (call->GetCallMethod() == nullptr) {
4342 llvm::FunctionType *fType = ctor->GetFunctionTypeForCall(call);
4343
4344 auto offset = runtime->GetCompiledEntryPointOffset(ctor->GetGraph()->GetArch());
4345 auto entrypointPtr = ctor->builder_.CreateConstInBoundsGEP1_32(ctor->builder_.getInt8Ty(), method, offset);
4346 auto entrypoint = ctor->builder_.CreateLoad(ctor->builder_.getPtrTy(), entrypointPtr);
4347 result = ctor->builder_.CreateCall(fType, entrypoint, args, ctor->CreateSaveStateBundle(inst));
4348 } else {
4349 ASSERT(runtime->IsInterfaceMethod(call->GetCallMethod()));
4350 auto *func = llvm::cast<llvm::Function>(method);
4351 result = ctor->builder_.CreateCall(func, args, ctor->CreateSaveStateBundle(inst));
4352 auto methodId = call->GetCallMethodId();
4353 result->addFnAttr(llvm::Attribute::get(result->getContext(), "original-method-id", std::to_string(methodId)));
4354 }
4355 if (inst->GetType() != DataType::VOID) {
4356 ctor->ValueMapAdd(inst, result);
4357 }
4358 ctor->WrapArkCall(inst, result);
4359 }
4360
VisitAbs(GraphVisitor * v,Inst * inst)4361 void LLVMIrConstructor::VisitAbs(GraphVisitor *v, Inst *inst)
4362 {
4363 ASSERT(g_options.IsCompilerEncodeIntrinsics());
4364 auto ctor = static_cast<LLVMIrConstructor *>(v);
4365
4366 DataType::Type pandaType = inst->GetInputType(0);
4367 auto argument = ctor->GetInputValue(inst, 0);
4368 llvm::Value *result = nullptr;
4369 if (DataType::IsFloatType(pandaType)) {
4370 result = ctor->builder_.CreateUnaryIntrinsic(llvm::Intrinsic::fabs, argument, nullptr);
4371 } else if (IsInteger(pandaType)) {
4372 result = ctor->builder_.CreateBinaryIntrinsic(llvm::Intrinsic::abs, argument, ctor->builder_.getFalse());
4373 } else {
4374 ASSERT_DO(false, (std::cerr << "Abs is not supported for type " << DataType::ToString(pandaType) << std::endl));
4375 UNREACHABLE();
4376 }
4377 ASSERT(result != nullptr);
4378 ctor->ValueMapAdd(inst, result);
4379 }
4380
VisitIntrinsic(GraphVisitor * v,Inst * inst)4381 void LLVMIrConstructor::VisitIntrinsic(GraphVisitor *v, Inst *inst)
4382 {
4383 auto ctor = static_cast<LLVMIrConstructor *>(v);
4384 auto entryId = inst->CastToIntrinsic()->GetIntrinsicId();
4385
4386 // Some Ark intrinsics are lowered into code or LLVM intrinsics, the IntrinsicsLowering pass
4387 // makes final desicion for them to be lowered into code or calling Ark entrypoint.
4388 if (g_options.IsCompilerEncodeIntrinsics()) {
4389 bool lowered = ctor->TryEmitIntrinsic(inst, entryId);
4390 if (lowered) {
4391 return;
4392 }
4393 }
4394 // Create call otherwise
4395 auto result = ctor->CreateIntrinsicCall(inst);
4396 if (inst->GetType() != DataType::VOID) {
4397 ctor->ValueMapAdd(inst, result);
4398 }
4399 }
4400
VisitMonitor(GraphVisitor * v,Inst * inst)4401 void LLVMIrConstructor::VisitMonitor(GraphVisitor *v, Inst *inst)
4402 {
4403 auto ctor = static_cast<LLVMIrConstructor *>(v);
4404 MonitorInst *monitor = inst->CastToMonitor();
4405 auto object = ctor->GetInputValue(inst, 0);
4406 auto eid = monitor->IsEntry() ? RuntimeInterface::EntrypointId::MONITOR_ENTER_FAST_PATH
4407 : RuntimeInterface::EntrypointId::MONITOR_EXIT_FAST_PATH;
4408 auto call = ctor->CreateEntrypointCall(eid, inst, {object});
4409 ASSERT(call->getCallingConv() == llvm::CallingConv::C);
4410 call->setCallingConv(llvm::CallingConv::ArkFast1);
4411 }
4412
VisitSqrt(GraphVisitor * v,Inst * inst)4413 void LLVMIrConstructor::VisitSqrt(GraphVisitor *v, Inst *inst)
4414 {
4415 ASSERT(g_options.IsCompilerEncodeIntrinsics());
4416 auto ctor = static_cast<LLVMIrConstructor *>(v);
4417 auto argument = ctor->GetInputValue(inst, 0);
4418 auto result = ctor->builder_.CreateUnaryIntrinsic(llvm::Intrinsic::sqrt, argument, nullptr);
4419 ctor->ValueMapAdd(inst, result);
4420 }
4421
VisitInitClass(GraphVisitor * v,Inst * inst)4422 void LLVMIrConstructor::VisitInitClass(GraphVisitor *v, Inst *inst)
4423 {
4424 auto ctor = static_cast<LLVMIrConstructor *>(v);
4425 auto classId = inst->CastToInitClass()->GetTypeId();
4426
4427 auto constexpr INITIALIZED = true;
4428 ctor->CreateLoadClassById(inst, classId, INITIALIZED);
4429 }
4430
VisitLoadClass(GraphVisitor * v,Inst * inst)4431 void LLVMIrConstructor::VisitLoadClass(GraphVisitor *v, Inst *inst)
4432 {
4433 auto ctor = static_cast<LLVMIrConstructor *>(v);
4434 auto classId = inst->CastToLoadClass()->GetTypeId();
4435
4436 auto constexpr INITIALIZED = true;
4437 auto clsPtr = ctor->CreateLoadClassById(inst, classId, !INITIALIZED);
4438 ctor->ValueMapAdd(inst, clsPtr);
4439 }
4440
VisitLoadAndInitClass(GraphVisitor * v,Inst * inst)4441 void LLVMIrConstructor::VisitLoadAndInitClass(GraphVisitor *v, Inst *inst)
4442 {
4443 auto ctor = static_cast<LLVMIrConstructor *>(v);
4444 auto classId = inst->CastToLoadAndInitClass()->GetTypeId();
4445
4446 auto constexpr INITIALIZED = true;
4447 auto clsPtr = ctor->CreateLoadClassById(inst, classId, INITIALIZED);
4448 ctor->ValueMapAdd(inst, clsPtr);
4449 }
4450
VisitUnresolvedLoadAndInitClass(GraphVisitor * v,Inst * inst)4451 void LLVMIrConstructor::VisitUnresolvedLoadAndInitClass(GraphVisitor *v, Inst *inst)
4452 {
4453 auto ctor = static_cast<LLVMIrConstructor *>(v);
4454 auto classId = inst->CastToUnresolvedLoadAndInitClass()->GetTypeId();
4455
4456 auto constexpr INITIALIZED = true;
4457 auto clsPtr = ctor->CreateLoadClassById(inst, classId, INITIALIZED);
4458 ctor->ValueMapAdd(inst, clsPtr);
4459 }
4460
VisitLoadStatic(GraphVisitor * v,Inst * inst)4461 void LLVMIrConstructor::VisitLoadStatic(GraphVisitor *v, Inst *inst)
4462 {
4463 auto ctor = static_cast<LLVMIrConstructor *>(v);
4464 auto klass = ctor->GetInputValue(inst, 0);
4465 ASSERT_TYPE(klass, ctor->builder_.getPtrTy());
4466
4467 auto offset = ctor->GetGraph()->GetRuntime()->GetFieldOffset(inst->CastToLoadStatic()->GetObjField());
4468 auto fieldPtr = ctor->builder_.CreateConstInBoundsGEP1_32(ctor->builder_.getInt8Ty(), klass, offset);
4469
4470 auto n = ctor->CreateLoadWithOrdering(inst, fieldPtr, ToAtomicOrdering(inst->CastToLoadStatic()->GetVolatile()));
4471 ctor->ValueMapAdd(inst, n);
4472 }
4473
VisitStoreStatic(GraphVisitor * v,Inst * inst)4474 void LLVMIrConstructor::VisitStoreStatic(GraphVisitor *v, Inst *inst)
4475 {
4476 auto ctor = static_cast<LLVMIrConstructor *>(v);
4477 auto klass = ctor->GetInputValue(inst, 0);
4478 ASSERT_TYPE(klass, ctor->builder_.getPtrTy());
4479 auto value = ctor->GetInputValue(inst, 1);
4480
4481 auto runtime = ctor->GetGraph()->GetRuntime();
4482 auto offset = runtime->GetFieldOffset(inst->CastToStoreStatic()->GetObjField());
4483 auto fieldPtr = ctor->builder_.CreateConstInBoundsGEP1_32(ctor->builder_.getInt8Ty(), klass, offset);
4484
4485 // Pre
4486 if (inst->CastToStoreStatic()->GetNeedBarrier()) {
4487 ctor->CreatePreWRB(inst, fieldPtr);
4488 }
4489 // Write
4490 ctor->CreateStoreWithOrdering(value, fieldPtr, ToAtomicOrdering(inst->CastToStoreStatic()->GetVolatile()));
4491 // Post
4492 if (inst->CastToStoreStatic()->GetNeedBarrier()) {
4493 auto barrierType = runtime->GetPostType();
4494 if (barrierType == mem::BarrierType::POST_INTERREGION_BARRIER) {
4495 ctor->CreatePostWRB(inst, klass, ctor->builder_.getInt32(offset), value);
4496 } else {
4497 auto managed = ctor->CreateLoadManagedClassFromClass(klass);
4498 ctor->CreatePostWRB(inst, managed, ctor->builder_.getInt32(0), value);
4499 }
4500 }
4501 }
4502
VisitUnresolvedStoreStatic(GraphVisitor * v,Inst * inst)4503 void LLVMIrConstructor::VisitUnresolvedStoreStatic(GraphVisitor *v, Inst *inst)
4504 {
4505 auto ctor = static_cast<LLVMIrConstructor *>(v);
4506 auto unresolvedStore = inst->CastToUnresolvedStoreStatic();
4507
4508 ASSERT(unresolvedStore->GetNeedBarrier());
4509 ASSERT(DataType::IsReference(inst->GetType()));
4510
4511 auto typeId = ctor->builder_.getInt32(unresolvedStore->GetTypeId());
4512 auto value = ctor->GetInputValue(inst, 0);
4513
4514 auto entrypoint = RuntimeInterface::EntrypointId::UNRESOLVED_STORE_STATIC_BARRIERED;
4515 ctor->CreateEntrypointCall(entrypoint, inst, {ctor->GetMethodArgument(), typeId, value});
4516 }
4517
VisitLoadConstArray(GraphVisitor * v,Inst * inst)4518 void LLVMIrConstructor::VisitLoadConstArray(GraphVisitor *v, Inst *inst)
4519 {
4520 auto ctor = static_cast<LLVMIrConstructor *>(v);
4521 auto arrayType = inst->CastToLoadConstArray()->GetTypeId();
4522
4523 llvm::Value *result = ctor->CreateEntrypointCall(RuntimeInterface::EntrypointId::RESOLVE_LITERAL_ARRAY, inst,
4524 {ctor->GetMethodArgument(), ctor->builder_.getInt32(arrayType)});
4525 ctor->ValueMapAdd(inst, result);
4526 }
4527
VisitFillConstArray(GraphVisitor * v,Inst * inst)4528 void LLVMIrConstructor::VisitFillConstArray(GraphVisitor *v, Inst *inst)
4529 {
4530 ASSERT(!DataType::IsReference(inst->GetType()));
4531 auto ctor = static_cast<LLVMIrConstructor *>(v);
4532 auto &builder = ctor->builder_;
4533
4534 auto runtime = ctor->GetGraph()->GetRuntime();
4535 auto arrayType = inst->CastToFillConstArray()->GetTypeId();
4536 auto arch = ctor->GetGraph()->GetArch();
4537 auto src = ctor->GetInputValue(inst, 0);
4538 auto offset = runtime->GetArrayDataOffset(arch);
4539 auto arraySize = inst->CastToFillConstArray()->GetImm() << DataType::ShiftByType(inst->GetType(), arch);
4540 auto arrayPtr = builder.CreateConstInBoundsGEP1_64(builder.getInt8Ty(), src, offset);
4541
4542 ASSERT(arraySize != 0);
4543
4544 auto arrOffset = runtime->GetOffsetToConstArrayData(inst->CastToFillConstArray()->GetMethod(), arrayType);
4545 auto pfOffset = runtime->GetPandaFileOffset(arch);
4546 auto fileOffset = runtime->GetBinaryFileBaseOffset(arch);
4547
4548 auto pfPtrPtr = builder.CreateConstInBoundsGEP1_64(builder.getInt8Ty(), ctor->GetMethodArgument(), pfOffset);
4549 auto pfPtr = builder.CreateLoad(builder.getPtrTy(), pfPtrPtr);
4550 auto filePtrPtr = builder.CreateConstInBoundsGEP1_64(builder.getInt8Ty(), pfPtr, fileOffset);
4551 auto filePtr = builder.CreateLoad(builder.getPtrTy(), filePtrPtr);
4552 auto constArrPtr = builder.CreateConstInBoundsGEP1_64(builder.getInt8Ty(), filePtr, arrOffset);
4553
4554 auto align = llvm::MaybeAlign(0);
4555 /**
4556 * LLVM AOT may replace `@llvm.memcpy.inline` with call to ark's `LIB_CALL_MEM_COPY`, see `MustLowerMemCpy` in
4557 * libllvmbackend/llvm_ark_interface.cpp.
4558 */
4559 builder.CreateMemCpyInline(arrayPtr, align, constArrPtr, align, builder.getInt64(arraySize));
4560 }
4561
VisitIsInstance(GraphVisitor * v,Inst * inst)4562 void LLVMIrConstructor::VisitIsInstance(GraphVisitor *v, Inst *inst)
4563 {
4564 auto ctor = static_cast<LLVMIrConstructor *>(v);
4565 auto isInstance = inst->CastToIsInstance();
4566 auto klassType = isInstance->GetClassType();
4567 auto object = ctor->GetInputValue(inst, 0);
4568 llvm::Value *result;
4569 if (klassType == ClassType::UNRESOLVED_CLASS) {
4570 result = ctor->CreateIsInstanceEntrypointCall(inst);
4571 } else {
4572 auto &ctx = ctor->func_->getContext();
4573 auto preBb = ctor->GetCurrentBasicBlock();
4574 auto contBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "isinstance_cont"), ctor->func_);
4575
4576 if (!inst->CastToIsInstance()->GetOmitNullCheck()) {
4577 auto notnullBb =
4578 llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "isinstance_notnull"), ctor->func_);
4579 auto isNullObj = ctor->builder_.CreateIsNull(object);
4580 ctor->builder_.CreateCondBr(isNullObj, contBb, notnullBb);
4581 ctor->SetCurrentBasicBlock(notnullBb);
4582 }
4583
4584 llvm::Value *innerResult = nullptr;
4585 auto klassId = ctor->GetInputValue(inst, 1);
4586 auto klassObj = CreateLoadClassFromObject(object, &ctor->builder_, ctor->arkInterface_);
4587 auto notnullPostBb = ctor->GetCurrentBasicBlock();
4588 auto cmp = ctor->builder_.CreateICmpEQ(klassId, klassObj);
4589 if (klassType == ClassType::FINAL_CLASS) {
4590 innerResult = ctor->builder_.CreateZExt(cmp, ctor->builder_.getInt8Ty());
4591 } else {
4592 auto innerBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "isinstance_inner"), ctor->func_);
4593 ctor->builder_.CreateCondBr(cmp, contBb, innerBb);
4594 ctor->SetCurrentBasicBlock(innerBb);
4595 innerResult = ctor->CreateIsInstanceInnerBlock(inst, klassObj, klassId);
4596 }
4597 auto incomingBlock = ctor->GetCurrentBasicBlock();
4598 ctor->builder_.CreateBr(contBb);
4599
4600 ctor->SetCurrentBasicBlock(contBb);
4601 unsigned amount = 1 + (preBb == notnullPostBb ? 0 : 1) + (notnullPostBb == incomingBlock ? 0 : 1);
4602 auto resultPhi = ctor->builder_.CreatePHI(ctor->builder_.getInt8Ty(), amount);
4603 if (preBb != notnullPostBb) {
4604 resultPhi->addIncoming(ctor->builder_.getInt8(0), preBb);
4605 }
4606 if (notnullPostBb != incomingBlock) {
4607 resultPhi->addIncoming(ctor->builder_.getInt8(1), notnullPostBb);
4608 }
4609 resultPhi->addIncoming(innerResult, incomingBlock);
4610 result = resultPhi;
4611 }
4612
4613 ctor->ValueMapAdd(inst, result);
4614 }
4615
VisitCheckCast(GraphVisitor * v,Inst * inst)4616 void LLVMIrConstructor::VisitCheckCast(GraphVisitor *v, Inst *inst)
4617 {
4618 auto ctor = static_cast<LLVMIrConstructor *>(v);
4619 auto checkCast = inst->CastToCheckCast();
4620 auto klassType = checkCast->GetClassType();
4621 auto src = ctor->GetInputValue(inst, 0);
4622
4623 auto &ctx = ctor->func_->getContext();
4624 auto outBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "checkcast_out"), ctor->func_);
4625
4626 // Nullptr check can be omitted sometimes
4627 if (!inst->CastToCheckCast()->GetOmitNullCheck()) {
4628 auto contBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "checkcast_cont"), ctor->func_);
4629 auto isNullptr = ctor->builder_.CreateIsNull(src);
4630 ctor->builder_.CreateCondBr(isNullptr, outBb, contBb);
4631 ctor->SetCurrentBasicBlock(contBb);
4632 }
4633
4634 if (klassType == ClassType::UNRESOLVED_CLASS ||
4635 (klassType == ClassType::INTERFACE_CLASS && inst->CanDeoptimize())) {
4636 ctor->CreateCheckCastEntrypointCall(inst);
4637 } else if (klassType == ClassType::INTERFACE_CLASS) {
4638 ASSERT(!inst->CanDeoptimize());
4639 auto entrypoint = RuntimeInterface::EntrypointId::CHECK_CAST_INTERFACE;
4640 ctor->CreateFastPathCall(inst, entrypoint, {src, ctor->GetInputValue(inst, 1)});
4641 } else {
4642 auto klassId = ctor->GetInputValue(inst, 1);
4643 auto klassObj = CreateLoadClassFromObject(src, &ctor->builder_, ctor->arkInterface_);
4644 if (klassType == ClassType::FINAL_CLASS) {
4645 auto cmpNe = ctor->builder_.CreateICmpNE(klassId, klassObj);
4646 auto exception = RuntimeInterface::EntrypointId::CLASS_CAST_EXCEPTION;
4647 ctor->CreateDeoptimizationBranch(inst, cmpNe, exception, {klassId, src});
4648 } else {
4649 auto cmpEq = ctor->builder_.CreateICmpEQ(klassId, klassObj);
4650 auto innerBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "checkcast_inner"), ctor->func_);
4651 ctor->builder_.CreateCondBr(cmpEq, outBb, innerBb);
4652 ctor->SetCurrentBasicBlock(innerBb);
4653 ctor->CreateCheckCastInner(inst, klassObj, klassId);
4654 }
4655 }
4656 ctor->builder_.CreateBr(outBb);
4657 ctor->SetCurrentBasicBlock(outBb);
4658 }
4659
VisitLoadType(GraphVisitor * v,Inst * inst)4660 void LLVMIrConstructor::VisitLoadType(GraphVisitor *v, Inst *inst)
4661 {
4662 auto ctor = static_cast<LLVMIrConstructor *>(v);
4663
4664 auto constexpr INITIALIZED = true;
4665 auto klass = ctor->CreateLoadClassById(inst, inst->CastToLoadType()->GetTypeId(), !INITIALIZED);
4666 auto result = ctor->CreateLoadManagedClassFromClass(klass);
4667 ctor->ValueMapAdd(inst, result);
4668 }
4669
VisitUnresolvedLoadType(GraphVisitor * v,Inst * inst)4670 void LLVMIrConstructor::VisitUnresolvedLoadType(GraphVisitor *v, Inst *inst)
4671 {
4672 auto ctor = static_cast<LLVMIrConstructor *>(v);
4673
4674 auto constexpr INITIALIZED = true;
4675 auto klass = ctor->CreateLoadClassById(inst, inst->CastToUnresolvedLoadType()->GetTypeId(), !INITIALIZED);
4676 auto result = ctor->CreateLoadManagedClassFromClass(klass);
4677 ctor->ValueMapAdd(inst, result);
4678 }
4679
VisitGetInstanceClass(GraphVisitor * v,Inst * inst)4680 void LLVMIrConstructor::VisitGetInstanceClass(GraphVisitor *v, Inst *inst)
4681 {
4682 auto ctor = static_cast<LLVMIrConstructor *>(v);
4683
4684 auto object = ctor->GetInputValue(inst, 0);
4685 auto klass = CreateLoadClassFromObject(object, &ctor->builder_, ctor->arkInterface_);
4686 ctor->ValueMapAdd(inst, klass);
4687 }
4688
VisitThrow(GraphVisitor * v,Inst * inst)4689 void LLVMIrConstructor::VisitThrow(GraphVisitor *v, Inst *inst)
4690 {
4691 auto ctor = static_cast<LLVMIrConstructor *>(v);
4692 auto obj = ctor->GetInputValue(inst, 0);
4693
4694 auto exception = RuntimeInterface::EntrypointId::THROW_EXCEPTION;
4695 ctor->CreateEntrypointCall(exception, inst, {obj});
4696 ctor->builder_.CreateUnreachable();
4697 }
4698
VisitCatchPhi(GraphVisitor * v,Inst * inst)4699 void LLVMIrConstructor::VisitCatchPhi([[maybe_unused]] GraphVisitor *v, Inst *inst)
4700 {
4701 UnexpectedLowering(inst);
4702 }
4703
VisitLoadRuntimeClass(GraphVisitor * v,Inst * inst)4704 void LLVMIrConstructor::VisitLoadRuntimeClass(GraphVisitor *v, Inst *inst)
4705 {
4706 auto ctor = static_cast<LLVMIrConstructor *>(v);
4707
4708 auto offset = ctor->GetGraph()->GetRuntime()->GetTlsPromiseClassPointerOffset(ctor->GetGraph()->GetArch());
4709 auto result = llvmbackend::runtime_calls::LoadTLSValue(&ctor->builder_, ctor->arkInterface_, offset,
4710 ctor->builder_.getPtrTy());
4711 ctor->ValueMapAdd(inst, result);
4712 }
4713
VisitLoadUndefined(GraphVisitor * v,Inst * inst)4714 void LLVMIrConstructor::VisitLoadUndefined(GraphVisitor *v, Inst *inst)
4715 {
4716 auto ctor = static_cast<LLVMIrConstructor *>(v);
4717
4718 auto offset = ctor->GetGraph()->GetRuntime()->GetTlsUndefinedObjectOffset(ctor->GetGraph()->GetArch());
4719 auto result = llvmbackend::runtime_calls::LoadTLSValue(&ctor->builder_, ctor->arkInterface_, offset,
4720 ctor->builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE));
4721 ctor->ValueMapAdd(inst, result);
4722 }
4723
VisitCallLaunchVirtual(GraphVisitor * v,Inst * inst)4724 void LLVMIrConstructor::VisitCallLaunchVirtual(GraphVisitor *v, Inst *inst)
4725 {
4726 auto ctor = static_cast<LLVMIrConstructor *>(v);
4727 ctor->CreateLaunchCall(inst->CastToCallLaunchVirtual());
4728 }
4729
VisitCallResolvedLaunchStatic(GraphVisitor * v,Inst * inst)4730 void LLVMIrConstructor::VisitCallResolvedLaunchStatic(GraphVisitor *v, Inst *inst)
4731 {
4732 auto ctor = static_cast<LLVMIrConstructor *>(v);
4733 ctor->CreateLaunchCall(inst->CastToCallResolvedLaunchStatic());
4734 }
4735
VisitCallResolvedLaunchVirtual(GraphVisitor * v,Inst * inst)4736 void LLVMIrConstructor::VisitCallResolvedLaunchVirtual(GraphVisitor *v, Inst *inst)
4737 {
4738 auto ctor = static_cast<LLVMIrConstructor *>(v);
4739 ctor->CreateLaunchCall(inst->CastToCallResolvedLaunchVirtual());
4740 }
4741
VisitLoadImmediate(GraphVisitor * v,Inst * inst)4742 void LLVMIrConstructor::VisitLoadImmediate(GraphVisitor *v, Inst *inst)
4743 {
4744 auto ctor = static_cast<LLVMIrConstructor *>(v);
4745 auto loadImm = inst->CastToLoadImmediate();
4746 ASSERT_DO(loadImm->IsTlsOffset(), (std::cerr << "Unsupported llvm lowering for \n", inst->Dump(&std::cerr, true)));
4747 ASSERT(inst->GetType() == DataType::POINTER);
4748 auto result = llvmbackend::runtime_calls::LoadTLSValue(&ctor->builder_, ctor->arkInterface_,
4749 loadImm->GetTlsOffset(), ctor->builder_.getPtrTy());
4750 ctor->ValueMapAdd(inst, result);
4751 }
4752
VisitDefault(Inst * inst)4753 void LLVMIrConstructor::VisitDefault([[maybe_unused]] Inst *inst)
4754 {
4755 ASSERT_DO(false, (std::cerr << "Unsupported llvm lowering for \n", inst->Dump(&std::cerr, true)));
4756 UNREACHABLE();
4757 }
4758
LLVMIrConstructor(Graph * graph,llvm::Module * module,llvm::LLVMContext * context,LLVMArkInterface * arkInterface,const std::unique_ptr<DebugDataBuilder> & debugData)4759 LLVMIrConstructor::LLVMIrConstructor(Graph *graph, llvm::Module *module, llvm::LLVMContext *context,
4760 LLVMArkInterface *arkInterface, const std::unique_ptr<DebugDataBuilder> &debugData)
4761 : graph_(graph),
4762 builder_(llvm::IRBuilder<>(*context)),
4763 inputMap_(graph->GetLocalAllocator()->Adapter()),
4764 blockTailMap_(graph->GetLocalAllocator()->Adapter()),
4765 blockHeadMap_(graph->GetLocalAllocator()->Adapter()),
4766 arkInterface_(arkInterface),
4767 debugData_(debugData),
4768 cc_(graph->GetLocalAllocator()->Adapter()),
4769 ccValues_(graph->GetLocalAllocator()->Adapter())
4770 {
4771 llvm::CallingConv::ID callingConv = llvm::CallingConv::C;
4772 // Assign regmaps
4773 if (graph->GetMode().IsInterpreter()) {
4774 if (graph->GetArch() == Arch::AARCH64) {
4775 cc_.assign({AARCH64_PC, AARCH64_ACC, AARCH64_ACC_TAG, AARCH64_FP, AARCH64_DISPATCH, AARCH64_MOFFSET,
4776 AARCH64_METHOD_PTR, GetThreadReg(Arch::AARCH64)});
4777 } else if (graph->GetArch() == Arch::X86_64) {
4778 cc_.assign({X86_64_PC, X86_64_ACC, X86_64_ACC_TAG, X86_64_FP, X86_64_DISPATCH, GetThreadReg(Arch::X86_64),
4779 X86_64_REAL_FP});
4780 } else {
4781 LLVM_LOG(FATAL, IR) << "Unsupported architecture for arkintcc";
4782 }
4783 callingConv = llvm::CallingConv::ArkInt;
4784 } else if (graph->GetMode().IsFastPath()) {
4785 ASSERT(graph->GetArch() == Arch::AARCH64);
4786 for (size_t i = 0; i < graph->GetRuntime()->GetMethodTotalArgumentsCount(graph->GetMethod()); i++) {
4787 cc_.push_back(i);
4788 }
4789 // Get calling convention excluding thread and frame registers
4790 callingConv = GetFastPathCallingConv(cc_.size());
4791 cc_.push_back(GetThreadReg(Arch::AARCH64));
4792 cc_.push_back(AARCH64_REAL_FP);
4793 }
4794 ccValues_.assign(cc_.size(), nullptr);
4795
4796 // Create function
4797 auto funcProto = GetEntryFunctionType();
4798 auto methodName = arkInterface_->GetUniqMethodName(graph_->GetMethod());
4799 func_ = CreateFunctionDeclaration(funcProto, methodName, module);
4800 ASSERT(func_->getCallingConv() == llvm::CallingConv::C);
4801 func_->setCallingConv(callingConv);
4802
4803 // Scenario of code generation for FastPath having zero arguments and return value is not tested
4804 ASSERT(callingConv != llvm::CallingConv::ArkFast0 || func_->getReturnType()->isVoidTy());
4805
4806 if (graph->SupportManagedCode()) {
4807 func_->setGC(std::string {llvmbackend::LLVMArkInterface::GC_STRATEGY});
4808 }
4809
4810 auto klassId = graph_->GetRuntime()->GetClassIdForMethod(graph_->GetMethod());
4811 auto klassIdMd = llvm::ConstantAsMetadata::get(builder_.getInt32(klassId));
4812 func_->addMetadata(llvmbackend::LLVMArkInterface::FUNCTION_MD_CLASS_ID, *llvm::MDNode::get(*context, {klassIdMd}));
4813
4814 if (!arkInterface_->IsIrtocMode()) {
4815 func_->addMetadata("use-ark-frame", *llvm::MDNode::get(*context, {}));
4816 }
4817 }
4818
BuildIr(bool preventInlining)4819 bool LLVMIrConstructor::BuildIr(bool preventInlining)
4820 {
4821 LLVM_LOG(DEBUG, IR) << "Building IR for LLVM";
4822
4823 // Set Argument Names
4824 // Special arguments
4825 auto it = func_->arg_begin();
4826 if (graph_->SupportManagedCode()) {
4827 (it++)->setName("method");
4828 }
4829 // Actual arguments
4830 auto idx = 0;
4831 while (it != func_->arg_end()) {
4832 std::stringstream name;
4833 name << "a" << idx++;
4834 (it++)->setName(name.str());
4835 }
4836
4837 auto method = graph_->GetMethod();
4838 auto runtime = graph_->GetRuntime();
4839 arkInterface_->RememberFunctionOrigin(func_, method);
4840 func_->addFnAttr(ark::llvmbackend::LLVMArkInterface::SOURCE_LANG_ATTR,
4841 std::to_string(static_cast<uint8_t>(runtime->GetMethodSourceLanguage(method))));
4842
4843 if (!graph_->GetMode().IsFastPath()) {
4844 debugData_->BeginSubprogram(func_, runtime->GetFullFileName(method), runtime->GetMethodId(method));
4845 } else {
4846 func_->addFnAttr(llvm::Attribute::NoInline);
4847 }
4848
4849 auto normalMarkerHolder = MarkerHolder(graph_);
4850 auto normal = normalMarkerHolder.GetMarker();
4851
4852 graph_->GetStartBlock()->SetMarker(normal);
4853 MarkNormalBlocksRecursive(graph_->GetStartBlock(), normal);
4854
4855 // First step - create blocks, leaving LLVM EntryBlock untouched
4856 BuildBasicBlocks(normal);
4857 InitializeEntryBlock(preventInlining);
4858
4859 // Second step - visit all instructions, including StartBlock, but not filling PHI inputs
4860 BuildInstructions(normal);
4861
4862 // Third step - fill the PHIs inputs
4863 for (auto block : graph_->GetBlocksRPO()) {
4864 FillPhiInputs(block, normal);
4865 }
4866
4867 if (!graph_->GetMode().IsFastPath()) {
4868 debugData_->EndSubprogram(func_);
4869 }
4870 if (!arkInterface_->IsIrtocMode()) {
4871 func_->addFnAttr("frame-pointer", "all");
4872 }
4873 #ifndef NDEBUG
4874 // Only for tests
4875 BreakIrIfNecessary();
4876 #endif
4877 // verifyFunction returns false if there are no errors. But we return true if everything is ok.
4878 auto verified = !verifyFunction(*func_, &llvm::errs());
4879 if (!verified) {
4880 func_->print(llvm::errs());
4881 }
4882 return verified;
4883 }
4884
InsertArkFrameInfo(llvm::Module * module,Arch arch)4885 void LLVMIrConstructor::InsertArkFrameInfo(llvm::Module *module, Arch arch)
4886 {
4887 constexpr std::string_view ARK_CALLER_SLOTS_MD = "ark.frame.info";
4888 ASSERT(module->getNamedMetadata(ARK_CALLER_SLOTS_MD) == nullptr);
4889 auto arkFrameInfoMd = module->getOrInsertNamedMetadata(ARK_CALLER_SLOTS_MD);
4890 auto builder = llvm::IRBuilder<>(module->getContext());
4891
4892 // The fist param is a difference between Ark's fp and the start of LLVM frame.
4893 auto md = llvm::ConstantAsMetadata::get(builder.getInt32(0U));
4894 arkFrameInfoMd->addOperand(llvm::MDNode::get(module->getContext(), {md}));
4895
4896 // The second param contains offsets of caller-saved registers inside the ark's frame
4897 std::vector<size_t> callParamsRegs;
4898 switch (arch) {
4899 case Arch::AARCH64: {
4900 auto src = ArchCallingConvention<Arch::AARCH64>::Target::CALL_PARAMS_REGS;
4901 callParamsRegs = std::vector<size_t>(src.begin(), src.end());
4902 break;
4903 }
4904 case Arch::X86_64: {
4905 auto src = ArchCallingConvention<Arch::X86_64>::Target::CALL_PARAMS_REGS;
4906 callParamsRegs = std::vector<size_t>(src.begin(), src.end());
4907 break;
4908 }
4909 default:
4910 UNREACHABLE();
4911 }
4912
4913 CFrameLayout frameLayout(arch, 0);
4914 const auto callerRegsSlotStart = frameLayout.GetCallerFirstSlot(false);
4915 const auto callerRegsCount = frameLayout.GetCallerRegistersCount(false);
4916 std::vector<llvm::Metadata *> argOffsets;
4917 for (auto paramRegId : callParamsRegs) {
4918 int slot = callerRegsSlotStart + (callerRegsCount - 1 - paramRegId);
4919 slot += frameLayout.GetStackStartSlot();
4920 constexpr auto FP_ORIGIN = CFrameLayout::OffsetOrigin::FP;
4921 constexpr auto OFFSET_IN_BYTES = CFrameLayout::OffsetUnit::BYTES;
4922 auto offset = -frameLayout.GetOffset<FP_ORIGIN, OFFSET_IN_BYTES>(slot);
4923 ASSERT(std::numeric_limits<int32_t>::min() <= offset);
4924 ASSERT(offset <= std::numeric_limits<int32_t>::max());
4925 if (arch == Arch::AARCH64) {
4926 offset -= frameLayout.GetSlotSize() * 2U;
4927 }
4928 argOffsets.push_back(llvm::ConstantAsMetadata::get(builder.getInt32(offset)));
4929 }
4930 arkFrameInfoMd->addOperand(llvm::MDNode::get(module->getContext(), argOffsets));
4931
4932 // The third param is actual frame size
4933 auto val = frameLayout.GetFrameSize<CFrameLayout::OffsetUnit::BYTES>();
4934 // LLVM will store LR & FP
4935 if (arch == Arch::AARCH64) {
4936 val -= frameLayout.GetSlotSize() * 2U;
4937 }
4938 auto vmd = llvm::ConstantAsMetadata::get(builder.getInt32(val));
4939 arkFrameInfoMd->addOperand(llvm::MDNode::get(module->getContext(), {vmd}));
4940 }
4941
ProvideSafepointPoll(llvm::Module * module,LLVMArkInterface * arkInterface,Arch arch)4942 void LLVMIrConstructor::ProvideSafepointPoll(llvm::Module *module, LLVMArkInterface *arkInterface, Arch arch)
4943 {
4944 // Has been already provided
4945 ASSERT(module->getFunction(LLVMArkInterface::GC_SAFEPOINT_POLL_NAME) == nullptr);
4946 auto &ctx = module->getContext();
4947 auto builder = llvm::IRBuilder<>(ctx);
4948
4949 // Create a gc.safepoint_poll itself
4950 auto pollFtype = llvm::FunctionType::get(builder.getVoidTy(), false);
4951 auto poll = llvm::Function::Create(pollFtype, llvm::Function::ExternalLinkage,
4952 LLVMArkInterface::GC_SAFEPOINT_POLL_NAME, module);
4953 poll->setDoesNotThrow();
4954
4955 // Creating a body
4956 auto entry = llvm::BasicBlock::Create(ctx, "bb", poll);
4957 builder.SetInsertPoint(entry);
4958
4959 int64_t flagAddrOffset = arkInterface->GetRuntime()->GetFlagAddrOffset(arch);
4960 auto trigger =
4961 llvmbackend::runtime_calls::LoadTLSValue(&builder, arkInterface, flagAddrOffset, builder.getInt16Ty());
4962 auto needSafepoint = builder.CreateICmpNE(trigger, builder.getInt16(0), "need_safepoint");
4963 // Create a ret instuction immediately to split bb right before it
4964 auto ret = builder.CreateRetVoid();
4965
4966 // Split into IF-THEN before RET and insert a safepoint call into THEN block
4967 auto weights =
4968 llvm::MDBuilder(ctx).createBranchWeights(llvmbackend::Metadata::BranchWeights::UNLIKELY_BRANCH_WEIGHT,
4969 llvmbackend::Metadata::BranchWeights::LIKELY_BRANCH_WEIGHT);
4970
4971 builder.SetInsertPoint(llvm::SplitBlockAndInsertIfThen(needSafepoint, ret, false, weights));
4972 builder.GetInsertBlock()->setName("safepoint");
4973 auto eid = RuntimeInterface::EntrypointId::SAFEPOINT;
4974 arkInterface->GetOrCreateRuntimeFunctionType(ctx, module, LLVMArkInterface::RuntimeCallType::ENTRYPOINT,
4975 static_cast<LLVMArkInterface::EntrypointId>(eid));
4976 auto threadReg = llvmbackend::runtime_calls::GetThreadRegValue(&builder, arkInterface);
4977 auto spCall = llvmbackend::runtime_calls::CreateEntrypointCallCommon(
4978 &builder, threadReg, arkInterface, static_cast<llvmbackend::runtime_calls::EntrypointId>(eid));
4979
4980 spCall->addFnAttr(llvm::Attribute::get(ctx, "safepoint"));
4981 }
4982
CheckGraph(Graph * graph)4983 std::string LLVMIrConstructor::CheckGraph(Graph *graph)
4984 {
4985 ASSERT(!graph->IsDynamicMethod());
4986 for (auto basicBlock : graph->GetBlocksRPO()) {
4987 for (auto inst : basicBlock->AllInsts()) {
4988 bool canCompile = LLVMIrConstructor::CanCompile(inst);
4989 if (!canCompile) {
4990 // It means we have one of the following cases:
4991 // * meet some brand-new opcode in Ark Compiler IR
4992 // * dynamic intrinsic call (in non-dynamic method!)
4993 // * not yet patched SLOW_PATH_ENTRY call in Irtoc code
4994 std::stringstream sstream;
4995 sstream << GetOpcodeString(inst->GetOpcode()) << " unexpected in LLVM lowering. Method = "
4996 << graph->GetRuntime()->GetMethodFullName(graph->GetMethod());
4997 std::string error = sstream.str();
4998 LLVM_LOG(ERROR, IR) << error;
4999 return error;
5000 }
5001 }
5002 }
5003 return "";
5004 }
5005
CanCompile(Inst * inst)5006 bool LLVMIrConstructor::CanCompile(Inst *inst)
5007 {
5008 if (inst->IsIntrinsic()) {
5009 auto iid = inst->CastToIntrinsic()->GetIntrinsicId();
5010 // We support only slowpaths where the second immediate is an external function
5011 if (iid == RuntimeInterface::IntrinsicId::INTRINSIC_SLOW_PATH_ENTRY) {
5012 return inst->CastToIntrinsic()->GetImms().size() > 1;
5013 }
5014 return CanCompileIntrinsic(iid);
5015 }
5016 // Check if we have method that can handle it
5017 switch (inst->GetOpcode()) {
5018 default:
5019 UNREACHABLE_CONSTEXPR();
5020 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
5021 #define INST_DEF(OPCODE, ...) \
5022 case Opcode::OPCODE: { \
5023 return &LLVMIrConstructor::Visit##OPCODE != &GraphVisitor::Visit##OPCODE; \
5024 }
5025 OPCODE_LIST(INST_DEF)
5026 }
5027 #undef INST_DEF
5028 }
5029
5030 #ifndef NDEBUG
BreakIrIfNecessary()5031 void LLVMIrConstructor::BreakIrIfNecessary()
5032 {
5033 if (llvmbackend::g_options.GetLlvmBreakIrRegex().empty()) {
5034 return;
5035 }
5036
5037 std::regex regex {llvmbackend::g_options.GetLlvmBreakIrRegex()};
5038
5039 if (!std::regex_match(func_->getName().str(), regex)) {
5040 return;
5041 }
5042
5043 LLVM_LOG(DEBUG, IR) << "Breaking IR for '" << func_->getName().str() << "' because it matches regex = '"
5044 << llvmbackend::g_options.GetLlvmBreakIrRegex() << "'";
5045
5046 for (auto &basicBlock : *func_) {
5047 basicBlock.getTerminator()->eraseFromParent();
5048 }
5049 }
5050 #endif
5051
5052 #include "llvm_ir_constructor_gen.inl"
5053
5054 } // namespace ark::compiler
5055