• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2023-2025 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include <llvm/IR/Intrinsics.h>
17 #include "optimizer/code_generator/codegen.h"
18 #include "runtime/include/coretypes/string.h"
19 
20 #include "llvm_ir_constructor.h"
21 
22 #include "gc_barriers.h"
23 #include "irtoc_function_utils.h"
24 #include "llvm_logger.h"
25 #include "llvm_options.h"
26 #include "metadata.h"
27 #include "utils.h"
28 #include "transforms/builtins.h"
29 #include "transforms/gc_utils.h"
30 #include "transforms/runtime_calls.h"
31 
32 namespace ark::compiler {
33 #define ONLY_NEEDSAFEPOINT
34 #include <intrinsics_ir_build.inl>
35 #undef ONLY_NEEDSAFEPOINT
36 }  // namespace ark::compiler
37 
38 #include <llvm/IR/InlineAsm.h>
39 #include <llvm/IR/IntrinsicsAArch64.h>
40 #include <llvm/IR/MDBuilder.h>
41 #include <llvm/IR/Verifier.h>
42 #include <llvm/Transforms/Utils/BasicBlockUtils.h>
43 
44 using ark::llvmbackend::DebugDataBuilder;
45 using ark::llvmbackend::LLVMArkInterface;
46 using ark::llvmbackend::builtins::BarrierReturnVoid;
47 using ark::llvmbackend::builtins::KeepThis;
48 using ark::llvmbackend::builtins::LenArray;
49 using ark::llvmbackend::builtins::LoadClass;
50 using ark::llvmbackend::builtins::LoadInitClass;
51 using ark::llvmbackend::builtins::LoadString;
52 using ark::llvmbackend::builtins::ResolveVirtual;
53 using ark::llvmbackend::irtoc_function_utils::IsNoAliasIrtocFunction;
54 #ifndef NDEBUG
55 using ark::llvmbackend::irtoc_function_utils::IsPtrIgnIrtocFunction;
56 #endif
57 using ark::llvmbackend::utils::CreateLoadClassFromObject;
58 
59 static constexpr unsigned VECTOR_SIZE_2 = 2;
60 static constexpr unsigned VECTOR_SIZE_8 = 8;
61 static constexpr unsigned VECTOR_SIZE_16 = 16;
62 
63 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
64 #define ASSERT_TYPE(input, expectedType)                                                                   \
65     ASSERT_DO((input)->getType() == (expectedType),                                                        \
66               std::cerr << "Unexpected data type: " << GetTypeName((input)->getType()) << ". Should be a " \
67                         << GetTypeName(expectedType) << "." << std::endl)
68 
69 // Max integer that can be represented in float/double without losing precision
MaxIntAsExactFloat()70 constexpr float MaxIntAsExactFloat()
71 {
72     return static_cast<float>((1U << static_cast<unsigned>(std::numeric_limits<float>::digits)) - 1);
73 }
74 
MaxIntAsExactDouble()75 constexpr double MaxIntAsExactDouble()
76 {
77     return static_cast<double>((1ULL << static_cast<unsigned>(std::numeric_limits<double>::digits)) - 1);
78 }
79 
80 // arm64: { dispatch: 24, pc: 20, frame: 23, acc: 21, accTag: 22, moffset: 25, methodPtr: 26 },
81 static constexpr auto AARCH64_PC = 20;
82 static constexpr auto AARCH64_ACC = 21;
83 static constexpr auto AARCH64_ACC_TAG = 22;
84 static constexpr auto AARCH64_FP = 23;
85 static constexpr auto AARCH64_DISPATCH = 24;
86 static constexpr auto AARCH64_MOFFSET = 25;
87 static constexpr auto AARCH64_METHOD_PTR = 26;
88 static constexpr auto AARCH64_REAL_FP = 29;
89 
90 // x86_64: { dispatch: 8, pc: 4, frame: 5, acc: 11, accTag: 3 }
91 static constexpr auto X86_64_PC = 4;       // renamed r10
92 static constexpr auto X86_64_ACC = 11;     // renamed r3 (rbx)
93 static constexpr auto X86_64_ACC_TAG = 3;  // renamed r11
94 static constexpr auto X86_64_FP = 5;       // renamed r9
95 static constexpr auto X86_64_DISPATCH = 8;
96 static constexpr auto X86_64_REAL_FP = 9;  // renamed r5 (rbp)
97 
98 namespace {
CreateFunctionDeclaration(llvm::FunctionType * functionType,const std::string & name,llvm::Module * module)99 inline llvm::Function *CreateFunctionDeclaration(llvm::FunctionType *functionType, const std::string &name,
100                                                  llvm::Module *module)
101 {
102     ASSERT(functionType != nullptr);
103     ASSERT(!name.empty());
104     ASSERT(module != nullptr);
105 
106     auto function = module->getFunction(name);
107     if (function != nullptr) {
108         ASSERT(function->getVisibility() == llvm::GlobalValue::ProtectedVisibility);
109         ASSERT(function->doesNotThrow());
110         return function;
111     }
112 
113     function = llvm::Function::Create(functionType, llvm::Function::ExternalLinkage, name, module);
114     function->setDoesNotThrow();
115     function->setVisibility(llvm::GlobalValue::ProtectedVisibility);
116     function->setSectionPrefix(name);
117 
118     return function;
119 }
120 
CreateBlackBoxAsm(llvm::IRBuilder<> * builder,const std::string & inlineAsm)121 inline void CreateBlackBoxAsm(llvm::IRBuilder<> *builder, const std::string &inlineAsm)
122 {
123     auto iasmType = llvm::FunctionType::get(builder->getVoidTy(), {}, false);
124     builder->CreateCall(iasmType, llvm::InlineAsm::get(iasmType, inlineAsm, "", true), {});
125 }
126 
CreateInt32ImmAsm(llvm::IRBuilder<> * builder,const std::string & inlineAsm,uint32_t imm)127 inline void CreateInt32ImmAsm(llvm::IRBuilder<> *builder, const std::string &inlineAsm, uint32_t imm)
128 {
129     auto oneInt = llvm::FunctionType::get(builder->getVoidTy(), {builder->getInt32Ty()}, false);
130     builder->CreateCall(oneInt, llvm::InlineAsm::get(oneInt, inlineAsm, "i", true), {builder->getInt32(imm)});
131 }
132 
ToAtomicOrdering(bool isVolatile)133 inline llvm::AtomicOrdering ToAtomicOrdering(bool isVolatile)
134 {
135     return isVolatile ? LLVMArkInterface::VOLATILE_ORDER : LLVMArkInterface::NOT_ATOMIC_ORDER;
136 }
137 
138 #ifndef NDEBUG
GetTypeName(llvm::Type * type)139 inline std::string GetTypeName(llvm::Type *type)
140 {
141     std::string name;
142     auto stream = llvm::raw_string_ostream(name);
143     type->print(stream);
144     return stream.str();
145 }
146 #endif
147 }  // namespace
148 
149 namespace ark::compiler {
150 
151 #include <can_compile_intrinsics_gen.inl>
152 
153 class MemCharSimdLowering {
154 public:
155     MemCharSimdLowering(MemCharSimdLowering &&) = delete;
156     MemCharSimdLowering(const MemCharSimdLowering &) = delete;
157     MemCharSimdLowering &operator=(const MemCharSimdLowering &) = delete;
158     MemCharSimdLowering &operator=(MemCharSimdLowering &&) = delete;
159     MemCharSimdLowering() = delete;
160     ~MemCharSimdLowering() = default;
161 
162     MemCharSimdLowering(llvm::Value *ch, llvm::Value *addr, llvm::IRBuilder<> *builder, llvm::Function *func);
163 
164     template <bool MEM_BLOCK_SIZE_256_BITS>
165     llvm::Value *Generate(llvm::VectorType *vecTy);
166 
GetU64X2Ty() const167     llvm::VectorType *GetU64X2Ty() const
168     {
169         return llvm::VectorType::get(builder_->getInt64Ty(), VECTOR_SIZE_2, false);
170     }
GetU16X8Ty() const171     llvm::VectorType *GetU16X8Ty() const
172     {
173         return llvm::VectorType::get(builder_->getInt16Ty(), VECTOR_SIZE_8, false);
174     }
GetU8X16Ty() const175     llvm::VectorType *GetU8X16Ty() const
176     {
177         return llvm::VectorType::get(builder_->getInt8Ty(), VECTOR_SIZE_16, false);
178     }
179 
180 private:
181     static const uint64_t UL64 = 64UL;
182     static const uint64_t UL128 = 128UL;
183     static const uint64_t UL192 = 192UL;
184 
185     void GenLoadAndFastCheck128(llvm::VectorType *vecTy);
186     void GenLoadAndFastCheck256(llvm::VectorType *vecTy);
187     llvm::Value *GenFindChar128(llvm::IntegerType *charTy);
188     llvm::Value *GenFindChar256(llvm::IntegerType *charTy);
ShuffleMask(llvm::Type * charTy)189     static llvm::SmallVector<int> ShuffleMask(llvm::Type *charTy)
190     {
191         ASSERT(charTy->isIntegerTy(8U) || charTy->isIntegerTy(16U));
192         static constexpr std::initializer_list<int> MASK_U8 {15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0};
193         static constexpr std::initializer_list<int> MASK_U16 {7, 6, 5, 4, 3, 2, 1, 0};
194         return {charTy->isIntegerTy(8U) ? MASK_U8 : MASK_U16};
195     }
196 
197 private:
198     llvm::Value *ch_;
199     llvm::Value *addr_;
200     llvm::IRBuilder<> *builder_;
201     llvm::Function *func_;
202 
203     llvm::BasicBlock *lastBb_ = nullptr;
204     llvm::BasicBlock *foundBb_ = nullptr;
205     llvm::BasicBlock *inspectV1D0Bb_ = nullptr;
206     llvm::BasicBlock *inspectV1D1Bb_ = nullptr;
207     llvm::BasicBlock *inspectV2D0Bb_ = nullptr;
208     llvm::BasicBlock *inspectV2D1Bb_ = nullptr;
209     llvm::BasicBlock *clzV1D0Bb_ = nullptr;
210     llvm::BasicBlock *clzV1D1Bb_ = nullptr;
211     llvm::BasicBlock *clzV2D0Bb_ = nullptr;
212     llvm::Value *vcmpeq1_ = nullptr;
213     llvm::Value *vcmpeq2_ = nullptr;
214 };
215 
MemCharSimdLowering(llvm::Value * ch,llvm::Value * addr,llvm::IRBuilder<> * builder,llvm::Function * func)216 MemCharSimdLowering::MemCharSimdLowering(llvm::Value *ch, llvm::Value *addr, llvm::IRBuilder<> *builder,
217                                          llvm::Function *func)
218     : ch_(ch), addr_(addr), builder_(builder), func_(func)
219 {
220     ASSERT(addr_ != nullptr);
221     ASSERT(builder_ != nullptr);
222     ASSERT(func_ != nullptr);
223 }
224 
GenLoadAndFastCheck128(llvm::VectorType * vecTy)225 void MemCharSimdLowering::GenLoadAndFastCheck128(llvm::VectorType *vecTy)
226 {
227     auto *module = func_->getParent();
228     auto addpId = llvm::Intrinsic::AARCH64Intrinsics::aarch64_neon_addp;
229     auto addp = llvm::Intrinsic::getDeclaration(module, addpId, {GetU64X2Ty()});
230     // Read 16-byte chunk of memory
231     auto vld1 = builder_->CreateLoad(vecTy, addr_);
232     // Prepare the search pattern
233     auto insert = builder_->CreateInsertElement(vecTy, ch_, 0UL);
234     auto pattern = builder_->CreateShuffleVector(insert, ShuffleMask(vecTy->getElementType()));
235     // Compare
236     vcmpeq1_ = builder_->CreateSExt(builder_->CreateICmpEQ(vld1, pattern), vecTy);
237     // Do fast check and give up if char is not there
238     auto v64x2 = builder_->CreateBitCast(vcmpeq1_, GetU64X2Ty());
239     auto vaddp = builder_->CreateCall(addp, {v64x2, v64x2});
240     auto low64 = builder_->CreateBitCast(builder_->CreateExtractElement(vaddp, 0UL), builder_->getInt64Ty());
241     auto charIsNotThere = builder_->CreateICmpEQ(low64, llvm::Constant::getNullValue(low64->getType()));
242     builder_->CreateCondBr(charIsNotThere, lastBb_, inspectV1D0Bb_);
243 }
244 
GenLoadAndFastCheck256(llvm::VectorType * vecTy)245 void MemCharSimdLowering::GenLoadAndFastCheck256(llvm::VectorType *vecTy)
246 {
247     auto *module = func_->getParent();
248     auto ld1Id = llvm::Intrinsic::AARCH64Intrinsics::aarch64_neon_ld1x2;
249     auto addpId = llvm::Intrinsic::AARCH64Intrinsics::aarch64_neon_addp;
250     auto ld1 = llvm::Intrinsic::getDeclaration(module, ld1Id, {vecTy, addr_->getType()});
251     auto addp1 = llvm::Intrinsic::getDeclaration(module, addpId, {vecTy});
252     auto addp2 = llvm::Intrinsic::getDeclaration(module, addpId, {GetU64X2Ty()});
253     // Read 32-byte chunk of memory
254     auto vld1 = builder_->CreateCall(ld1, {addr_});
255     auto v1 = builder_->CreateExtractValue(vld1, {0});
256     auto v2 = builder_->CreateExtractValue(vld1, {1});
257     // Prepare the search pattern
258     auto insert = builder_->CreateInsertElement(vecTy, ch_, 0UL);
259     auto pattern = builder_->CreateShuffleVector(insert, ShuffleMask(vecTy->getElementType()));
260     // Compare
261     vcmpeq1_ = builder_->CreateSExt(builder_->CreateICmpEQ(v1, pattern), vecTy);
262     vcmpeq2_ = builder_->CreateSExt(builder_->CreateICmpEQ(v2, pattern), vecTy);
263     // Do fast check and give up if char is not there
264     auto vaddp = builder_->CreateCall(addp1, {vcmpeq1_, vcmpeq2_});
265     auto v64x2 = builder_->CreateBitCast(vaddp, GetU64X2Ty());
266     vaddp = builder_->CreateCall(addp2, {v64x2, v64x2});
267     auto low64 = builder_->CreateBitCast(builder_->CreateExtractElement(vaddp, 0UL), builder_->getInt64Ty());
268     auto charIsNotThere = builder_->CreateICmpEQ(low64, llvm::Constant::getNullValue(low64->getType()));
269     builder_->CreateCondBr(charIsNotThere, lastBb_, inspectV1D0Bb_);
270 }
271 
GenFindChar128(llvm::IntegerType * charTy)272 llvm::Value *MemCharSimdLowering::GenFindChar128(llvm::IntegerType *charTy)
273 {
274     ASSERT(vcmpeq1_ != nullptr && vcmpeq2_ == nullptr);
275     auto i64Ty = builder_->getInt64Ty();
276     constexpr uint32_t DWORD_SIZE = 64U;
277     // Inspect low 64-bit part of vcmpeq1
278     builder_->SetInsertPoint(inspectV1D0Bb_);
279     auto vcmpeq1 = builder_->CreateBitCast(vcmpeq1_, GetU64X2Ty());
280     auto v1d0 = builder_->CreateBitCast(builder_->CreateExtractElement(vcmpeq1, 0UL), i64Ty);
281     auto v1d0IsZero = builder_->CreateICmpEQ(v1d0, llvm::Constant::getNullValue(v1d0->getType()));
282     builder_->CreateCondBr(v1d0IsZero, inspectV1D1Bb_, clzV1D0Bb_);
283     builder_->SetInsertPoint(clzV1D0Bb_);
284     auto rev10 = builder_->CreateUnaryIntrinsic(llvm::Intrinsic::bswap, v1d0, nullptr);
285     auto pos10 = builder_->CreateBinaryIntrinsic(llvm::Intrinsic::ctlz, rev10, builder_->getFalse(), nullptr);
286     builder_->CreateBr(foundBb_);
287     // Inspect high 64-bit part of vcmpeq1
288     builder_->SetInsertPoint(inspectV1D1Bb_);
289     auto v1d1 = builder_->CreateBitCast(builder_->CreateExtractElement(vcmpeq1, 1UL), i64Ty);
290     auto rev11 = builder_->CreateUnaryIntrinsic(llvm::Intrinsic::bswap, v1d1, nullptr);
291     auto clz11 = builder_->CreateBinaryIntrinsic(llvm::Intrinsic::ctlz, rev11, builder_->getFalse(), nullptr);
292     auto pos11 = builder_->CreateAdd(clz11, llvm::Constant::getIntegerValue(i64Ty, llvm::APInt(DWORD_SIZE, UL64)));
293     builder_->CreateBr(foundBb_);
294     // Compute a pointer to the char
295     builder_->SetInsertPoint(foundBb_);
296     auto nbits = builder_->CreatePHI(i64Ty, 2U);
297     nbits->addIncoming(pos10, clzV1D0Bb_);
298     nbits->addIncoming(pos11, inspectV1D1Bb_);
299     auto nbytes = builder_->CreateLShr(nbits, charTy->isIntegerTy(8U) ? 3UL : 4UL);
300     auto foundCharPtr = builder_->CreateInBoundsGEP(charTy, addr_, nbytes);
301     builder_->CreateBr(lastBb_);
302     return foundCharPtr;
303 }
304 
GenFindChar256(llvm::IntegerType * charTy)305 llvm::Value *MemCharSimdLowering::GenFindChar256(llvm::IntegerType *charTy)
306 {
307     ASSERT(vcmpeq1_ != nullptr);
308     ASSERT(vcmpeq2_ != nullptr);
309     auto i64Ty = builder_->getInt64Ty();
310     constexpr uint32_t DWORD_SIZE = 64U;
311     // Inspect low 64-bit part of vcmpeq1
312     builder_->SetInsertPoint(inspectV1D0Bb_);
313     auto vcmpeq1 = builder_->CreateBitCast(vcmpeq1_, GetU64X2Ty());
314     auto v1d0 = builder_->CreateBitCast(builder_->CreateExtractElement(vcmpeq1, 0UL), i64Ty);
315     auto v1d0IsZero = builder_->CreateICmpEQ(v1d0, llvm::Constant::getNullValue(v1d0->getType()));
316     builder_->CreateCondBr(v1d0IsZero, inspectV1D1Bb_, clzV1D0Bb_);
317     builder_->SetInsertPoint(clzV1D0Bb_);
318     auto rev10 = builder_->CreateUnaryIntrinsic(llvm::Intrinsic::bswap, v1d0, nullptr);
319     auto pos10 = builder_->CreateBinaryIntrinsic(llvm::Intrinsic::ctlz, rev10, builder_->getFalse(), nullptr);
320     builder_->CreateBr(foundBb_);
321     // Inspect high 64-bit part of vcmpeq1
322     builder_->SetInsertPoint(inspectV1D1Bb_);
323     auto v1d1 = builder_->CreateBitCast(builder_->CreateExtractElement(vcmpeq1, 1UL), i64Ty);
324     auto v1d1IsZero = builder_->CreateICmpEQ(v1d1, llvm::Constant::getNullValue(v1d1->getType()));
325     builder_->CreateCondBr(v1d1IsZero, inspectV2D0Bb_, clzV1D1Bb_);
326     builder_->SetInsertPoint(clzV1D1Bb_);
327     auto rev11 = builder_->CreateUnaryIntrinsic(llvm::Intrinsic::bswap, v1d1, nullptr);
328     auto clz11 = builder_->CreateBinaryIntrinsic(llvm::Intrinsic::ctlz, rev11, builder_->getFalse(), nullptr);
329     auto pos11 = builder_->CreateAdd(clz11, llvm::Constant::getIntegerValue(i64Ty, llvm::APInt(DWORD_SIZE, UL64)));
330     builder_->CreateBr(foundBb_);
331     // Inspect low 64-bit part of vcmpeq2
332     builder_->SetInsertPoint(inspectV2D0Bb_);
333     auto vcmpeq2 = builder_->CreateBitCast(vcmpeq2_, GetU64X2Ty());
334     auto v2d0 = builder_->CreateBitCast(builder_->CreateExtractElement(vcmpeq2, 0UL), i64Ty);
335     auto v2d0IsZero = builder_->CreateICmpEQ(v2d0, llvm::Constant::getNullValue(v2d0->getType()));
336     builder_->CreateCondBr(v2d0IsZero, inspectV2D1Bb_, clzV2D0Bb_);
337     builder_->SetInsertPoint(clzV2D0Bb_);
338     auto rev20 = builder_->CreateUnaryIntrinsic(llvm::Intrinsic::bswap, v2d0, nullptr);
339     auto clz20 = builder_->CreateBinaryIntrinsic(llvm::Intrinsic::ctlz, rev20, builder_->getFalse(), nullptr);
340     auto pos20 = builder_->CreateAdd(clz20, llvm::Constant::getIntegerValue(i64Ty, llvm::APInt(DWORD_SIZE, UL128)));
341     builder_->CreateBr(foundBb_);
342     // Inspect high 64-bit part of vcmpeq2
343     builder_->SetInsertPoint(inspectV2D1Bb_);
344     auto v2d1 = builder_->CreateBitCast(builder_->CreateExtractElement(vcmpeq2, 1UL), i64Ty);
345     auto rev21 = builder_->CreateUnaryIntrinsic(llvm::Intrinsic::bswap, v2d1, nullptr);
346     auto clz21 = builder_->CreateBinaryIntrinsic(llvm::Intrinsic::ctlz, rev21, builder_->getFalse(), nullptr);
347     auto pos21 = builder_->CreateAdd(clz21, llvm::Constant::getIntegerValue(i64Ty, llvm::APInt(DWORD_SIZE, UL192)));
348     builder_->CreateBr(foundBb_);
349     // Compute a pointer to the char
350     builder_->SetInsertPoint(foundBb_);
351     auto nbits = builder_->CreatePHI(i64Ty, 4U);
352     nbits->addIncoming(pos10, clzV1D0Bb_);
353     nbits->addIncoming(pos11, clzV1D1Bb_);
354     nbits->addIncoming(pos20, clzV2D0Bb_);
355     nbits->addIncoming(pos21, inspectV2D1Bb_);
356     auto nbytes = builder_->CreateLShr(nbits, charTy->isIntegerTy(8U) ? 3UL : 4UL);
357     auto foundCharPtr = builder_->CreateInBoundsGEP(charTy, addr_, nbytes);
358     builder_->CreateBr(lastBb_);
359     return foundCharPtr;
360 }
361 
362 template <bool MEM_BLOCK_SIZE_256_BITS>
Generate(llvm::VectorType * vecTy)363 llvm::Value *MemCharSimdLowering::Generate(llvm::VectorType *vecTy)
364 {
365     auto *charTy = llvm::cast<llvm::IntegerType>(vecTy->getElementType());
366     ASSERT(vecTy == GetU8X16Ty() || vecTy == GetU16X8Ty());
367     auto &context = func_->getContext();
368     auto firstBb = builder_->GetInsertBlock();
369     lastBb_ = llvm::BasicBlock::Create(context, "mem_char_using_simd_last", func_);
370     foundBb_ = llvm::BasicBlock::Create(context, "mem_char_using_simd_found", func_);
371     inspectV1D0Bb_ = llvm::BasicBlock::Create(context, "mem_char_using_simd_inspect_v1d0", func_);
372     inspectV1D1Bb_ = llvm::BasicBlock::Create(context, "mem_char_using_simd_inspect_v1d1", func_);
373     clzV1D0Bb_ = llvm::BasicBlock::Create(context, "mem_char_using_simd_inspect_v1d1", func_);
374     llvm::Value *foundCharPtr = nullptr;
375     if constexpr (MEM_BLOCK_SIZE_256_BITS) {
376         inspectV2D0Bb_ = llvm::BasicBlock::Create(context, "mem_char_using_simd_inspect_v2d0", func_);
377         inspectV2D1Bb_ = llvm::BasicBlock::Create(context, "mem_char_using_simd_inspect_v2d1", func_);
378         clzV1D1Bb_ = llvm::BasicBlock::Create(context, "mem_char_using_simd_inspect_v1d1", func_);
379         clzV2D0Bb_ = llvm::BasicBlock::Create(context, "mem_char_using_simd_inspect_v1d1", func_);
380         GenLoadAndFastCheck256(vecTy);
381         foundCharPtr = GenFindChar256(charTy);
382     } else {
383         GenLoadAndFastCheck128(vecTy);
384         foundCharPtr = GenFindChar128(charTy);
385     }
386     ASSERT(foundCharPtr != nullptr);
387     // The result is either a pointer to the char or null
388     builder_->SetInsertPoint(lastBb_);
389     auto result = builder_->CreatePHI(builder_->getPtrTy(), 2U);
390     result->addIncoming(llvm::Constant::getNullValue(builder_->getPtrTy()), firstBb);
391     result->addIncoming(foundCharPtr, foundBb_);
392     // Cleanup and return
393     lastBb_ = nullptr;
394     foundBb_ = nullptr;
395     inspectV1D0Bb_ = nullptr;
396     inspectV1D1Bb_ = nullptr;
397     inspectV2D0Bb_ = nullptr;
398     inspectV2D1Bb_ = nullptr;
399     clzV1D0Bb_ = nullptr;
400     clzV1D1Bb_ = nullptr;
401     clzV2D0Bb_ = nullptr;
402     vcmpeq1_ = nullptr;
403     vcmpeq2_ = nullptr;
404     return result;
405 }
406 
MarkNormalBlocksRecursive(BasicBlock * block,Marker normal)407 static void MarkNormalBlocksRecursive(BasicBlock *block, Marker normal)
408 {
409     [[maybe_unused]] size_t expected = 0;
410     bool processSucc = true;
411     auto last = block->GetLastInst();
412     if (last != nullptr) {
413         // Any successors of blocks with terminators are either TryEnd or Catch blocks
414         if (last->GetFlag(inst_flags::TERMINATOR)) {
415             processSucc = false;
416         }
417         if (last->GetOpcode() == Opcode::IfImm || last->GetOpcode() == Opcode::If) {
418             expected = 1;
419         }
420     }
421     for (size_t i = 0; i < block->GetSuccsBlocks().size(); i++) {
422         auto succ = block->GetSuccessor(i);
423         if (succ->IsCatch()) {
424             ASSERT_DO(i > expected,
425                       (std::cerr << "Catch block found too early in successors: at index " << i << std::endl));
426             continue;
427         }
428         ASSERT_DO(i <= expected, (std::cerr << "Unexpected non-catch successor block at index " << i << std::endl));
429         if (processSucc && !succ->SetMarker(normal)) {
430             MarkNormalBlocksRecursive(succ, normal);
431         }
432     }
433 }
434 
435 // Use that only to pass it into method like rvalue
CreateBasicBlockName(Inst * inst,const std::string & bbName)436 static inline std::string CreateBasicBlockName(Inst *inst, const std::string &bbName)
437 {
438     std::stringstream name;
439     name << "bb" << std::to_string(inst->GetBasicBlock()->GetId()) << "_i" << std::to_string(inst->GetId()) << ".."
440          << bbName << "..";
441     return name.str();
442 }
443 
CreateNameForInst(Inst * inst)444 static inline std::string CreateNameForInst(Inst *inst)
445 {
446     return std::string("v") + std::to_string(inst->GetId());
447 }
448 
IsInteger(DataType::Type type)449 static inline bool IsInteger(DataType::Type type)
450 {
451     return DataType::IsTypeNumeric(type) && !DataType::IsFloatType(type) && type != DataType::POINTER;
452 }
453 
IsSignedInteger(const DataType::Type & type)454 static inline bool IsSignedInteger(const DataType::Type &type)
455 {
456     return IsInteger(type) && DataType::IsTypeSigned(type);
457 }
458 
IsUnsignedInteger(DataType::Type type)459 static inline bool IsUnsignedInteger(DataType::Type type)
460 {
461     return IsInteger(type) && !DataType::IsTypeSigned(type);
462 }
463 
IsAlwaysThrowBasicBlock(Inst * inst)464 static inline bool IsAlwaysThrowBasicBlock(Inst *inst)
465 {
466     if (!g_options.IsCompilerInliningSkipThrowBlocks()) {
467         return false;
468     }
469 
470     auto bbLastInst = inst->GetBasicBlock()->GetLastInst();
471     return bbLastInst->GetOpcode() == Opcode::Throw || bbLastInst->GetOpcode() == Opcode::Deoptimize;
472 }
473 
ICmpCodeConvert(ConditionCode cc)474 static llvm::ICmpInst::Predicate ICmpCodeConvert(ConditionCode cc)
475 {
476     switch (cc) {
477         case ConditionCode::CC_EQ:
478             return llvm::CmpInst::Predicate::ICMP_EQ;
479         case ConditionCode::CC_NE:
480             return llvm::CmpInst::Predicate::ICMP_NE;
481         case ConditionCode::CC_LT:
482             return llvm::CmpInst::Predicate::ICMP_SLT;
483         case ConditionCode::CC_GT:
484             return llvm::CmpInst::Predicate::ICMP_SGT;
485         case ConditionCode::CC_LE:
486             return llvm::CmpInst::Predicate::ICMP_SLE;
487         case ConditionCode::CC_GE:
488             return llvm::CmpInst::Predicate::ICMP_SGE;
489         case ConditionCode::CC_B:
490             return llvm::CmpInst::Predicate::ICMP_ULT;
491         case ConditionCode::CC_A:
492             return llvm::CmpInst::Predicate::ICMP_UGT;
493         case ConditionCode::CC_BE:
494             return llvm::CmpInst::Predicate::ICMP_ULE;
495         case ConditionCode::CC_AE:
496             return llvm::CmpInst::Predicate::ICMP_UGE;
497         default:
498             UNREACHABLE();
499             return llvm::CmpInst::Predicate::ICMP_NE;
500     }
501 }
502 
FCmpCodeConvert(ConditionCode conditionCode)503 static llvm::FCmpInst::Predicate FCmpCodeConvert(ConditionCode conditionCode)
504 {
505     switch (conditionCode) {
506         case ConditionCode::CC_EQ:
507             return llvm::FCmpInst::Predicate::FCMP_UEQ;
508         case ConditionCode::CC_NE:
509             return llvm::FCmpInst::Predicate::FCMP_UNE;
510         case ConditionCode::CC_LT:
511             return llvm::FCmpInst::Predicate::FCMP_ULT;
512         case ConditionCode::CC_GT:
513             return llvm::FCmpInst::Predicate::FCMP_UGT;
514         case ConditionCode::CC_LE:
515             return llvm::FCmpInst::Predicate::FCMP_ULE;
516         case ConditionCode::CC_GE:
517             return llvm::FCmpInst::Predicate::FCMP_UGE;
518         case ConditionCode::CC_B:
519             return llvm::FCmpInst::Predicate::FCMP_ULT;
520         case ConditionCode::CC_A:
521             return llvm::FCmpInst::Predicate::FCMP_UGT;
522         case ConditionCode::CC_BE:
523             return llvm::FCmpInst::Predicate::FCMP_ULE;
524         case ConditionCode::CC_AE:
525             return llvm::FCmpInst::Predicate::FCMP_UGE;
526         default:
527             ASSERT_DO(false, (std::cerr << "Unexpected condition_code = " << conditionCode << std::endl));
528             UNREACHABLE();
529     }
530 }
531 
GetDeoptimizationType(Inst * inst)532 static DeoptimizeType GetDeoptimizationType(Inst *inst)
533 {
534     switch (inst->GetOpcode()) {
535         case Opcode::NullCheck:
536             return DeoptimizeType::NULL_CHECK;
537         case Opcode::DeoptimizeIf:
538             return inst->CastToDeoptimizeIf()->GetDeoptimizeType();
539         case Opcode::BoundsCheck:
540             return DeoptimizeType::BOUNDS_CHECK_WITH_DEOPT;
541         case Opcode::NegativeCheck:
542             return DeoptimizeType::NEGATIVE_CHECK;
543         case Opcode::ZeroCheck:
544             return DeoptimizeType::ZERO_CHECK;
545         case Opcode::SubOverflowCheck:
546             return DeoptimizeType::OVERFLOW_TYPE;
547         case Opcode::CheckCast:
548             return DeoptimizeType::CHECK_CAST;
549         case Opcode::RefTypeCheck:
550         default:
551             ASSERT_DO(false, (std::cerr << "Unexpected inst to GetDeoptimizationType, inst:" << std::endl,
552                               inst->Dump(&std::cerr, true)));
553             UNREACHABLE();
554     }
555 }
556 
GetFastPathCallingConv(uint32_t numArgs)557 static llvm::CallingConv::ID GetFastPathCallingConv(uint32_t numArgs)
558 {
559     switch (numArgs) {
560         case 0U:
561             return llvm::CallingConv::ArkFast0;
562         case 1U:
563             return llvm::CallingConv::ArkFast1;
564         case 2U:
565             return llvm::CallingConv::ArkFast2;
566         case 3U:
567             return llvm::CallingConv::ArkFast3;
568         case 4U:
569             return llvm::CallingConv::ArkFast4;
570         case 5U:
571             return llvm::CallingConv::ArkFast5;
572         case 6U:
573             return llvm::CallingConv::ArkFast6;
574         default:
575             UNREACHABLE();
576     }
577 }
578 
GetAllocateArrayTlabEntrypoint(size_t elementSize)579 static RuntimeInterface::EntrypointId GetAllocateArrayTlabEntrypoint(size_t elementSize)
580 {
581     switch (elementSize) {
582         case sizeof(uint8_t):
583             return RuntimeInterface::EntrypointId::ALLOCATE_ARRAY_TLAB8;
584         case sizeof(uint16_t):
585             return RuntimeInterface::EntrypointId::ALLOCATE_ARRAY_TLAB16;
586         case sizeof(uint32_t):
587             return RuntimeInterface::EntrypointId::ALLOCATE_ARRAY_TLAB32;
588         case sizeof(uint64_t):
589             return RuntimeInterface::EntrypointId::ALLOCATE_ARRAY_TLAB64;
590         default:
591             UNREACHABLE();
592     }
593 }
594 
GetRealFrameReg(Arch arch)595 static size_t GetRealFrameReg(Arch arch)
596 {
597     switch (arch) {
598         case Arch::AARCH64:
599             return AARCH64_REAL_FP;
600         case Arch::X86_64:
601             return X86_64_REAL_FP;
602         default:
603             UNREACHABLE();
604     }
605 }
606 
607 /**
608  * Call when we are sure that instruction shouldn't appear for translating but
609  * eventually we've tried to translate it.
610  */
UnexpectedLowering(Inst * inst)611 static void UnexpectedLowering([[maybe_unused]] Inst *inst)
612 {
613     ASSERT_DO(false, (std::cerr << "Unexpected attempt to lower: ", inst->Dump(&std::cerr, true)));
614     UNREACHABLE();
615 }
616 
IsSafeCast(Inst * inst,unsigned int index)617 bool LLVMIrConstructor::IsSafeCast(Inst *inst, unsigned int index)
618 {
619     auto trueType = inst->GetInput(index).GetInst()->GetType();
620     auto instType = inst->GetInputType(index);
621     bool signTheSame = IsSignedInteger(trueType) == IsSignedInteger(instType);
622     bool extending = DataType::GetTypeSize(trueType, GetGraph()->GetArch()) <=
623                      DataType::GetTypeSize(instType, GetGraph()->GetArch());
624     return signTheSame || extending;
625 }
626 
TryEmitIntrinsic(Inst * inst,RuntimeInterface::IntrinsicId arkId)627 bool LLVMIrConstructor::TryEmitIntrinsic(Inst *inst, RuntimeInterface::IntrinsicId arkId)
628 {
629     auto module = func_->getParent();
630     auto f32Ty = builder_.getFloatTy();
631     auto f64Ty = builder_.getDoubleTy();
632     llvm::Function *llvmId = nullptr;
633 
634     switch (arkId) {
635 #include "intrinsics_llvm_codegen.inl"
636 #ifndef NDEBUG
637         // Must be lowered earlier in IrBuilder, impossible to meet
638         case RuntimeInterface::IntrinsicId::INTRINSIC_OBJECT_MONITOR_ENTER:
639         case RuntimeInterface::IntrinsicId::INTRINSIC_OBJECT_MONITOR_EXIT:
640         case RuntimeInterface::IntrinsicId::INTRINSIC_MATH_ABS_I32:
641         case RuntimeInterface::IntrinsicId::INTRINSIC_MATH_ABS_I64:
642         case RuntimeInterface::IntrinsicId::INTRINSIC_MATH_ABS_F32:
643         case RuntimeInterface::IntrinsicId::INTRINSIC_MATH_ABS_F64:
644         case RuntimeInterface::IntrinsicId::INTRINSIC_MATH_SQRT_F32:
645         case RuntimeInterface::IntrinsicId::INTRINSIC_MATH_SQRT_F64:
646         case RuntimeInterface::IntrinsicId::INTRINSIC_MATH_MIN_I32:
647         case RuntimeInterface::IntrinsicId::INTRINSIC_MATH_MIN_I64:
648         case RuntimeInterface::IntrinsicId::INTRINSIC_MATH_MIN_F32:
649         case RuntimeInterface::IntrinsicId::INTRINSIC_MATH_MIN_F64:
650         case RuntimeInterface::IntrinsicId::INTRINSIC_MATH_MAX_I32:
651         case RuntimeInterface::IntrinsicId::INTRINSIC_MATH_MAX_I64:
652         case RuntimeInterface::IntrinsicId::INTRINSIC_MATH_MAX_F32:
653         case RuntimeInterface::IntrinsicId::INTRINSIC_MATH_MAX_F64:
654             UNREACHABLE();
655         // Can appear only after LLVM optimizations
656         case RuntimeInterface::IntrinsicId::LIB_CALL_MEM_COPY:
657         case RuntimeInterface::IntrinsicId::LIB_CALL_MEM_SET:
658         case RuntimeInterface::IntrinsicId::LIB_CALL_MEM_MOVE:
659             UNREACHABLE();
660 #include "emit_intrinsic_llvm_ir_constructor_gen.inl"
661 #endif
662         default:
663             return false;
664     }
665 
666     ASSERT(llvmId != nullptr);
667     ASSERT(!inst->CanThrow());
668 
669     arkInterface_->GetOrCreateRuntimeFunctionType(func_->getContext(), func_->getParent(),
670                                                   LLVMArkInterface::RuntimeCallType::INTRINSIC,
671                                                   static_cast<LLVMArkInterface::EntrypointId>(arkId));
672 
673     auto arguments = GetIntrinsicArguments(llvmId->getFunctionType(), inst->CastToIntrinsic());
674     auto result = llvm::CallInst::Create(llvmId, arguments, "", GetCurrentBasicBlock());
675     SetIntrinsicParamAttrs(result, inst->CastToIntrinsic(), arguments);
676     ValueMapAdd(inst, result);
677     return true;
678 }
679 
680 // Specific intrinsic Emitters
681 
EmitFastPath(Inst * inst,RuntimeInterface::EntrypointId eid,uint32_t numArgs)682 bool LLVMIrConstructor::EmitFastPath(Inst *inst, RuntimeInterface::EntrypointId eid, uint32_t numArgs)
683 {
684     ArenaVector<llvm::Value *> args(GetGraph()->GetLocalAllocator()->Adapter());
685     for (uint32_t i = 0; i < numArgs; i++) {
686         args.push_back(GetInputValue(inst, i));
687     }
688     auto call = CreateFastPathCall(inst, eid, args);
689 
690     auto retType = GetType(inst->GetType());
691     if (!retType->isVoidTy()) {
692         ValueMapAdd(inst, call);
693     }
694     return true;
695 }
696 
EmitStringEquals(Inst * inst)697 bool LLVMIrConstructor::EmitStringEquals(Inst *inst)
698 {
699     return EmitFastPath(inst, RuntimeInterface::EntrypointId::STRING_EQUALS_COMPRESSED, 2U);
700 }
701 
EmitStringBuilderBool(Inst * inst)702 bool LLVMIrConstructor::EmitStringBuilderBool(Inst *inst)
703 {
704     return EmitFastPath(inst, RuntimeInterface::EntrypointId::STRING_BUILDER_BOOL, 2U);
705 }
706 
EmitStringBuilderChar(Inst * inst)707 bool LLVMIrConstructor::EmitStringBuilderChar(Inst *inst)
708 {
709     return EmitFastPath(inst, RuntimeInterface::EntrypointId::STRING_BUILDER_CHAR, 2U);
710 }
711 
EmitStringBuilderString(Inst * inst)712 bool LLVMIrConstructor::EmitStringBuilderString(Inst *inst)
713 {
714     return EmitFastPath(inst, RuntimeInterface::EntrypointId::STRING_BUILDER_STRING_COMPRESSED, 2U);
715 }
716 
EmitStringConcat2(Inst * inst)717 bool LLVMIrConstructor::EmitStringConcat2(Inst *inst)
718 {
719     return EmitFastPath(inst, RuntimeInterface::EntrypointId::STRING_CONCAT2_TLAB, 2U);
720 }
721 
EmitStringConcat3(Inst * inst)722 bool LLVMIrConstructor::EmitStringConcat3(Inst *inst)
723 {
724     return EmitFastPath(inst, RuntimeInterface::EntrypointId::STRING_CONCAT3_TLAB, 3U);
725 }
726 
EmitStringConcat4(Inst * inst)727 bool LLVMIrConstructor::EmitStringConcat4(Inst *inst)
728 {
729     return EmitFastPath(inst, RuntimeInterface::EntrypointId::STRING_CONCAT4_TLAB, 4U);
730 }
731 
EmitStringCompareTo(Inst * inst)732 bool LLVMIrConstructor::EmitStringCompareTo(Inst *inst)
733 {
734     return EmitFastPath(inst, RuntimeInterface::EntrypointId::STRING_COMPARE_TO, 2U);
735 }
736 
EmitIsInf(Inst * inst)737 bool LLVMIrConstructor::EmitIsInf(Inst *inst)
738 {
739     auto result = CreateIsInf(GetInputValue(inst, 0));
740     ValueMapAdd(inst, result);
741     return true;
742 }
743 
EmitMemmoveUnchecked(Inst * inst)744 bool LLVMIrConstructor::EmitMemmoveUnchecked(Inst *inst)
745 {
746     switch (inst->CastToIntrinsic()->GetIntrinsicId()) {
747         case RuntimeInterface::IntrinsicId::INTRINSIC_COMPILER_MEMMOVE_UNCHECKED_1_BYTE:
748             return EmitFastPath(inst, RuntimeInterface::EntrypointId::ARRAY_COPY_TO_UNCHECKED_1_BYTE, 5U);
749         case RuntimeInterface::IntrinsicId::INTRINSIC_COMPILER_MEMMOVE_UNCHECKED_2_BYTE:
750             return EmitFastPath(inst, RuntimeInterface::EntrypointId::ARRAY_COPY_TO_UNCHECKED_2_BYTE, 5U);
751         case RuntimeInterface::IntrinsicId::INTRINSIC_COMPILER_MEMMOVE_UNCHECKED_4_BYTE:
752             return EmitFastPath(inst, RuntimeInterface::EntrypointId::ARRAY_COPY_TO_UNCHECKED_4_BYTE, 5U);
753         case RuntimeInterface::IntrinsicId::INTRINSIC_COMPILER_MEMMOVE_UNCHECKED_8_BYTE:
754             return EmitFastPath(inst, RuntimeInterface::EntrypointId::ARRAY_COPY_TO_UNCHECKED_8_BYTE, 5U);
755         default:
756             UNREACHABLE();
757     }
758 }
759 
EmitUnreachable(Inst * inst)760 bool LLVMIrConstructor::EmitUnreachable([[maybe_unused]] Inst *inst)
761 {
762     auto bb = GetCurrentBasicBlock();
763     if (bb->empty() || !llvm::isa<llvm::ReturnInst>(*(bb->rbegin()))) {
764         auto trap = llvm::Intrinsic::getDeclaration(func_->getParent(), llvm::Intrinsic::trap, {});
765         builder_.CreateCall(trap, {});
766         builder_.CreateUnreachable();
767     }
768     return true;
769 }
770 
EmitNothing(Inst * inst)771 bool LLVMIrConstructor::EmitNothing([[maybe_unused]] Inst *inst)
772 {
773     return true;
774 }
775 
776 #ifndef NDEBUG
CheckSlowPathName(const std::string & name,size_t funcArgsNum,size_t callArgsNum)777 static void CheckSlowPathName(const std::string &name, size_t funcArgsNum, size_t callArgsNum)
778 {
779     ASSERT_DO(std::string_view {name}.find("SlowPath") == std::string_view::npos,
780               std::cerr << "Bad bridge: SlowPath bridge not allowed in LLVM FastPath: " << name << std::endl);
781     ASSERT(callArgsNum <= funcArgsNum);
782     if (callArgsNum < funcArgsNum) {
783         funcArgsNum -= 2U;  // exclude fake arguments for these asserts
784         ASSERT(funcArgsNum <= 4U);
785         ASSERT_DO((std::string_view {name}.find("1ArgBridge") != std::string_view::npos) == (funcArgsNum == 1U),
786                   std::cerr << "Bad bridge: OddSaved1 for FastPath with 1 arguments "
787                             << "and SlowPath with zero arguments: " << name << std::endl);
788         ASSERT_DO((std::string_view {name}.find("2ArgBridge") != std::string_view::npos) == (funcArgsNum == 2U),
789                   std::cerr << "Bad bridge: OddSaved2 for FastPath with 2 arguments "
790                             << "and SlowPath with 0-1 arguments: " << name << std::endl);
791         ASSERT_DO((std::string_view {name}.find("3ArgBridge") != std::string_view::npos) == (funcArgsNum == 3U),
792                   std::cerr << "Bad bridge: OddSaved3 for FastPath with 3 arguments "
793                             << "and SlowPath with 0-2 arguments: " << name << std::endl);
794         ASSERT_DO((std::string_view {name}.find("4ArgBridge") != std::string_view::npos) == (funcArgsNum == 4U),
795                   std::cerr << "Bad bridge: OddSaved4 for FastPath with 4 arguments "
796                             << "and SlowPath with 0-3 arguments: " << name << std::endl);
797     } else {  // callArgsNum == funcArgsNum
798         ASSERT_DO((std::string_view {name}.find("OddSaved") != std::string_view::npos) == (funcArgsNum % 2U == 1U),
799                   std::cerr << "Bad bridge: OddSaved <=> amount of arguments is odd: " << name << std::endl);
800     }
801 }
802 #endif
803 
EmitSlowPathEntry(Inst * inst)804 bool LLVMIrConstructor::EmitSlowPathEntry(Inst *inst)
805 {
806     ASSERT(GetGraph()->GetMode().IsFastPath());
807     ASSERT(func_->getCallingConv() == llvm::CallingConv::ArkFast0 ||
808            func_->getCallingConv() == llvm::CallingConv::ArkFast1 ||
809            func_->getCallingConv() == llvm::CallingConv::ArkFast2 ||
810            func_->getCallingConv() == llvm::CallingConv::ArkFast3 ||
811            func_->getCallingConv() == llvm::CallingConv::ArkFast4 ||
812            func_->getCallingConv() == llvm::CallingConv::ArkFast5 ||
813            func_->getCallingConv() == llvm::CallingConv::ArkFast6);
814 
815     // Arguments
816     ArenaVector<llvm::Value *> args(GetGraph()->GetLocalAllocator()->Adapter());
817     for (size_t i = 0; i < inst->GetInputs().Size(); i++) {
818         args.push_back(GetInputValue(inst, i));
819     }
820     auto threadRegPtr = builder_.CreateIntToPtr(GetThreadRegValue(), builder_.getPtrTy());
821     auto frameRegPtr = builder_.CreateIntToPtr(GetRealFrameRegValue(), builder_.getPtrTy());
822     args.push_back(threadRegPtr);
823     args.push_back(frameRegPtr);
824 
825     ASSERT(inst->CastToIntrinsic()->HasImms() && inst->CastToIntrinsic()->GetImms().size() == 2U);
826     uint32_t externalId = inst->CastToIntrinsic()->GetImms()[1];
827     auto externalName = GetGraph()->GetRuntime()->GetExternalMethodName(GetGraph()->GetMethod(), externalId);
828 #ifndef NDEBUG
829     CheckSlowPathName(externalName, func_->arg_size(), args.size());
830 #endif
831     auto callee = func_->getParent()->getFunction(externalName);
832     if (callee == nullptr) {
833         ArenaVector<llvm::Type *> argTypes(GetGraph()->GetLocalAllocator()->Adapter());
834         for (const auto &input : inst->GetInputs()) {
835             argTypes.push_back(GetExactType(input.GetInst()->GetType()));
836         }
837         argTypes.push_back(builder_.getPtrTy());
838         argTypes.push_back(builder_.getPtrTy());
839         auto ftype = llvm::FunctionType::get(GetType(inst->GetType()), argTypes, false);
840         callee = llvm::Function::Create(ftype, llvm::Function::ExternalLinkage, externalName, func_->getParent());
841         callee->setCallingConv(GetFastPathCallingConv(inst->GetInputs().Size()));
842     }
843 
844     auto call = builder_.CreateCall(callee->getFunctionType(), callee, args);
845     call->setCallingConv(callee->getCallingConv());
846     call->setTailCallKind(llvm::CallInst::TailCallKind::TCK_Tail);
847     call->addFnAttr(llvm::Attribute::get(call->getContext(), "ark-tail-call"));
848     CreateReturn(call);
849     return true;
850 }
851 
EmitExclusiveLoadWithAcquire(Inst * inst)852 bool LLVMIrConstructor::EmitExclusiveLoadWithAcquire(Inst *inst)
853 {
854     ASSERT(GetGraph()->GetArch() == Arch::AARCH64);
855     ASSERT(inst->GetInputType(0) == DataType::POINTER);
856     auto &ctx = func_->getContext();
857     auto addr = GetInputValue(inst, 0);
858     auto dstType = GetExactType(inst->GetType());
859     auto intrinsicId = llvm::Intrinsic::AARCH64Intrinsics::aarch64_ldaxr;
860     auto load = builder_.CreateUnaryIntrinsic(intrinsicId, addr);
861     load->addParamAttr(0, llvm::Attribute::get(ctx, llvm::Attribute::ElementType, dstType));
862     ValueMapAdd(inst, load);
863     return true;
864 }
865 
EmitExclusiveStoreWithRelease(Inst * inst)866 bool LLVMIrConstructor::EmitExclusiveStoreWithRelease(Inst *inst)
867 {
868     ASSERT(GetGraph()->GetArch() == Arch::AARCH64);
869     ASSERT(inst->GetInputType(0) == DataType::POINTER);
870     auto &ctx = func_->getContext();
871     auto addr = GetInputValue(inst, 0);
872     auto value = GetInputValue(inst, 1);
873     auto type = value->getType();
874     auto intrinsicId = llvm::Intrinsic::AARCH64Intrinsics::aarch64_stlxr;
875     auto stlxr = llvm::Intrinsic::getDeclaration(func_->getParent(), intrinsicId, builder_.getPtrTy());
876     value = builder_.CreateZExtOrBitCast(value, stlxr->getFunctionType()->getParamType(0));
877     auto store = builder_.CreateCall(stlxr, {value, addr});
878     store->addParamAttr(1, llvm::Attribute::get(ctx, llvm::Attribute::ElementType, type));
879     ValueMapAdd(inst, store);
880     return true;
881 }
882 
EmitInterpreterReturn(Inst * inst)883 bool LLVMIrConstructor::EmitInterpreterReturn([[maybe_unused]] Inst *inst)
884 {
885     // We only support it for Irtoc interpreters on AArch64
886     ASSERT(GetGraph()->GetMode().IsInterpreter());
887 
888     // This constant is hardcoded in codegen_interpreter.h and in interpreter.irt
889     constexpr size_t SPILL_SLOTS = 32;
890     CFrameLayout fl(GetGraph()->GetArch(), SPILL_SLOTS);
891     constexpr bool SAVE_UNUSED_CALLEE_REGS = true;
892 
893     // Restore callee-registers
894     auto calleeRegsMask = GetCalleeRegsMask(GetGraph()->GetArch(), false, SAVE_UNUSED_CALLEE_REGS);
895     auto calleeVregsMask = GetCalleeRegsMask(GetGraph()->GetArch(), true, SAVE_UNUSED_CALLEE_REGS);
896     if (GetGraph()->GetArch() == Arch::AARCH64) {
897         constexpr bool SAVE_FRAME_AND_LINK_REGS = true;
898 
899         size_t slotSize = fl.GetSlotSize();
900         size_t dslotSize = slotSize * 2U;
901 
902         auto lastCalleeReg = fl.GetRegsSlotsCount() - calleeRegsMask.Count();
903         auto lastCalleeVreg = fl.GetRegsSlotsCount() - fl.GetCalleeRegistersCount(false) - calleeVregsMask.Count();
904         CreateInterpreterReturnRestoreRegs(calleeRegsMask, lastCalleeReg, false);
905         CreateInterpreterReturnRestoreRegs(calleeVregsMask, lastCalleeVreg, true);
906 
907         // Adjust SP
908         auto spToFrameTopSlots = fl.GetRegsSlotsCount() + CFrameRegs::Start() - CFrameReturnAddr::Start();
909         if (SAVE_FRAME_AND_LINK_REGS) {
910             spToFrameTopSlots -= CFrameLayout::GetFpLrSlotsCount();
911         }
912 
913         CreateInt32ImmAsm(&builder_,
914                           std::string("add  sp, sp, $0").append(LLVMArkInterface::PATCH_STACK_ADJUSTMENT_COMMENT),
915                           spToFrameTopSlots * slotSize);
916         CreateInt32ImmAsm(&builder_, "ldp  x29, x30, [sp], $0", dslotSize);
917         CreateBlackBoxAsm(&builder_, "ret");
918     } else {
919         // Currently there is no vector regs usage at x86_64 handlers
920         ASSERT(calleeVregsMask.count() == 0);
921         auto regShift = DOUBLE_WORD_SIZE_BYTES *
922                         (fl.GetSpillsCount() + fl.GetCallerRegistersCount(false) + fl.GetCallerRegistersCount(true));
923         auto fpShift = DOUBLE_WORD_SIZE_BYTES * (2 + CFrameSlots::Start() - CFrameData::Start());
924 
925         std::string iasmStr =
926             std::string("leaq  ${0:c}(%rsp), %rsp").append(LLVMArkInterface::PATCH_STACK_ADJUSTMENT_COMMENT);
927         CreateInt32ImmAsm(&builder_, iasmStr, regShift);
928         Target target {GetGraph()->GetArch()};
929         while (calleeRegsMask.count() > 0) {
930             auto reg = calleeRegsMask.GetMinRegister();
931             calleeRegsMask ^= 1U << reg;
932             iasmStr = "pop  %" + target.GetRegName(reg, false);
933             CreateBlackBoxAsm(&builder_, iasmStr);
934         }
935         iasmStr = "leaq  " + std::to_string(fpShift) + "(%rsp), %rsp";
936         CreateBlackBoxAsm(&builder_, iasmStr);
937         CreateBlackBoxAsm(&builder_, "pop  %rbp");
938         CreateBlackBoxAsm(&builder_, "retq");
939     }
940     builder_.CreateUnreachable();
941 
942     return true;
943 }
944 
EmitTailCall(Inst * inst)945 bool LLVMIrConstructor::EmitTailCall(Inst *inst)
946 {
947     ASSERT(func_->getCallingConv() == llvm::CallingConv::ArkFast0 ||
948            func_->getCallingConv() == llvm::CallingConv::ArkFast1 ||
949            func_->getCallingConv() == llvm::CallingConv::ArkFast2 ||
950            func_->getCallingConv() == llvm::CallingConv::ArkFast3 ||
951            func_->getCallingConv() == llvm::CallingConv::ArkFast4 ||
952            func_->getCallingConv() == llvm::CallingConv::ArkFast5 ||
953            func_->getCallingConv() == llvm::CallingConv::ArkFast6 ||
954            func_->getCallingConv() == llvm::CallingConv::ArkInt);
955     llvm::CallInst *call;
956 
957     if (GetGraph()->GetMode().IsFastPath()) {
958         call = CreateTailCallFastPath(inst);
959     } else if (GetGraph()->GetMode().IsInterpreter()) {
960         call = CreateTailCallInterpreter(inst);
961     } else {
962         UNREACHABLE();
963     }
964     call->setTailCallKind(llvm::CallInst::TailCallKind::TCK_Tail);
965     call->addFnAttr(llvm::Attribute::get(call->getContext(), "ark-tail-call"));
966     CreateReturn(call);
967     std::fill(ccValues_.begin(), ccValues_.end(), nullptr);
968     return true;
969 }
970 
EmitCompressEightUtf16ToUtf8CharsUsingSimd(Inst * inst)971 bool LLVMIrConstructor::EmitCompressEightUtf16ToUtf8CharsUsingSimd(Inst *inst)
972 {
973     CreateCompressUtf16ToUtf8CharsUsingSimd<VECTOR_SIZE_8>(inst);
974     return true;
975 }
976 
EmitCompressSixteenUtf16ToUtf8CharsUsingSimd(Inst * inst)977 bool LLVMIrConstructor::EmitCompressSixteenUtf16ToUtf8CharsUsingSimd(Inst *inst)
978 {
979     CreateCompressUtf16ToUtf8CharsUsingSimd<VECTOR_SIZE_16>(inst);
980     return true;
981 }
982 
EmitMemCharU8X16UsingSimd(Inst * inst)983 bool LLVMIrConstructor::EmitMemCharU8X16UsingSimd(Inst *inst)
984 {
985     ASSERT(GetGraph()->GetArch() == Arch::AARCH64);
986     ASSERT(GetGraph()->GetMode().IsFastPath());
987     ASSERT(inst->GetInputType(0) == DataType::UINT8);
988     ASSERT(inst->GetInputType(1) == DataType::POINTER);
989 
990     MemCharSimdLowering memCharLowering(GetInputValue(inst, 0), GetInputValue(inst, 1), GetBuilder(), GetFunc());
991     ValueMapAdd(inst, memCharLowering.Generate<false>(memCharLowering.GetU8X16Ty()));
992     return true;
993 }
994 
EmitMemCharU8X32UsingSimd(Inst * inst)995 bool LLVMIrConstructor::EmitMemCharU8X32UsingSimd(Inst *inst)
996 {
997     ASSERT(GetGraph()->GetArch() == Arch::AARCH64);
998     ASSERT(GetGraph()->GetMode().IsFastPath());
999     ASSERT(inst->GetInputType(0) == DataType::UINT8);
1000     ASSERT(inst->GetInputType(1) == DataType::POINTER);
1001 
1002     MemCharSimdLowering memCharLowering(GetInputValue(inst, 0), GetInputValue(inst, 1), GetBuilder(), GetFunc());
1003     ValueMapAdd(inst, memCharLowering.Generate<true>(memCharLowering.GetU8X16Ty()));
1004     return true;
1005 }
1006 
EmitMemCharU16X8UsingSimd(Inst * inst)1007 bool LLVMIrConstructor::EmitMemCharU16X8UsingSimd(Inst *inst)
1008 {
1009     ASSERT(GetGraph()->GetArch() == Arch::AARCH64);
1010     ASSERT(GetGraph()->GetMode().IsFastPath());
1011     ASSERT(inst->GetInputType(0) == DataType::UINT16);
1012     ASSERT(inst->GetInputType(1) == DataType::POINTER);
1013 
1014     MemCharSimdLowering memCharLowering(GetInputValue(inst, 0), GetInputValue(inst, 1), GetBuilder(), GetFunc());
1015     ValueMapAdd(inst, memCharLowering.Generate<false>(memCharLowering.GetU16X8Ty()));
1016     return true;
1017 }
1018 
EmitMemCharU16X16UsingSimd(Inst * inst)1019 bool LLVMIrConstructor::EmitMemCharU16X16UsingSimd(Inst *inst)
1020 {
1021     ASSERT(GetGraph()->GetArch() == Arch::AARCH64);
1022     ASSERT(GetGraph()->GetMode().IsFastPath());
1023     ASSERT(inst->GetInputType(0) == DataType::UINT16);
1024     ASSERT(inst->GetInputType(1) == DataType::POINTER);
1025 
1026     MemCharSimdLowering memCharLowering(GetInputValue(inst, 0), GetInputValue(inst, 1), GetBuilder(), GetFunc());
1027     ValueMapAdd(inst, memCharLowering.Generate<true>(memCharLowering.GetU16X8Ty()));
1028     return true;
1029 }
1030 
EmitReverseBytes(Inst * inst)1031 bool LLVMIrConstructor::EmitReverseBytes(Inst *inst)
1032 {
1033     ASSERT(IsSafeCast(inst, 0));
1034     auto result = builder_.CreateUnaryIntrinsic(llvm::Intrinsic::bswap, GetInputValue(inst, 0), nullptr);
1035     ValueMapAdd(inst, result);
1036     return true;
1037 }
1038 
EmitMemoryFenceFull(Inst * inst)1039 bool LLVMIrConstructor::EmitMemoryFenceFull([[maybe_unused]] Inst *inst)
1040 {
1041     CreateMemoryFence(memory_order::FULL);
1042     return true;
1043 }
1044 
EmitMemoryFenceRelease(Inst * inst)1045 bool LLVMIrConstructor::EmitMemoryFenceRelease([[maybe_unused]] Inst *inst)
1046 {
1047     CreateMemoryFence(memory_order::RELEASE);
1048     return true;
1049 }
1050 
EmitMemoryFenceAcquire(Inst * inst)1051 bool LLVMIrConstructor::EmitMemoryFenceAcquire([[maybe_unused]] Inst *inst)
1052 {
1053     CreateMemoryFence(memory_order::ACQUIRE);
1054     return true;
1055 }
1056 
EmitRoundToPInf(Inst * inst)1057 bool LLVMIrConstructor::EmitRoundToPInf(Inst *inst)
1058 {
1059     // CC-OFFNXT(G.NAM.03-CPP) project code style
1060     constexpr double HALF = 0.5;
1061     // CC-OFFNXT(G.NAM.03-CPP) project code style
1062     constexpr double ONE = 1.0;
1063 
1064     auto input = GetInputValue(inst, 0);
1065     ASSERT_TYPE(input, builder_.getDoubleTy());
1066 
1067     auto ceil = builder_.CreateIntrinsic(llvm::Intrinsic::ceil, {builder_.getDoubleTy()}, {input});
1068     auto diff = builder_.CreateFSub(ceil, input);
1069     auto roundBias = llvm::ConstantFP::get(builder_.getDoubleTy(), HALF);
1070     auto cmp = builder_.CreateFCmpOGT(diff, roundBias);
1071     auto compensation = llvm::ConstantFP::get(builder_.getDoubleTy(), ONE);
1072     auto adjusted = builder_.CreateFSub(ceil, compensation);
1073     auto result = builder_.CreateSelect(cmp, adjusted, ceil);
1074     ValueMapAdd(inst, result);
1075     return true;
1076 }
1077 
EmitFround(Inst * inst)1078 bool LLVMIrConstructor::EmitFround(Inst *inst)
1079 {
1080     llvm::Value *input = GetInputValue(inst, 0);
1081     ASSERT_TYPE(input, builder_.getDoubleTy());
1082     auto isNan = CreateIsNan(input);
1083     auto floatCasted = builder_.CreateCast(llvm::Instruction::FPTrunc, input, builder_.getFloatTy());
1084     auto casted = builder_.CreateCast(llvm::Instruction::FPExt, floatCasted, builder_.getDoubleTy());
1085     llvm::Value *nan = llvm::ConstantFP::getQNaN(builder_.getDoubleTy());
1086     auto result = builder_.CreateSelect(isNan, nan, casted);
1087     ValueMapAdd(inst, result);
1088     return true;
1089 }
1090 
EmitJsCastDoubleToChar(Inst * inst)1091 bool LLVMIrConstructor::EmitJsCastDoubleToChar([[maybe_unused]] Inst *inst)
1092 {
1093     ASSERT(GetGraph()->GetArch() == Arch::AARCH64);
1094     ASSERT(GetGraph()->GetMode().IsFastPath());
1095     ASSERT_DO(!g_options.IsCpuFeatureEnabled(CpuFeature::JSCVT),
1096               std::cerr << "The LLVM backend doesn't support the aarch64_fjcvtzs intrinsic yet." << std::endl);
1097     llvm::Value *input = GetInputValue(inst, 0);
1098     auto sourceType = input->getType();
1099     ASSERT_DO(sourceType->isDoubleTy(), std::cerr << "Unexpected source type: " << GetTypeName(sourceType)
1100                                                   << ". Should be a double." << std::endl);
1101     auto targetType = inst->GetType();
1102     ASSERT_DO(targetType == DataType::UINT32,
1103               std::cerr << "Unexpected target type: " << targetType << ". Should be a uint32_t." << std::endl);
1104 
1105     // infinite and big numbers will overflow here to INT64_MIN or INT64_MAX, but NaN casts to 0
1106     auto *doubleToInt =
1107         builder_.CreateIntrinsic(llvm::Intrinsic::fptosi_sat, {builder_.getInt64Ty(), sourceType}, {input}, nullptr);
1108 
1109     auto *int64min = builder_.CreateICmpEQ(doubleToInt, builder_.getInt64(std::numeric_limits<int64_t>::min()));
1110     auto *int64max = builder_.CreateICmpEQ(doubleToInt, builder_.getInt64(std::numeric_limits<int64_t>::max()));
1111     auto *overflow = builder_.CreateLogicalOr(int64min, int64max);
1112 
1113     // CC-OFFNXT(G.NAM.03-CPP) project code style
1114     constexpr uint64_t UTF16_CHAR_MASK = 0xffff;
1115     auto *character = builder_.CreateTrunc(builder_.CreateAnd(doubleToInt, builder_.getInt64(UTF16_CHAR_MASK)),
1116                                            GetExactType(targetType));
1117 
1118     // CC-OFFNXT(G.NAM.03-CPP) project code style
1119     constexpr uint32_t FAILURE_RESULT_FLAG = (1U << 16U);
1120     auto *result = builder_.CreateSelect(overflow, builder_.getInt32(FAILURE_RESULT_FLAG), character);
1121     ValueMapAdd(inst, result);
1122     return true;
1123 }
1124 
EmitCtlz(Inst * inst)1125 bool LLVMIrConstructor::EmitCtlz(Inst *inst)
1126 {
1127     auto result = CreateZerosCount(inst, llvm::Intrinsic::ctlz);
1128     ValueMapAdd(inst, result);
1129     return true;
1130 }
1131 
EmitCttz(Inst * inst)1132 bool LLVMIrConstructor::EmitCttz(Inst *inst)
1133 {
1134     auto result = CreateZerosCount(inst, llvm::Intrinsic::cttz);
1135     ValueMapAdd(inst, result);
1136     return true;
1137 }
1138 
EmitSignbit(Inst * inst)1139 bool LLVMIrConstructor::EmitSignbit(Inst *inst)
1140 {
1141     auto num = GetInputValue(inst, 0);
1142     auto bitcast = builder_.CreateBitCast(num, builder_.getInt64Ty());
1143     auto cmp = builder_.CreateICmpSLT(bitcast, builder_.getInt64(0));
1144     ValueMapAdd(inst, cmp);
1145     return true;
1146 }
1147 
EmitIsInteger(Inst * inst)1148 bool LLVMIrConstructor::EmitIsInteger(Inst *inst)
1149 {
1150     auto result = CreateIsInteger(inst, GetInputValue(inst, 0));
1151     ValueMapAdd(inst, result);
1152     return true;
1153 }
1154 
EmitIsSafeInteger(Inst * inst)1155 bool LLVMIrConstructor::EmitIsSafeInteger(Inst *inst)
1156 {
1157     auto &ctx = func_->getContext();
1158     auto input = GetInputValue(inst, 0);
1159     ASSERT(input->getType()->isDoubleTy() || input->getType()->isFloatTy());
1160     auto isInteger = CreateIsInteger(inst, input);
1161 
1162     auto maxSafe = input->getType()->isDoubleTy() ? llvm::ConstantFP::get(builder_.getDoubleTy(), MaxIntAsExactDouble())
1163                                                   : llvm::ConstantFP::get(builder_.getFloatTy(), MaxIntAsExactFloat());
1164 
1165     auto initialBb = GetCurrentBasicBlock();
1166     auto isSafeIntegerBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "is_safe_integer"), func_);
1167     auto continueBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "is_safe_integer_continue"), func_);
1168 
1169     builder_.CreateCondBr(isInteger, isSafeIntegerBb, continueBb);
1170 
1171     SetCurrentBasicBlock(isSafeIntegerBb);
1172     // fabs(v) <= MaxSafeInteger
1173     auto inputAbs = builder_.CreateUnaryIntrinsic(llvm::Intrinsic::fabs, input);
1174     auto cmp = builder_.CreateFCmp(llvm::CmpInst::FCMP_OLE, inputAbs, maxSafe);
1175     builder_.CreateBr(continueBb);
1176 
1177     SetCurrentBasicBlock(continueBb);
1178     auto result = builder_.CreatePHI(builder_.getInt1Ty(), 2U);
1179     result->addIncoming(builder_.getInt1(false), initialBb);
1180     result->addIncoming(cmp, isSafeIntegerBb);
1181 
1182     ValueMapAdd(inst, result);
1183     return true;
1184 }
1185 
EmitRawBitcastToInt(Inst * inst)1186 bool LLVMIrConstructor::EmitRawBitcastToInt(Inst *inst)
1187 {
1188     llvm::Value *input = GetInputValue(inst, 0);
1189     ASSERT_TYPE(input, builder_.getFloatTy());
1190     auto result = builder_.CreateBitCast(input, builder_.getInt32Ty());
1191     ValueMapAdd(inst, result);
1192     return true;
1193 }
1194 
EmitRawBitcastToLong(Inst * inst)1195 bool LLVMIrConstructor::EmitRawBitcastToLong(Inst *inst)
1196 {
1197     llvm::Value *input = GetInputValue(inst, 0);
1198     ASSERT_TYPE(input, builder_.getDoubleTy());
1199     auto result = builder_.CreateBitCast(input, builder_.getInt64Ty());
1200     ValueMapAdd(inst, result);
1201     return true;
1202 }
1203 
EmitRawBitcastFromInt(Inst * inst)1204 bool LLVMIrConstructor::EmitRawBitcastFromInt(Inst *inst)
1205 {
1206     llvm::Value *input = GetInputValue(inst, 0);
1207     ASSERT_TYPE(input, builder_.getInt32Ty());
1208     auto result = builder_.CreateBitCast(input, builder_.getFloatTy());
1209     ValueMapAdd(inst, result);
1210     return true;
1211 }
1212 
EmitRawBitcastFromLong(Inst * inst)1213 bool LLVMIrConstructor::EmitRawBitcastFromLong(Inst *inst)
1214 {
1215     llvm::Value *input = GetInputValue(inst, 0);
1216     ASSERT_TYPE(input, builder_.getInt64Ty());
1217     auto result = builder_.CreateBitCast(input, builder_.getDoubleTy());
1218     ValueMapAdd(inst, result);
1219     return true;
1220 }
1221 
EmitStringGetCharsTlab(Inst * inst)1222 bool LLVMIrConstructor::EmitStringGetCharsTlab(Inst *inst)
1223 {
1224     auto offset = GetGraph()->GetRuntime()->GetArrayU16ClassPointerTlsOffset(GetGraph()->GetArch());
1225     auto klass = llvmbackend::runtime_calls::LoadTLSValue(&builder_, arkInterface_, offset, builder_.getPtrTy());
1226     auto eid = RuntimeInterface::EntrypointId::STRING_GET_CHARS_TLAB_COMPRESSED;
1227     auto result = CreateEntrypointCall(eid, inst,
1228                                        {GetInputValue(inst, 0), GetInputValue(inst, 1), GetInputValue(inst, 2), klass});
1229     ASSERT(result->getCallingConv() == llvm::CallingConv::C);
1230     result->setCallingConv(llvm::CallingConv::ArkFast4);
1231     result->addRetAttr(llvm::Attribute::NonNull);
1232     result->addRetAttr(llvm::Attribute::NoAlias);
1233     ValueMapAdd(inst, result);
1234     return true;
1235 }
1236 
EmitStringHashCode(Inst * inst)1237 bool LLVMIrConstructor::EmitStringHashCode(Inst *inst)
1238 {
1239     ASSERT(GetGraph()->GetRuntime()->IsCompressedStringsEnabled());
1240     auto string = GetInputValue(inst, 0);
1241     auto offset = coretypes::String::GetHashcodeOffset();
1242     auto gep = builder_.CreateConstInBoundsGEP1_32(builder_.getInt8Ty(), string, offset);
1243     auto hashCode = builder_.CreateLoad(builder_.getInt32Ty(), gep);
1244     auto isZero = builder_.CreateICmpEQ(hashCode, llvm::Constant::getNullValue(hashCode->getType()));
1245     auto fastPath = GetCurrentBasicBlock();
1246     auto slowPath = llvm::BasicBlock::Create(func_->getContext(), "hash_code_slow_path", func_);
1247     auto continuation = llvm::BasicBlock::Create(func_->getContext(), "hash_code_continuation", func_);
1248     auto branchWeights = llvm::MDBuilder(func_->getContext())
1249                              .createBranchWeights(llvmbackend::Metadata::BranchWeights::UNLIKELY_BRANCH_WEIGHT,
1250                                                   llvmbackend::Metadata::BranchWeights::LIKELY_BRANCH_WEIGHT);
1251     builder_.CreateCondBr(isZero, slowPath, continuation, branchWeights);
1252     SetCurrentBasicBlock(slowPath);
1253 
1254     auto newHash = CreateEntrypointCall(RuntimeInterface::EntrypointId::STRING_HASH_CODE_COMPRESSED, inst, {string});
1255     ASSERT(newHash->getCallingConv() == llvm::CallingConv::C);
1256     newHash->setCallingConv(llvm::CallingConv::ArkFast1);
1257     builder_.CreateBr(continuation);
1258     SetCurrentBasicBlock(continuation);
1259 
1260     auto result = builder_.CreatePHI(hashCode->getType(), 2U);
1261     result->addIncoming(hashCode, fastPath);
1262     result->addIncoming(newHash, slowPath);
1263     ValueMapAdd(inst, result);
1264 
1265     return true;
1266 }
1267 
EmitWriteTlabStatsSafe(Inst * inst)1268 bool LLVMIrConstructor::EmitWriteTlabStatsSafe(Inst *inst)
1269 {
1270     auto addr = GetInputValue(inst, 0);
1271     auto size = GetInputValue(inst, 1);
1272     CreateEntrypointCall(RuntimeInterface::EntrypointId::WRITE_TLAB_STATS_NO_BRIDGE, inst, {addr, size});
1273 
1274     return true;
1275 }
1276 
EmitExpandU8U16(Inst * inst)1277 bool LLVMIrConstructor::EmitExpandU8U16(Inst *inst)
1278 {
1279     auto input = GetInputValue(inst, 0);
1280     ASSERT(input->getType()->getScalarSizeInBits() == 32U);  // has to be f32
1281 
1282     auto srcTy = llvm::VectorType::get(builder_.getInt8Ty(), 4U, false);
1283     auto dstTy = llvm::VectorType::get(builder_.getInt16Ty(), 4U, false);
1284 
1285     auto val = builder_.CreateBitCast(input, srcTy);
1286     auto result = builder_.CreateZExt(val, dstTy);
1287     ValueMapAdd(inst, result);
1288 
1289     return true;
1290 }
1291 
EmitReverseHalfWords(Inst * inst)1292 bool LLVMIrConstructor::EmitReverseHalfWords(Inst *inst)
1293 {
1294     auto input = GetInputValue(inst, 0);
1295     ASSERT(input->getType()->getScalarSizeInBits() == 64U);  // has to be f64
1296     auto srcTy = llvm::VectorType::get(builder_.getInt16Ty(), 4U, false);
1297     auto val = builder_.CreateBitCast(input, srcTy);
1298 
1299     const llvm::SmallVector<int, 4> indices = {3, 2, 1, 0};
1300     auto result = builder_.CreateShuffleVector(val, indices);
1301     ValueMapAdd(inst, result);
1302 
1303     return true;
1304 }
1305 
EmitAtomicByteOr(Inst * inst)1306 bool LLVMIrConstructor::EmitAtomicByteOr(Inst *inst)
1307 {
1308     auto addr = GetInputValue(inst, 0);
1309     auto value = GetInputValue(inst, 1);
1310     auto byteVal = builder_.CreateTrunc(value, builder_.getInt8Ty());
1311     auto op = llvm::AtomicRMWInst::BinOp::Or;
1312     builder_.CreateAtomicRMW(op, addr, byteVal, llvm::MaybeAlign(0), llvm::AtomicOrdering::Monotonic);
1313 
1314     return true;
1315 }
1316 
GetMappedValue(Inst * inst,DataType::Type type)1317 llvm::Value *LLVMIrConstructor::GetMappedValue(Inst *inst, DataType::Type type)
1318 {
1319     ASSERT(inputMap_.count(inst) == 1);
1320     auto &typeMap = inputMap_.at(inst);
1321     ASSERT(typeMap.count(type) == 1);
1322     auto result = typeMap.at(type);
1323     ASSERT(result != nullptr);
1324     return result;
1325 }
1326 
GetInputValue(Inst * inst,size_t index,bool skipCoerce)1327 llvm::Value *LLVMIrConstructor::GetInputValue(Inst *inst, size_t index, bool skipCoerce)
1328 {
1329     auto input = inst->GetInput(index).GetInst();
1330     auto type = inst->GetInputType(index);
1331     ASSERT(type != DataType::NO_TYPE);
1332 
1333     if (skipCoerce) {
1334         ASSERT(input->GetType() == DataType::UINT64 || input->GetType() == DataType::INT64);
1335         type = input->GetType();
1336     }
1337 
1338     if (input->IsConst()) {
1339         return GetInputValueFromConstant(input->CastToConstant(), type);
1340     }
1341     if (input->GetOpcode() == Opcode::NullPtr) {
1342         auto llvmType = GetExactType(DataType::REFERENCE);
1343         ASSERT(llvmType == builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE));
1344         return llvm::Constant::getNullValue(llvmType);
1345     }
1346     return GetMappedValue(input, type);
1347 }
1348 
GetInputValueFromConstant(ConstantInst * constant,DataType::Type pandaType)1349 llvm::Value *LLVMIrConstructor::GetInputValueFromConstant(ConstantInst *constant, DataType::Type pandaType)
1350 {
1351     auto llvmType = GetExactType(pandaType);
1352     if (pandaType == DataType::FLOAT64) {
1353         double value = constant->GetDoubleValue();
1354         return llvm::ConstantFP::get(llvmType, value);
1355     }
1356     if (pandaType == DataType::FLOAT32) {
1357         float value = constant->GetFloatValue();
1358         return llvm::ConstantFP::get(llvmType, value);
1359     }
1360     if (pandaType == DataType::POINTER) {
1361         auto cval = static_cast<int64_t>(constant->GetIntValue());
1362         auto integer = builder_.getInt64(cval);
1363         return builder_.CreateIntToPtr(integer, builder_.getPtrTy());
1364     }
1365     if (DataType::IsTypeNumeric(pandaType)) {
1366         auto isSigned = DataType::IsTypeSigned(pandaType);
1367         auto cval = static_cast<int64_t>(constant->GetIntValue());
1368         return llvm::ConstantInt::get(llvmType, cval, isSigned);
1369     }
1370     if (DataType::IsReference(pandaType) && constant->GetRawValue() == 0) {
1371         return llvm::Constant::getNullValue(llvmType);
1372     }
1373     UNREACHABLE();
1374 }
1375 
1376 // Initializers. BuildIr calls them
1377 
BuildBasicBlocks(Marker normal)1378 void LLVMIrConstructor::BuildBasicBlocks(Marker normal)
1379 {
1380     auto &context = func_->getContext();
1381     for (auto block : graph_->GetBlocksRPO()) {
1382         if (block->IsEndBlock()) {
1383             continue;
1384         }
1385         if (!block->IsMarked(normal)) {
1386             continue;
1387         }
1388         auto bb = llvm::BasicBlock::Create(context, llvm::StringRef("bb") + llvm::Twine(block->GetId()), func_);
1389         AddBlock(block, bb);
1390         // Checking that irtoc handler contains a return instruction
1391         if (!graph_->GetMode().IsInterpreter()) {
1392             continue;
1393         }
1394         for (auto inst : block->AllInsts()) {
1395             if (inst->IsIntrinsic() && inst->CastToIntrinsic()->GetIntrinsicId() ==
1396                                            RuntimeInterface::IntrinsicId::INTRINSIC_INTERPRETER_RETURN) {
1397                 arkInterface_->AppendIrtocReturnHandler(func_->getName());
1398             }
1399         }
1400     }
1401 }
1402 
BuildInstructions(Marker normal)1403 void LLVMIrConstructor::BuildInstructions(Marker normal)
1404 {
1405     for (auto block : graph_->GetBlocksRPO()) {
1406         if (block->IsEndBlock() || !block->IsMarked(normal)) {
1407             continue;
1408         }
1409         SetCurrentBasicBlock(GetTailBlock(block));
1410         for (auto inst : block->AllInsts()) {
1411             auto bb = GetCurrentBasicBlock();
1412             if (!bb->empty() && llvm::isa<llvm::UnreachableInst>(*(bb->rbegin()))) {
1413                 break;
1414             }
1415             VisitInstruction(inst);
1416         }
1417 
1418         if (block->IsTryBegin()) {
1419             ASSERT(block->GetSuccsBlocks().size() > 1);
1420             ASSERT(block->GetSuccessor(0)->IsMarked(normal) && !block->GetSuccessor(1)->IsMarked(normal));
1421             ASSERT(!block->GetLastInst()->IsControlFlow());
1422             builder_.CreateBr(GetHeadBlock(block->GetSuccessor(0)));
1423         }
1424         if (((block->GetSuccsBlocks().size() == 1 && !block->GetSuccessor(0)->IsEndBlock()) || block->IsTryEnd()) &&
1425             block->GetSuccessor(0)->IsMarked(normal)) {
1426             ASSERT(block->IsTryEnd() ? !block->GetSuccessor(1)->IsMarked(normal) : true);
1427             builder_.CreateBr(GetHeadBlock(block->GetSuccessor(0)));
1428         }
1429         ReplaceTailBlock(block, GetCurrentBasicBlock());
1430     }
1431 }
1432 
FillPhiInputs(BasicBlock * block,Marker normal)1433 void LLVMIrConstructor::FillPhiInputs(BasicBlock *block, Marker normal)
1434 {
1435     if (block->IsStartBlock() || block->IsEndBlock() || !block->IsMarked(normal)) {
1436         return;
1437     }
1438     for (auto inst : block->PhiInsts()) {
1439         auto phi = llvm::cast<llvm::PHINode>(GetMappedValue(inst, inst->GetType()));
1440         for (size_t i = 0; i < inst->GetInputsCount(); i++) {
1441             auto inputBlock = inst->CastToPhi()->GetPhiInputBb(i);
1442             if (!inputBlock->IsMarked(normal)) {
1443                 continue;
1444             }
1445 
1446             auto input = GetInputValue(inst, i);
1447             phi->addIncoming(input, GetTailBlock(inputBlock));
1448         }
1449     }
1450 }
1451 
1452 // Creator functions for internal usage
1453 
CreateEntrypointCall(RuntimeInterface::EntrypointId eid,Inst * inst,llvm::ArrayRef<llvm::Value * > args)1454 llvm::CallInst *LLVMIrConstructor::CreateEntrypointCall(RuntimeInterface::EntrypointId eid, Inst *inst,
1455                                                         llvm::ArrayRef<llvm::Value *> args)
1456 {
1457     arkInterface_->GetOrCreateRuntimeFunctionType(func_->getContext(), func_->getParent(),
1458                                                   LLVMArkInterface::RuntimeCallType::ENTRYPOINT,
1459                                                   static_cast<LLVMArkInterface::EntrypointId>(eid));
1460 
1461     // Sanity assert to not misuse this scenario
1462     ASSERT(inst != nullptr);
1463 
1464     llvm::CallInst *call;
1465     auto threadReg = GetThreadRegValue();
1466     if (GetGraph()->SupportManagedCode() && (inst->CanThrow() || inst->CanDeoptimize())) {
1467         bool noReturn = GetGraph()->GetRuntime()->IsEntrypointNoreturn(eid);
1468         call = llvmbackend::runtime_calls::CreateEntrypointCallCommon(
1469             &builder_, threadReg, arkInterface_, static_cast<llvmbackend::runtime_calls::EntrypointId>(eid), args,
1470             CreateSaveStateBundle(inst, noReturn));
1471     } else {
1472         call = llvmbackend::runtime_calls::CreateEntrypointCallCommon(
1473             &builder_, threadReg, arkInterface_, static_cast<llvmbackend::runtime_calls::EntrypointId>(eid), args);
1474     }
1475     if (inst->RequireState()) {
1476         WrapArkCall(inst, call);
1477     }
1478     return call;
1479 }
1480 
CreateIntrinsicCall(Inst * inst)1481 llvm::CallInst *LLVMIrConstructor::CreateIntrinsicCall(Inst *inst)
1482 {
1483     auto entryId = inst->CastToIntrinsic()->GetIntrinsicId();
1484     auto rtFunctionTy = arkInterface_->GetOrCreateRuntimeFunctionType(
1485         func_->getContext(), func_->getParent(), LLVMArkInterface::RuntimeCallType::INTRINSIC,
1486         static_cast<LLVMArkInterface::EntrypointId>(entryId));
1487     auto arguments = GetIntrinsicArguments(rtFunctionTy, inst->CastToIntrinsic());
1488     return CreateIntrinsicCall(inst, entryId, arguments);
1489 }
1490 
CreateIntrinsicCall(Inst * inst,RuntimeInterface::IntrinsicId entryId,llvm::ArrayRef<llvm::Value * > arguments)1491 llvm::CallInst *LLVMIrConstructor::CreateIntrinsicCall(Inst *inst, RuntimeInterface::IntrinsicId entryId,
1492                                                        llvm::ArrayRef<llvm::Value *> arguments)
1493 {
1494     auto rtFunctionTy = arkInterface_->GetOrCreateRuntimeFunctionType(
1495         func_->getContext(), func_->getParent(), LLVMArkInterface::RuntimeCallType::INTRINSIC,
1496         static_cast<LLVMArkInterface::EntrypointId>(entryId));
1497     auto rtFunctionName = arkInterface_->GetRuntimeFunctionName(LLVMArkInterface::RuntimeCallType::INTRINSIC,
1498                                                                 static_cast<LLVMArkInterface::EntrypointId>(entryId));
1499     auto intrinsicOffset = static_cast<int>(entryId);
1500     auto callee = llvmbackend::runtime_calls::GetPandaRuntimeFunctionCallee(intrinsicOffset, rtFunctionTy, &builder_,
1501                                                                             rtFunctionName);
1502     llvm::CallInst *result;
1503     if (inst->CanThrow()) {
1504         ASSERT_PRINT(inst->GetSaveState() != nullptr, "Intrinsic with can_throw does not have a savestate");
1505         result = builder_.CreateCall(callee, arguments, CreateSaveStateBundle(inst));
1506     } else {
1507         result = builder_.CreateCall(callee, arguments);
1508     }
1509     SetIntrinsicParamAttrs(result, inst->CastToIntrinsic(), arguments);
1510 
1511     if (inst->RequireState()) {
1512         WrapArkCall(inst, result);
1513     }
1514     if (NeedSafePointAfterIntrinsic(entryId) && g_options.IsCompilerUseSafepoint()) {
1515         result->addFnAttr(llvm::Attribute::get(result->getContext(), "needs-extra-safepoint"));
1516         result->getFunction()->addFnAttr("needs-extra-safepoint");
1517     }
1518 
1519     return result;
1520 }
1521 
1522 // Helper function. Regardless of where we use `alloca` to pass args, we want to do all of them in the
1523 // first basic block. This should allow LLVM to combine allocas into prologue
CreateAllocaForArgs(llvm::Type * type,uint32_t arraySize)1524 llvm::Value *LLVMIrConstructor::CreateAllocaForArgs(llvm::Type *type, uint32_t arraySize)
1525 {
1526     auto currentBb = GetCurrentBasicBlock();
1527     auto &firstBb = func_->getEntryBlock();
1528     auto inst = firstBb.getFirstNonPHI();
1529     builder_.SetInsertPoint(inst);
1530     llvm::AllocaInst *result;
1531 
1532     if (llvm::isa<llvm::AllocaInst>(inst)) {
1533         auto alloca = llvm::cast<llvm::AllocaInst>(inst);
1534         ASSERT(alloca->getAllocatedType() == type);
1535         ASSERT(llvm::isa<llvm::ConstantInt>(alloca->getArraySize()));
1536         auto allocaSize = llvm::cast<llvm::ConstantInt>(alloca->getArraySize())->getZExtValue();
1537         if (allocaSize < arraySize) {
1538             alloca->setOperand(0, builder_.getInt32(arraySize));
1539         }
1540         result = alloca;
1541     } else {
1542         result = builder_.CreateAlloca(type, builder_.getInt32(arraySize), "call_arg_buffer");
1543     }
1544 
1545     SetCurrentBasicBlock(currentBb);
1546     return result;
1547 }
1548 
CreateFastPathCall(Inst * inst,RuntimeInterface::EntrypointId eid,llvm::ArrayRef<llvm::Value * > args)1549 llvm::CallInst *LLVMIrConstructor::CreateFastPathCall(Inst *inst, RuntimeInterface::EntrypointId eid,
1550                                                       llvm::ArrayRef<llvm::Value *> args)
1551 {
1552     auto call = CreateEntrypointCall(eid, inst, args);
1553     ASSERT(call->getCallingConv() == llvm::CallingConv::C);
1554     call->setCallingConv(GetFastPathCallingConv(args.size()));
1555     return call;
1556 }
1557 
1558 // IsInstance Helpers
1559 
CreateIsInstanceEntrypointCall(Inst * inst)1560 llvm::Value *LLVMIrConstructor::CreateIsInstanceEntrypointCall(Inst *inst)
1561 {
1562     auto object = GetInputValue(inst, 0);
1563     auto klass = GetInputValue(inst, 1);
1564     return CreateEntrypointCall(RuntimeInterface::EntrypointId::IS_INSTANCE, inst, {object, klass});
1565 }
1566 
CreateIsInstanceObject(llvm::Value * klassObj)1567 llvm::Value *LLVMIrConstructor::CreateIsInstanceObject(llvm::Value *klassObj)
1568 {
1569     auto typeOffset = GetGraph()->GetRuntime()->GetClassTypeOffset(GetGraph()->GetArch());
1570     auto typeMask = GetGraph()->GetRuntime()->GetReferenceTypeMask();
1571     auto typePtr = builder_.CreateConstInBoundsGEP1_32(builder_.getInt8Ty(), klassObj, typeOffset);
1572     auto typeLdr = builder_.CreateLoad(builder_.getInt8Ty(), typePtr);
1573     auto cmpLocal =
1574         builder_.CreateICmpEQ(builder_.getInt32(typeMask), builder_.CreateZExt(typeLdr, builder_.getInt32Ty()));
1575     return builder_.CreateZExt(cmpLocal, builder_.getInt8Ty(), "isinstance_object_out");
1576 }
1577 
CreateIsInstanceOther(Inst * inst,llvm::Value * klassObj,llvm::Value * klassId)1578 llvm::Value *LLVMIrConstructor::CreateIsInstanceOther(Inst *inst, llvm::Value *klassObj, llvm::Value *klassId)
1579 {
1580     auto initialBb = GetCurrentBasicBlock();
1581     auto &ctx = func_->getContext();
1582     auto loopHeaderBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "other_loop_h"), func_);
1583     auto loopBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "other_loop"), func_);
1584     auto outBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "other_out"), func_);
1585     builder_.CreateBr(loopHeaderBb);
1586 
1587     SetCurrentBasicBlock(loopHeaderBb);
1588     auto typeOffset = GetGraph()->GetRuntime()->GetClassBaseOffset(GetGraph()->GetArch());
1589     auto loopPhi = builder_.CreatePHI(builder_.getPtrTy(), 2U, "loop_in");
1590     auto typePtr = builder_.CreateConstInBoundsGEP1_32(builder_.getInt8Ty(), loopPhi, typeOffset);
1591     auto typeLdr = builder_.CreateLoad(builder_.getPtrTy(), typePtr);
1592     auto cmpLocal = builder_.CreateIsNotNull(typeLdr);
1593     loopPhi->addIncoming(klassObj, initialBb);
1594     loopPhi->addIncoming(typeLdr, loopBb);
1595     builder_.CreateCondBr(cmpLocal, loopBb, outBb);
1596 
1597     SetCurrentBasicBlock(loopBb);
1598     cmpLocal = builder_.CreateICmpEQ(typeLdr, klassId);
1599     builder_.CreateCondBr(cmpLocal, outBb, loopHeaderBb);
1600 
1601     SetCurrentBasicBlock(outBb);
1602     auto outPhi = builder_.CreatePHI(builder_.getInt8Ty(), 2U, "isinstance_other_out");
1603     outPhi->addIncoming(builder_.getInt8(1), loopBb);
1604     outPhi->addIncoming(builder_.getInt8(0), loopHeaderBb);
1605     return outPhi;
1606 }
1607 
CreateIsInstanceArray(Inst * inst,llvm::Value * klassObj,llvm::Value * klassId)1608 llvm::Value *LLVMIrConstructor::CreateIsInstanceArray(Inst *inst, llvm::Value *klassObj, llvm::Value *klassId)
1609 {
1610     auto &ctx = func_->getContext();
1611     auto initialBb = GetCurrentBasicBlock();
1612     auto secondLoadBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "array_second_load"), func_);
1613     auto slowPath = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "array_slow_path"), func_);
1614     auto outBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "array_out"), func_);
1615 
1616     auto componentOffset = GetGraph()->GetRuntime()->GetClassComponentTypeOffset(GetGraph()->GetArch());
1617     auto typePtrObj = builder_.CreateConstInBoundsGEP1_32(builder_.getInt8Ty(), klassObj, componentOffset);
1618     auto typeLdrObj = builder_.CreateLoad(builder_.getPtrTy(), typePtrObj);
1619     auto cmpLocal = builder_.CreateIsNotNull(typeLdrObj);
1620     builder_.CreateCondBr(cmpLocal, secondLoadBb, outBb);
1621 
1622     SetCurrentBasicBlock(secondLoadBb);
1623     auto typePtrKlass = builder_.CreateConstInBoundsGEP1_32(builder_.getInt8Ty(), klassId, componentOffset);
1624     auto typeLdrKlass = builder_.CreateLoad(builder_.getPtrTy(), typePtrKlass);
1625     cmpLocal = builder_.CreateICmpEQ(typeLdrObj, typeLdrKlass);
1626     auto branchWeights = llvm::MDBuilder(ctx).createBranchWeights(
1627         llvmbackend::Metadata::BranchWeights::LIKELY_BRANCH_WEIGHT,     // if other comparisons are enough
1628         llvmbackend::Metadata::BranchWeights::UNLIKELY_BRANCH_WEIGHT);  // else
1629     builder_.CreateCondBr(cmpLocal, outBb, slowPath, branchWeights);
1630 
1631     SetCurrentBasicBlock(slowPath);
1632     auto slowPathResult = CreateIsInstanceEntrypointCall(inst);
1633     builder_.CreateBr(outBb);
1634 
1635     SetCurrentBasicBlock(outBb);
1636     auto outPhi = builder_.CreatePHI(builder_.getInt8Ty(), 3U, "isinstance_array_out");
1637     outPhi->addIncoming(builder_.getInt8(0), initialBb);
1638     outPhi->addIncoming(builder_.getInt8(1), secondLoadBb);
1639     outPhi->addIncoming(slowPathResult, slowPath);
1640     return outPhi;
1641 }
1642 
CreateIsInstanceArrayObject(Inst * inst,llvm::Value * klassObj)1643 llvm::Value *LLVMIrConstructor::CreateIsInstanceArrayObject(Inst *inst, llvm::Value *klassObj)
1644 {
1645     auto &ctx = func_->getContext();
1646     auto initialBb = GetCurrentBasicBlock();
1647     auto checkMaskBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "array_object_check_mask"), func_);
1648     auto outBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "array_object_out"), func_);
1649 
1650     auto componentOffset = GetGraph()->GetRuntime()->GetClassComponentTypeOffset(GetGraph()->GetArch());
1651     auto typePtr = builder_.CreateConstInBoundsGEP1_32(builder_.getInt8Ty(), klassObj, componentOffset);
1652     auto typeLdr = builder_.CreateLoad(builder_.getPtrTy(), typePtr);
1653     auto cmpLocal = builder_.CreateIsNotNull(typeLdr);
1654     builder_.CreateCondBr(cmpLocal, checkMaskBb, outBb);
1655 
1656     SetCurrentBasicBlock(checkMaskBb);
1657     auto typeOffset = GetGraph()->GetRuntime()->GetClassTypeOffset(GetGraph()->GetArch());
1658     auto typeMask = GetGraph()->GetRuntime()->GetReferenceTypeMask();
1659     auto typePtrElem = builder_.CreateConstInBoundsGEP1_32(builder_.getInt8Ty(), typeLdr, typeOffset);
1660     auto typeLdrElem = builder_.CreateLoad(builder_.getInt8Ty(), typePtrElem);
1661     cmpLocal =
1662         builder_.CreateICmpEQ(builder_.getInt32(typeMask), builder_.CreateZExt(typeLdrElem, builder_.getInt32Ty()));
1663     auto cmpExt = builder_.CreateZExt(cmpLocal, builder_.getInt8Ty());
1664     builder_.CreateBr(outBb);
1665 
1666     SetCurrentBasicBlock(outBb);
1667     auto outPhi = builder_.CreatePHI(builder_.getInt8Ty(), 2U, "isinstance_array_object_out");
1668     outPhi->addIncoming(builder_.getInt8(0), initialBb);
1669     outPhi->addIncoming(cmpExt, checkMaskBb);
1670     return outPhi;
1671 }
1672 
CreateIsInstanceInnerBlock(Inst * inst,llvm::Value * klassObj,llvm::Value * klassId)1673 llvm::Value *LLVMIrConstructor::CreateIsInstanceInnerBlock(Inst *inst, llvm::Value *klassObj, llvm::Value *klassId)
1674 {
1675     auto klassType = inst->CastToIsInstance()->GetClassType();
1676     switch (klassType) {
1677         case ClassType::OBJECT_CLASS:
1678             return CreateIsInstanceObject(klassObj);
1679         case ClassType::OTHER_CLASS:
1680             return CreateIsInstanceOther(inst, klassObj, klassId);
1681         case ClassType::ARRAY_CLASS:
1682             return CreateIsInstanceArray(inst, klassObj, klassId);
1683         case ClassType::ARRAY_OBJECT_CLASS:
1684             return CreateIsInstanceArrayObject(inst, klassObj);
1685         case ClassType::INTERFACE_CLASS:
1686             return CreateIsInstanceEntrypointCall(inst);
1687         default:
1688             UNREACHABLE();
1689     }
1690 }
1691 
1692 // IsInstance Helpers End
1693 
1694 // CheckCast Helpers
1695 
CreateCheckCastEntrypointCall(Inst * inst)1696 void LLVMIrConstructor::CreateCheckCastEntrypointCall(Inst *inst)
1697 {
1698     auto object = GetInputValue(inst, 0);
1699     auto klass = GetInputValue(inst, 1);
1700     if (inst->CanDeoptimize()) {
1701         auto call = CreateEntrypointCall(RuntimeInterface::EntrypointId::CHECK_CAST_DEOPTIMIZE, inst, {object, klass});
1702         call->addFnAttr(llvm::Attribute::get(call->getContext(), "may-deoptimize"));
1703     } else {
1704         CreateEntrypointCall(RuntimeInterface::EntrypointId::CHECK_CAST, inst, {object, klass});
1705     }
1706 }
1707 
CreateCheckCastObject(Inst * inst,llvm::Value * klassObj,llvm::Value * klassId)1708 void LLVMIrConstructor::CreateCheckCastObject(Inst *inst, llvm::Value *klassObj, llvm::Value *klassId)
1709 {
1710     auto typeOffset = GetGraph()->GetRuntime()->GetClassTypeOffset(GetGraph()->GetArch());
1711     auto typeMask = GetGraph()->GetRuntime()->GetReferenceTypeMask();
1712     auto typePtr = builder_.CreateConstInBoundsGEP1_32(builder_.getInt8Ty(), klassObj, typeOffset);
1713     auto typeLdr = builder_.CreateLoad(builder_.getInt8Ty(), typePtr);
1714     auto src = GetInputValue(inst, 0);
1715     auto zext = builder_.CreateZExt(typeLdr, builder_.getInt32Ty());
1716     auto deoptimize = builder_.CreateICmpNE(builder_.getInt32(typeMask), zext);
1717 
1718     auto exception = RuntimeInterface::EntrypointId::CLASS_CAST_EXCEPTION;
1719     CreateDeoptimizationBranch(inst, deoptimize, exception, {klassId, src});
1720 }
1721 
CreateCheckCastOther(Inst * inst,llvm::Value * klassObj,llvm::Value * klassId)1722 void LLVMIrConstructor::CreateCheckCastOther(Inst *inst, llvm::Value *klassObj, llvm::Value *klassId)
1723 {
1724     auto initialBb = GetCurrentBasicBlock();
1725     auto src = GetInputValue(inst, 0);
1726 
1727     auto &ctx = func_->getContext();
1728     auto loopHeaderBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "other_loop_h"), func_);
1729     auto loopBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "other_loop"), func_);
1730     auto outBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "other_out"), func_);
1731     builder_.CreateBr(loopHeaderBb);
1732 
1733     SetCurrentBasicBlock(loopHeaderBb);
1734     auto typeOffset = GetGraph()->GetRuntime()->GetClassBaseOffset(GetGraph()->GetArch());
1735     auto loopPhi = builder_.CreatePHI(builder_.getPtrTy(), 2U, "loop_in");
1736     auto typePtr = builder_.CreateConstInBoundsGEP1_32(builder_.getInt8Ty(), loopPhi, typeOffset);
1737     auto typeLdr = builder_.CreateLoad(builder_.getPtrTy(), typePtr);
1738     auto deoptimize = builder_.CreateIsNull(typeLdr);
1739     loopPhi->addIncoming(klassObj, initialBb);
1740     loopPhi->addIncoming(typeLdr, loopBb);
1741 
1742     auto exception = RuntimeInterface::EntrypointId::CLASS_CAST_EXCEPTION;
1743     CreateDeoptimizationBranch(inst, deoptimize, exception, {klassId, src});
1744     builder_.CreateBr(loopBb);
1745 
1746     SetCurrentBasicBlock(loopBb);
1747     auto cmp = builder_.CreateICmpEQ(typeLdr, klassId);
1748     builder_.CreateCondBr(cmp, outBb, loopHeaderBb);
1749 
1750     SetCurrentBasicBlock(outBb);
1751 }
1752 
CreateCheckCastArray(Inst * inst,llvm::Value * klassObj,llvm::Value * klassId)1753 void LLVMIrConstructor::CreateCheckCastArray(Inst *inst, llvm::Value *klassObj, llvm::Value *klassId)
1754 {
1755     auto src = GetInputValue(inst, 0);
1756 
1757     auto &ctx = func_->getContext();
1758     auto slowPath = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "array_slow_path"), func_);
1759     auto outBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "array_out"), func_);
1760 
1761     auto componentOffset = GetGraph()->GetRuntime()->GetClassComponentTypeOffset(GetGraph()->GetArch());
1762     auto typePtrObj = builder_.CreateConstInBoundsGEP1_32(builder_.getInt8Ty(), klassObj, componentOffset);
1763     auto typeLdrObj = builder_.CreateLoad(builder_.getPtrTy(), typePtrObj);
1764 
1765     auto deoptimize = builder_.CreateIsNull(typeLdrObj);
1766     auto exception = RuntimeInterface::EntrypointId::CLASS_CAST_EXCEPTION;
1767     CreateDeoptimizationBranch(inst, deoptimize, exception, {klassId, src});
1768 
1769     auto typePtrKlass = builder_.CreateConstInBoundsGEP1_32(builder_.getInt8Ty(), klassId, componentOffset);
1770     auto typeLdrKlass = builder_.CreateLoad(builder_.getPtrTy(), typePtrKlass);
1771     auto cmpLocal = builder_.CreateICmpEQ(typeLdrObj, typeLdrKlass);
1772     auto branchWeights = llvm::MDBuilder(ctx).createBranchWeights(
1773         llvmbackend::Metadata::BranchWeights::LIKELY_BRANCH_WEIGHT,     // if other comparisons are enough
1774         llvmbackend::Metadata::BranchWeights::UNLIKELY_BRANCH_WEIGHT);  // else
1775     builder_.CreateCondBr(cmpLocal, outBb, slowPath, branchWeights);
1776 
1777     SetCurrentBasicBlock(slowPath);
1778     CreateCheckCastEntrypointCall(inst);
1779     builder_.CreateBr(outBb);
1780 
1781     SetCurrentBasicBlock(outBb);
1782 }
1783 
CreateCheckCastArrayObject(Inst * inst,llvm::Value * klassObj,llvm::Value * klassId)1784 void LLVMIrConstructor::CreateCheckCastArrayObject(Inst *inst, llvm::Value *klassObj, llvm::Value *klassId)
1785 {
1786     auto src = GetInputValue(inst, 0);
1787 
1788     auto componentOffset = GetGraph()->GetRuntime()->GetClassComponentTypeOffset(GetGraph()->GetArch());
1789     auto typePtr = builder_.CreateConstInBoundsGEP1_32(builder_.getInt8Ty(), klassObj, componentOffset);
1790     auto typeLdr = builder_.CreateLoad(builder_.getPtrTy(), typePtr);
1791 
1792     auto deoptimize = builder_.CreateIsNull(typeLdr);
1793     auto exception = RuntimeInterface::EntrypointId::CLASS_CAST_EXCEPTION;
1794     CreateDeoptimizationBranch(inst, deoptimize, exception, {klassId, src});
1795 
1796     auto typeOffset = GetGraph()->GetRuntime()->GetClassTypeOffset(GetGraph()->GetArch());
1797     auto typeMask = GetGraph()->GetRuntime()->GetReferenceTypeMask();
1798     auto typePtrElem = builder_.CreateConstInBoundsGEP1_32(builder_.getInt8Ty(), typeLdr, typeOffset);
1799     auto typeLdrElem = builder_.CreateLoad(builder_.getInt8Ty(), typePtrElem);
1800     deoptimize =
1801         builder_.CreateICmpNE(builder_.getInt32(typeMask), builder_.CreateZExt(typeLdrElem, builder_.getInt32Ty()));
1802     CreateDeoptimizationBranch(inst, deoptimize, exception, {klassId, src});
1803 }
1804 
CreateCheckCastInner(Inst * inst,llvm::Value * klassObj,llvm::Value * klassId)1805 void LLVMIrConstructor::CreateCheckCastInner(Inst *inst, llvm::Value *klassObj, llvm::Value *klassId)
1806 {
1807     auto klassType = inst->CastToCheckCast()->GetClassType();
1808     switch (klassType) {
1809         case ClassType::OBJECT_CLASS:
1810             CreateCheckCastObject(inst, klassObj, klassId);
1811             break;
1812         case ClassType::OTHER_CLASS:
1813             CreateCheckCastOther(inst, klassObj, klassId);
1814             break;
1815         case ClassType::ARRAY_CLASS:
1816             CreateCheckCastArray(inst, klassObj, klassId);
1817             break;
1818         case ClassType::ARRAY_OBJECT_CLASS:
1819             CreateCheckCastArrayObject(inst, klassObj, klassId);
1820             break;
1821         case ClassType::INTERFACE_CLASS:
1822         default:
1823             UNREACHABLE();
1824     }
1825 }
1826 
1827 // CheckCast Helpers End
1828 
CreateInterpreterReturnRestoreRegs(RegMask & regMask,size_t offset,bool fp)1829 void LLVMIrConstructor::CreateInterpreterReturnRestoreRegs(RegMask &regMask, size_t offset, bool fp)
1830 {
1831     int32_t slotSize = PointerSize(GetGraph()->GetArch());
1832     int32_t dslotSize = slotSize * 2U;
1833     int32_t totalSize = regMask.count() * slotSize;
1834     auto startRegOffset = offset * DOUBLE_WORD_SIZE_BYTES;
1835     auto endRegOffset = startRegOffset + std::max(0, totalSize - dslotSize);
1836 
1837     constexpr uint32_t MAX_REPR_VAL = 504U;
1838     bool representable = startRegOffset <= MAX_REPR_VAL && (startRegOffset & 0x7U) == 0 &&
1839                          endRegOffset <= MAX_REPR_VAL && (endRegOffset & 0x7U) == 0;
1840 
1841     std::string baseReg = representable ? "sp" : "x16";
1842     if (!representable) {
1843         CreateInt32ImmAsm(&builder_,
1844                           std::string("add  x16, sp, $0").append(LLVMArkInterface::PATCH_STACK_ADJUSTMENT_COMMENT),
1845                           startRegOffset);
1846         startRegOffset = 0;
1847     }
1848 
1849     while (regMask.count() > 0) {
1850         std::string asmString = regMask.count() / 2U > 0 ? "ldp " : "ldr ";
1851         auto first = regMask.GetMinRegister();
1852         asmString += (fp ? "d" : "x") + std::to_string(first);
1853         regMask ^= 1U << first;
1854         if (regMask.count() > 0) {
1855             asmString += ", ";
1856             auto second = regMask.GetMinRegister();
1857             asmString += (fp ? "d" : "x") + std::to_string(second);
1858             regMask ^= 1U << second;
1859         }
1860         asmString += ", [";
1861         asmString += baseReg;
1862         asmString += ", $0]";
1863         if (representable) {
1864             asmString += LLVMArkInterface::PATCH_STACK_ADJUSTMENT_COMMENT;
1865         }
1866         CreateInt32ImmAsm(&builder_, asmString, startRegOffset);
1867         startRegOffset += dslotSize;
1868     }
1869 }
1870 
CreateLoadClassById(Inst * inst,uint32_t typeId,bool init)1871 llvm::Value *LLVMIrConstructor::CreateLoadClassById(Inst *inst, uint32_t typeId, bool init)
1872 {
1873     auto builtin = init ? LoadInitClass(func_->getParent()) : LoadClass(func_->getParent());
1874     auto slotIdVal = builder_.getInt32(arkInterface_->GetClassIndexInAotGot(GetGraph()->GetAotData(), typeId, init));
1875 
1876     // remember two functions, later we will use it in panda_runtime_lowering pass
1877     arkInterface_->GetOrCreateRuntimeFunctionType(
1878         func_->getContext(), func_->getParent(), LLVMArkInterface::RuntimeCallType::ENTRYPOINT,
1879         static_cast<LLVMArkInterface::EntrypointId>(RuntimeInterface::EntrypointId::CLASS_RESOLVER));
1880     arkInterface_->GetOrCreateRuntimeFunctionType(
1881         func_->getContext(), func_->getParent(), LLVMArkInterface::RuntimeCallType::ENTRYPOINT,
1882         static_cast<LLVMArkInterface::EntrypointId>(RuntimeInterface::EntrypointId::CLASS_INIT_RESOLVER));
1883 
1884     auto callInst = builder_.CreateCall(builtin, {builder_.getInt32(typeId), slotIdVal}, CreateSaveStateBundle(inst));
1885     WrapArkCall(inst, callInst);
1886     return callInst;
1887 }
1888 
CreateBinaryOp(Inst * inst,llvm::Instruction::BinaryOps opcode)1889 llvm::Value *LLVMIrConstructor::CreateBinaryOp(Inst *inst, llvm::Instruction::BinaryOps opcode)
1890 {
1891     llvm::Value *x = GetInputValue(inst, 0);
1892     llvm::Value *y = GetInputValue(inst, 1);
1893 
1894     if (x->getType()->isPointerTy()) {
1895         if (y->getType()->isPointerTy()) {
1896             ASSERT(opcode == llvm::Instruction::Sub);
1897             x = builder_.CreatePtrToInt(x, builder_.getInt64Ty());
1898             y = builder_.CreatePtrToInt(y, builder_.getInt64Ty());
1899             return builder_.CreateBinOp(opcode, x, y);
1900         }
1901         if (y->getType()->isIntegerTy()) {
1902             ASSERT(opcode == llvm::Instruction::Add);
1903             ASSERT(x->getType()->isPointerTy());
1904             return builder_.CreateInBoundsGEP(builder_.getInt8Ty(), x, y);
1905         }
1906         UNREACHABLE();
1907     }
1908     if (IsTypeNumeric(inst->GetType())) {
1909         // Peephole can remove casts and instead put a constant with the wrong type
1910         // so we need to create them here.
1911         x = CoerceValue(x, inst->GetInputType(0), inst->GetType());
1912         y = CoerceValue(y, inst->GetInputType(1), inst->GetType());
1913     }
1914     return builder_.CreateBinOp(opcode, x, y);
1915 }
1916 
CreateBinaryImmOp(Inst * inst,llvm::Instruction::BinaryOps opcode,uint64_t c)1917 llvm::Value *LLVMIrConstructor::CreateBinaryImmOp(Inst *inst, llvm::Instruction::BinaryOps opcode, uint64_t c)
1918 {
1919     ASSERT(IsTypeNumeric(inst->GetType()));
1920     llvm::Value *x = GetInputValue(inst, 0);
1921     if (x->getType()->isPointerTy()) {
1922         ASSERT(x->getType()->isPointerTy());
1923         ASSERT(opcode == llvm::Instruction::Add || opcode == llvm::Instruction::Sub);
1924         if (opcode == llvm::Instruction::Sub) {
1925             c = -c;
1926         }
1927         return builder_.CreateConstInBoundsGEP1_64(builder_.getInt8Ty(), x, c);
1928     }
1929     llvm::Value *y = CoerceValue(builder_.getInt64(c), DataType::INT64, inst->GetType());
1930     return builder_.CreateBinOp(opcode, x, y);
1931 }
1932 
CreateShiftOp(Inst * inst,llvm::Instruction::BinaryOps opcode)1933 llvm::Value *LLVMIrConstructor::CreateShiftOp(Inst *inst, llvm::Instruction::BinaryOps opcode)
1934 {
1935     llvm::Value *x = GetInputValue(inst, 0);
1936     llvm::Value *y = GetInputValue(inst, 1);
1937     auto targetType = inst->GetType();
1938     bool target64 = (targetType == DataType::UINT64) || (targetType == DataType::INT64);
1939     auto constexpr SHIFT32_RANGE = 0x1f;
1940     auto constexpr SHIFT64_RANGE = 0x3f;
1941 
1942     y = builder_.CreateBinOp(llvm::Instruction::And, y,
1943                              llvm::ConstantInt::get(y->getType(), target64 ? SHIFT64_RANGE : SHIFT32_RANGE));
1944 
1945     return builder_.CreateBinOp(opcode, x, y);
1946 }
1947 
CreateSignDivMod(Inst * inst,llvm::Instruction::BinaryOps opcode)1948 llvm::Value *LLVMIrConstructor::CreateSignDivMod(Inst *inst, llvm::Instruction::BinaryOps opcode)
1949 {
1950     ASSERT(opcode == llvm::Instruction::SDiv || opcode == llvm::Instruction::SRem);
1951     llvm::Value *x = GetInputValue(inst, 0);
1952     llvm::Value *y = GetInputValue(inst, 1);
1953     auto &ctx = func_->getContext();
1954     auto eqM1 = builder_.CreateICmpEQ(y, llvm::ConstantInt::get(y->getType(), -1));
1955     auto m1Result = opcode == llvm::Instruction::SDiv ? builder_.CreateNeg(x) : llvm::ConstantInt::get(y->getType(), 0);
1956 
1957     // Select for AArch64, as 'sdiv' correctly handles the INT_MIN / -1 case
1958     if (GetGraph()->GetArch() == Arch::AARCH64) {
1959         auto result = builder_.CreateBinOp(opcode, x, y);
1960         auto selectVal = builder_.CreateSelect(eqM1, m1Result, result);
1961         if (auto selectInst = llvm::dyn_cast<llvm::SelectInst>(selectVal)) {
1962             auto *metadata = llvm::MDNode::get(ctx, {});
1963             auto sdiv = ark::llvmbackend::LLVMArkInterface::AARCH64_SDIV_INST;
1964             selectInst->setMetadata(sdiv, metadata);
1965         }
1966         return selectVal;
1967     }
1968 
1969     // X86_64 solution with control flow
1970     auto currBb = GetCurrentBasicBlock();
1971     auto notM1Bb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "divmod_normal"), func_);
1972     auto contBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "divmod_cont"), func_);
1973     builder_.CreateCondBr(eqM1, contBb, notM1Bb);
1974 
1975     SetCurrentBasicBlock(notM1Bb);
1976     auto result = builder_.CreateBinOp(opcode, x, y);
1977     builder_.CreateBr(contBb);
1978 
1979     SetCurrentBasicBlock(contBb);
1980     auto resultPhi = builder_.CreatePHI(y->getType(), 2U);
1981     resultPhi->addIncoming(m1Result, currBb);
1982     resultPhi->addIncoming(result, notM1Bb);
1983     return resultPhi;
1984 }
1985 
CreateFloatComparison(CmpInst * cmpInst,llvm::Value * x,llvm::Value * y)1986 llvm::Value *LLVMIrConstructor::CreateFloatComparison(CmpInst *cmpInst, llvm::Value *x, llvm::Value *y)
1987 {
1988     // if x is less than y then return -1
1989     // else return zero extend of (x > y)
1990     llvm::CmpInst::Predicate greaterThanPredicate;
1991     llvm::CmpInst::Predicate lessThanPredicate;
1992     if (cmpInst->IsFcmpg()) {
1993         // if x or y is nan then greaterThanPredicate yields true
1994         greaterThanPredicate = llvm::CmpInst::FCMP_UGT;
1995         lessThanPredicate = llvm::CmpInst::FCMP_OLT;
1996     } else if (cmpInst->IsFcmpl()) {
1997         greaterThanPredicate = llvm::CmpInst::FCMP_OGT;
1998         // if x or y is nan then lessThanPredicate yields true
1999         lessThanPredicate = llvm::CmpInst::FCMP_ULT;
2000     } else {
2001         ASSERT_PRINT(false, "cmpInst must be either Fcmpg, or Fcmpl");
2002         UNREACHABLE();
2003     }
2004     // x > y || (inst == Fcmpg && (x == NaN || y == NaN))
2005     auto greaterThan = builder_.CreateFCmp(greaterThanPredicate, x, y);
2006     // x < y || (inst == Fcmpl && (x == NaN || y == NaN))
2007     auto lessThan = builder_.CreateFCmp(lessThanPredicate, x, y);
2008     auto comparison = builder_.CreateZExt(greaterThan, builder_.getInt32Ty());
2009     auto negativeOne = builder_.getInt32(-1);
2010     return builder_.CreateSelect(lessThan, negativeOne, comparison);
2011 }
2012 
CreateIntegerComparison(CmpInst * inst,llvm::Value * x,llvm::Value * y)2013 llvm::Value *LLVMIrConstructor::CreateIntegerComparison(CmpInst *inst, llvm::Value *x, llvm::Value *y)
2014 {
2015     ASSERT(x->getType() == y->getType());
2016     llvm::Value *greaterThan;
2017     llvm::Value *lessThan;
2018 
2019     if (DataType::IsTypeSigned(inst->GetOperandsType())) {
2020         greaterThan = builder_.CreateICmpSGT(x, y);
2021         lessThan = builder_.CreateICmpSLT(x, y);
2022     } else {
2023         greaterThan = builder_.CreateICmpUGT(x, y);
2024         lessThan = builder_.CreateICmpULT(x, y);
2025     }
2026     auto castComparisonResult = builder_.CreateZExt(greaterThan, builder_.getInt32Ty());
2027     auto negativeOne = builder_.getInt32(-1);
2028     return builder_.CreateSelect(lessThan, negativeOne, castComparisonResult);
2029 }
2030 
CreateNewArrayWithRuntime(Inst * inst)2031 llvm::Value *LLVMIrConstructor::CreateNewArrayWithRuntime(Inst *inst)
2032 {
2033     auto type = GetInputValue(inst, 0);
2034     auto size = ToSizeT(GetInputValue(inst, 1));
2035     auto eid = RuntimeInterface::EntrypointId::CREATE_ARRAY;
2036     auto result = CreateEntrypointCall(eid, inst, {type, size});
2037     MarkAsAllocation(result);
2038     if (inst->GetFlag(inst_flags::MEM_BARRIER)) {
2039         result->addFnAttr(llvm::Attribute::get(result->getContext(), "needs-mem-barrier"));
2040     }
2041     return result;
2042 }
2043 
CreateNewObjectWithRuntime(Inst * inst)2044 llvm::Value *LLVMIrConstructor::CreateNewObjectWithRuntime(Inst *inst)
2045 {
2046     auto initClass = GetInputValue(inst, 0);
2047     auto eid = RuntimeInterface::EntrypointId::CREATE_OBJECT_BY_CLASS;
2048     auto result = CreateEntrypointCall(eid, inst, {initClass});
2049     auto srcInst = inst->GetInput(0).GetInst();
2050     if (srcInst->GetOpcode() != Opcode::LoadAndInitClass ||
2051         GetGraph()->GetRuntime()->CanUseTlabForClass(srcInst->CastToLoadAndInitClass()->GetClass())) {
2052         MarkAsAllocation(result);
2053     }
2054     if (inst->GetFlag(inst_flags::MEM_BARRIER)) {
2055         result->addFnAttr(llvm::Attribute::get(result->getContext(), "needs-mem-barrier"));
2056     }
2057     return result;
2058 }
2059 
CreateResolveVirtualCallBuiltin(Inst * inst,llvm::Value * thiz,uint32_t methodId)2060 llvm::Value *LLVMIrConstructor::CreateResolveVirtualCallBuiltin(Inst *inst, llvm::Value *thiz, uint32_t methodId)
2061 {
2062     ASSERT(thiz->getType()->isPointerTy());
2063 
2064     auto builtin = ResolveVirtual(func_->getParent());
2065     arkInterface_->GetOrCreateRuntimeFunctionType(
2066         func_->getContext(), func_->getParent(), LLVMArkInterface::RuntimeCallType::ENTRYPOINT,
2067         static_cast<LLVMArkInterface::EntrypointId>(RuntimeInterface::EntrypointId::RESOLVE_VIRTUAL_CALL_AOT));
2068     arkInterface_->GetOrCreateRuntimeFunctionType(
2069         func_->getContext(), func_->getParent(), LLVMArkInterface::RuntimeCallType::ENTRYPOINT,
2070         static_cast<LLVMArkInterface::EntrypointId>(RuntimeInterface::EntrypointId::INTF_INLINE_CACHE));
2071 
2072     auto zero = builder_.getInt64(0);
2073     auto arrayType = llvm::ArrayType::get(builder_.getInt64Ty(), 0);
2074     auto offset = builder_.CreateIntToPtr(zero, arrayType->getPointerTo());
2075     auto callInst =
2076         builder_.CreateCall(builtin, {thiz, ToSizeT(builder_.getInt32(methodId)), offset}, CreateSaveStateBundle(inst));
2077     WrapArkCall(inst, callInst);
2078     return builder_.CreateIntToPtr(callInst, builder_.getPtrTy());
2079 }
2080 
CreateLoadManagedClassFromClass(llvm::Value * klass)2081 llvm::Value *LLVMIrConstructor::CreateLoadManagedClassFromClass(llvm::Value *klass)
2082 {
2083     ASSERT(klass->getType()->isPointerTy());
2084     auto dataOff = GetGraph()->GetRuntime()->GetManagedClassOffset(GetGraph()->GetArch());
2085     auto ptrData = builder_.CreateConstInBoundsGEP1_32(builder_.getInt8Ty(), klass, dataOff);
2086     return builder_.CreateLoad(builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE), ptrData);
2087 }
2088 
CreateIsInf(llvm::Value * input)2089 llvm::Value *LLVMIrConstructor::CreateIsInf(llvm::Value *input)
2090 {
2091     llvm::Type *type = nullptr;
2092     uint64_t infMaskInt;
2093     if (input->getType()->isFloatTy()) {
2094         constexpr uint32_t INF_MASK_FLOAT = 0xff000000;
2095         infMaskInt = INF_MASK_FLOAT;
2096         type = builder_.getInt32Ty();
2097     } else {
2098         ASSERT_TYPE(input, builder_.getDoubleTy());
2099         constexpr uint64_t INF_MASK_DOUBLE = 0xffe0000000000000;
2100         infMaskInt = INF_MASK_DOUBLE;
2101         type = builder_.getInt64Ty();
2102     }
2103 
2104     auto infMask = llvm::ConstantInt::get(type, infMaskInt);
2105     auto one = llvm::ConstantInt::get(type, 1);
2106     auto castedInput = builder_.CreateBitCast(input, type);
2107     auto shiftedInput = builder_.CreateShl(castedInput, one);
2108     auto result = builder_.CreateICmpEQ(shiftedInput, infMask);
2109     return result;
2110 }
2111 
CreateIsInteger(Inst * inst,llvm::Value * input)2112 llvm::Value *LLVMIrConstructor::CreateIsInteger(Inst *inst, llvm::Value *input)
2113 {
2114     auto &ctx = func_->getContext();
2115     ASSERT(input->getType()->isDoubleTy() || input->getType()->isFloatTy());
2116 
2117     auto isInf = CreateIsInf(input);
2118     auto epsilon = input->getType()->isDoubleTy()
2119                        ? llvm::ConstantFP::get(builder_.getDoubleTy(), std::numeric_limits<double>::epsilon())
2120                        : llvm::ConstantFP::get(builder_.getFloatTy(), std::numeric_limits<float>::epsilon());
2121 
2122     auto initialBb = GetCurrentBasicBlock();
2123     auto notInfBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "is_integer_not_inf"), func_);
2124     auto continueBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "is_integer_continue"), func_);
2125 
2126     builder_.CreateCondBr(isInf, continueBb, notInfBb);
2127 
2128     SetCurrentBasicBlock(notInfBb);
2129     // fabs(v - trunc(v)) <= epsilon
2130     auto truncated = builder_.CreateUnaryIntrinsic(llvm::Intrinsic::trunc, input);
2131     auto diff = builder_.CreateFSub(input, truncated);
2132     auto diffAbs = builder_.CreateUnaryIntrinsic(llvm::Intrinsic::fabs, diff);
2133     auto cmp = builder_.CreateFCmp(llvm::CmpInst::FCMP_OLE, diffAbs, epsilon);
2134     builder_.CreateBr(continueBb);
2135 
2136     SetCurrentBasicBlock(continueBb);
2137     auto result = builder_.CreatePHI(builder_.getInt1Ty(), 2U);
2138     result->addIncoming(builder_.getInt1(false), initialBb);
2139     result->addIncoming(cmp, notInfBb);
2140 
2141     return result;
2142 }
2143 
CreateCastToInt(Inst * inst)2144 llvm::Value *LLVMIrConstructor::CreateCastToInt(Inst *inst)
2145 {
2146     llvm::Value *input = GetInputValue(inst, 0);
2147     auto sourceType = input->getType();
2148     auto targetType = inst->GetType();
2149 
2150     ASSERT_DO(sourceType->isFloatTy() || sourceType->isDoubleTy(),
2151               std::cerr << "Unexpected source type: " << GetTypeName(sourceType) << ". Should be a float or double."
2152                         << std::endl);
2153 
2154     auto llvmId = DataType::IsTypeSigned(targetType) ? llvm::Intrinsic::fptosi_sat : llvm::Intrinsic::fptoui_sat;
2155     ArenaVector<llvm::Type *> intrinsicTypes(GetGraph()->GetLocalAllocator()->Adapter());
2156     intrinsicTypes.assign({GetExactType(targetType), sourceType});
2157     return builder_.CreateIntrinsic(llvmId, intrinsicTypes, {input}, nullptr);
2158 }
2159 
CreateLoadWithOrdering(Inst * inst,llvm::Value * value,llvm::AtomicOrdering ordering,const llvm::Twine & name)2160 llvm::Value *LLVMIrConstructor::CreateLoadWithOrdering(Inst *inst, llvm::Value *value, llvm::AtomicOrdering ordering,
2161                                                        const llvm::Twine &name)
2162 {
2163     auto pandaType = inst->GetType();
2164     llvm::Type *type = GetExactType(pandaType);
2165 
2166     auto load = builder_.CreateLoad(type, value, false, name);  // C-like volatile is not applied
2167     if (ordering != LLVMArkInterface::NOT_ATOMIC_ORDER) {
2168         auto alignment = func_->getParent()->getDataLayout().getPrefTypeAlignment(type);
2169         load->setOrdering(ordering);
2170         load->setAlignment(llvm::Align(alignment));
2171     }
2172 
2173     return load;
2174 }
2175 
CreateStoreWithOrdering(llvm::Value * value,llvm::Value * ptr,llvm::AtomicOrdering ordering)2176 llvm::Value *LLVMIrConstructor::CreateStoreWithOrdering(llvm::Value *value, llvm::Value *ptr,
2177                                                         llvm::AtomicOrdering ordering)
2178 {
2179     auto store = builder_.CreateStore(value, ptr, false);  // C-like volatile is not applied
2180     if (ordering != LLVMArkInterface::NOT_ATOMIC_ORDER) {
2181         auto alignment = func_->getParent()->getDataLayout().getPrefTypeAlignment(value->getType());
2182         store->setAlignment(llvm::Align(alignment));
2183         store->setOrdering(ordering);
2184     }
2185     return store;
2186 }
2187 
CreateZerosCount(Inst * inst,llvm::Intrinsic::ID llvmId)2188 llvm::Value *LLVMIrConstructor::CreateZerosCount(Inst *inst, llvm::Intrinsic::ID llvmId)
2189 {
2190     ASSERT(IsSafeCast(inst, 0));
2191     auto zeroDefined = llvm::ConstantInt::getFalse(func_->getContext());
2192     return builder_.CreateBinaryIntrinsic(llvmId, GetInputValue(inst, 0), zeroDefined, nullptr);
2193 }
2194 
CreateRoundArm64(Inst * inst,bool is64)2195 llvm::Value *LLVMIrConstructor::CreateRoundArm64(Inst *inst, bool is64)
2196 {
2197     auto input = GetInputValue(inst, 0);
2198 
2199     auto sourceType = is64 ? builder_.getDoubleTy() : builder_.getFloatTy();
2200     auto targetType = is64 ? builder_.getInt64Ty() : builder_.getInt32Ty();
2201 
2202     double constexpr HALF = 0.5;
2203     auto half = llvm::ConstantFP::get(sourceType, HALF);
2204     auto zero = is64 ? builder_.getInt64(0) : builder_.getInt32(0);
2205 
2206     auto initialBb = GetCurrentBasicBlock();
2207     auto &ctx = func_->getContext();
2208     auto module = func_->getParent();
2209 
2210     // lround - fcvtas instruction (positive solved fine, NaN mapped to 0, but negatives ties wrong way)
2211     auto decl = llvm::Intrinsic::getDeclaration(module, llvm::Intrinsic::lround, {targetType, sourceType});
2212     llvm::Value *round = llvm::CallInst::Create(decl, input, "", initialBb);
2213 
2214     // Check if rounded value less than zero (if not negative rounding is done)
2215     auto negative = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "neg"), func_);
2216     auto done = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "cont"), func_);
2217     auto lessThan = builder_.CreateICmpSLT(round, zero);
2218     builder_.CreateCondBr(lessThan, negative, done);
2219 
2220     // CC-OFFNXT(C_RULE_ID_HORIZON_SPACE_SHIELD) false-positive
2221     // Negative input case, add 1 iff "input - round(input) == 0.5"
2222     SetCurrentBasicBlock(negative);
2223     // frinta instruction
2224     auto floatRound = builder_.CreateUnaryIntrinsic(llvm::Intrinsic::round, input, nullptr);
2225     auto sub = builder_.CreateBinOp(llvm::Instruction::FSub, input, floatRound);
2226     auto one = is64 ? builder_.getInt64(1) : builder_.getInt32(1);
2227     auto add = builder_.CreateBinOp(llvm::Instruction::Add, round, one);
2228     auto equal = builder_.CreateFCmp(llvm::CmpInst::FCMP_OEQ, sub, half);
2229     auto roundMayInc = builder_.CreateSelect(equal, add, round);
2230     builder_.CreateBr(done);
2231 
2232     // Continue block
2233     SetCurrentBasicBlock(done);
2234     auto roundPhi = builder_.CreatePHI(targetType, 2U);
2235     roundPhi->addIncoming(round, initialBb);
2236     roundPhi->addIncoming(roundMayInc, negative);
2237     return roundPhi;
2238 }
2239 
CreateNewStringFromCharsTlab(Inst * inst,llvm::Value * offset,llvm::Value * length,llvm::Value * array)2240 llvm::Value *LLVMIrConstructor::CreateNewStringFromCharsTlab(Inst *inst, llvm::Value *offset, llvm::Value *length,
2241                                                              llvm::Value *array)
2242 {
2243     auto entryId = RuntimeInterface::EntrypointId::CREATE_STRING_FROM_CHAR_ARRAY_TLAB_COMPRESSED;
2244     ArenaVector<llvm::Value *> arguments(GetGraph()->GetLocalAllocator()->Adapter());
2245     auto callConv = llvm::CallingConv::ArkFast3;
2246     if (llvm::isa<llvm::Constant>(offset) && llvm::cast<llvm::Constant>(offset)->isNullValue()) {
2247         entryId = RuntimeInterface::EntrypointId::CREATE_STRING_FROM_ZERO_BASED_CHAR_ARRAY_TLAB_COMPRESSED;
2248     } else {
2249         arguments.push_back(offset);
2250         callConv = llvm::CallingConv::ArkFast4;
2251     }
2252     arguments.push_back(length);
2253     arguments.push_back(array);
2254     auto klassOffset = GetGraph()->GetRuntime()->GetStringClassPointerTlsOffset(GetGraph()->GetArch());
2255     auto klass = llvmbackend::runtime_calls::LoadTLSValue(&builder_, arkInterface_, klassOffset, builder_.getPtrTy());
2256     arguments.push_back(klass);
2257     auto result = CreateEntrypointCall(entryId, inst, arguments);
2258     ASSERT(result->getCallingConv() == llvm::CallingConv::C);
2259     result->setCallingConv(callConv);
2260     MarkAsAllocation(result);
2261     return result;
2262 }
2263 
CreateNewStringFromStringTlab(Inst * inst,llvm::Value * stringVal)2264 llvm::Value *LLVMIrConstructor::CreateNewStringFromStringTlab(Inst *inst, llvm::Value *stringVal)
2265 {
2266     auto entryId = RuntimeInterface::EntrypointId::CREATE_STRING_FROM_STRING_TLAB_COMPRESSED;
2267     auto result = CreateEntrypointCall(entryId, inst, {stringVal});
2268     ASSERT(result->getCallingConv() == llvm::CallingConv::C);
2269     result->setCallingConv(llvm::CallingConv::ArkFast1);
2270     MarkAsAllocation(result);
2271     return result;
2272 }
2273 
CreateDeoptimizationBranch(Inst * inst,llvm::Value * deoptimize,RuntimeInterface::EntrypointId exception,llvm::ArrayRef<llvm::Value * > arguments)2274 void LLVMIrConstructor::CreateDeoptimizationBranch(Inst *inst, llvm::Value *deoptimize,
2275                                                    RuntimeInterface::EntrypointId exception,
2276                                                    llvm::ArrayRef<llvm::Value *> arguments)
2277 {
2278     ASSERT_TYPE(deoptimize, builder_.getInt1Ty());
2279     ASSERT(exception != RuntimeInterface::EntrypointId::DEOPTIMIZE || inst->CanDeoptimize());
2280     auto &ctx = func_->getContext();
2281 
2282     /* Create basic blocks for continuation and throw */
2283     auto continuation = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "cont"), func_);
2284     auto throwPath = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "deopt"), func_);
2285 
2286     /* Creating branch */
2287     auto branchWeights = llvm::MDBuilder(ctx).createBranchWeights(
2288         llvmbackend::Metadata::BranchWeights::UNLIKELY_BRANCH_WEIGHT,  // if unlikely(deoptimize) then throw
2289         llvmbackend::Metadata::BranchWeights::LIKELY_BRANCH_WEIGHT);   // else continue
2290     auto branch = builder_.CreateCondBr(deoptimize, throwPath, continuation, branchWeights);
2291 
2292     /* Creating throw block */
2293     SetCurrentBasicBlock(throwPath);
2294 
2295     auto call = CreateDeoptimizeCall(inst, arguments, exception);
2296 
2297     /* Set metadata for implicit null check */
2298     if (!inst->CanDeoptimize() && exception == RuntimeInterface::EntrypointId::NULL_POINTER_EXCEPTION &&
2299         g_options.IsCompilerImplicitNullCheck()) {
2300         ASSERT(inst->IsNullCheck());
2301         auto *metadata = llvm::MDNode::get(ctx, {});
2302         branch->setMetadata(llvm::LLVMContext::MD_make_implicit, metadata);
2303     }
2304 
2305     /* Create 'ret' after llvm.experimental.deoptimize call */
2306     CreateReturn(call);
2307     WrapArkCall(inst, call);
2308 
2309     /* Continue */
2310     SetCurrentBasicBlock(continuation);
2311 }
2312 
CreateDeoptimizeCall(Inst * inst,llvm::ArrayRef<llvm::Value * > arguments,RuntimeInterface::EntrypointId exception)2313 llvm::CallInst *LLVMIrConstructor::CreateDeoptimizeCall(Inst *inst, llvm::ArrayRef<llvm::Value *> arguments,
2314                                                         RuntimeInterface::EntrypointId exception)
2315 {
2316     auto deoptimizeDeclaration = llvm::Intrinsic::getDeclaration(
2317         func_->getParent(), llvm::Intrinsic::experimental_deoptimize, {func_->getReturnType()});
2318     llvm::CallInst *call;
2319     if (inst->CanDeoptimize()) {
2320         // If inst CanDeoptimize then call Deoptimize to bail out into interpreter, do not throw exception
2321         exception = RuntimeInterface::EntrypointId::DEOPTIMIZE;
2322         auto type = helpers::ToUnderlying(GetDeoptimizationType(inst)) |
2323                     (inst->GetId() << MinimumBitsToStore(DeoptimizeType::COUNT));
2324         ASSERT(GetGraph()->GetRuntime()->IsEntrypointNoreturn(exception));
2325         call = builder_.CreateCall(
2326             {deoptimizeDeclaration}, {builder_.getInt64(type), builder_.getInt32(static_cast<uint32_t>(exception))},
2327             CreateSaveStateBundle(inst, GetGraph()->GetRuntime()->IsEntrypointNoreturn(exception)));
2328         call->addFnAttr(llvm::Attribute::get(call->getContext(), "may-deoptimize"));
2329     } else {
2330         std::vector<llvm::Value *> args = arguments;
2331         args.push_back(builder_.getInt32(static_cast<uint32_t>(exception)));
2332         call =
2333             builder_.CreateCall({deoptimizeDeclaration}, args,
2334                                 CreateSaveStateBundle(inst, GetGraph()->GetRuntime()->IsEntrypointNoreturn(exception)));
2335     }
2336     arkInterface_->GetOrCreateRuntimeFunctionType(func_->getContext(), func_->getParent(),
2337                                                   LLVMArkInterface::RuntimeCallType::ENTRYPOINT,
2338                                                   static_cast<LLVMArkInterface::EntrypointId>(exception));
2339     return call;
2340 }
2341 
CreateSaveStateBundle(Inst * inst,bool noReturn)2342 ArenaVector<llvm::OperandBundleDef> LLVMIrConstructor::CreateSaveStateBundle(Inst *inst, bool noReturn)
2343 {
2344     ASSERT_PRINT(inst->CanThrow() || inst->CanDeoptimize(),
2345                  "Attempt to create a regmap for instruction that doesn't throw (or deoptimize)");
2346     ArenaVector<llvm::OperandBundleDef> bundle(GetGraph()->GetLocalAllocator()->Adapter());
2347     if (!arkInterface_->DeoptsEnabled()) {
2348         return bundle;
2349     }
2350     ArenaVector<llvm::Value *> vals(GetGraph()->GetLocalAllocator()->Adapter());
2351     ArenaVector<SaveStateInst *> saveStates(GetGraph()->GetLocalAllocator()->Adapter());
2352 
2353     auto saveState = inst->GetSaveState();
2354     while (saveState != nullptr) {
2355         saveStates.push_back(saveState);
2356         auto caller = saveState->GetCallerInst();
2357         saveState = caller == nullptr ? nullptr : caller->GetSaveState();
2358     }
2359 
2360     std::reverse(saveStates.begin(), saveStates.end());
2361     for (auto ss : saveStates) {
2362         auto method = ss->GetMethod();
2363         auto caller = ss->GetCallerInst();
2364         if (caller != nullptr) {
2365             method = caller->GetCallMethod();
2366         }
2367         ASSERT(method != nullptr);
2368         // Put a function as a delimiter in inlining chain
2369         auto function = GetOrCreateFunctionForCall(caller, method);
2370         ASSERT(function != nullptr);
2371         vals.push_back(function);
2372         // Put methodId needed for inline info
2373         vals.push_back(builder_.getInt32(GetGraph()->GetRuntime()->GetMethodId(method)));
2374         // Put bytecode pc for inlining chain as well
2375         vals.push_back(builder_.getInt32(ss->GetPc()));
2376         // Put a marker if catch has been met
2377         uint32_t flags = (inst->RequireRegMap() ? 1U : 0U) | (noReturn ? 2U : 0U);
2378         vals.push_back(builder_.getInt32(flags));
2379         // Put a number of interpreter registers for the method
2380         auto vregCount = arkInterface_->GetVirtualRegistersCount(method);
2381         vals.push_back(builder_.getInt32(vregCount));
2382 
2383         EncodeSaveStateInputs(&vals, ss);
2384     }
2385     bundle.assign({llvm::OperandBundleDef {"deopt", vals}});
2386     return bundle;
2387 }
2388 
EncodeSaveStateInputs(ArenaVector<llvm::Value * > * vals,SaveStateInst * ss)2389 void LLVMIrConstructor::EncodeSaveStateInputs(ArenaVector<llvm::Value *> *vals, SaveStateInst *ss)
2390 {
2391     for (size_t i = 0; i < ss->GetInputsCount(); ++i) {
2392         if (ss->GetVirtualRegister(i).Value() == VirtualRegister::BRIDGE) {
2393             continue;
2394         }
2395         // Put a virtual register index
2396         vals->push_back(builder_.getInt32(ss->GetVirtualRegister(i).Value()));
2397         // Put a virtual register type
2398         auto metatype = IrTypeToMetainfoType(ss->GetInputType(i));
2399         uint32_t undertype = static_cast<std::underlying_type_t<VRegInfo::Type>>(metatype);
2400         vals->push_back(builder_.getInt32(undertype));
2401         // Put a virtual register value
2402         auto value = GetInputValue(ss, i);
2403         if (!value->getType()->isPointerTy()) {
2404             ASSERT(value->getType()->getScalarSizeInBits() <= 64U);
2405             auto intVal = builder_.CreateBitCast(value, builder_.getIntNTy(value->getType()->getScalarSizeInBits()));
2406             if (metatype == VRegInfo::Type::INT32) {
2407                 intVal = CoerceValue(intVal, ss->GetInputType(i), DataType::INT32);
2408             }
2409             vals->push_back(builder_.CreateZExt(intVal, builder_.getInt64Ty()));
2410         } else {
2411             vals->push_back(value);
2412         }
2413     }
2414 }
2415 
EncodeInlineInfo(Inst * inst,llvm::Instruction * instruction)2416 void LLVMIrConstructor::EncodeInlineInfo(Inst *inst, llvm::Instruction *instruction)
2417 {
2418     SaveStateInst *saveState = inst->GetSaveState();
2419     llvm::SmallVector<SaveStateInst *> saveStates;
2420     bool first = true;
2421     while (saveState != nullptr) {
2422         if (!first) {
2423             saveStates.push_back(saveState);
2424         }
2425         first = false;
2426         saveState = saveState->GetCallerInst() == nullptr ? nullptr : saveState->GetCallerInst()->GetSaveState();
2427     }
2428     llvm::reverse(saveStates);
2429     for (auto ss : saveStates) {
2430         auto method = ss->GetMethod();
2431         auto methodName = arkInterface_->GetUniqMethodName(method);
2432         auto function = func_->getParent()->getFunction(methodName);
2433         auto caller = ss->GetCallerInst();
2434         if (caller != nullptr) {
2435             method = ss->GetCallerInst()->GetCallMethod();
2436             function = GetOrCreateFunctionForCall(caller, method);
2437         }
2438         ASSERT(function != nullptr);
2439         debugData_->AppendInlinedAt(instruction, function, ss->GetPc());
2440     }
2441 }
2442 
CreatePreWRB(Inst * inst,llvm::Value * mem)2443 void LLVMIrConstructor::CreatePreWRB(Inst *inst, llvm::Value *mem)
2444 {
2445     auto barrierType = GetGraph()->GetRuntime()->GetPreType();
2446     auto isVolatile = IsVolatileMemInst(inst);
2447     if (barrierType == mem::BarrierType::PRE_WRB_NONE) {
2448         ASSERT(GetGraph()->SupportManagedCode());
2449         return;
2450     }
2451     ASSERT(barrierType == mem::BarrierType::PRE_SATB_BARRIER);
2452 
2453     if (llvmbackend::g_options.IsLlvmBuiltinWrb() && !arkInterface_->IsIrtocMode()) {
2454         auto builtin = llvmbackend::builtins::PreWRB(func_->getParent(), mem->getType()->getPointerAddressSpace());
2455         builder_.CreateCall(builtin, {mem, builder_.getInt1(isVolatile)});
2456         return;
2457     }
2458     auto &ctx = func_->getContext();
2459     auto outBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "pre_wrb_out"), func_);
2460     llvmbackend::gc_barriers::EmitPreWRB(&builder_, mem, isVolatile, outBb, arkInterface_, GetThreadRegValue());
2461 }
2462 
CreatePostWRB(Inst * inst,llvm::Value * mem,llvm::Value * offset,llvm::Value * value)2463 void LLVMIrConstructor::CreatePostWRB(Inst *inst, llvm::Value *mem, llvm::Value *offset, llvm::Value *value)
2464 {
2465     auto barrierType = GetGraph()->GetRuntime()->GetPostType();
2466     if (barrierType == mem::BarrierType::POST_WRB_NONE) {
2467         ASSERT(GetGraph()->SupportManagedCode());
2468         return;
2469     }
2470     ASSERT(barrierType == mem::BarrierType::POST_INTERGENERATIONAL_BARRIER ||
2471            barrierType == mem::BarrierType::POST_INTERREGION_BARRIER);
2472 
2473     Inst *secondValue;
2474     Inst *val = InstStoredValue(inst, &secondValue);
2475     ASSERT(secondValue == nullptr);
2476 
2477     if (val->GetOpcode() == Opcode::NullPtr) {
2478         return;
2479     }
2480 
2481     bool irtoc = arkInterface_->IsIrtocMode();
2482     if (!irtoc && llvmbackend::g_options.IsLlvmBuiltinWrb()) {
2483         auto builtin = llvmbackend::builtins::PostWRB(func_->getParent(), mem->getType()->getPointerAddressSpace());
2484         builder_.CreateCall(builtin, {mem, offset, value});
2485         return;
2486     }
2487     auto frame = (irtoc && GetGraph()->GetArch() == Arch::X86_64) ? GetRealFrameRegValue() : nullptr;
2488     llvmbackend::gc_barriers::EmitPostWRB(&builder_, mem, offset, value, arkInterface_, GetThreadRegValue(), frame);
2489 }
2490 
CreateMemoryFence(memory_order::Order order)2491 llvm::Value *LLVMIrConstructor::CreateMemoryFence(memory_order::Order order)
2492 {
2493     llvm::AtomicOrdering ordering;
2494     switch (order) {
2495         case memory_order::RELEASE:
2496             ordering = llvm::AtomicOrdering::Release;
2497             break;
2498         case memory_order::ACQUIRE:
2499             ordering = llvm::AtomicOrdering::Acquire;
2500             break;
2501         case memory_order::FULL:
2502             ordering = llvm::AtomicOrdering::SequentiallyConsistent;
2503             break;
2504         default:
2505             UNREACHABLE();
2506     }
2507     return builder_.CreateFence(ordering);
2508 }
2509 
CreateCondition(ConditionCode cc,llvm::Value * x,llvm::Value * y)2510 llvm::Value *LLVMIrConstructor::CreateCondition(ConditionCode cc, llvm::Value *x, llvm::Value *y)
2511 {
2512     if (cc == CC_TST_EQ || cc == CC_TST_NE) {
2513         auto tst = builder_.CreateBinOp(llvm::Instruction::And, x, y);
2514         return (cc == CC_TST_EQ) ? builder_.CreateIsNull(tst) : builder_.CreateIsNotNull(tst);
2515     }
2516     return builder_.CreateICmp(ICmpCodeConvert(cc), x, y);
2517 }
2518 
CreateIf(Inst * inst,llvm::Value * cond,bool likely,bool unlikely)2519 void LLVMIrConstructor::CreateIf(Inst *inst, llvm::Value *cond, bool likely, bool unlikely)
2520 {
2521     llvm::MDNode *weights = nullptr;
2522     auto constexpr LIKELY = llvmbackend::Metadata::BranchWeights::LIKELY_BRANCH_WEIGHT;
2523     auto constexpr UNLIKELY = llvmbackend::Metadata::BranchWeights::UNLIKELY_BRANCH_WEIGHT;
2524     if (likely) {
2525         weights = llvm::MDBuilder(func_->getContext()).createBranchWeights(LIKELY, UNLIKELY);
2526     } else if (unlikely) {
2527         weights = llvm::MDBuilder(func_->getContext()).createBranchWeights(UNLIKELY, LIKELY);
2528     }
2529     builder_.CreateCondBr(cond, GetHeadBlock(inst->GetBasicBlock()->GetTrueSuccessor()),
2530                           GetHeadBlock(inst->GetBasicBlock()->GetFalseSuccessor()), weights);
2531 }
2532 
CreateReturn(llvm::Value * value)2533 llvm::Value *LLVMIrConstructor::CreateReturn(llvm::Value *value)
2534 {
2535     ASSERT(value != nullptr);
2536     if (value->getType()->isVoidTy()) {
2537         return builder_.CreateRetVoid();
2538     }
2539     return builder_.CreateRet(value);
2540 }
2541 
CreateTailCallFastPath(Inst * inst)2542 llvm::CallInst *LLVMIrConstructor::CreateTailCallFastPath(Inst *inst)
2543 {
2544     ASSERT(inst->GetInputs().Size() == 0);
2545     ASSERT(inst->CastToIntrinsic()->HasImms() && inst->CastToIntrinsic()->GetImms().size() == 2U);
2546     ASSERT(ccValues_.size() == func_->arg_size());
2547 
2548     ArenaVector<llvm::Value *> args(GetGraph()->GetLocalAllocator()->Adapter());
2549     uint32_t externalId = inst->CastToIntrinsic()->GetImms()[1];
2550     auto externalName = GetGraph()->GetRuntime()->GetExternalMethodName(GetGraph()->GetMethod(), externalId);
2551     auto callee = func_->getParent()->getFunction(externalName);
2552     llvm::CallingConv::ID cc = 0;
2553     if (callee == nullptr) {
2554         ArenaVector<llvm::Type *> argTypes(GetGraph()->GetLocalAllocator()->Adapter());
2555         for (size_t i = 0; i < func_->arg_size(); i++) {
2556             args.push_back(i < ccValues_.size() && ccValues_.at(i) != nullptr ? ccValues_.at(i) : func_->getArg(i));
2557             argTypes.push_back(args.at(i)->getType());
2558         }
2559         auto ftype = llvm::FunctionType::get(GetType(inst->GetType()), argTypes, false);
2560         callee = llvm::Function::Create(ftype, llvm::Function::ExternalLinkage, externalName, func_->getParent());
2561         cc = func_->getCallingConv();
2562     } else {
2563         size_t size = func_->arg_size();
2564         ASSERT(callee->arg_size() <= size);
2565         for (size_t i = 0; i < callee->arg_size() - 2U; i++) {
2566             args.push_back(i < ccValues_.size() && ccValues_.at(i) != nullptr ? ccValues_.at(i) : func_->getArg(i));
2567         }
2568         args.push_back(func_->getArg(size - 2U));
2569         args.push_back(func_->getArg(size - 1U));
2570         cc = callee->getCallingConv();
2571     }
2572     auto call = builder_.CreateCall(callee->getFunctionType(), callee, args);
2573     call->setCallingConv(cc);
2574     return call;
2575 }
2576 
CreateTailCallInterpreter(Inst * inst)2577 llvm::CallInst *LLVMIrConstructor::CreateTailCallInterpreter(Inst *inst)
2578 {
2579     auto ptr = GetInputValue(inst, 0);
2580     ASSERT_TYPE(ptr, builder_.getPtrTy());
2581     ASSERT(ccValues_.size() == (GetGraph()->GetArch() == Arch::AARCH64 ? 8U : 7U));
2582     ASSERT(ccValues_.at(0) != nullptr);  // pc
2583     static constexpr unsigned ACC = 1U;
2584     static constexpr unsigned ACC_TAG = 2U;
2585     ArenaVector<llvm::Type *> argTypes(GetGraph()->GetLocalAllocator()->Adapter());
2586     for (size_t i = 0; i < cc_.size(); i++) {
2587         if (ccValues_.at(i) != nullptr) {
2588             argTypes.push_back(ccValues_.at(i)->getType());
2589         } else {
2590             argTypes.push_back(func_->getFunctionType()->getParamType(i));
2591         }
2592     }
2593     if (ccValues_.at(ACC) == nullptr) {
2594         ccValues_[ACC] = llvm::Constant::getNullValue(argTypes[ACC]);
2595     }
2596     if (ccValues_.at(ACC_TAG) == nullptr) {
2597         ccValues_[ACC_TAG] = llvm::Constant::getNullValue(argTypes[ACC_TAG]);
2598     }
2599     ASSERT(ccValues_.at(3U) != nullptr);  // frame
2600     ASSERT(ccValues_.at(4U) != nullptr);  // dispatch
2601     if (GetGraph()->GetArch() == Arch::AARCH64) {
2602         ASSERT(ccValues_.at(5U) != nullptr);  // moffset
2603         ASSERT(ccValues_.at(6U) != nullptr);  // methodPtr
2604         ASSERT(ccValues_.at(7U) != nullptr);  // thread
2605     } else {
2606         static constexpr unsigned REAL_FRAME_POINER = 6U;
2607         ASSERT(ccValues_.at(5U) != nullptr);                 // thread
2608         ASSERT(ccValues_.at(REAL_FRAME_POINER) == nullptr);  // real frame pointer
2609         ccValues_[REAL_FRAME_POINER] = func_->getArg(REAL_FRAME_POINER);
2610     }
2611 
2612     auto functionType = llvm::FunctionType::get(func_->getReturnType(), argTypes, false);
2613     auto call = builder_.CreateCall(functionType, ptr, ccValues_);
2614     call->setCallingConv(func_->getCallingConv());
2615     return call;
2616 }
2617 
2618 template <uint32_t VECTOR_SIZE>
CreateCompressUtf16ToUtf8CharsUsingSimd(Inst * inst)2619 void LLVMIrConstructor::CreateCompressUtf16ToUtf8CharsUsingSimd(Inst *inst)
2620 {
2621     ASSERT(GetGraph()->GetArch() == Arch::AARCH64);
2622     ASSERT(GetGraph()->GetMode().IsFastPath());
2623     ASSERT(inst->GetInputType(0) == DataType::POINTER);
2624     ASSERT(inst->GetInputType(1) == DataType::POINTER);
2625     static_assert(VECTOR_SIZE == VECTOR_SIZE_8 || VECTOR_SIZE == VECTOR_SIZE_16, "Unexpected vector size");
2626     auto vecInTy = llvm::VectorType::get(builder_.getInt16Ty(), VECTOR_SIZE, false);
2627     auto vecOutTy = llvm::VectorType::get(builder_.getInt8Ty(), VECTOR_SIZE, false);
2628 
2629     auto u16Ptr = GetInputValue(inst, 0);  // ptr to src array of utf16 chars
2630     auto u8Ptr = GetInputValue(inst, 1);   // ptr to dst array of utf8 chars
2631     auto inVec = builder_.CreateLoad(vecInTy, u16Ptr);
2632     auto outVec = builder_.CreateTrunc(inVec, vecOutTy);
2633     builder_.CreateStore(outVec, u8Ptr);
2634 }
2635 
2636 // Getters
2637 
GetEntryFunctionType()2638 llvm::FunctionType *LLVMIrConstructor::GetEntryFunctionType()
2639 {
2640     ArenaVector<llvm::Type *> argTypes(graph_->GetLocalAllocator()->Adapter());
2641 
2642     // Method*
2643     if (graph_->SupportManagedCode()) {
2644         argTypes.push_back(builder_.getPtrTy());
2645     }
2646 
2647     // ArkInt have fake parameters
2648     if (graph_->GetMode().IsInterpreter()) {
2649         for (size_t i = 0; i < cc_.size(); ++i) {
2650             argTypes.push_back(builder_.getPtrTy());
2651         }
2652     }
2653 
2654     // Actual function arguments
2655     auto method = graph_->GetMethod();
2656     for (size_t i = 0; i < graph_->GetRuntime()->GetMethodTotalArgumentsCount(method); i++) {
2657         ASSERT(!graph_->GetMode().IsInterpreter());
2658         auto type = graph_->GetRuntime()->GetMethodTotalArgumentType(method, i);
2659         if (graph_->GetMode().IsFastPath()) {
2660             argTypes.push_back(GetExactType(type));
2661         } else {
2662             argTypes.push_back(GetType(type));
2663         }
2664     }
2665 
2666     // ThreadReg and RealFP for FastPaths
2667     if (graph_->GetMode().IsFastPath()) {
2668         argTypes.push_back(builder_.getPtrTy());
2669         argTypes.push_back(builder_.getPtrTy());
2670     }
2671 
2672     auto retType = graph_->GetRuntime()->GetMethodReturnType(method);
2673     ASSERT(graph_->GetMode().IsInterpreter() || retType != DataType::NO_TYPE);
2674     retType = retType == DataType::NO_TYPE ? DataType::VOID : retType;
2675     return llvm::FunctionType::get(GetType(retType), makeArrayRef(argTypes.data(), argTypes.size()), false);
2676 }
2677 
ToSizeT(llvm::Value * value)2678 llvm::Value *LLVMIrConstructor::ToSizeT(llvm::Value *value)
2679 {
2680     auto entrypointSizeType = GetEntrypointSizeType();
2681     if (value->getType() == entrypointSizeType) {
2682         return value;
2683     }
2684     ASSERT(value->getType()->getIntegerBitWidth() < entrypointSizeType->getBitWidth());
2685     return builder_.CreateZExt(value, entrypointSizeType);
2686 }
2687 
ToSSizeT(llvm::Value * value)2688 llvm::Value *LLVMIrConstructor::ToSSizeT(llvm::Value *value)
2689 {
2690     auto entrypointSizeType = GetEntrypointSizeType();
2691     if (value->getType() == entrypointSizeType) {
2692         return value;
2693     }
2694     ASSERT(value->getType()->getIntegerBitWidth() < entrypointSizeType->getBitWidth());
2695     return builder_.CreateSExt(value, entrypointSizeType);
2696 }
2697 
GetArgumentsForCall(llvm::Value * callee,CallInst * call,bool skipFirst)2698 ArenaVector<llvm::Value *> LLVMIrConstructor::GetArgumentsForCall(llvm::Value *callee, CallInst *call, bool skipFirst)
2699 {
2700     ASSERT(callee->getType()->isPointerTy());
2701     ArenaVector<llvm::Value *> args(GetGraph()->GetLocalAllocator()->Adapter());
2702     args.push_back(callee);
2703 
2704     // SaveState skipping - last arg
2705     for (size_t i = skipFirst ? 1 : 0; i < call->GetInputsCount() - 1; i++) {
2706         auto arg = GetInputValue(call, i);
2707         auto type = call->GetInputType(i);
2708         if (DataType::IsLessInt32(type)) {
2709             arg = CoerceValue(arg, type, DataType::INT32);
2710         }
2711         args.push_back(arg);
2712     }
2713 
2714     return args;
2715 }
2716 
GetIntrinsicArguments(llvm::FunctionType * intrinsicFunctionType,IntrinsicInst * inst)2717 ArenaVector<llvm::Value *> LLVMIrConstructor::GetIntrinsicArguments(llvm::FunctionType *intrinsicFunctionType,
2718                                                                     IntrinsicInst *inst)
2719 {
2720     ASSERT(intrinsicFunctionType != nullptr);
2721     ASSERT(inst != nullptr);
2722 
2723     ArenaVector<llvm::Value *> args(GetGraph()->GetLocalAllocator()->Adapter());
2724 
2725     if (inst->IsMethodFirstInput()) {
2726         args.push_back(GetMethodArgument());
2727     }
2728     if (inst->HasImms()) {
2729         for (uint64_t imm : inst->GetImms()) {
2730             size_t index = args.size();
2731             auto type = intrinsicFunctionType->getParamType(index);
2732             args.push_back(llvm::ConstantInt::get(type, imm));
2733         }
2734     }
2735     for (size_t i = 0; i < inst->GetInputsCount(); i++) {
2736         // Skip SaveState
2737         if (inst->GetInput(i).GetInst()->IsSaveState()) {
2738             continue;
2739         }
2740         args.push_back(GetInputValue(inst, i));
2741     }
2742     ASSERT(intrinsicFunctionType->getNumParams() == args.size());
2743     return args;
2744 }
2745 
SetIntrinsicParamAttrs(llvm::CallInst * call,IntrinsicInst * inst,llvm::ArrayRef<llvm::Value * > args)2746 void LLVMIrConstructor::SetIntrinsicParamAttrs(llvm::CallInst *call, IntrinsicInst *inst,
2747                                                [[maybe_unused]] llvm::ArrayRef<llvm::Value *> args)
2748 {
2749     size_t i = inst->IsMethodFirstInput() ? 1U : 0;
2750     if (inst->HasImms()) {
2751         i += inst->GetImms().size();
2752     }
2753 #ifndef NDEBUG
2754     for (size_t j = 0; j < i; j++) {
2755         ASSERT(!args[j]->getType()->isIntegerTy() || args[j]->getType()->getIntegerBitWidth() > VECTOR_SIZE_16);
2756     }
2757 #endif
2758     for (size_t arkIndex = 0; arkIndex < inst->GetInputsCount(); arkIndex++) {
2759         // Skip SaveState
2760         if (inst->GetInput(arkIndex).GetInst()->IsSaveState()) {
2761             continue;
2762         }
2763         auto arkType = inst->GetInputType(arkIndex);
2764         switch (arkType) {
2765             case DataType::UINT8:
2766                 ASSERT(args[i]->getType()->isIntegerTy() && args[i]->getType()->getIntegerBitWidth() == VECTOR_SIZE_8);
2767                 call->addParamAttr(i, llvm::Attribute::ZExt);
2768                 break;
2769             case DataType::UINT16:
2770                 ASSERT(args[i]->getType()->isIntegerTy() && args[i]->getType()->getIntegerBitWidth() == VECTOR_SIZE_16);
2771                 call->addParamAttr(i, llvm::Attribute::ZExt);
2772                 break;
2773             case DataType::INT8:
2774                 ASSERT(args[i]->getType()->isIntegerTy() && args[i]->getType()->getIntegerBitWidth() == VECTOR_SIZE_8);
2775                 call->addParamAttr(i, llvm::Attribute::SExt);
2776                 break;
2777             case DataType::INT16:
2778                 ASSERT(args[i]->getType()->isIntegerTy() && args[i]->getType()->getIntegerBitWidth() == VECTOR_SIZE_16);
2779                 call->addParamAttr(i, llvm::Attribute::SExt);
2780                 break;
2781             case DataType::BOOL:
2782                 break;
2783             default:
2784                 ASSERT(!args[i]->getType()->isIntegerTy() || args[i]->getType()->getIntegerBitWidth() > VECTOR_SIZE_16);
2785                 break;
2786         }
2787         i++;
2788     }
2789     ASSERT(i == args.size());
2790 }
2791 
2792 template <typename T>
GetFunctionTypeForCall(T * inst)2793 llvm::FunctionType *LLVMIrConstructor::GetFunctionTypeForCall(T *inst)
2794 {
2795     ArenaVector<llvm::Type *> argTypes(GetGraph()->GetLocalAllocator()->Adapter());
2796 
2797     if (GetGraph()->SupportManagedCode()) {
2798         // Callee
2799         argTypes.push_back(builder_.getPtrTy());
2800     }
2801 
2802     auto runtime = GetGraph()->GetRuntime();
2803     auto methodPtr = GetGraph()->GetMethod();
2804     auto methodId = inst->GetCallMethodId();
2805     // For instance methods pass implicit object argument
2806     if (!runtime->IsMethodStatic(methodPtr, methodId)) {
2807         argTypes.push_back(builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE));
2808     }
2809 
2810     for (size_t i = 0; i < runtime->GetMethodArgumentsCount(methodPtr, methodId); i++) {
2811         auto ptype = runtime->GetMethodArgumentType(methodPtr, methodId, i);
2812         argTypes.push_back(GetType(ptype));
2813     }
2814 
2815     auto retType = runtime->GetMethodReturnType(methodPtr, methodId);
2816     // Ugly fix CallVirtual opcode for SaveState-excluded run codegen statistics
2817     if (methodPtr == nullptr) {
2818         retType = inst->GetType();
2819     }
2820 
2821     if constexpr (std::is_same_v<T, CallInst>) {
2822         ASSERT(inst->IsInlined() || inst->GetType() == retType);
2823     }
2824 
2825     return llvm::FunctionType::get(GetType(retType), argTypes, false);
2826 }
2827 
GetThreadRegValue()2828 llvm::Value *LLVMIrConstructor::GetThreadRegValue()
2829 {
2830     if (GetGraph()->SupportManagedCode()) {
2831         return llvmbackend::runtime_calls::GetThreadRegValue(&builder_, arkInterface_);
2832     }
2833     auto regInput = std::find(cc_.begin(), cc_.end(), GetThreadReg(GetGraph()->GetArch()));
2834     ASSERT(regInput != cc_.end());
2835     auto threadRegValue = func_->arg_begin() + std::distance(cc_.begin(), regInput);
2836     return threadRegValue;
2837 }
2838 
GetRealFrameRegValue()2839 llvm::Value *LLVMIrConstructor::GetRealFrameRegValue()
2840 {
2841     if (GetGraph()->SupportManagedCode()) {
2842         return llvmbackend::runtime_calls::GetRealFrameRegValue(&builder_, arkInterface_);
2843     }
2844     ASSERT(GetGraph()->GetMode().IsFastPath() || GetGraph()->GetArch() == Arch::X86_64);
2845     auto regInput = std::find(cc_.begin(), cc_.end(), GetRealFrameReg(GetGraph()->GetArch()));
2846     ASSERT(regInput != cc_.end());
2847     auto frameRegValue = func_->arg_begin() + std::distance(cc_.begin(), regInput);
2848     return frameRegValue;
2849 }
2850 
GetOrCreateFunctionForCall(ark::compiler::CallInst * call,void * method)2851 llvm::Function *LLVMIrConstructor::GetOrCreateFunctionForCall(ark::compiler::CallInst *call, void *method)
2852 {
2853     ASSERT(method != nullptr);
2854     auto module = func_->getParent();
2855     auto methodName = arkInterface_->GetUniqMethodName(method);
2856     auto function = module->getFunction(methodName);
2857     if (function == nullptr) {
2858         auto functionProto = GetFunctionTypeForCall(call);
2859         function = CreateFunctionDeclaration(functionProto, methodName, module);
2860         function->addFnAttr("frame-pointer", "all");
2861         function->addFnAttr(
2862             ark::llvmbackend::LLVMArkInterface::SOURCE_LANG_ATTR,
2863             std::to_string(static_cast<uint8_t>(GetGraph()->GetRuntime()->GetMethodSourceLanguage(method))));
2864     }
2865     return function;
2866 }
2867 
GetType(DataType::Type pandaType)2868 llvm::Type *LLVMIrConstructor::GetType(DataType::Type pandaType)
2869 {
2870     switch (pandaType) {
2871         case DataType::VOID:
2872             return builder_.getVoidTy();
2873         case DataType::POINTER:
2874             return builder_.getPtrTy();
2875         case DataType::REFERENCE:
2876             return builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE);
2877         case DataType::BOOL:
2878         case DataType::UINT8:
2879         case DataType::INT8:
2880         case DataType::UINT16:
2881         case DataType::INT16:
2882         case DataType::UINT32:
2883         case DataType::INT32:
2884             return builder_.getInt32Ty();
2885         case DataType::UINT64:
2886         case DataType::INT64:
2887             return builder_.getInt64Ty();
2888         case DataType::FLOAT32:
2889             return builder_.getFloatTy();
2890         case DataType::FLOAT64:
2891             return builder_.getDoubleTy();
2892         default:
2893             ASSERT_DO(false, (std::cerr << "No handler for panda type = '" << DataType::ToString(pandaType)
2894                                         << "' to llvm type conversion." << std::endl));
2895             UNREACHABLE();
2896     }
2897 }
2898 
2899 /**
2900  * Return exact llvm::Type corresponding to the panda type.
2901  *
2902  * Use this method when exact panda type is indeed required.
2903  * It is the case for:
2904  * - array loads and stores. If 32-bit version were used, then the neighbour array elements would be overwritten or read
2905  * - object field loads and stores. The reason the same as in the case above.
2906  * - object static field loads and stores. The reason the same as in the cases above.
2907  * - comparisons. Sometimes boolean is compared with i32 or other integral type.
2908  *   The exact type could be obtained from the compareInst->GetOperandsType(),
2909  *   which should be used to coerce its operands
2910  * - Runtime calls. Some runtime call function declarations have narrower types than 32-bit version. To invoke them
2911  *   the argument should be coerced to the exact type.
2912  */
GetExactType(DataType::Type targetType)2913 llvm::Type *LLVMIrConstructor::GetExactType(DataType::Type targetType)
2914 {
2915     switch (targetType) {
2916         case DataType::VOID:
2917             return builder_.getVoidTy();
2918         case DataType::POINTER:
2919             return builder_.getPtrTy();
2920         case DataType::REFERENCE:
2921             return builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE);
2922         case DataType::BOOL:
2923         case DataType::UINT8:
2924         case DataType::INT8:
2925             return builder_.getInt8Ty();
2926         case DataType::UINT16:
2927         case DataType::INT16:
2928             return builder_.getInt16Ty();
2929         case DataType::UINT32:
2930         case DataType::INT32:
2931             return builder_.getInt32Ty();
2932         case DataType::UINT64:
2933         case DataType::INT64:
2934             return builder_.getInt64Ty();
2935         case DataType::FLOAT32:
2936             return builder_.getFloatTy();
2937         case DataType::FLOAT64:
2938             return builder_.getDoubleTy();
2939         default:
2940             ASSERT_DO(false, (std::cerr << "No handler for panda type = '" << DataType::ToString(targetType)
2941                                         << "' to llvm type conversion." << std::endl));
2942             UNREACHABLE();
2943     }
2944 }
2945 
GetCastOp(DataType::Type from,DataType::Type to)2946 llvm::Instruction::CastOps LLVMIrConstructor::GetCastOp(DataType::Type from, DataType::Type to)
2947 {
2948     Arch arch = GetGraph()->GetArch();
2949     if (IsInteger(from) && IsInteger(to) && DataType::GetTypeSize(from, arch) > DataType::GetTypeSize(to, arch)) {
2950         // narrowing, e.g. U32TOU8, I64TOI32
2951         return llvm::Instruction::Trunc;
2952     }
2953     if (IsSignedInteger(from) && IsInteger(to) && DataType::GetTypeSize(from, arch) < DataType::GetTypeSize(to, arch)) {
2954         // signed int widening, e.g. I32TOI64, I32TOU64
2955         return llvm::Instruction::SExt;
2956     }
2957     if (IsUnsignedInteger(from) && IsInteger(to) &&
2958         DataType::GetTypeSize(from, arch) < DataType::GetTypeSize(to, arch)) {
2959         // unsigned int widening, e.g. U32TOI64, U8TOU64
2960         return llvm::Instruction::ZExt;
2961     }
2962     if (IsUnsignedInteger(from) && DataType::IsFloatType(to)) {
2963         // unsigned int to float, e.g. U32TOF64, U64TOF64
2964         return llvm::Instruction::UIToFP;
2965     }
2966     if (IsSignedInteger(from) && DataType::IsFloatType(to)) {
2967         // signed int to float e.g. I32TOF64, I64TOF64
2968         return llvm::Instruction::SIToFP;
2969     }
2970     if (DataType::IsFloatType(from) && DataType::IsFloatType(to)) {
2971         if (DataType::GetTypeSize(from, arch) < DataType::GetTypeSize(to, arch)) {
2972             return llvm::Instruction::FPExt;
2973         }
2974         return llvm::Instruction::FPTrunc;
2975     }
2976     if (DataType::IsReference(from) && to == DataType::POINTER) {
2977         return llvm::Instruction::AddrSpaceCast;
2978     }
2979     ASSERT_DO(false, (std::cerr << "Cast from " << DataType::ToString(from) << " to " << DataType::ToString(to))
2980                          << " is not supported" << std::endl);
2981     UNREACHABLE();
2982 }
2983 
2984 // Various other helpers
2985 
2986 /**
2987  * Coerce given {@code value} with {@code sourceType} to the {@code targetType}.
2988  *
2989  * The method may perform truncation or widening cast, or leave the original
2990  * {@code value}, if no cast is necessary.
2991  *
2992  * For integer {@code value} when widening cast is performed the sign of the {@code sourceType} is taken
2993  * into account:
2994  * * {@code value} is zero extended if the {@code sourceType} is unsigned integer
2995  * * {@code value} is sign extended if the {@code sourceType} is signed integer
2996  *
2997  * Reference types are returned as is.
2998  *
2999  * Currently Ark Bytecode:
3000  * * does not differentiate between ints of sizes less than 32 bits, and treats them all as i32/u32
3001  * * leaves resolution of such conversions to the discretion of bytecodes accepting them
3002  * * assumes implicit casts between small integers
3003  *
3004  * Sometimes it causes inconsistencies in LLVM since Ark Compiler IR input has implicit casts too,
3005  * but LLVM does not permit such conversions.  This function perform those casts if necessary.
3006  */
CoerceValue(llvm::Value * value,DataType::Type sourceType,DataType::Type targetType)3007 llvm::Value *LLVMIrConstructor::CoerceValue(llvm::Value *value, DataType::Type sourceType, DataType::Type targetType)
3008 {
3009     ASSERT(value != nullptr);
3010     // Other non-integer mistyping prohibited
3011     ASSERT_DO(!IsInteger(targetType) || value->getType()->isIntegerTy(),
3012               std::cerr << "Unexpected data type: " << GetTypeName(value->getType()) << ". Should be an integer."
3013                         << std::endl);
3014     ASSERT_DO(!DataType::IsReference(targetType) || value->getType()->isPointerTy(),
3015               std::cerr << "Unexpected data type: " << GetTypeName(value->getType()) << ". Should be a pointer."
3016                         << std::endl);
3017     ASSERT_DO(targetType != DataType::FLOAT64 || value->getType()->isDoubleTy(),
3018               std::cerr << "Unexpected data type: " << GetTypeName(value->getType()) << ". Should be a double."
3019                         << std::endl);
3020     ASSERT_DO(targetType != DataType::FLOAT32 || value->getType()->isFloatTy(),
3021               std::cerr << "Unexpected data type: " << GetTypeName(value->getType()) << ". Should be a float."
3022                         << std::endl);
3023 
3024     if (!IsInteger(targetType)) {
3025         return value;
3026     }
3027     ASSERT(value->getType()->isIntegerTy());
3028 
3029     auto targetLlvmType = llvm::cast<llvm::IntegerType>(GetExactType(targetType));
3030     auto originalLlvmType = llvm::cast<llvm::IntegerType>(value->getType());
3031     ASSERT(originalLlvmType->getBitWidth() == DataType::GetTypeSize(sourceType, GetGraph()->GetArch()));
3032 
3033     llvm::CastInst::CastOps castOp;
3034     if (originalLlvmType->getBitWidth() > targetLlvmType->getBitWidth()) {
3035         castOp = llvm::Instruction::Trunc;
3036     } else if (originalLlvmType->getBitWidth() < targetLlvmType->getBitWidth()) {
3037         if (IsSignedInteger(sourceType)) {
3038             castOp = llvm::Instruction::SExt;
3039         } else {
3040             castOp = llvm::Instruction::ZExt;
3041         }
3042     } else {
3043         return value;
3044     }
3045     return builder_.CreateCast(castOp, value, targetLlvmType);
3046 }
3047 
3048 /**
3049  * Used in irtoc C++ inlining.
3050  *
3051  * When we compile irtoc handlers, we do not have ark's types.
3052  * For example, ark::Frame is missing.
3053  * LLVM AOT uses i8* or i64 instead
3054  *
3055  * For example, the irtoc handler could look like:
3056  *
3057  * @code
3058  * void MyHandler(i8* myMbject) {
3059  *    var clone = CloneObjectEntrypoint(myObject);
3060  * }
3061  * @endcode
3062  *
3063  * When we compile interpreter handlers with cpp inlining we have the definition of CloneObjectEntrypoint:
3064  *
3065  * @code
3066  * ObjectHeader *CloneObjectEntrypoint(ObjectHeader *obj) {
3067  *   ...
3068  * }
3069  * @endcode
3070  *
3071  * and we must invoke the CloneObjectEntrypoint with ObjectHeader* argument, not i8*.
3072  * The CoerceValue method converts i8* to ObjectHeader*
3073  */
CoerceValue(llvm::Value * value,llvm::Type * targetType)3074 llvm::Value *LLVMIrConstructor::CoerceValue(llvm::Value *value, llvm::Type *targetType)
3075 {
3076     auto valueType = value->getType();
3077     if (valueType == targetType) {
3078         return value;
3079     }
3080 
3081     if (!valueType->isPointerTy() && targetType->isPointerTy()) {
3082         // DataType::POINTER to targetType.
3083         // Example: i64 -> %"class.ark::Frame"*
3084         return builder_.CreateIntToPtr(value, targetType);
3085     }
3086     if (valueType->isPointerTy() && !targetType->isPointerTy()) {
3087         // valueType to DataType::POINTER
3088         // Example: %"class.ark::coretypes::String"* -> i64
3089         return builder_.CreatePtrToInt(value, targetType);
3090     }
3091 
3092     if (valueType->isIntegerTy() && targetType->isIntegerTy()) {
3093         auto valueWidth = llvm::cast<llvm::IntegerType>(valueType)->getBitWidth();
3094         auto targetWidth = llvm::cast<llvm::IntegerType>(targetType)->getBitWidth();
3095         if (valueWidth > targetWidth) {
3096             return builder_.CreateTrunc(value, targetType);
3097         }
3098         if (valueWidth < targetWidth) {
3099             return builder_.CreateZExt(value, targetType);
3100         }
3101     }
3102     if (valueType->isPointerTy() && targetType->isPointerTy()) {
3103         return builder_.CreateAddrSpaceCast(value, targetType);
3104     }
3105     UNREACHABLE();
3106 }
3107 
ValueMapAdd(Inst * inst,llvm::Value * value,bool setName)3108 void LLVMIrConstructor::ValueMapAdd(Inst *inst, llvm::Value *value, bool setName)
3109 {
3110     if (!inst->IsMovableObject() && !inst->IsCheck() && llvmbackend::gc_utils::IsGcRefType(value->getType())) {
3111         auto llvmInst = llvm::dyn_cast<llvm::Instruction>(value);
3112         if (llvmInst != nullptr) {
3113             llvmbackend::gc_utils::MarkAsNonMovable(llvmInst);
3114         }
3115     }
3116 
3117     auto type = inst->GetType();
3118     auto ltype = GetExactType(type);
3119     ASSERT(inputMap_.count(inst) == 0);
3120     auto it = inputMap_.emplace(inst, GetGraph()->GetLocalAllocator()->Adapter());
3121     ASSERT(it.second);
3122     ArenaUnorderedMap<DataType::Type, llvm::Value *> &typeMap = it.first->second;
3123 
3124     if (value == nullptr) {
3125         typeMap.insert({type, nullptr});
3126         return;
3127     }
3128     if (setName) {
3129         value->setName(CreateNameForInst(inst));
3130     }
3131     if (inst->GetOpcode() == Opcode::LiveOut || !ltype->isIntegerTy()) {
3132         typeMap.insert({type, value});
3133         if (type == DataType::POINTER) {
3134             FillValueMapForUsers(&typeMap, inst, value);
3135         }
3136         return;
3137     }
3138     ASSERT(value->getType()->isIntegerTy());
3139     if (value->getType()->getIntegerBitWidth() > ltype->getIntegerBitWidth()) {
3140         value = builder_.CreateTrunc(value, ltype);
3141     } else if (value->getType()->getIntegerBitWidth() < ltype->getIntegerBitWidth()) {
3142         value = builder_.CreateZExt(value, ltype);
3143     }
3144     typeMap.insert({type, value});
3145     FillValueMapForUsers(&typeMap, inst, value);
3146 }
3147 
FillValueMapForUsers(ArenaUnorderedMap<DataType::Type,llvm::Value * > * map,Inst * inst,llvm::Value * value)3148 void LLVMIrConstructor::FillValueMapForUsers(ArenaUnorderedMap<DataType::Type, llvm::Value *> *map, Inst *inst,
3149                                              llvm::Value *value)
3150 {
3151     auto type = inst->GetType();
3152     ASSERT(type != DataType::REFERENCE);
3153     for (auto &userItem : inst->GetUsers()) {
3154         auto user = userItem.GetInst();
3155         for (unsigned i = 0; i < user->GetInputsCount(); i++) {
3156             auto itype = user->GetInputType(i);
3157             auto input = user->GetInput(i).GetInst();
3158             if (input != inst || itype == type || map->count(itype) != 0) {
3159                 continue;
3160             }
3161             /*
3162              * When Ark Compiler implicitly converts something -> LLVM side:
3163              * 1. POINTER to REFERENCE (user LiveOut or Store) -> AddrSpaceCast
3164              * 2. POINTER to UINT64 (user is LiveOut)          -> no conversion necessary
3165              * 3. LiveIn to REFERENCE                          -> no conversion necessary
3166              * 4. INT64/UINT64 to REFERENCE (user is LiveOut)  -> IntToPtr
3167              * 5. Integers                                     -> use coercing
3168              */
3169             llvm::Value *cvalue;
3170             if (type == DataType::POINTER && itype == DataType::REFERENCE) {
3171                 ASSERT(user->GetOpcode() == Opcode::LiveOut || user->GetOpcode() == Opcode::Store);
3172                 cvalue = builder_.CreateAddrSpaceCast(value, builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE));
3173             } else if (type == DataType::POINTER && itype == DataType::UINT64) {
3174                 ASSERT(user->GetOpcode() == Opcode::LiveOut);
3175                 cvalue = value;
3176             } else if (type == DataType::POINTER) {
3177                 continue;
3178             } else if (inst->GetOpcode() == Opcode::LiveIn && itype == DataType::REFERENCE) {
3179                 cvalue = value;
3180             } else if ((type == DataType::INT64 || type == DataType::UINT64) && itype == DataType::REFERENCE) {
3181                 ASSERT(user->GetOpcode() == Opcode::LiveOut);
3182                 cvalue = builder_.CreateIntToPtr(value, builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE));
3183             } else {
3184                 cvalue = CoerceValue(value, type, itype);
3185             }
3186             map->insert({itype, cvalue});
3187         }
3188     }
3189 }
3190 
WrapArkCall(Inst * orig,llvm::CallInst * call)3191 void LLVMIrConstructor::WrapArkCall(Inst *orig, llvm::CallInst *call)
3192 {
3193     ASSERT(orig->RequireState());
3194     ASSERT_PRINT(!call->getDebugLoc(), "Debug info must be unset");
3195     // Ark calls may call GC inside, so add statepoint
3196     debugData_->SetLocation(call, orig->GetPc());
3197     EncodeInlineInfo(orig, call);
3198 }
3199 
InitializeEntryBlock(bool noInline)3200 void LLVMIrConstructor::InitializeEntryBlock(bool noInline)
3201 {
3202     if (noInline) {
3203         ASSERT(!arkInterface_->IsIrtocMode() && GetGraph()->SupportManagedCode());
3204         func_->addFnAttr(llvm::Attribute::NoInline);
3205         // This type of linkage prevents return value propagation.
3206         // llvm::GlobalValue::isDefinitionExact becomes false and as a result
3207         // llvm::canTrackReturnsInterprocedurally() also false.
3208         func_->setLinkage(llvm::Function::WeakAnyLinkage);
3209     }
3210 
3211     if (GetGraph()->SupportManagedCode()) {
3212         func_->addParamAttr(GetMethodArgument()->getArgNo(), llvm::Attribute::NonNull);
3213         if (!GetGraph()->GetRuntime()->IsMethodStatic(GetGraph()->GetMethod())) {
3214             func_->addParamAttr(GetArgument(0)->getArgNo(), llvm::Attribute::NonNull);
3215         }
3216     }
3217 
3218     if (func_->hasMetadata(LLVMArkInterface::FUNCTION_MD_INLINE_MODULE) &&
3219         !GetGraph()->GetRuntime()->IsMethodStatic(GetGraph()->GetMethod())) {
3220         SetCurrentBasicBlock(&func_->getEntryBlock());
3221         builder_.CreateCall(KeepThis(func_->getParent()), GetArgument(0));
3222     }
3223 }
3224 
MarkAsAllocation(llvm::CallInst * call)3225 void LLVMIrConstructor::MarkAsAllocation(llvm::CallInst *call)
3226 {
3227     llvm::AttrBuilder builder {call->getContext()};
3228     /**
3229      * When we add allockind(alloc) attribute, then llvm can assume that the function is allocation function.
3230      * With this assumption llvm can remove dead allocations
3231      */
3232     builder.addAllocKindAttr(llvm::AllocFnKind::Alloc);
3233     call->addFnAttr(builder.getAttribute(llvm::Attribute::AllocKind));
3234     call->addRetAttr(llvm::Attribute::NonNull);
3235     call->addRetAttr(llvm::Attribute::NoAlias);
3236 }
3237 
3238 // Instruction Visitors
3239 
3240 // Constant and NullPtr are processed directly in GetInputValue
VisitConstant(GraphVisitor * v,Inst * inst)3241 void LLVMIrConstructor::VisitConstant([[maybe_unused]] GraphVisitor *v, [[maybe_unused]] Inst *inst)
3242 {
3243     ASSERT(inst->GetBasicBlock()->IsStartBlock());
3244 }
3245 
VisitNullPtr(GraphVisitor * v,Inst * inst)3246 void LLVMIrConstructor::VisitNullPtr([[maybe_unused]] GraphVisitor *v, [[maybe_unused]] Inst *inst)
3247 {
3248     ASSERT(inst->GetBasicBlock()->IsStartBlock());
3249 }
3250 
VisitLiveIn(GraphVisitor * v,Inst * inst)3251 void LLVMIrConstructor::VisitLiveIn(GraphVisitor *v, Inst *inst)
3252 {
3253     auto ctor = static_cast<LLVMIrConstructor *>(v);
3254     ASSERT(inst->GetBasicBlock()->IsStartBlock());
3255     ASSERT(!ctor->GetGraph()->SupportManagedCode());
3256 
3257     auto regInput = std::find(ctor->cc_.begin(), ctor->cc_.end(), inst->CastToLiveIn()->GetDstReg());
3258     ASSERT(regInput != ctor->cc_.end());
3259     auto idx = std::distance(ctor->cc_.begin(), regInput);
3260     auto n = ctor->func_->arg_begin() + idx;
3261     ctor->ValueMapAdd(inst, ctor->CoerceValue(n, ctor->GetExactType(inst->GetType())));
3262 }
3263 
VisitParameter(GraphVisitor * v,Inst * inst)3264 void LLVMIrConstructor::VisitParameter(GraphVisitor *v, Inst *inst)
3265 {
3266     ASSERT(inst->GetBasicBlock()->IsStartBlock());
3267     auto ctor = static_cast<LLVMIrConstructor *>(v);
3268     ASSERT(ctor->GetGraph()->SupportManagedCode() || ctor->GetGraph()->GetMode().IsFastPath());
3269     auto n = ctor->GetArgument(inst->CastToParameter()->GetArgNumber());
3270     ctor->ValueMapAdd(inst, n, false);
3271 }
3272 
VisitReturnVoid(GraphVisitor * v,Inst * inst)3273 void LLVMIrConstructor::VisitReturnVoid(GraphVisitor *v, Inst *inst)
3274 {
3275     auto ctor = static_cast<LLVMIrConstructor *>(v);
3276     if (inst->GetFlag(inst_flags::MEM_BARRIER)) {
3277         auto builtin = BarrierReturnVoid(ctor->func_->getParent());
3278         auto builtinCall = ctor->builder_.CreateCall(builtin);
3279         builtinCall->addFnAttr(llvm::Attribute::get(builtinCall->getContext(), "needs-mem-barrier"));
3280     }
3281     ctor->builder_.CreateRetVoid();
3282 }
3283 
VisitReturn(GraphVisitor * v,Inst * inst)3284 void LLVMIrConstructor::VisitReturn(GraphVisitor *v, Inst *inst)
3285 {
3286     auto ctor = static_cast<LLVMIrConstructor *>(v);
3287     auto ret = ctor->GetInputValue(inst, 0);
3288 
3289     auto type = inst->GetType();
3290     if (DataType::IsLessInt32(type)) {
3291         ret = ctor->CoerceValue(ret, type, DataType::INT32);
3292     }
3293 
3294     ctor->builder_.CreateRet(ret);
3295 }
3296 
VisitReturnInlined(GraphVisitor * v,Inst * inst)3297 void LLVMIrConstructor::VisitReturnInlined(GraphVisitor *v, Inst *inst)
3298 {
3299     auto ctor = static_cast<LLVMIrConstructor *>(v);
3300 
3301     if (inst->GetFlag(inst_flags::MEM_BARRIER)) {
3302         auto builtin = BarrierReturnVoid(ctor->func_->getParent());
3303         auto builtinCall = ctor->builder_.CreateCall(builtin);
3304         builtinCall->addFnAttr(llvm::Attribute::get(builtinCall->getContext(), "needs-mem-barrier"));
3305     }
3306 }
3307 
VisitReturnI(GraphVisitor * v,Inst * inst)3308 void LLVMIrConstructor::VisitReturnI(GraphVisitor *v, Inst *inst)
3309 {
3310     auto ctor = static_cast<LLVMIrConstructor *>(v);
3311     llvm::Value *ret = ctor->builder_.getInt64(inst->CastToReturnI()->GetImm());
3312 
3313     auto type = inst->GetType();
3314     if (DataType::IsInt32Bit(type)) {
3315         ret = ctor->CoerceValue(ret, DataType::INT64, DataType::INT32);
3316     }
3317 
3318     ctor->builder_.CreateRet(ret);
3319 }
3320 
3321 // No-op "pseudo" instructions
VisitTry(GraphVisitor * v,Inst * inst)3322 void LLVMIrConstructor::VisitTry([[maybe_unused]] GraphVisitor *v, [[maybe_unused]] Inst *inst) {}
VisitSaveState(GraphVisitor * v,Inst * inst)3323 void LLVMIrConstructor::VisitSaveState([[maybe_unused]] GraphVisitor *v, [[maybe_unused]] Inst *inst) {}
VisitSaveStateDeoptimize(GraphVisitor * v,Inst * inst)3324 void LLVMIrConstructor::VisitSaveStateDeoptimize([[maybe_unused]] GraphVisitor *v, [[maybe_unused]] Inst *inst) {}
VisitSafePoint(GraphVisitor * v,Inst * inst)3325 void LLVMIrConstructor::VisitSafePoint([[maybe_unused]] GraphVisitor *v, [[maybe_unused]] Inst *inst) {}
3326 // NOP and Deoptimize* required after adding CheckElim* passes
VisitNOP(GraphVisitor * v,Inst * inst)3327 void LLVMIrConstructor::VisitNOP([[maybe_unused]] GraphVisitor *v, [[maybe_unused]] Inst *inst) {}
3328 
VisitLiveOut(GraphVisitor * v,Inst * inst)3329 void LLVMIrConstructor::VisitLiveOut(GraphVisitor *v, Inst *inst)
3330 {
3331     auto ctor = static_cast<LLVMIrConstructor *>(v);
3332     ASSERT(!ctor->GetGraph()->SupportManagedCode());
3333     auto input = ctor->GetInputValue(inst, 0);
3334 
3335     auto regInput = std::find(ctor->cc_.begin(), ctor->cc_.end(), inst->GetDstReg());
3336     ASSERT(regInput != ctor->cc_.end());
3337     size_t idx = std::distance(ctor->cc_.begin(), regInput);
3338     ASSERT(ctor->ccValues_[idx] == nullptr);
3339 
3340     // LiveOut not allowed for real frame register
3341     ASSERT(ctor->GetGraph()->GetArch() == Arch::AARCH64 || idx + 1 != ctor->cc_.size());
3342     auto value = ctor->CoerceValue(input, ctor->GetExactType(inst->GetType()));
3343     ctor->ccValues_[idx] = value;
3344     ctor->ValueMapAdd(inst, value, false);
3345 }
3346 
VisitSubOverflowCheck(GraphVisitor * v,Inst * inst)3347 void LLVMIrConstructor::VisitSubOverflowCheck(GraphVisitor *v, Inst *inst)
3348 {
3349     auto ctor = static_cast<LLVMIrConstructor *>(v);
3350     auto dtype = inst->GetType();
3351     auto ltype = ctor->GetExactType(dtype);
3352     auto src0 = ctor->GetInputValue(inst, 0);
3353     auto src1 = ctor->GetInputValue(inst, 1);
3354     ASSERT(inst->GetInputType(0) == inst->GetInputType(1));
3355 
3356     auto arch = ctor->GetGraph()->GetArch();
3357     auto dtypeSize = DataType::GetTypeSize(dtype, arch);
3358     auto srcTypeSize = DataType::GetTypeSize(inst->GetInputType(0), arch);
3359     ASSERT(DataType::Is32Bits(dtype, arch) || DataType::Is64Bits(dtype, arch));
3360     if (srcTypeSize < dtypeSize) {
3361         src0 = ctor->builder_.CreateSExt(src0, ltype);
3362         src1 = ctor->builder_.CreateSExt(src1, ltype);
3363     }
3364     if (dtypeSize < srcTypeSize) {
3365         src0 = ctor->builder_.CreateTrunc(src0, ltype);
3366         src1 = ctor->builder_.CreateTrunc(src1, ltype);
3367     }
3368 
3369     auto ssubOverflow = ctor->builder_.CreateBinaryIntrinsic(llvm::Intrinsic::ssub_with_overflow, src0, src1);
3370     auto result = ctor->builder_.CreateExtractValue(ssubOverflow, {0}, "ssub");
3371     auto deoptimize = ctor->builder_.CreateExtractValue(ssubOverflow, {1}, "obit");
3372 
3373     auto exception = RuntimeInterface::EntrypointId::DEOPTIMIZE;
3374     ctor->CreateDeoptimizationBranch(inst, deoptimize, exception);
3375 
3376     ctor->ValueMapAdd(inst, result, false);
3377 }
3378 
VisitDeoptimize(GraphVisitor * v,Inst * inst)3379 void LLVMIrConstructor::VisitDeoptimize(GraphVisitor *v, Inst *inst)
3380 {
3381     auto ctor = static_cast<LLVMIrConstructor *>(v);
3382     auto type = inst->CastToDeoptimize()->GetDeoptimizeType();
3383     auto exception = RuntimeInterface::EntrypointId::DEOPTIMIZE;
3384     uint64_t value = static_cast<uint64_t>(type) | (inst->GetId() << MinimumBitsToStore(DeoptimizeType::COUNT));
3385     auto call = ctor->CreateEntrypointCall(exception, inst, {ctor->builder_.getInt64(value)});
3386     call->addFnAttr(llvm::Attribute::get(call->getContext(), "may-deoptimize"));
3387     ctor->builder_.CreateUnreachable();
3388 }
3389 
VisitDeoptimizeIf(GraphVisitor * v,Inst * inst)3390 void LLVMIrConstructor::VisitDeoptimizeIf(GraphVisitor *v, Inst *inst)
3391 {
3392     auto ctor = static_cast<LLVMIrConstructor *>(v);
3393     auto exception = RuntimeInterface::EntrypointId::DEOPTIMIZE;
3394     auto deoptimize = ctor->builder_.CreateIsNotNull(ctor->GetInputValue(inst, 0));
3395     ctor->CreateDeoptimizationBranch(inst, deoptimize, exception);
3396 }
3397 
VisitNegativeCheck(GraphVisitor * v,Inst * inst)3398 void LLVMIrConstructor::VisitNegativeCheck(GraphVisitor *v, Inst *inst)
3399 {
3400     auto ctor = static_cast<LLVMIrConstructor *>(v);
3401     auto val = ctor->GetInputValue(inst, 0);
3402 
3403     auto deoptimize = ctor->builder_.CreateICmpSLT(val, llvm::Constant::getNullValue(val->getType()));
3404     auto exception = RuntimeInterface::EntrypointId::NEGATIVE_ARRAY_SIZE_EXCEPTION;
3405     ctor->CreateDeoptimizationBranch(inst, deoptimize, exception, {ctor->ToSSizeT(val)});
3406 
3407     ctor->ValueMapAdd(inst, val, false);
3408 }
3409 
VisitZeroCheck(GraphVisitor * v,Inst * inst)3410 void LLVMIrConstructor::VisitZeroCheck(GraphVisitor *v, Inst *inst)
3411 {
3412     auto ctor = static_cast<LLVMIrConstructor *>(v);
3413     auto val = ctor->GetInputValue(inst, 0);
3414 
3415     auto deoptimize = ctor->builder_.CreateIsNull(val);
3416     auto exception = RuntimeInterface::EntrypointId::ARITHMETIC_EXCEPTION;
3417     ctor->CreateDeoptimizationBranch(inst, deoptimize, exception);
3418 
3419     ctor->ValueMapAdd(inst, val, false);
3420 }
3421 
VisitNullCheck(GraphVisitor * v,Inst * inst)3422 void LLVMIrConstructor::VisitNullCheck(GraphVisitor *v, Inst *inst)
3423 {
3424     auto ctor = static_cast<LLVMIrConstructor *>(v);
3425     auto obj = ctor->GetInputValue(inst, 0);
3426     auto obj64 = obj;
3427 
3428     if (compiler::g_options.IsCompilerImplicitNullCheck()) {
3429         // LLVM's ImplicitNullChecks pass can't operate with 32-bit pointers, but it is enough
3430         // to create address space cast to an usual 64-bit pointer before comparing with null.
3431         obj64 = ctor->builder_.CreateAddrSpaceCast(obj, ctor->builder_.getPtrTy());
3432     }
3433 
3434     auto deoptimize = ctor->builder_.CreateIsNull(obj64);
3435     auto exception = RuntimeInterface::EntrypointId::NULL_POINTER_EXCEPTION;
3436     ctor->CreateDeoptimizationBranch(inst, deoptimize, exception);
3437 
3438     ctor->ValueMapAdd(inst, obj, false);
3439 }
3440 
VisitBoundsCheck(GraphVisitor * v,Inst * inst)3441 void LLVMIrConstructor::VisitBoundsCheck(GraphVisitor *v, Inst *inst)
3442 {
3443     auto ctor = static_cast<LLVMIrConstructor *>(v);
3444     auto length = ctor->GetInputValue(inst, 0);
3445     ASSERT_TYPE(length, ctor->builder_.getInt32Ty());
3446     auto index = ctor->GetInputValue(inst, 1);
3447     ASSERT(index->getType()->isIntegerTy());
3448 
3449     auto deoptimize = ctor->builder_.CreateICmpUGE(index, length);
3450     auto exception = inst->CastToBoundsCheck()->IsArray()
3451                          ? RuntimeInterface::EntrypointId::ARRAY_INDEX_OUT_OF_BOUNDS_EXCEPTION
3452                          : RuntimeInterface::EntrypointId::STRING_INDEX_OUT_OF_BOUNDS_EXCEPTION;
3453     ctor->CreateDeoptimizationBranch(inst, deoptimize, exception, {ctor->ToSSizeT(index), ctor->ToSizeT(length)});
3454 
3455     ctor->ValueMapAdd(inst, index, false);
3456 }
3457 
VisitRefTypeCheck(GraphVisitor * v,Inst * inst)3458 void LLVMIrConstructor::VisitRefTypeCheck(GraphVisitor *v, Inst *inst)
3459 {
3460     auto ctor = static_cast<LLVMIrConstructor *>(v);
3461 
3462     auto array = ctor->GetInputValue(inst, 0);
3463     auto ref = ctor->GetInputValue(inst, 1);
3464 
3465     auto &ctx = ctor->func_->getContext();
3466     auto compareBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "comparison"), ctor->func_);
3467     auto compBaseBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "comp_base"), ctor->func_);
3468     auto slowPathBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "slow_path"), ctor->func_);
3469     auto outBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "out"), ctor->func_);
3470 
3471     auto runtime = ctor->GetGraph()->GetRuntime();
3472     auto arch = ctor->GetGraph()->GetArch();
3473 
3474     auto cmp = ctor->builder_.CreateIsNotNull(ref);
3475     ctor->builder_.CreateCondBr(cmp, compareBb, outBb);
3476 
3477     // Get element class from array
3478     ctor->SetCurrentBasicBlock(compareBb);
3479     auto arrayClass = CreateLoadClassFromObject(array, &ctor->builder_, ctor->arkInterface_);
3480     auto elementTypeOffset = runtime->GetClassComponentTypeOffset(arch);
3481     auto int8Ty = ctor->builder_.getInt8Ty();
3482     auto elementClassPtr = ctor->builder_.CreateConstInBoundsGEP1_32(int8Ty, arrayClass, elementTypeOffset);
3483     auto elementClass = ctor->builder_.CreateLoad(ctor->builder_.getPtrTy(), elementClassPtr);
3484     // And class from stored object
3485     auto refClass = CreateLoadClassFromObject(ref, &ctor->builder_, ctor->arkInterface_);
3486 
3487     // Unlike other checks, there's another check in the runtime function, so don't use CreateDeoptimizationBranch
3488     cmp = ctor->builder_.CreateICmpNE(elementClass, refClass);
3489     auto branchWeights =
3490         llvm::MDBuilder(ctx).createBranchWeights(llvmbackend::Metadata::BranchWeights::LIKELY_BRANCH_WEIGHT,
3491                                                  llvmbackend::Metadata::BranchWeights::UNLIKELY_BRANCH_WEIGHT);
3492     ctor->builder_.CreateCondBr(cmp, compBaseBb, outBb, branchWeights);
3493 
3494     // If the array's element class is Object (Base class is null) - no further check needed
3495     ctor->SetCurrentBasicBlock(compBaseBb);
3496     auto baseTypeOffset = runtime->GetClassBaseOffset(arch);
3497     auto baseClassPtr = ctor->builder_.CreateConstInBoundsGEP1_32(int8Ty, elementClass, baseTypeOffset);
3498     auto baseClass = ctor->builder_.CreateLoad(ctor->builder_.getPtrTy(), baseClassPtr);
3499     auto notObjectArray = ctor->builder_.CreateIsNotNull(baseClass);
3500     ctor->builder_.CreateCondBr(notObjectArray, slowPathBb, outBb);
3501 
3502     ctor->SetCurrentBasicBlock(slowPathBb);
3503     if (inst->CanDeoptimize()) {
3504         auto entrypoint = RuntimeInterface::EntrypointId::CHECK_STORE_ARRAY_REFERENCE_DEOPTIMIZE;
3505         auto call = ctor->CreateEntrypointCall(entrypoint, inst, {array, ref});
3506         call->addFnAttr(llvm::Attribute::get(call->getContext(), "may-deoptimize"));
3507     } else {
3508         ctor->CreateEntrypointCall(RuntimeInterface::EntrypointId::CHECK_STORE_ARRAY_REFERENCE, inst, {array, ref});
3509     }
3510     ctor->builder_.CreateBr(outBb);
3511 
3512     ctor->SetCurrentBasicBlock(outBb);
3513     ctor->ValueMapAdd(inst, ref, false);
3514 }
3515 
VisitLoadString(GraphVisitor * v,Inst * inst)3516 void LLVMIrConstructor::VisitLoadString(GraphVisitor *v, Inst *inst)
3517 {
3518     auto ctor = static_cast<LLVMIrConstructor *>(v);
3519 
3520     llvm::Value *result;
3521     if (g_options.IsCompilerAotLoadStringPlt() &&
3522         !ctor->GetGraph()->GetRuntime()->IsMethodStaticConstructor(ctor->GetGraph()->GetMethod())) {
3523         auto aotData = ctor->GetGraph()->GetAotData();
3524         ASSERT(aotData != nullptr);
3525 
3526         auto typeId = inst->CastToLoadString()->GetTypeId();
3527         auto typeVal = ctor->builder_.getInt32(typeId);
3528         auto slotVal = ctor->builder_.getInt32(ctor->arkInterface_->GetStringSlotId(aotData, typeId));
3529         ctor->arkInterface_->GetOrCreateRuntimeFunctionType(
3530             ctor->func_->getContext(), ctor->func_->getParent(), LLVMArkInterface::RuntimeCallType::ENTRYPOINT,
3531             static_cast<LLVMArkInterface::EntrypointId>(RuntimeInterface::EntrypointId::RESOLVE_STRING_AOT));
3532 
3533         auto builtin = LoadString(ctor->func_->getParent());
3534         auto call = ctor->builder_.CreateCall(builtin, {typeVal, slotVal}, ctor->CreateSaveStateBundle(inst));
3535         ctor->WrapArkCall(inst, call);
3536         result = call;
3537     } else {
3538         auto stringType = ctor->builder_.getInt32(inst->CastToLoadString()->GetTypeId());
3539         auto entrypointId = RuntimeInterface::EntrypointId::RESOLVE_STRING;
3540         result = ctor->CreateEntrypointCall(entrypointId, inst, {ctor->GetMethodArgument(), stringType});
3541     }
3542     ctor->ValueMapAdd(inst, result);
3543 }
3544 
VisitLenArray(GraphVisitor * v,Inst * inst)3545 void LLVMIrConstructor::VisitLenArray(GraphVisitor *v, Inst *inst)
3546 {
3547     auto ctor = static_cast<LLVMIrConstructor *>(v);
3548     auto array = ctor->GetInputValue(inst, 0);
3549     auto runtime = ctor->GetGraph()->GetRuntime();
3550     bool isString = !inst->CastToLenArray()->IsArray();
3551     auto &builder = ctor->builder_;
3552 
3553     auto arrayInput = inst->GetDataFlowInput(0);
3554     // Try to extract array length from constructor
3555     if (arrayInput->GetOpcode() == Opcode::NewArray) {
3556         auto size = ctor->GetInputValue(arrayInput, NewArrayInst::INDEX_SIZE);
3557         ctor->ValueMapAdd(inst, size);
3558         return;
3559     }
3560     auto builtin = LenArray(ctor->func_->getParent());
3561     auto arch = ctor->GetGraph()->GetArch();
3562     auto offset = isString ? runtime->GetStringLengthOffset(arch) : runtime->GetArrayLengthOffset(arch);
3563     auto len = ctor->builder_.CreateCall(builtin, {array, builder.getInt32(offset)});
3564 
3565     ctor->ValueMapAdd(inst, len);
3566 }
3567 
VisitLoadArray(GraphVisitor * v,Inst * inst)3568 void LLVMIrConstructor::VisitLoadArray(GraphVisitor *v, Inst *inst)
3569 {
3570     auto ctor = static_cast<LLVMIrConstructor *>(v);
3571     auto loadArray = inst->CastToLoadArray();
3572 
3573     auto array = ctor->GetInputValue(inst, 0);
3574     ASSERT_TYPE(array, ctor->builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE));
3575 
3576     auto dtype = inst->GetType();
3577     auto ltype = ctor->GetExactType(dtype);
3578     auto arch = ctor->GetGraph()->GetArch();
3579     uint32_t dataOffset = ctor->GetGraph()->GetRuntime()->GetArrayDataOffset(arch);
3580     if (!loadArray->IsArray()) {
3581         dataOffset = ctor->GetGraph()->GetRuntime()->GetStringDataOffset(arch);
3582     }
3583     auto ptrData = ctor->builder_.CreateConstInBoundsGEP1_32(ctor->builder_.getInt8Ty(), array, dataOffset);
3584 
3585     llvm::Value *ptrElem = ctor->builder_.CreateInBoundsGEP(ltype, ptrData, ctor->GetInputValue(inst, 1));
3586 
3587     llvm::Value *n = ctor->builder_.CreateLoad(ltype, ptrElem);
3588     ctor->ValueMapAdd(inst, n);
3589 }
3590 
VisitLoadCompressedStringChar(GraphVisitor * v,Inst * inst)3591 void LLVMIrConstructor::VisitLoadCompressedStringChar(GraphVisitor *v, Inst *inst)
3592 {
3593     auto ctor = static_cast<LLVMIrConstructor *>(v);
3594     auto loadString = inst->CastToLoadCompressedStringChar();
3595 
3596     ASSERT(inst->GetType() == DataType::UINT16);
3597 
3598     auto array = ctor->GetInputValue(loadString, 0);
3599     auto index = ctor->GetInputValue(loadString, 1);
3600     auto length = ctor->GetInputValue(loadString, 2);
3601 
3602     ASSERT(ctor->GetGraph()->GetRuntime()->GetStringCompressionMask() == 1U);
3603     auto compressionMask = ctor->builder_.getInt32(ctor->GetGraph()->GetRuntime()->GetStringCompressionMask());
3604     auto dataOff = ctor->GetGraph()->GetRuntime()->GetStringDataOffset(ctor->GetGraph()->GetArch());
3605     auto chars = ctor->builder_.CreateConstInBoundsGEP1_64(ctor->builder_.getInt8Ty(), array, dataOff);
3606     auto isCompressed = ctor->builder_.CreateIsNull(ctor->builder_.CreateAnd(length, compressionMask));
3607 
3608     /**
3609      * int32_t CompressedCharAt(uint8_t *string, int32_t index) {
3610      *     int32_t length = LenArray(string, LENGTH_OFFSET, SHIFT);
3611      *     bool isCompressed = (length & COMPRESSION_MASK) == 0;
3612      *     uint8_t *chars = string + DATA_OFFSET;
3613      *
3614      *     uint16_t c;
3615      *     if (isCompressed) {
3616      *         // compressedBb
3617      *         c = static_cast<uint16_t>(chars[index]);
3618      *     } else {
3619      *         // uncompressedBb
3620      *         c = reinterpret_cast<uint16_t *>(chars)[index];
3621      *     }
3622      *     // Coercing
3623      *     return static_cast<int32_t>(c);
3624      * }
3625      */
3626     auto compressedBb =
3627         llvm::BasicBlock::Create(ctor->func_->getContext(), CreateBasicBlockName(inst, "compressed_bb"), ctor->func_);
3628     auto uncompressedBb =
3629         llvm::BasicBlock::Create(ctor->func_->getContext(), CreateBasicBlockName(inst, "uncompressed_bb"), ctor->func_);
3630     auto continuation =
3631         llvm::BasicBlock::Create(ctor->func_->getContext(), CreateBasicBlockName(inst, "char_at_cont"), ctor->func_);
3632     ctor->builder_.CreateCondBr(isCompressed, compressedBb, uncompressedBb);
3633     llvm::Value *compressedChar;
3634     {
3635         ctor->SetCurrentBasicBlock(compressedBb);
3636         ASSERT_TYPE(chars, ctor->builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE));
3637         auto charAt = ctor->builder_.CreateInBoundsGEP(ctor->builder_.getInt8Ty(), chars, index);
3638         auto character = ctor->builder_.CreateLoad(ctor->builder_.getInt8Ty(), charAt);
3639         compressedChar = ctor->builder_.CreateSExt(character, ctor->builder_.getInt16Ty());
3640         ctor->builder_.CreateBr(continuation);
3641     }
3642 
3643     llvm::Value *uncompressedChar;
3644     {
3645         ctor->SetCurrentBasicBlock(uncompressedBb);
3646         auto u16CharAt = ctor->builder_.CreateInBoundsGEP(ctor->builder_.getInt16Ty(), chars, index);
3647         uncompressedChar = ctor->builder_.CreateLoad(ctor->builder_.getInt16Ty(), u16CharAt);
3648         ctor->builder_.CreateBr(continuation);
3649     }
3650     ctor->SetCurrentBasicBlock(continuation);
3651 
3652     auto charAt = ctor->builder_.CreatePHI(ctor->builder_.getInt16Ty(), 2U);
3653     charAt->addIncoming(compressedChar, compressedBb);
3654     charAt->addIncoming(uncompressedChar, uncompressedBb);
3655     ctor->ValueMapAdd(inst, charAt);
3656 }
3657 
VisitStoreArray(GraphVisitor * v,Inst * inst)3658 void LLVMIrConstructor::VisitStoreArray(GraphVisitor *v, Inst *inst)
3659 {
3660     auto ctor = static_cast<LLVMIrConstructor *>(v);
3661     auto array = ctor->GetInputValue(inst, 0);
3662     ASSERT_TYPE(array, ctor->builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE));
3663     auto value = ctor->GetInputValue(inst, 2U);
3664 
3665     auto dtype = inst->GetType();
3666     auto arch = ctor->GetGraph()->GetArch();
3667     auto ltype = ctor->GetExactType(dtype);
3668     auto dataOff = ctor->GetGraph()->GetRuntime()->GetArrayDataOffset(arch);
3669     auto ptrData = ctor->builder_.CreateConstInBoundsGEP1_32(ctor->builder_.getInt8Ty(), array, dataOff);
3670     auto index = ctor->GetInputValue(inst, 1);
3671     auto ptrElem = ctor->builder_.CreateInBoundsGEP(ltype, ptrData, index);
3672 
3673     // Pre
3674     if (inst->CastToStoreArray()->GetNeedBarrier()) {
3675         ctor->CreatePreWRB(inst, ptrElem);
3676     }
3677     // Write
3678     ctor->builder_.CreateStore(value, ptrElem);
3679     // Post
3680     if (inst->CastToStoreArray()->GetNeedBarrier()) {
3681         auto indexOffset = ctor->builder_.CreateBinOp(llvm::Instruction::Shl, index,
3682                                                       ctor->builder_.getInt32(DataType::ShiftByType(dtype, arch)));
3683         auto offset = ctor->builder_.CreateBinOp(llvm::Instruction::Add, indexOffset, ctor->builder_.getInt32(dataOff));
3684         ctor->CreatePostWRB(inst, array, offset, value);
3685     }
3686 }
3687 
VisitLoad(GraphVisitor * v,Inst * inst)3688 void LLVMIrConstructor::VisitLoad(GraphVisitor *v, Inst *inst)
3689 {
3690     auto ctor = static_cast<LLVMIrConstructor *>(v);
3691     auto srcPtr = ctor->GetInputValue(inst, 0);
3692     ASSERT(srcPtr->getType()->isPointerTy());
3693 
3694     llvm::Value *offset;
3695     auto offsetInput = inst->GetInput(1).GetInst();
3696     auto offsetItype = offsetInput->GetType();
3697     if (offsetItype == DataType::UINT64 || offsetItype == DataType::INT64) {
3698         ASSERT(offsetInput->GetOpcode() != Opcode::Load && offsetInput->GetOpcode() != Opcode::LoadI);
3699         offset = ctor->GetInputValue(inst, 1, true);
3700     } else {
3701         offset = ctor->GetInputValue(inst, 1);
3702     }
3703 
3704     ASSERT(srcPtr->getType()->isPointerTy());
3705     auto ptr = ctor->builder_.CreateInBoundsGEP(ctor->builder_.getInt8Ty(), srcPtr, offset);
3706 
3707     auto n = ctor->CreateLoadWithOrdering(inst, ptr, ToAtomicOrdering(inst->CastToLoad()->GetVolatile()));
3708     ctor->ValueMapAdd(inst, n);
3709 }
3710 
VisitLoadNative(GraphVisitor * v,Inst * inst)3711 void LLVMIrConstructor::VisitLoadNative(GraphVisitor *v, Inst *inst)
3712 {
3713     inst->SetOpcode(Opcode::Load);
3714     VisitLoad(v, inst);
3715 }
3716 
VisitStore(GraphVisitor * v,Inst * inst)3717 void LLVMIrConstructor::VisitStore(GraphVisitor *v, Inst *inst)
3718 {
3719     auto ctor = static_cast<LLVMIrConstructor *>(v);
3720     auto srcPtr = ctor->GetInputValue(inst, 0);
3721     auto value = ctor->GetInputValue(inst, 2U);
3722 
3723     llvm::Value *offset;
3724     auto offsetInput = inst->GetInput(1).GetInst();
3725     auto offsetItype = offsetInput->GetType();
3726     if (offsetItype == DataType::UINT64 || offsetItype == DataType::INT64) {
3727         ASSERT(offsetInput->GetOpcode() != Opcode::Load && offsetInput->GetOpcode() != Opcode::LoadI);
3728         offset = ctor->GetInputValue(inst, 1, true);
3729     } else {
3730         offset = ctor->GetInputValue(inst, 1);
3731     }
3732 
3733     auto ptrPlus = ctor->builder_.CreateInBoundsGEP(ctor->builder_.getInt8Ty(), srcPtr, offset);
3734 
3735     // Pre
3736     if (inst->CastToStore()->GetNeedBarrier()) {
3737         ctor->CreatePreWRB(inst, ptrPlus);
3738     }
3739     // Write
3740     ctor->CreateStoreWithOrdering(value, ptrPlus, ToAtomicOrdering(inst->CastToStore()->GetVolatile()));
3741     // Post
3742     if (inst->CastToStore()->GetNeedBarrier()) {
3743         ctor->CreatePostWRB(inst, srcPtr, offset, value);
3744     }
3745 }
3746 
VisitStoreNative(GraphVisitor * v,Inst * inst)3747 void LLVMIrConstructor::VisitStoreNative(GraphVisitor *v, Inst *inst)
3748 {
3749     inst->SetOpcode(Opcode::Store);
3750     VisitStore(v, inst);
3751 }
3752 
VisitLoadI(GraphVisitor * v,Inst * inst)3753 void LLVMIrConstructor::VisitLoadI(GraphVisitor *v, Inst *inst)
3754 {
3755     auto ctor = static_cast<LLVMIrConstructor *>(v);
3756     auto srcPtr = ctor->GetInputValue(inst, 0);
3757     auto index = inst->CastToLoadI()->GetImm();
3758 
3759     ASSERT(srcPtr->getType()->isPointerTy());
3760     auto ptrPlus = ctor->builder_.CreateConstInBoundsGEP1_64(ctor->builder_.getInt8Ty(), srcPtr, index);
3761 
3762     auto n = ctor->CreateLoadWithOrdering(inst, ptrPlus, ToAtomicOrdering(inst->CastToLoadI()->GetVolatile()));
3763     ctor->ValueMapAdd(inst, n);
3764 }
3765 
VisitStoreI(GraphVisitor * v,Inst * inst)3766 void LLVMIrConstructor::VisitStoreI(GraphVisitor *v, Inst *inst)
3767 {
3768     auto ctor = static_cast<LLVMIrConstructor *>(v);
3769     auto srcPtr = ctor->GetInputValue(inst, 0);
3770     auto value = ctor->GetInputValue(inst, 1);
3771 
3772     auto index = inst->CastToStoreI()->GetImm();
3773     ASSERT(srcPtr->getType()->isPointerTy());
3774     auto ptrPlus = ctor->builder_.CreateConstInBoundsGEP1_64(ctor->builder_.getInt8Ty(), srcPtr, index);
3775 
3776     // Pre
3777     if (inst->CastToStoreI()->GetNeedBarrier()) {
3778         ctor->CreatePreWRB(inst, ptrPlus);
3779     }
3780     // Write
3781     ctor->CreateStoreWithOrdering(value, ptrPlus, ToAtomicOrdering(inst->CastToStoreI()->GetVolatile()));
3782     // Post
3783     if (inst->CastToStoreI()->GetNeedBarrier()) {
3784         ctor->CreatePostWRB(inst, srcPtr, ctor->builder_.getInt32(index), value);
3785     }
3786 }
3787 
VisitLoadObject(GraphVisitor * v,Inst * inst)3788 void LLVMIrConstructor::VisitLoadObject(GraphVisitor *v, Inst *inst)
3789 {
3790     auto ctor = static_cast<LLVMIrConstructor *>(v);
3791     auto obj = ctor->GetInputValue(inst, 0);
3792     ASSERT_TYPE(obj, ctor->builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE));
3793 
3794     auto field = inst->CastToLoadObject()->GetObjField();
3795     auto dataOff = ctor->GetGraph()->GetRuntime()->GetFieldOffset(field);
3796     auto ptrData = ctor->builder_.CreateConstInBoundsGEP1_32(ctor->builder_.getInt8Ty(), obj, dataOff);
3797 
3798     auto n = ctor->CreateLoadWithOrdering(inst, ptrData, ToAtomicOrdering(inst->CastToLoadObject()->GetVolatile()));
3799     ctor->ValueMapAdd(inst, n);
3800 }
3801 
VisitStoreObject(GraphVisitor * v,Inst * inst)3802 void LLVMIrConstructor::VisitStoreObject(GraphVisitor *v, Inst *inst)
3803 {
3804     auto ctor = static_cast<LLVMIrConstructor *>(v);
3805     auto obj = ctor->GetInputValue(inst, 0);
3806     ASSERT_TYPE(obj, ctor->builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE));
3807     auto value = ctor->GetInputValue(inst, 1);
3808 
3809     auto field = inst->CastToStoreObject()->GetObjField();
3810     auto dataOff = ctor->GetGraph()->GetRuntime()->GetFieldOffset(field);
3811 
3812     auto ptrData = ctor->builder_.CreateConstInBoundsGEP1_32(ctor->builder_.getInt8Ty(), obj, dataOff);
3813 
3814     // Pre
3815     if (inst->CastToStoreObject()->GetNeedBarrier()) {
3816         ctor->CreatePreWRB(inst, ptrData);
3817     }
3818     // Write
3819     ctor->CreateStoreWithOrdering(value, ptrData, ToAtomicOrdering(inst->CastToStoreObject()->GetVolatile()));
3820     // Post
3821     if (inst->CastToStoreObject()->GetNeedBarrier()) {
3822         ctor->CreatePostWRB(inst, obj, ctor->builder_.getInt32(dataOff), value);
3823     }
3824 }
3825 
VisitResolveObjectField(GraphVisitor * v,Inst * inst)3826 void LLVMIrConstructor::VisitResolveObjectField(GraphVisitor *v, Inst *inst)
3827 {
3828     auto ctor = static_cast<LLVMIrConstructor *>(v);
3829 
3830     auto typeId = ctor->builder_.getInt32(inst->CastToResolveObjectField()->GetTypeId());
3831 
3832     auto entrypointId = RuntimeInterface::EntrypointId::GET_FIELD_OFFSET;
3833     auto offset = ctor->CreateEntrypointCall(entrypointId, inst, {ctor->GetMethodArgument(), typeId});
3834 
3835     ctor->ValueMapAdd(inst, offset);
3836 }
3837 
VisitLoadResolvedObjectField(GraphVisitor * v,Inst * inst)3838 void LLVMIrConstructor::VisitLoadResolvedObjectField(GraphVisitor *v, Inst *inst)
3839 {
3840     auto ctor = static_cast<LLVMIrConstructor *>(v);
3841     auto obj = ctor->GetInputValue(inst, 0);
3842     ASSERT_TYPE(obj, ctor->builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE));
3843 
3844     auto offset = ctor->GetInputValue(inst, 1);
3845     auto ptrData = ctor->builder_.CreateInBoundsGEP(ctor->builder_.getInt8Ty(), obj, offset);
3846 
3847     auto n = ctor->CreateLoadWithOrdering(inst, ptrData, LLVMArkInterface::VOLATILE_ORDER);
3848     ctor->ValueMapAdd(inst, n);
3849 }
3850 
VisitStoreResolvedObjectField(GraphVisitor * v,Inst * inst)3851 void LLVMIrConstructor::VisitStoreResolvedObjectField(GraphVisitor *v, Inst *inst)
3852 {
3853     auto ctor = static_cast<LLVMIrConstructor *>(v);
3854     auto obj = ctor->GetInputValue(inst, 0);
3855     ASSERT_TYPE(obj, ctor->builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE));
3856     auto value = ctor->GetInputValue(inst, 1);
3857 
3858     auto offset = ctor->GetInputValue(inst, 2);
3859     auto ptrData = ctor->builder_.CreateInBoundsGEP(ctor->builder_.getInt8Ty(), obj, offset);
3860 
3861     // Pre
3862     if (inst->CastToStoreResolvedObjectField()->GetNeedBarrier()) {
3863         ctor->CreatePreWRB(inst, ptrData);
3864     }
3865     // Write
3866     ctor->CreateStoreWithOrdering(value, ptrData, LLVMArkInterface::VOLATILE_ORDER);
3867     // Post
3868     if (inst->CastToStoreResolvedObjectField()->GetNeedBarrier()) {
3869         ctor->CreatePostWRB(inst, obj, offset, value);
3870     }
3871 }
3872 
VisitResolveObjectFieldStatic(GraphVisitor * v,Inst * inst)3873 void LLVMIrConstructor::VisitResolveObjectFieldStatic(GraphVisitor *v, Inst *inst)
3874 {
3875     auto ctor = static_cast<LLVMIrConstructor *>(v);
3876     auto resolverInst = inst->CastToResolveObjectFieldStatic();
3877 
3878     auto entrypoint = RuntimeInterface::EntrypointId::GET_UNKNOWN_STATIC_FIELD_MEMORY_ADDRESS;
3879 
3880     auto typeId = ctor->builder_.getInt32(resolverInst->GetTypeId());
3881     auto slotPtr = llvm::Constant::getNullValue(ctor->builder_.getPtrTy());
3882 
3883     auto ptrInt = ctor->CreateEntrypointCall(entrypoint, inst, {ctor->GetMethodArgument(), typeId, slotPtr});
3884     auto n = ctor->builder_.CreateIntToPtr(ptrInt, ctor->builder_.getPtrTy());
3885     ctor->ValueMapAdd(inst, n);
3886 }
3887 
VisitLoadResolvedObjectFieldStatic(GraphVisitor * v,Inst * inst)3888 void LLVMIrConstructor::VisitLoadResolvedObjectFieldStatic(GraphVisitor *v, Inst *inst)
3889 {
3890     auto ctor = static_cast<LLVMIrConstructor *>(v);
3891     auto offset = ctor->GetInputValue(inst, 0);
3892 
3893     auto casted = ctor->builder_.CreateIntToPtr(offset, ctor->builder_.getPtrTy());
3894     auto n = ctor->CreateLoadWithOrdering(inst, casted, LLVMArkInterface::VOLATILE_ORDER);
3895     ctor->ValueMapAdd(inst, n);
3896 }
3897 
VisitStoreResolvedObjectFieldStatic(GraphVisitor * v,Inst * inst)3898 void LLVMIrConstructor::VisitStoreResolvedObjectFieldStatic(GraphVisitor *v, Inst *inst)
3899 {
3900     auto ctor = static_cast<LLVMIrConstructor *>(v);
3901     [[maybe_unused]] auto storeInst = inst->CastToStoreResolvedObjectFieldStatic();
3902 
3903     ASSERT(!DataType::IsReference(inst->GetType()));
3904     ASSERT(!storeInst->GetNeedBarrier());
3905 
3906     auto value = ctor->GetInputValue(inst, 1);
3907     auto destPtr = ctor->GetInputValue(inst, 0);
3908 
3909     [[maybe_unused]] auto dtype = inst->GetType();
3910     ASSERT(value->getType()->getScalarSizeInBits() == DataType::GetTypeSize(dtype, ctor->GetGraph()->GetArch()));
3911     ctor->CreateStoreWithOrdering(value, destPtr, LLVMArkInterface::VOLATILE_ORDER);
3912 }
3913 
VisitBitcast(GraphVisitor * v,Inst * inst)3914 void LLVMIrConstructor::VisitBitcast(GraphVisitor *v, Inst *inst)
3915 {
3916     auto ctor = static_cast<LLVMIrConstructor *>(v);
3917     auto type = inst->GetType();
3918     auto llvmTargetType = ctor->GetExactType(type);
3919     auto input = ctor->GetInputValue(inst, 0);
3920     auto itype = inst->GetInputType(0);
3921 
3922     llvm::Value *n;
3923     if (itype == DataType::POINTER) {
3924         ASSERT(!llvmTargetType->isPointerTy());
3925         n = ctor->builder_.CreatePtrToInt(input, llvmTargetType);
3926     } else {
3927         if (type == DataType::REFERENCE) {
3928             n = ctor->builder_.CreateIntToPtr(input, ctor->builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE));
3929         } else if (type == DataType::POINTER) {
3930             n = ctor->builder_.CreateIntToPtr(input, ctor->builder_.getPtrTy());
3931         } else {
3932             n = ctor->builder_.CreateBitCast(input, llvmTargetType);
3933         }
3934     }
3935     ctor->ValueMapAdd(inst, n);
3936 }
3937 
VisitCast(GraphVisitor * v,Inst * inst)3938 void LLVMIrConstructor::VisitCast(GraphVisitor *v, Inst *inst)
3939 {
3940     auto ctor = static_cast<LLVMIrConstructor *>(v);
3941     auto x = ctor->GetInputValue(inst, 0);
3942 
3943     auto type = inst->GetInputType(0);
3944     auto targetType = inst->GetType();
3945     auto llvmTargetType = ctor->GetExactType(targetType);
3946     // Do not cast if either Ark or LLVM types are the same
3947     if (type == targetType || x->getType() == llvmTargetType) {
3948         ctor->ValueMapAdd(inst, x, false);
3949         return;
3950     }
3951 
3952     if (DataType::IsFloatType(type) && IsInteger(targetType)) {
3953         // float to int, e.g. F64TOI32, F32TOI64, F64TOU32, F32TOU64
3954         auto n = ctor->CreateCastToInt(inst);
3955         ctor->ValueMapAdd(inst, n);
3956         return;
3957     }
3958     auto op = ctor->GetCastOp(type, targetType);
3959     if (targetType == DataType::BOOL) {
3960         ASSERT(op == llvm::Instruction::Trunc);
3961         auto u1 = ctor->builder_.CreateIsNotNull(x, CreateNameForInst(inst));
3962         auto n = ctor->builder_.CreateZExt(u1, ctor->builder_.getInt8Ty());
3963         ctor->ValueMapAdd(inst, n, false);
3964         return;
3965     }
3966     auto n = ctor->builder_.CreateCast(op, x, llvmTargetType);
3967     ctor->ValueMapAdd(inst, n);
3968 }
3969 
VisitAnd(GraphVisitor * v,Inst * inst)3970 void LLVMIrConstructor::VisitAnd(GraphVisitor *v, Inst *inst)
3971 {
3972     auto ctor = static_cast<LLVMIrConstructor *>(v);
3973     auto n = ctor->CreateBinaryOp(inst, llvm::Instruction::And);
3974     ctor->ValueMapAdd(inst, n);
3975 }
3976 
VisitAndI(GraphVisitor * v,Inst * inst)3977 void LLVMIrConstructor::VisitAndI(GraphVisitor *v, Inst *inst)
3978 {
3979     auto ctor = static_cast<LLVMIrConstructor *>(v);
3980     auto n = ctor->CreateBinaryImmOp(inst, llvm::Instruction::And, inst->CastToAndI()->GetImm());
3981     ctor->ValueMapAdd(inst, n);
3982 }
3983 
VisitOr(GraphVisitor * v,Inst * inst)3984 void LLVMIrConstructor::VisitOr(GraphVisitor *v, Inst *inst)
3985 {
3986     auto ctor = static_cast<LLVMIrConstructor *>(v);
3987     auto n = ctor->CreateBinaryOp(inst, llvm::Instruction::Or);
3988     ctor->ValueMapAdd(inst, n);
3989 }
3990 
VisitOrI(GraphVisitor * v,Inst * inst)3991 void LLVMIrConstructor::VisitOrI(GraphVisitor *v, Inst *inst)
3992 {
3993     auto ctor = static_cast<LLVMIrConstructor *>(v);
3994     auto n = ctor->CreateBinaryImmOp(inst, llvm::Instruction::Or, inst->CastToOrI()->GetImm());
3995     ctor->ValueMapAdd(inst, n);
3996 }
3997 
VisitXor(GraphVisitor * v,Inst * inst)3998 void LLVMIrConstructor::VisitXor(GraphVisitor *v, Inst *inst)
3999 {
4000     auto ctor = static_cast<LLVMIrConstructor *>(v);
4001     auto n = ctor->CreateBinaryOp(inst, llvm::Instruction::Xor);
4002     ctor->ValueMapAdd(inst, n);
4003 }
4004 
VisitXorI(GraphVisitor * v,Inst * inst)4005 void LLVMIrConstructor::VisitXorI(GraphVisitor *v, Inst *inst)
4006 {
4007     auto ctor = static_cast<LLVMIrConstructor *>(v);
4008     auto n = ctor->CreateBinaryImmOp(inst, llvm::Instruction::Xor, inst->CastToXorI()->GetImm());
4009     ctor->ValueMapAdd(inst, n);
4010 }
4011 
VisitShl(GraphVisitor * v,Inst * inst)4012 void LLVMIrConstructor::VisitShl(GraphVisitor *v, Inst *inst)
4013 {
4014     auto ctor = static_cast<LLVMIrConstructor *>(v);
4015     auto n = ctor->CreateShiftOp(inst, llvm::Instruction::Shl);
4016     ctor->ValueMapAdd(inst, n);
4017 }
4018 
VisitShlI(GraphVisitor * v,Inst * inst)4019 void LLVMIrConstructor::VisitShlI(GraphVisitor *v, Inst *inst)
4020 {
4021     auto ctor = static_cast<LLVMIrConstructor *>(v);
4022     auto n = ctor->CreateBinaryImmOp(inst, llvm::Instruction::Shl, inst->CastToShlI()->GetImm());
4023     ctor->ValueMapAdd(inst, n);
4024 }
4025 
VisitShr(GraphVisitor * v,Inst * inst)4026 void LLVMIrConstructor::VisitShr(GraphVisitor *v, Inst *inst)
4027 {
4028     auto ctor = static_cast<LLVMIrConstructor *>(v);
4029     auto n = ctor->CreateShiftOp(inst, llvm::Instruction::LShr);
4030     ctor->ValueMapAdd(inst, n);
4031 }
4032 
VisitShrI(GraphVisitor * v,Inst * inst)4033 void LLVMIrConstructor::VisitShrI(GraphVisitor *v, Inst *inst)
4034 {
4035     auto ctor = static_cast<LLVMIrConstructor *>(v);
4036     auto n = ctor->CreateBinaryImmOp(inst, llvm::Instruction::LShr, inst->CastToShrI()->GetImm());
4037     ctor->ValueMapAdd(inst, n);
4038 }
4039 
VisitAShr(GraphVisitor * v,Inst * inst)4040 void LLVMIrConstructor::VisitAShr(GraphVisitor *v, Inst *inst)
4041 {
4042     auto ctor = static_cast<LLVMIrConstructor *>(v);
4043     auto n = ctor->CreateShiftOp(inst, llvm::Instruction::AShr);
4044     ctor->ValueMapAdd(inst, n);
4045 }
4046 
VisitAShrI(GraphVisitor * v,Inst * inst)4047 void LLVMIrConstructor::VisitAShrI(GraphVisitor *v, Inst *inst)
4048 {
4049     auto ctor = static_cast<LLVMIrConstructor *>(v);
4050     auto n = ctor->CreateBinaryImmOp(inst, llvm::Instruction::AShr, inst->CastToAShrI()->GetImm());
4051     ctor->ValueMapAdd(inst, n);
4052 }
4053 
VisitAdd(GraphVisitor * v,Inst * inst)4054 void LLVMIrConstructor::VisitAdd(GraphVisitor *v, Inst *inst)
4055 {
4056     auto ctor = static_cast<LLVMIrConstructor *>(v);
4057     llvm::Value *n;
4058     if (IsFloatType(inst->GetType())) {
4059         n = ctor->CreateBinaryOp(inst, llvm::Instruction::FAdd);
4060     } else if (IsTypeNumeric(inst->GetType())) {
4061         n = ctor->CreateBinaryOp(inst, llvm::Instruction::Add);
4062     } else {
4063         UNREACHABLE();
4064     }
4065     ctor->ValueMapAdd(inst, n);
4066 }
4067 
VisitAddI(GraphVisitor * v,Inst * inst)4068 void LLVMIrConstructor::VisitAddI(GraphVisitor *v, Inst *inst)
4069 {
4070     auto ctor = static_cast<LLVMIrConstructor *>(v);
4071     auto n = ctor->CreateBinaryImmOp(inst, llvm::Instruction::Add, inst->CastToAddI()->GetImm());
4072     ctor->ValueMapAdd(inst, n);
4073 }
4074 
VisitSub(GraphVisitor * v,Inst * inst)4075 void LLVMIrConstructor::VisitSub(GraphVisitor *v, Inst *inst)
4076 {
4077     auto ctor = static_cast<LLVMIrConstructor *>(v);
4078     llvm::Value *n;
4079     if (IsFloatType(inst->GetType())) {
4080         n = ctor->CreateBinaryOp(inst, llvm::Instruction::FSub);
4081     } else if (IsTypeNumeric(inst->GetType())) {
4082         n = ctor->CreateBinaryOp(inst, llvm::Instruction::Sub);
4083     } else {
4084         UNREACHABLE();
4085     }
4086     ctor->ValueMapAdd(inst, n);
4087 }
4088 
VisitSubI(GraphVisitor * v,Inst * inst)4089 void LLVMIrConstructor::VisitSubI(GraphVisitor *v, Inst *inst)
4090 {
4091     auto ctor = static_cast<LLVMIrConstructor *>(v);
4092     auto n = ctor->CreateBinaryImmOp(inst, llvm::Instruction::Sub, inst->CastToSubI()->GetImm());
4093     ctor->ValueMapAdd(inst, n);
4094 }
4095 
VisitMul(GraphVisitor * v,Inst * inst)4096 void LLVMIrConstructor::VisitMul(GraphVisitor *v, Inst *inst)
4097 {
4098     auto ctor = static_cast<LLVMIrConstructor *>(v);
4099     llvm::Value *n;
4100     if (IsFloatType(inst->GetType())) {
4101         n = ctor->CreateBinaryOp(inst, llvm::Instruction::FMul);
4102     } else if (IsTypeNumeric(inst->GetType())) {
4103         n = ctor->CreateBinaryOp(inst, llvm::Instruction::Mul);
4104     } else {
4105         UNREACHABLE();
4106     }
4107     ctor->ValueMapAdd(inst, n);
4108 }
4109 
VisitMulI(GraphVisitor * v,Inst * inst)4110 void LLVMIrConstructor::VisitMulI(GraphVisitor *v, Inst *inst)
4111 {
4112     auto ctor = static_cast<LLVMIrConstructor *>(v);
4113     auto n = ctor->CreateBinaryImmOp(inst, llvm::Instruction::Mul, inst->CastToMulI()->GetImm());
4114     ctor->ValueMapAdd(inst, n);
4115 }
4116 
VisitDiv(GraphVisitor * v,Inst * inst)4117 void LLVMIrConstructor::VisitDiv(GraphVisitor *v, Inst *inst)
4118 {
4119     auto ctor = static_cast<LLVMIrConstructor *>(v);
4120     auto type = inst->GetType();
4121     llvm::Value *n;
4122     if (IsFloatType(type)) {
4123         n = ctor->CreateBinaryOp(inst, llvm::Instruction::FDiv);
4124     } else if (IsInteger(type)) {
4125         if (IsSignedInteger(type)) {
4126             n = ctor->CreateSignDivMod(inst, llvm::Instruction::SDiv);
4127         } else {
4128             n = ctor->CreateBinaryOp(inst, llvm::Instruction::UDiv);
4129         }
4130     } else {
4131         UNREACHABLE();
4132     }
4133     ctor->ValueMapAdd(inst, n);
4134 }
4135 
VisitMod(GraphVisitor * v,Inst * inst)4136 void LLVMIrConstructor::VisitMod(GraphVisitor *v, Inst *inst)
4137 {
4138     auto ctor = static_cast<LLVMIrConstructor *>(v);
4139     auto type = inst->GetType();
4140     llvm::Value *n;
4141     if (IsFloatType(type)) {
4142         n = ctor->CreateBinaryOp(inst, llvm::Instruction::FRem);
4143     } else if (IsInteger(type)) {
4144         if (IsSignedInteger(type)) {
4145             n = ctor->CreateSignDivMod(inst, llvm::Instruction::SRem);
4146         } else {
4147             n = ctor->CreateBinaryOp(inst, llvm::Instruction::URem);
4148         }
4149     } else {
4150         UNREACHABLE();
4151     }
4152     ctor->ValueMapAdd(inst, n);
4153 }
4154 
VisitMin(GraphVisitor * v,Inst * inst)4155 void LLVMIrConstructor::VisitMin(GraphVisitor *v, Inst *inst)
4156 {
4157     ASSERT(g_options.IsCompilerEncodeIntrinsics());
4158     auto ctor = static_cast<LLVMIrConstructor *>(v);
4159     auto operType = inst->CastToMin()->GetType();
4160     llvm::Value *x = ctor->GetInputValue(inst, 0);
4161     llvm::Value *y = ctor->GetInputValue(inst, 1);
4162     llvm::Intrinsic::ID llvmId = 0;
4163 
4164     if (DataType::IsFloatType(operType)) {
4165         llvmId = llvm::Intrinsic::minimum;
4166     } else if (IsInteger(operType)) {
4167         llvmId = DataType::IsTypeSigned(operType) ? llvm::Intrinsic::smin : llvm::Intrinsic::umin;
4168     } else {
4169         ASSERT_DO(false, (std::cerr << "Min is not supported for type " << DataType::ToString(operType) << std::endl));
4170         UNREACHABLE();
4171     }
4172     auto min = ctor->builder_.CreateBinaryIntrinsic(llvmId, x, y);
4173     ctor->ValueMapAdd(inst, min);
4174 }
4175 
VisitMax(GraphVisitor * v,Inst * inst)4176 void LLVMIrConstructor::VisitMax(GraphVisitor *v, Inst *inst)
4177 {
4178     ASSERT(g_options.IsCompilerEncodeIntrinsics());
4179     auto ctor = static_cast<LLVMIrConstructor *>(v);
4180     auto operType = inst->CastToMax()->GetType();
4181     llvm::Value *x = ctor->GetInputValue(inst, 0);
4182     llvm::Value *y = ctor->GetInputValue(inst, 1);
4183     llvm::Intrinsic::ID llvmId = 0;
4184 
4185     if (DataType::IsFloatType(operType)) {
4186         llvmId = llvm::Intrinsic::maximum;
4187     } else if (IsInteger(operType)) {
4188         llvmId = DataType::IsTypeSigned(operType) ? llvm::Intrinsic::smax : llvm::Intrinsic::umax;
4189     } else {
4190         ASSERT_DO(false, (std::cerr << "Max is not supported for type " << DataType::ToString(operType) << std::endl));
4191         UNREACHABLE();
4192     }
4193     auto max = ctor->builder_.CreateBinaryIntrinsic(llvmId, x, y);
4194     ctor->ValueMapAdd(inst, max);
4195 }
4196 
VisitCompare(GraphVisitor * v,Inst * inst)4197 void LLVMIrConstructor::VisitCompare(GraphVisitor *v, Inst *inst)
4198 {
4199     auto ctor = static_cast<LLVMIrConstructor *>(v);
4200     auto compareInst = inst->CastToCompare();
4201     auto operandsType = compareInst->GetOperandsType();
4202 
4203     llvm::Value *x = ctor->GetInputValue(inst, 0);
4204     llvm::Value *y = ctor->GetInputValue(inst, 1);
4205 
4206     llvm::Value *n = nullptr;
4207     if (IsInteger(operandsType) || DataType::IsReference(operandsType)) {
4208         n = ctor->CreateCondition(compareInst->GetCc(), x, y);
4209     } else {
4210         n = ctor->builder_.CreateFCmp(FCmpCodeConvert(compareInst->GetCc()), x, y);
4211     }
4212     ctor->ValueMapAdd(inst, n);
4213 }
4214 
VisitCmp(GraphVisitor * v,Inst * inst)4215 void LLVMIrConstructor::VisitCmp(GraphVisitor *v, Inst *inst)
4216 {
4217     auto ctor = static_cast<LLVMIrConstructor *>(v);
4218     CmpInst *cmpInst = inst->CastToCmp();
4219     DataType::Type operandsType = cmpInst->GetOperandsType();
4220 
4221     auto x = ctor->GetInputValue(inst, 0);
4222     auto y = ctor->GetInputValue(inst, 1);
4223     llvm::Value *n;
4224     if (DataType::IsFloatType(operandsType)) {
4225         n = ctor->CreateFloatComparison(cmpInst, x, y);
4226     } else if (IsInteger(operandsType)) {
4227         n = ctor->CreateIntegerComparison(cmpInst, x, y);
4228     } else {
4229         ASSERT_DO(false, (std::cerr << "Unsupported comparison for operands of type = "
4230                                     << DataType::ToString(operandsType) << std::endl));
4231         UNREACHABLE();
4232     }
4233     ctor->ValueMapAdd(inst, n);
4234 }
4235 
VisitNeg(GraphVisitor * v,Inst * inst)4236 void LLVMIrConstructor::VisitNeg(GraphVisitor *v, Inst *inst)
4237 {
4238     auto ctor = static_cast<LLVMIrConstructor *>(v);
4239     auto inputType = inst->GetInputType(0);
4240     auto toNegate = ctor->GetInputValue(inst, 0);
4241     llvm::Value *n;
4242     if (inputType == DataType::Type::FLOAT64 || inputType == DataType::Type::FLOAT32) {
4243         n = ctor->builder_.CreateFNeg(toNegate);
4244     } else if (IsInteger(inputType)) {
4245         n = ctor->builder_.CreateNeg(toNegate);
4246     } else {
4247         ASSERT_DO(false, (std::cerr << "Negation is not supported for" << DataType::ToString(inputType) << std::endl));
4248         UNREACHABLE();
4249     }
4250     ctor->ValueMapAdd(inst, n);
4251 }
4252 
VisitNot(GraphVisitor * v,Inst * inst)4253 void LLVMIrConstructor::VisitNot(GraphVisitor *v, Inst *inst)
4254 {
4255     ASSERT(IsInteger(inst->GetInputType(0)));
4256 
4257     auto ctor = static_cast<LLVMIrConstructor *>(v);
4258     auto input = ctor->GetInputValue(inst, 0);
4259 
4260     auto notOperator = ctor->builder_.CreateNot(input);
4261     ctor->ValueMapAdd(inst, notOperator);
4262 }
4263 
VisitIfImm(GraphVisitor * v,Inst * inst)4264 void LLVMIrConstructor::VisitIfImm(GraphVisitor *v, Inst *inst)
4265 {
4266     auto ctor = static_cast<LLVMIrConstructor *>(v);
4267     auto x = ctor->GetInputValue(inst, 0);
4268     auto ifimm = inst->CastToIfImm();
4269 
4270     llvm::Value *cond = nullptr;
4271     if (ifimm->GetCc() == ConditionCode::CC_NE && ifimm->GetImm() == 0 && x->getType()->isIntegerTy()) {
4272         ASSERT(ifimm->GetOperandsType() == DataType::BOOL);
4273         cond = ctor->builder_.CreateTrunc(x, ctor->builder_.getInt1Ty());
4274     } else {
4275         ASSERT(x->getType()->isIntOrPtrTy());
4276         llvm::Constant *immCst;
4277         if (x->getType()->isPointerTy()) {
4278             if (ifimm->GetImm() == 0) {
4279                 immCst = llvm::ConstantPointerNull::get(llvm::cast<llvm::PointerType>(x->getType()));
4280             } else {
4281                 immCst = llvm::ConstantInt::getSigned(x->getType(), ifimm->GetImm());
4282                 immCst = llvm::ConstantExpr::getPointerCast(immCst, x->getType());
4283             }
4284         } else {
4285             immCst = llvm::ConstantInt::getSigned(x->getType(), ifimm->GetImm());
4286         }
4287         cond = ctor->CreateCondition(ifimm->GetCc(), x, immCst);
4288     }
4289     ctor->CreateIf(inst, cond, ifimm->IsLikely(), ifimm->IsUnlikely());
4290 }
4291 
VisitIf(GraphVisitor * v,Inst * inst)4292 void LLVMIrConstructor::VisitIf(GraphVisitor *v, Inst *inst)
4293 {
4294     auto ctor = static_cast<LLVMIrConstructor *>(v);
4295     auto x = ctor->GetInputValue(inst, 0);
4296     auto y = ctor->GetInputValue(inst, 1);
4297     ASSERT(x->getType()->isIntOrPtrTy());
4298     ASSERT(y->getType()->isIntOrPtrTy());
4299     auto ifi = inst->CastToIf();
4300     auto cond = ctor->CreateCondition(ifi->GetCc(), x, y);
4301     ctor->CreateIf(inst, cond, ifi->IsLikely(), ifi->IsUnlikely());
4302 }
4303 
VisitCallIndirect(GraphVisitor * v,Inst * inst)4304 void LLVMIrConstructor::VisitCallIndirect(GraphVisitor *v, Inst *inst)
4305 {
4306     auto ctor = static_cast<LLVMIrConstructor *>(v);
4307     auto ptr = ctor->GetInputValue(inst, 0);
4308     ASSERT_TYPE(ptr, ctor->builder_.getPtrTy());
4309     // Build FunctionType
4310     ArenaVector<llvm::Type *> argTypes(ctor->GetGraph()->GetLocalAllocator()->Adapter());
4311     ArenaVector<llvm::Value *> args(ctor->GetGraph()->GetLocalAllocator()->Adapter());
4312     for (size_t i = 1; i < inst->GetInputs().Size(); ++i) {
4313         argTypes.push_back(ctor->GetType(inst->GetInput(i).GetInst()->GetType()));
4314         args.push_back(ctor->GetInputValue(inst, i));
4315     }
4316     auto retType = ctor->GetType(inst->GetType());
4317     auto funcType = llvm::FunctionType::get(retType, argTypes, false);
4318     auto call = ctor->builder_.CreateCall(funcType, ptr, args);
4319     if (!retType->isVoidTy()) {
4320         ctor->ValueMapAdd(inst, call);
4321     }
4322 }
4323 
VisitCall(GraphVisitor * v,Inst * inst)4324 void LLVMIrConstructor::VisitCall(GraphVisitor *v, Inst *inst)
4325 {
4326     auto ctor = static_cast<LLVMIrConstructor *>(v);
4327     ASSERT(!ctor->GetGraph()->SupportManagedCode());
4328 
4329     // Prepare external call if needed
4330     auto externalId = inst->CastToCall()->GetCallMethodId();
4331     auto runtime = ctor->GetGraph()->GetRuntime();
4332     auto externalName = runtime->GetExternalMethodName(ctor->GetGraph()->GetMethod(), externalId);
4333     auto function = ctor->func_->getParent()->getFunction(externalName);
4334     if (function == nullptr) {
4335         ArenaVector<llvm::Type *> argTypes(ctor->GetGraph()->GetLocalAllocator()->Adapter());
4336         for (size_t i = 0; i < inst->GetInputs().Size(); ++i) {
4337             argTypes.push_back(ctor->GetType(inst->GetInputType(i)));
4338         }
4339         auto ftype = llvm::FunctionType::get(ctor->GetType(inst->GetType()), argTypes, false);
4340         function =
4341             llvm::Function::Create(ftype, llvm::Function::ExternalLinkage, externalName, ctor->func_->getParent());
4342     }
4343     // Arguments
4344     ArenaVector<llvm::Value *> args(ctor->GetGraph()->GetLocalAllocator()->Adapter());
4345     for (size_t i = 0; i < inst->GetInputs().Size(); ++i) {
4346         args.push_back(ctor->CoerceValue(ctor->GetInputValue(inst, i), function->getArg(i)->getType()));
4347     }
4348     // Call
4349     auto call = ctor->builder_.CreateCall(function->getFunctionType(), function, args);
4350 
4351     if (IsNoAliasIrtocFunction(externalName)) {
4352         ASSERT(call->getType()->isPointerTy());
4353         call->addRetAttr(llvm::Attribute::NoAlias);
4354     } else {
4355         ASSERT(call->getType()->isPointerTy() ^ !IsPtrIgnIrtocFunction(externalName));
4356     }
4357 
4358     // Check if function has debug info
4359     if (function->getSubprogram() != nullptr) {
4360         ctor->debugData_->SetLocation(call, inst->GetPc());
4361     }
4362 
4363     if (inst->GetType() != DataType::VOID) {
4364         ctor->ValueMapAdd(inst, ctor->CoerceValue(call, ctor->GetType(inst->GetType())));
4365     }
4366 }
4367 
VisitPhi(GraphVisitor * v,Inst * inst)4368 void LLVMIrConstructor::VisitPhi(GraphVisitor *v, Inst *inst)
4369 {
4370     auto ctor = static_cast<LLVMIrConstructor *>(v);
4371     auto ltype = ctor->GetExactType(inst->GetType());
4372     auto block = ctor->GetCurrentBasicBlock();
4373 
4374     // PHI need adjusted insert point if ValueMapAdd already created coerced values for other PHIs
4375     auto nonPhi = block->getFirstNonPHI();
4376     if (nonPhi != nullptr) {
4377         ctor->builder_.SetInsertPoint(nonPhi);
4378     }
4379 
4380     auto phi = ctor->builder_.CreatePHI(ltype, inst->GetInputsCount());
4381     ctor->SetCurrentBasicBlock(block);
4382     ctor->ValueMapAdd(inst, phi);
4383 }
4384 
VisitMultiArray(GraphVisitor * v,Inst * inst)4385 void LLVMIrConstructor::VisitMultiArray(GraphVisitor *v, Inst *inst)
4386 {
4387     auto ctor = static_cast<LLVMIrConstructor *>(v);
4388 
4389     ArenaVector<llvm::Value *> args(ctor->GetGraph()->GetLocalAllocator()->Adapter());
4390     args.push_back(ctor->GetInputValue(inst, 0));
4391 
4392     auto sizesCount = inst->GetInputsCount() - 2U;
4393     args.push_back(ctor->builder_.getInt32(sizesCount));
4394     auto sizes = ctor->CreateAllocaForArgs(ctor->builder_.getInt64Ty(), sizesCount);
4395 
4396     // Store multi-array sizes
4397     for (size_t i = 1; i <= sizesCount; i++) {
4398         auto size = ctor->GetInputValue(inst, i);
4399 
4400         auto type = inst->GetInputType(i);
4401         if (type != DataType::INT64) {
4402             size = ctor->CoerceValue(size, type, DataType::INT64);
4403         }
4404 
4405         auto gep = ctor->builder_.CreateConstInBoundsGEP1_32(ctor->builder_.getInt64Ty(), sizes, i - 1);
4406         ctor->builder_.CreateStore(size, gep);
4407     }
4408     args.push_back(sizes);
4409 
4410     auto entrypointId = RuntimeInterface::EntrypointId::CREATE_MULTI_ARRAY;
4411     auto result = ctor->CreateEntrypointCall(entrypointId, inst, args);
4412     ctor->MarkAsAllocation(result);
4413     if (inst->GetFlag(inst_flags::MEM_BARRIER)) {
4414         result->addFnAttr(llvm::Attribute::get(result->getContext(), "needs-mem-barrier"));
4415     }
4416     ctor->ValueMapAdd(inst, result);
4417 }
4418 
VisitInitEmptyString(GraphVisitor * v,Inst * inst)4419 void LLVMIrConstructor::VisitInitEmptyString(GraphVisitor *v, Inst *inst)
4420 {
4421     auto ctor = static_cast<LLVMIrConstructor *>(v);
4422     auto eid = RuntimeInterface::EntrypointId::CREATE_EMPTY_STRING;
4423     auto result = ctor->CreateEntrypointCall(eid, inst);
4424     ctor->MarkAsAllocation(result);
4425     ctor->ValueMapAdd(inst, result);
4426 }
4427 
VisitInitString(GraphVisitor * v,Inst * inst)4428 void LLVMIrConstructor::VisitInitString(GraphVisitor *v, Inst *inst)
4429 {
4430     auto ctor = static_cast<LLVMIrConstructor *>(v);
4431     auto strInit = inst->CastToInitString();
4432     auto arg = ctor->GetInputValue(inst, 0);
4433     if (strInit->IsFromString()) {
4434         auto result = ctor->CreateNewStringFromStringTlab(inst, arg);
4435         ctor->ValueMapAdd(inst, result);
4436     } else {
4437         auto lengthOffset = ctor->GetGraph()->GetRuntime()->GetArrayLengthOffset(ctor->GetGraph()->GetArch());
4438         auto lengthPtr = ctor->builder_.CreateConstInBoundsGEP1_32(ctor->builder_.getInt8Ty(), arg, lengthOffset);
4439         auto length = ctor->builder_.CreateLoad(ctor->builder_.getInt32Ty(), lengthPtr);
4440         auto result = ctor->CreateNewStringFromCharsTlab(
4441             inst, llvm::Constant::getNullValue(ctor->builder_.getInt32Ty()), length, arg);
4442         ctor->ValueMapAdd(inst, result);
4443     }
4444 }
4445 
VisitNewArray(GraphVisitor * v,Inst * inst)4446 void LLVMIrConstructor::VisitNewArray(GraphVisitor *v, Inst *inst)
4447 {
4448     auto ctor = static_cast<LLVMIrConstructor *>(v);
4449     auto method = inst->CastToNewArray()->GetMethod();
4450 
4451     auto type = ctor->GetInputValue(inst, 0);
4452     auto size = ctor->ToSizeT(ctor->GetInputValue(inst, 1));
4453     auto arrayType = inst->CastToNewArray()->GetTypeId();
4454     auto runtime = ctor->GetGraph()->GetRuntime();
4455     auto maxTlabSize = runtime->GetTLABMaxSize();
4456     if (maxTlabSize == 0) {
4457         auto result = ctor->CreateNewArrayWithRuntime(inst);
4458         ctor->ValueMapAdd(inst, result);
4459         return;
4460     }
4461 
4462     auto lenInst = inst->GetDataFlowInput(0);
4463     auto classArraySize = runtime->GetClassArraySize(ctor->GetGraph()->GetArch());
4464     uint64_t arraySize = 0;
4465     uint64_t elementSize = runtime->GetArrayElementSize(method, arrayType);
4466     uint64_t alignment = runtime->GetTLABAlignment();
4467     ASSERT(alignment != 0);
4468 
4469     if (lenInst->GetOpcode() == Opcode::Constant) {
4470         ASSERT(lenInst->GetType() == DataType::INT64);
4471         arraySize = lenInst->CastToConstant()->GetIntValue() * elementSize + classArraySize;
4472         arraySize = (arraySize & ~(alignment - 1U)) + ((arraySize % alignment) != 0U ? alignment : 0U);
4473         if (arraySize > maxTlabSize) {
4474             auto result = ctor->CreateNewArrayWithRuntime(inst);
4475             ctor->ValueMapAdd(inst, result);
4476             return;
4477         }
4478     }
4479     auto eid = GetAllocateArrayTlabEntrypoint(elementSize);
4480     auto result = ctor->CreateFastPathCall(inst, eid, {type, size});
4481     ctor->MarkAsAllocation(result);
4482     if (inst->GetFlag(inst_flags::MEM_BARRIER)) {
4483         result->addFnAttr(llvm::Attribute::get(result->getContext(), "needs-mem-barrier"));
4484     }
4485     ctor->ValueMapAdd(inst, result);
4486 }
4487 
VisitNewObject(GraphVisitor * v,Inst * inst)4488 void LLVMIrConstructor::VisitNewObject(GraphVisitor *v, Inst *inst)
4489 {
4490     auto ctor = static_cast<LLVMIrConstructor *>(v);
4491 
4492     auto newObjInst = inst->CastToNewObject();
4493     auto srcInst = newObjInst->GetInput(0).GetInst();
4494 
4495     auto runtime = ctor->GetGraph()->GetRuntime();
4496     auto maxTlabSize = runtime->GetTLABMaxSize();
4497     if (maxTlabSize == 0 || srcInst->GetOpcode() != Opcode::LoadAndInitClass) {
4498         auto runtimeCall = ctor->CreateNewObjectWithRuntime(inst);
4499         ctor->ValueMapAdd(inst, runtimeCall);
4500         return;
4501     }
4502 
4503     auto klass = srcInst->CastToLoadAndInitClass()->GetClass();
4504     if (klass == nullptr || !runtime->CanUseTlabForClass(klass)) {
4505         auto runtimeCall = ctor->CreateNewObjectWithRuntime(inst);
4506         ctor->ValueMapAdd(inst, runtimeCall);
4507         return;
4508     }
4509     auto classSize = runtime->GetClassSize(klass);
4510     auto alignment = runtime->GetTLABAlignment();
4511     ASSERT(alignment != 0);
4512 
4513     classSize = (classSize & ~(alignment - 1U)) + ((classSize % alignment) != 0U ? alignment : 0U);
4514     if (classSize > maxTlabSize) {
4515         auto runtimeCall = ctor->CreateNewObjectWithRuntime(inst);
4516         ctor->ValueMapAdd(inst, runtimeCall);
4517         return;
4518     }
4519 
4520     auto initClass = ctor->GetInputValue(inst, 0);
4521     auto klassSize = ctor->ToSizeT(ctor->builder_.getInt32(classSize));
4522     auto eid = RuntimeInterface::EntrypointId::ALLOCATE_OBJECT_TLAB;
4523     auto result = ctor->CreateFastPathCall(inst, eid, {initClass, klassSize});
4524     ctor->MarkAsAllocation(result);
4525     if (inst->GetFlag(inst_flags::MEM_BARRIER)) {
4526         result->addFnAttr(llvm::Attribute::get(result->getContext(), "needs-mem-barrier"));
4527     }
4528     ctor->ValueMapAdd(inst, result);
4529 }
4530 
VisitCallStatic(GraphVisitor * v,Inst * inst)4531 void LLVMIrConstructor::VisitCallStatic(GraphVisitor *v, Inst *inst)
4532 {
4533     auto call = inst->CastToCallStatic();
4534     if (call->IsInlined()) {
4535         return;
4536     }
4537 
4538     auto ctor = static_cast<LLVMIrConstructor *>(v);
4539     auto methodPtr = ctor->GetGraph()->GetMethod();
4540     auto methodId = call->GetCallMethodId();
4541     auto callee = ctor->GetGraph()->GetRuntime()->GetMethodById(methodPtr, methodId);
4542     ASSERT(callee != nullptr);
4543     // Create a declare statement if we haven't met this function yet
4544     auto function = ctor->GetOrCreateFunctionForCall(call, callee);
4545     ctor->arkInterface_->RememberFunctionCall(ctor->func_, function, methodId);
4546 
4547     // Replaced to real callee in the PandaRuntimeLowering
4548     auto args = ctor->GetArgumentsForCall(ctor->GetMethodArgument(), call);
4549     auto result = ctor->builder_.CreateCall(function, args, ctor->CreateSaveStateBundle(inst));
4550     ctor->WrapArkCall(inst, result);
4551 
4552     if (inst->GetType() != DataType::VOID) {
4553         ctor->ValueMapAdd(inst, result);
4554     }
4555 
4556     if (ctor->GetGraph()->GetRuntime()->IsMethodExternal(methodPtr, callee)) {
4557         result->addAttributeAtIndex(llvm::AttributeList::FunctionIndex, llvm::Attribute::NoInline);
4558     }
4559     if (IsAlwaysThrowBasicBlock(inst)) {
4560         result->addAttributeAtIndex(llvm::AttributeList::FunctionIndex, llvm::Attribute::NoInline);
4561         result->addFnAttr(llvm::Attribute::get(ctor->func_->getContext(), "keep-noinline"));
4562     }
4563 }
4564 
VisitResolveStatic(GraphVisitor * v,Inst * inst)4565 void LLVMIrConstructor::VisitResolveStatic(GraphVisitor *v, Inst *inst)
4566 {
4567     auto ctor = static_cast<LLVMIrConstructor *>(v);
4568     auto call = inst->CastToResolveStatic();
4569 
4570     auto slotPtr = llvm::Constant::getNullValue(ctor->builder_.getPtrTy());
4571     auto methodPtr = ctor->CreateEntrypointCall(
4572         RuntimeInterface::EntrypointId::GET_UNKNOWN_CALLEE_METHOD, inst,
4573         {ctor->GetMethodArgument(), ctor->ToSizeT(ctor->builder_.getInt32(call->GetCallMethodId())), slotPtr});
4574     auto method = ctor->builder_.CreateIntToPtr(methodPtr, ctor->builder_.getPtrTy());
4575 
4576     ctor->ValueMapAdd(inst, method);
4577 }
4578 
VisitCallResolvedStatic(GraphVisitor * v,Inst * inst)4579 void LLVMIrConstructor::VisitCallResolvedStatic(GraphVisitor *v, Inst *inst)
4580 {
4581     auto ctor = static_cast<LLVMIrConstructor *>(v);
4582     auto call = inst->CastToCallResolvedStatic();
4583     if (call->IsInlined()) {
4584         return;
4585     }
4586 
4587     auto method = ctor->GetInputValue(inst, 0);
4588 
4589     llvm::FunctionType *fType = ctor->GetFunctionTypeForCall(call);
4590     auto args = ctor->GetArgumentsForCall(method, call, true);  // skip first input
4591 
4592     auto offset = ctor->GetGraph()->GetRuntime()->GetCompiledEntryPointOffset(ctor->GetGraph()->GetArch());
4593     auto entrypointPtr = ctor->builder_.CreateConstInBoundsGEP1_32(ctor->builder_.getInt8Ty(), method, offset);
4594     auto entrypoint = ctor->builder_.CreateLoad(ctor->builder_.getPtrTy(), entrypointPtr);
4595 
4596     auto result = ctor->builder_.CreateCall(fType, entrypoint, args, ctor->CreateSaveStateBundle(inst));
4597     if (inst->GetType() != DataType::VOID) {
4598         ctor->ValueMapAdd(inst, result);
4599     }
4600     ctor->WrapArkCall(inst, result);
4601 }
4602 
4603 template <typename T>
CreateDeclForVirtualCall(T * inst,LLVMIrConstructor * ctor,LLVMArkInterface * arkInterface)4604 llvm::Function *CreateDeclForVirtualCall(T *inst, LLVMIrConstructor *ctor, LLVMArkInterface *arkInterface)
4605 {
4606     arkInterface->GetOrCreateRuntimeFunctionType(
4607         ctor->GetFunc()->getContext(), ctor->GetFunc()->getParent(), LLVMArkInterface::RuntimeCallType::ENTRYPOINT,
4608         static_cast<LLVMArkInterface::EntrypointId>(RuntimeInterface::EntrypointId::RESOLVE_VIRTUAL_CALL_AOT));
4609     arkInterface->GetOrCreateRuntimeFunctionType(
4610         ctor->GetFunc()->getContext(), ctor->GetFunc()->getParent(), LLVMArkInterface::RuntimeCallType::ENTRYPOINT,
4611         static_cast<LLVMArkInterface::EntrypointId>(RuntimeInterface::EntrypointId::INTF_INLINE_CACHE));
4612 
4613     auto methodPtr = ctor->GetGraph()->GetMethod();
4614     auto methodId = inst->GetCallMethodId();
4615     auto callee = ctor->GetGraph()->GetRuntime()->GetMethodById(methodPtr, methodId);
4616     ASSERT(callee != nullptr);
4617 
4618     std::stringstream ssUniqName;
4619     ssUniqName << "f_" << std::hex << inst;
4620     auto uniqName = ssUniqName.str();
4621     auto methodName = arkInterface->GetUniqMethodName(callee) + "_" + uniqName;
4622     auto functionProto = ctor->GetFunctionTypeForCall(inst);
4623     auto func = CreateFunctionDeclaration(functionProto, methodName, ctor->GetFunc()->getParent());
4624 
4625     func->addFnAttr("frame-pointer", "all");
4626     arkInterface->PutVirtualFunction(inst->GetCallMethod(), func);
4627     return func;
4628 }
4629 
VisitCallVirtual(GraphVisitor * v,Inst * inst)4630 void LLVMIrConstructor::VisitCallVirtual(GraphVisitor *v, Inst *inst)
4631 {
4632     auto ctor = static_cast<LLVMIrConstructor *>(v);
4633     auto call = inst->CastToCallVirtual();
4634     if (call->IsInlined()) {
4635         return;
4636     }
4637     ASSERT_PRINT(ctor->GetGraph()->GetAotData()->GetUseCha(),
4638                  std::string("GetUseCha must be 'true' but was 'false' for method = '") +
4639                      ctor->GetGraph()->GetRuntime()->GetMethodFullName(ctor->GetGraph()->GetMethod()) + "'");
4640 
4641     ASSERT(!ctor->GetGraph()->GetRuntime()->IsInterfaceMethod(call->GetCallMethod()));
4642     auto methodId = call->GetCallMethodId();
4643 
4644     auto func = CreateDeclForVirtualCall(call, ctor, ctor->arkInterface_);
4645     auto args = ctor->GetArgumentsForCall(ctor->GetMethodArgument(), call);
4646     auto result = ctor->builder_.CreateCall(func, args, ctor->CreateSaveStateBundle(inst));
4647     result->addFnAttr(llvm::Attribute::get(result->getContext(), "original-method-id", std::to_string(methodId)));
4648     if (inst->GetType() != DataType::VOID) {
4649         ctor->ValueMapAdd(inst, result);
4650     }
4651     ctor->WrapArkCall(inst, result);
4652 }
4653 
VisitResolveVirtual(GraphVisitor * v,Inst * inst)4654 void LLVMIrConstructor::VisitResolveVirtual(GraphVisitor *v, Inst *inst)
4655 {
4656     auto ctor = static_cast<LLVMIrConstructor *>(v);
4657     auto resolver = inst->CastToResolveVirtual();
4658 
4659     llvm::Value *method = nullptr;
4660     if (resolver->GetCallMethod() == nullptr) {
4661         llvm::Value *thiz = ctor->GetInputValue(inst, 0);
4662         method = ctor->CreateResolveVirtualCallBuiltin(inst, thiz, resolver->GetCallMethodId());
4663         ASSERT(method->getType()->isPointerTy());
4664     } else {
4665         ASSERT(ctor->GetGraph()->GetRuntime()->IsInterfaceMethod(resolver->GetCallMethod()));
4666         method = CreateDeclForVirtualCall(resolver, ctor, ctor->arkInterface_);
4667     }
4668     ctor->ValueMapAdd(inst, method, false);
4669 }
4670 
VisitCallResolvedVirtual(GraphVisitor * v,Inst * inst)4671 void LLVMIrConstructor::VisitCallResolvedVirtual(GraphVisitor *v, Inst *inst)
4672 {
4673     auto ctor = static_cast<LLVMIrConstructor *>(v);
4674     auto call = inst->CastToCallResolvedVirtual();
4675     if (call->IsInlined()) {
4676         return;
4677     }
4678     auto runtime = ctor->GetGraph()->GetRuntime();
4679     auto method = ctor->GetInputValue(inst, 0);
4680     auto args = ctor->GetArgumentsForCall(method, call, true);
4681 
4682     llvm::CallInst *result = nullptr;
4683     if (call->GetCallMethod() == nullptr) {
4684         llvm::FunctionType *fType = ctor->GetFunctionTypeForCall(call);
4685 
4686         auto offset = runtime->GetCompiledEntryPointOffset(ctor->GetGraph()->GetArch());
4687         auto entrypointPtr = ctor->builder_.CreateConstInBoundsGEP1_32(ctor->builder_.getInt8Ty(), method, offset);
4688         auto entrypoint = ctor->builder_.CreateLoad(ctor->builder_.getPtrTy(), entrypointPtr);
4689         result = ctor->builder_.CreateCall(fType, entrypoint, args, ctor->CreateSaveStateBundle(inst));
4690     } else {
4691         ASSERT(runtime->IsInterfaceMethod(call->GetCallMethod()));
4692         auto *func = llvm::cast<llvm::Function>(method);
4693         result = ctor->builder_.CreateCall(func, args, ctor->CreateSaveStateBundle(inst));
4694         auto methodId = call->GetCallMethodId();
4695         result->addFnAttr(llvm::Attribute::get(result->getContext(), "original-method-id", std::to_string(methodId)));
4696     }
4697     if (inst->GetType() != DataType::VOID) {
4698         ctor->ValueMapAdd(inst, result);
4699     }
4700     ctor->WrapArkCall(inst, result);
4701 }
4702 
VisitAbs(GraphVisitor * v,Inst * inst)4703 void LLVMIrConstructor::VisitAbs(GraphVisitor *v, Inst *inst)
4704 {
4705     ASSERT(g_options.IsCompilerEncodeIntrinsics());
4706     auto ctor = static_cast<LLVMIrConstructor *>(v);
4707 
4708     DataType::Type pandaType = inst->GetInputType(0);
4709     auto argument = ctor->GetInputValue(inst, 0);
4710     llvm::Value *result = nullptr;
4711     if (DataType::IsFloatType(pandaType)) {
4712         result = ctor->builder_.CreateUnaryIntrinsic(llvm::Intrinsic::fabs, argument, nullptr);
4713     } else if (IsInteger(pandaType)) {
4714         result = ctor->builder_.CreateBinaryIntrinsic(llvm::Intrinsic::abs, argument, ctor->builder_.getFalse());
4715     } else {
4716         ASSERT_DO(false, (std::cerr << "Abs is not supported for type " << DataType::ToString(pandaType) << std::endl));
4717         UNREACHABLE();
4718     }
4719     ASSERT(result != nullptr);
4720     ctor->ValueMapAdd(inst, result);
4721 }
4722 
VisitIntrinsic(GraphVisitor * v,Inst * inst)4723 void LLVMIrConstructor::VisitIntrinsic(GraphVisitor *v, Inst *inst)
4724 {
4725     auto ctor = static_cast<LLVMIrConstructor *>(v);
4726     auto entryId = inst->CastToIntrinsic()->GetIntrinsicId();
4727 
4728     // Some Ark intrinsics are lowered into code or LLVM intrinsics, the IntrinsicsLowering pass
4729     // makes final desicion for them to be lowered into code or calling Ark entrypoint.
4730     if (g_options.IsCompilerEncodeIntrinsics()) {
4731         bool lowered = ctor->TryEmitIntrinsic(inst, entryId);
4732         if (lowered) {
4733             return;
4734         }
4735     }
4736     // Create call otherwise
4737     auto result = ctor->CreateIntrinsicCall(inst);
4738     if (inst->GetType() != DataType::VOID) {
4739         ctor->ValueMapAdd(inst, result);
4740     }
4741 }
4742 
VisitMonitor(GraphVisitor * v,Inst * inst)4743 void LLVMIrConstructor::VisitMonitor(GraphVisitor *v, Inst *inst)
4744 {
4745     auto ctor = static_cast<LLVMIrConstructor *>(v);
4746     MonitorInst *monitor = inst->CastToMonitor();
4747     auto object = ctor->GetInputValue(inst, 0);
4748     auto eid = monitor->IsEntry() ? RuntimeInterface::EntrypointId::MONITOR_ENTER_FAST_PATH
4749                                   : RuntimeInterface::EntrypointId::MONITOR_EXIT_FAST_PATH;
4750     auto call = ctor->CreateEntrypointCall(eid, inst, {object});
4751     ASSERT(call->getCallingConv() == llvm::CallingConv::C);
4752     call->setCallingConv(llvm::CallingConv::ArkFast1);
4753 }
4754 
VisitSqrt(GraphVisitor * v,Inst * inst)4755 void LLVMIrConstructor::VisitSqrt(GraphVisitor *v, Inst *inst)
4756 {
4757     ASSERT(g_options.IsCompilerEncodeIntrinsics());
4758     auto ctor = static_cast<LLVMIrConstructor *>(v);
4759     auto argument = ctor->GetInputValue(inst, 0);
4760     auto result = ctor->builder_.CreateUnaryIntrinsic(llvm::Intrinsic::sqrt, argument, nullptr);
4761     ctor->ValueMapAdd(inst, result);
4762 }
4763 
VisitInitClass(GraphVisitor * v,Inst * inst)4764 void LLVMIrConstructor::VisitInitClass(GraphVisitor *v, Inst *inst)
4765 {
4766     auto ctor = static_cast<LLVMIrConstructor *>(v);
4767     auto classId = inst->CastToInitClass()->GetTypeId();
4768 
4769     auto constexpr INITIALIZED = true;
4770     ctor->CreateLoadClassById(inst, classId, INITIALIZED);
4771 }
4772 
VisitLoadClass(GraphVisitor * v,Inst * inst)4773 void LLVMIrConstructor::VisitLoadClass(GraphVisitor *v, Inst *inst)
4774 {
4775     auto ctor = static_cast<LLVMIrConstructor *>(v);
4776     auto classId = inst->CastToLoadClass()->GetTypeId();
4777 
4778     auto constexpr INITIALIZED = true;
4779     auto clsPtr = ctor->CreateLoadClassById(inst, classId, !INITIALIZED);
4780     ctor->ValueMapAdd(inst, clsPtr);
4781 }
4782 
VisitLoadAndInitClass(GraphVisitor * v,Inst * inst)4783 void LLVMIrConstructor::VisitLoadAndInitClass(GraphVisitor *v, Inst *inst)
4784 {
4785     auto ctor = static_cast<LLVMIrConstructor *>(v);
4786     auto classId = inst->CastToLoadAndInitClass()->GetTypeId();
4787 
4788     auto constexpr INITIALIZED = true;
4789     auto clsPtr = ctor->CreateLoadClassById(inst, classId, INITIALIZED);
4790     ctor->ValueMapAdd(inst, clsPtr);
4791 }
4792 
VisitUnresolvedLoadAndInitClass(GraphVisitor * v,Inst * inst)4793 void LLVMIrConstructor::VisitUnresolvedLoadAndInitClass(GraphVisitor *v, Inst *inst)
4794 {
4795     auto ctor = static_cast<LLVMIrConstructor *>(v);
4796     auto classId = inst->CastToUnresolvedLoadAndInitClass()->GetTypeId();
4797 
4798     auto constexpr INITIALIZED = true;
4799     auto clsPtr = ctor->CreateLoadClassById(inst, classId, INITIALIZED);
4800     ctor->ValueMapAdd(inst, clsPtr);
4801 }
4802 
VisitLoadStatic(GraphVisitor * v,Inst * inst)4803 void LLVMIrConstructor::VisitLoadStatic(GraphVisitor *v, Inst *inst)
4804 {
4805     auto ctor = static_cast<LLVMIrConstructor *>(v);
4806     auto klass = ctor->GetInputValue(inst, 0);
4807     ASSERT_TYPE(klass, ctor->builder_.getPtrTy());
4808 
4809     auto offset = ctor->GetGraph()->GetRuntime()->GetFieldOffset(inst->CastToLoadStatic()->GetObjField());
4810     auto fieldPtr = ctor->builder_.CreateConstInBoundsGEP1_32(ctor->builder_.getInt8Ty(), klass, offset);
4811 
4812     auto n = ctor->CreateLoadWithOrdering(inst, fieldPtr, ToAtomicOrdering(inst->CastToLoadStatic()->GetVolatile()));
4813     ctor->ValueMapAdd(inst, n);
4814 }
4815 
VisitStoreStatic(GraphVisitor * v,Inst * inst)4816 void LLVMIrConstructor::VisitStoreStatic(GraphVisitor *v, Inst *inst)
4817 {
4818     auto ctor = static_cast<LLVMIrConstructor *>(v);
4819     auto klass = ctor->GetInputValue(inst, 0);
4820     ASSERT_TYPE(klass, ctor->builder_.getPtrTy());
4821     auto value = ctor->GetInputValue(inst, 1);
4822 
4823     auto runtime = ctor->GetGraph()->GetRuntime();
4824     auto offset = runtime->GetFieldOffset(inst->CastToStoreStatic()->GetObjField());
4825     auto fieldPtr = ctor->builder_.CreateConstInBoundsGEP1_32(ctor->builder_.getInt8Ty(), klass, offset);
4826 
4827     // Pre
4828     if (inst->CastToStoreStatic()->GetNeedBarrier()) {
4829         ctor->CreatePreWRB(inst, fieldPtr);
4830     }
4831     // Write
4832     ctor->CreateStoreWithOrdering(value, fieldPtr, ToAtomicOrdering(inst->CastToStoreStatic()->GetVolatile()));
4833     // Post
4834     if (inst->CastToStoreStatic()->GetNeedBarrier()) {
4835         auto barrierType = runtime->GetPostType();
4836         if (barrierType == mem::BarrierType::POST_INTERREGION_BARRIER) {
4837             ctor->CreatePostWRB(inst, klass, ctor->builder_.getInt32(offset), value);
4838         } else {
4839             auto managed = ctor->CreateLoadManagedClassFromClass(klass);
4840             ctor->CreatePostWRB(inst, managed, ctor->builder_.getInt32(0), value);
4841         }
4842     }
4843 }
4844 
VisitUnresolvedStoreStatic(GraphVisitor * v,Inst * inst)4845 void LLVMIrConstructor::VisitUnresolvedStoreStatic(GraphVisitor *v, Inst *inst)
4846 {
4847     auto ctor = static_cast<LLVMIrConstructor *>(v);
4848     auto unresolvedStore = inst->CastToUnresolvedStoreStatic();
4849 
4850     ASSERT(unresolvedStore->GetNeedBarrier());
4851     ASSERT(DataType::IsReference(inst->GetType()));
4852 
4853     auto typeId = ctor->builder_.getInt32(unresolvedStore->GetTypeId());
4854     auto value = ctor->GetInputValue(inst, 0);
4855 
4856     auto entrypoint = RuntimeInterface::EntrypointId::UNRESOLVED_STORE_STATIC_BARRIERED;
4857     ctor->CreateEntrypointCall(entrypoint, inst, {ctor->GetMethodArgument(), typeId, value});
4858 }
4859 
VisitLoadConstArray(GraphVisitor * v,Inst * inst)4860 void LLVMIrConstructor::VisitLoadConstArray(GraphVisitor *v, Inst *inst)
4861 {
4862     auto ctor = static_cast<LLVMIrConstructor *>(v);
4863     auto arrayType = inst->CastToLoadConstArray()->GetTypeId();
4864 
4865     llvm::Value *result = ctor->CreateEntrypointCall(RuntimeInterface::EntrypointId::RESOLVE_LITERAL_ARRAY, inst,
4866                                                      {ctor->GetMethodArgument(), ctor->builder_.getInt32(arrayType)});
4867     ctor->ValueMapAdd(inst, result);
4868 }
4869 
VisitFillConstArray(GraphVisitor * v,Inst * inst)4870 void LLVMIrConstructor::VisitFillConstArray(GraphVisitor *v, Inst *inst)
4871 {
4872     ASSERT(!DataType::IsReference(inst->GetType()));
4873     auto ctor = static_cast<LLVMIrConstructor *>(v);
4874     auto &builder = ctor->builder_;
4875 
4876     auto runtime = ctor->GetGraph()->GetRuntime();
4877     auto arrayType = inst->CastToFillConstArray()->GetTypeId();
4878     auto arch = ctor->GetGraph()->GetArch();
4879     auto src = ctor->GetInputValue(inst, 0);
4880     auto offset = runtime->GetArrayDataOffset(arch);
4881     auto arraySize = inst->CastToFillConstArray()->GetImm() << DataType::ShiftByType(inst->GetType(), arch);
4882     auto arrayPtr = builder.CreateConstInBoundsGEP1_64(builder.getInt8Ty(), src, offset);
4883 
4884     ASSERT(arraySize != 0);
4885 
4886     auto arrOffset = runtime->GetOffsetToConstArrayData(inst->CastToFillConstArray()->GetMethod(), arrayType);
4887     auto pfOffset = runtime->GetPandaFileOffset(arch);
4888     auto fileOffset = runtime->GetBinaryFileBaseOffset(arch);
4889 
4890     auto pfPtrPtr = builder.CreateConstInBoundsGEP1_64(builder.getInt8Ty(), ctor->GetMethodArgument(), pfOffset);
4891     auto pfPtr = builder.CreateLoad(builder.getPtrTy(), pfPtrPtr);
4892     auto filePtrPtr = builder.CreateConstInBoundsGEP1_64(builder.getInt8Ty(), pfPtr, fileOffset);
4893     auto filePtr = builder.CreateLoad(builder.getPtrTy(), filePtrPtr);
4894     auto constArrPtr = builder.CreateConstInBoundsGEP1_64(builder.getInt8Ty(), filePtr, arrOffset);
4895 
4896     auto align = llvm::MaybeAlign(0);
4897     /**
4898      * LLVM AOT may replace `@llvm.memcpy.inline` with call to ark's `LIB_CALL_MEM_COPY`, see `MustLowerMemCpy` in
4899      * libllvmbackend/llvm_ark_interface.cpp.
4900      */
4901     builder.CreateMemCpyInline(arrayPtr, align, constArrPtr, align, builder.getInt64(arraySize));
4902 }
4903 
VisitIsInstance(GraphVisitor * v,Inst * inst)4904 void LLVMIrConstructor::VisitIsInstance(GraphVisitor *v, Inst *inst)
4905 {
4906     auto ctor = static_cast<LLVMIrConstructor *>(v);
4907     auto isInstance = inst->CastToIsInstance();
4908     auto klassType = isInstance->GetClassType();
4909     auto object = ctor->GetInputValue(inst, 0);
4910     llvm::Value *result;
4911     if (klassType == ClassType::UNRESOLVED_CLASS) {
4912         result = ctor->CreateIsInstanceEntrypointCall(inst);
4913     } else {
4914         auto &ctx = ctor->func_->getContext();
4915         auto preBb = ctor->GetCurrentBasicBlock();
4916         auto contBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "isinstance_cont"), ctor->func_);
4917 
4918         if (!inst->CastToIsInstance()->GetOmitNullCheck()) {
4919             auto notnullBb =
4920                 llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "isinstance_notnull"), ctor->func_);
4921             auto isNullObj = ctor->builder_.CreateIsNull(object);
4922             ctor->builder_.CreateCondBr(isNullObj, contBb, notnullBb);
4923             ctor->SetCurrentBasicBlock(notnullBb);
4924         }
4925 
4926         llvm::Value *innerResult = nullptr;
4927         auto klassId = ctor->GetInputValue(inst, 1);
4928         auto klassObj = CreateLoadClassFromObject(object, &ctor->builder_, ctor->arkInterface_);
4929         auto notnullPostBb = ctor->GetCurrentBasicBlock();
4930         auto cmp = ctor->builder_.CreateICmpEQ(klassId, klassObj);
4931         if (klassType == ClassType::FINAL_CLASS) {
4932             innerResult = ctor->builder_.CreateZExt(cmp, ctor->builder_.getInt8Ty());
4933         } else {
4934             auto innerBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "isinstance_inner"), ctor->func_);
4935             ctor->builder_.CreateCondBr(cmp, contBb, innerBb);
4936             ctor->SetCurrentBasicBlock(innerBb);
4937             innerResult = ctor->CreateIsInstanceInnerBlock(inst, klassObj, klassId);
4938         }
4939         auto incomingBlock = ctor->GetCurrentBasicBlock();
4940         ctor->builder_.CreateBr(contBb);
4941 
4942         ctor->SetCurrentBasicBlock(contBb);
4943         unsigned amount = 1 + (preBb == notnullPostBb ? 0 : 1) + (notnullPostBb == incomingBlock ? 0 : 1);
4944         auto resultPhi = ctor->builder_.CreatePHI(ctor->builder_.getInt8Ty(), amount);
4945         if (preBb != notnullPostBb) {
4946             resultPhi->addIncoming(ctor->builder_.getInt8(0), preBb);
4947         }
4948         if (notnullPostBb != incomingBlock) {
4949             resultPhi->addIncoming(ctor->builder_.getInt8(1), notnullPostBb);
4950         }
4951         resultPhi->addIncoming(innerResult, incomingBlock);
4952         result = resultPhi;
4953     }
4954 
4955     ctor->ValueMapAdd(inst, result);
4956 }
4957 
VisitCheckCast(GraphVisitor * v,Inst * inst)4958 void LLVMIrConstructor::VisitCheckCast(GraphVisitor *v, Inst *inst)
4959 {
4960     auto ctor = static_cast<LLVMIrConstructor *>(v);
4961     auto checkCast = inst->CastToCheckCast();
4962     auto klassType = checkCast->GetClassType();
4963     auto src = ctor->GetInputValue(inst, 0);
4964 
4965     auto &ctx = ctor->func_->getContext();
4966     auto outBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "checkcast_out"), ctor->func_);
4967 
4968     // Nullptr check can be omitted sometimes
4969     if (!inst->CastToCheckCast()->GetOmitNullCheck()) {
4970         auto contBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "checkcast_cont"), ctor->func_);
4971         auto isNullptr = ctor->builder_.CreateIsNull(src);
4972         ctor->builder_.CreateCondBr(isNullptr, outBb, contBb);
4973         ctor->SetCurrentBasicBlock(contBb);
4974     }
4975 
4976     if (klassType == ClassType::UNRESOLVED_CLASS ||
4977         (klassType == ClassType::INTERFACE_CLASS && inst->CanDeoptimize())) {
4978         ctor->CreateCheckCastEntrypointCall(inst);
4979     } else if (klassType == ClassType::INTERFACE_CLASS) {
4980         ASSERT(!inst->CanDeoptimize());
4981         auto entrypoint = RuntimeInterface::EntrypointId::CHECK_CAST_INTERFACE;
4982         ctor->CreateFastPathCall(inst, entrypoint, {src, ctor->GetInputValue(inst, 1)});
4983     } else {
4984         auto klassId = ctor->GetInputValue(inst, 1);
4985         auto klassObj = CreateLoadClassFromObject(src, &ctor->builder_, ctor->arkInterface_);
4986         if (klassType == ClassType::FINAL_CLASS) {
4987             auto cmpNe = ctor->builder_.CreateICmpNE(klassId, klassObj);
4988             auto exception = RuntimeInterface::EntrypointId::CLASS_CAST_EXCEPTION;
4989             ctor->CreateDeoptimizationBranch(inst, cmpNe, exception, {klassId, src});
4990         } else {
4991             auto cmpEq = ctor->builder_.CreateICmpEQ(klassId, klassObj);
4992             auto innerBb = llvm::BasicBlock::Create(ctx, CreateBasicBlockName(inst, "checkcast_inner"), ctor->func_);
4993             ctor->builder_.CreateCondBr(cmpEq, outBb, innerBb);
4994             ctor->SetCurrentBasicBlock(innerBb);
4995             ctor->CreateCheckCastInner(inst, klassObj, klassId);
4996         }
4997     }
4998     ctor->builder_.CreateBr(outBb);
4999     ctor->SetCurrentBasicBlock(outBb);
5000 }
5001 
VisitLoadType(GraphVisitor * v,Inst * inst)5002 void LLVMIrConstructor::VisitLoadType(GraphVisitor *v, Inst *inst)
5003 {
5004     auto ctor = static_cast<LLVMIrConstructor *>(v);
5005 
5006     auto constexpr INITIALIZED = true;
5007     auto klass = ctor->CreateLoadClassById(inst, inst->CastToLoadType()->GetTypeId(), !INITIALIZED);
5008     auto result = ctor->CreateLoadManagedClassFromClass(klass);
5009     ctor->ValueMapAdd(inst, result);
5010 }
5011 
VisitUnresolvedLoadType(GraphVisitor * v,Inst * inst)5012 void LLVMIrConstructor::VisitUnresolvedLoadType(GraphVisitor *v, Inst *inst)
5013 {
5014     auto ctor = static_cast<LLVMIrConstructor *>(v);
5015 
5016     auto constexpr INITIALIZED = true;
5017     auto klass = ctor->CreateLoadClassById(inst, inst->CastToUnresolvedLoadType()->GetTypeId(), !INITIALIZED);
5018     auto result = ctor->CreateLoadManagedClassFromClass(klass);
5019     ctor->ValueMapAdd(inst, result);
5020 }
5021 
VisitGetInstanceClass(GraphVisitor * v,Inst * inst)5022 void LLVMIrConstructor::VisitGetInstanceClass(GraphVisitor *v, Inst *inst)
5023 {
5024     auto ctor = static_cast<LLVMIrConstructor *>(v);
5025 
5026     auto object = ctor->GetInputValue(inst, 0);
5027     auto klass = CreateLoadClassFromObject(object, &ctor->builder_, ctor->arkInterface_);
5028     ctor->ValueMapAdd(inst, klass);
5029 }
5030 
VisitThrow(GraphVisitor * v,Inst * inst)5031 void LLVMIrConstructor::VisitThrow(GraphVisitor *v, Inst *inst)
5032 {
5033     auto ctor = static_cast<LLVMIrConstructor *>(v);
5034     auto obj = ctor->GetInputValue(inst, 0);
5035 
5036     auto exception = RuntimeInterface::EntrypointId::THROW_EXCEPTION;
5037     ctor->CreateEntrypointCall(exception, inst, {obj});
5038     ctor->builder_.CreateUnreachable();
5039 }
5040 
VisitCatchPhi(GraphVisitor * v,Inst * inst)5041 void LLVMIrConstructor::VisitCatchPhi([[maybe_unused]] GraphVisitor *v, Inst *inst)
5042 {
5043     UnexpectedLowering(inst);
5044 }
5045 
VisitLoadRuntimeClass(GraphVisitor * v,Inst * inst)5046 void LLVMIrConstructor::VisitLoadRuntimeClass(GraphVisitor *v, Inst *inst)
5047 {
5048     auto ctor = static_cast<LLVMIrConstructor *>(v);
5049 
5050     auto offset = ctor->GetGraph()->GetRuntime()->GetTlsPromiseClassPointerOffset(ctor->GetGraph()->GetArch());
5051     auto result = llvmbackend::runtime_calls::LoadTLSValue(&ctor->builder_, ctor->arkInterface_, offset,
5052                                                            ctor->builder_.getPtrTy());
5053     ctor->ValueMapAdd(inst, result);
5054 }
5055 
VisitLoadUniqueObject(GraphVisitor * v,Inst * inst)5056 void LLVMIrConstructor::VisitLoadUniqueObject(GraphVisitor *v, Inst *inst)
5057 {
5058     auto ctor = static_cast<LLVMIrConstructor *>(v);
5059 
5060     auto offset = ctor->GetGraph()->GetRuntime()->GetTlsUniqueObjectOffset(ctor->GetGraph()->GetArch());
5061     auto result = llvmbackend::runtime_calls::LoadTLSValue(&ctor->builder_, ctor->arkInterface_, offset,
5062                                                            ctor->builder_.getPtrTy(LLVMArkInterface::GC_ADDR_SPACE));
5063     ctor->ValueMapAdd(inst, result);
5064 }
5065 
VisitLoadImmediate(GraphVisitor * v,Inst * inst)5066 void LLVMIrConstructor::VisitLoadImmediate(GraphVisitor *v, Inst *inst)
5067 {
5068     auto ctor = static_cast<LLVMIrConstructor *>(v);
5069     auto loadImm = inst->CastToLoadImmediate();
5070     ASSERT_DO(loadImm->IsTlsOffset(), (std::cerr << "Unsupported llvm lowering for \n", inst->Dump(&std::cerr, true)));
5071     ASSERT(inst->GetType() == DataType::POINTER);
5072     auto result = llvmbackend::runtime_calls::LoadTLSValue(&ctor->builder_, ctor->arkInterface_,
5073                                                            loadImm->GetTlsOffset(), ctor->builder_.getPtrTy());
5074     ctor->ValueMapAdd(inst, result);
5075 }
5076 
VisitDefault(Inst * inst)5077 void LLVMIrConstructor::VisitDefault([[maybe_unused]] Inst *inst)
5078 {
5079     ASSERT_DO(false, (std::cerr << "Unsupported llvm lowering for \n", inst->Dump(&std::cerr, true)));
5080     UNREACHABLE();
5081 }
5082 
LLVMIrConstructor(Graph * graph,llvm::Module * module,llvm::LLVMContext * context,LLVMArkInterface * arkInterface,const std::unique_ptr<DebugDataBuilder> & debugData)5083 LLVMIrConstructor::LLVMIrConstructor(Graph *graph, llvm::Module *module, llvm::LLVMContext *context,
5084                                      LLVMArkInterface *arkInterface, const std::unique_ptr<DebugDataBuilder> &debugData)
5085     : graph_(graph),
5086       builder_(llvm::IRBuilder<>(*context)),
5087       inputMap_(graph->GetLocalAllocator()->Adapter()),
5088       blockTailMap_(graph->GetLocalAllocator()->Adapter()),
5089       blockHeadMap_(graph->GetLocalAllocator()->Adapter()),
5090       arkInterface_(arkInterface),
5091       debugData_(debugData),
5092       cc_(graph->GetLocalAllocator()->Adapter()),
5093       ccValues_(graph->GetLocalAllocator()->Adapter())
5094 {
5095     llvm::CallingConv::ID callingConv = llvm::CallingConv::C;
5096     // Assign regmaps
5097     if (graph->GetMode().IsInterpreter()) {
5098         if (graph->GetArch() == Arch::AARCH64) {
5099             cc_.assign({AARCH64_PC, AARCH64_ACC, AARCH64_ACC_TAG, AARCH64_FP, AARCH64_DISPATCH, AARCH64_MOFFSET,
5100                         AARCH64_METHOD_PTR, GetThreadReg(Arch::AARCH64)});
5101         } else if (graph->GetArch() == Arch::X86_64) {
5102             cc_.assign({X86_64_PC, X86_64_ACC, X86_64_ACC_TAG, X86_64_FP, X86_64_DISPATCH, GetThreadReg(Arch::X86_64),
5103                         X86_64_REAL_FP});
5104         } else {
5105             LLVM_LOG(FATAL, IR) << "Unsupported architecture for arkintcc";
5106         }
5107         callingConv = llvm::CallingConv::ArkInt;
5108     } else if (graph->GetMode().IsFastPath()) {
5109         ASSERT(graph->GetArch() == Arch::AARCH64);
5110         for (size_t i = 0; i < graph->GetRuntime()->GetMethodTotalArgumentsCount(graph->GetMethod()); i++) {
5111             cc_.push_back(i);
5112         }
5113         // Get calling convention excluding thread and frame registers
5114         callingConv = GetFastPathCallingConv(cc_.size());
5115         cc_.push_back(GetThreadReg(Arch::AARCH64));
5116         cc_.push_back(AARCH64_REAL_FP);
5117     }
5118     ccValues_.assign(cc_.size(), nullptr);
5119 
5120     // Create function
5121     auto funcProto = GetEntryFunctionType();
5122     auto methodName = arkInterface_->GetUniqMethodName(graph_->GetMethod());
5123     func_ = CreateFunctionDeclaration(funcProto, methodName, module);
5124     ASSERT(func_->getCallingConv() == llvm::CallingConv::C);
5125     func_->setCallingConv(callingConv);
5126 
5127     // Scenario of code generation for FastPath having zero arguments and return value is not tested
5128     ASSERT(callingConv != llvm::CallingConv::ArkFast0 || func_->getReturnType()->isVoidTy());
5129 
5130     if (graph->SupportManagedCode()) {
5131         func_->setGC(std::string {llvmbackend::LLVMArkInterface::GC_STRATEGY});
5132     }
5133 
5134     auto klassId = graph_->GetRuntime()->GetClassIdForMethod(graph_->GetMethod());
5135     auto klassIdMd = llvm::ConstantAsMetadata::get(builder_.getInt32(klassId));
5136     func_->addMetadata(llvmbackend::LLVMArkInterface::FUNCTION_MD_CLASS_ID, *llvm::MDNode::get(*context, {klassIdMd}));
5137 
5138     if (!arkInterface_->IsIrtocMode()) {
5139         func_->addMetadata("use-ark-frame", *llvm::MDNode::get(*context, {}));
5140     }
5141 }
5142 
BuildIr(bool preventInlining)5143 bool LLVMIrConstructor::BuildIr(bool preventInlining)
5144 {
5145     LLVM_LOG(DEBUG, IR) << "Building IR for LLVM";
5146 
5147     // Set Argument Names
5148     // Special arguments
5149     auto it = func_->arg_begin();
5150     if (graph_->SupportManagedCode()) {
5151         (it++)->setName("method");
5152     }
5153     // Actual arguments
5154     auto idx = 0;
5155     while (it != func_->arg_end()) {
5156         std::stringstream name;
5157         name << "a" << idx++;
5158         (it++)->setName(name.str());
5159     }
5160 
5161     auto method = graph_->GetMethod();
5162     auto runtime = graph_->GetRuntime();
5163     arkInterface_->RememberFunctionOrigin(func_, method);
5164     func_->addFnAttr(ark::llvmbackend::LLVMArkInterface::SOURCE_LANG_ATTR,
5165                      std::to_string(static_cast<uint8_t>(runtime->GetMethodSourceLanguage(method))));
5166 
5167     if (!graph_->GetMode().IsFastPath()) {
5168         debugData_->BeginSubprogram(func_, runtime->GetFullFileName(method), runtime->GetMethodId(method));
5169     } else {
5170         func_->addFnAttr(llvm::Attribute::NoInline);
5171     }
5172 
5173     auto normalMarkerHolder = MarkerHolder(graph_);
5174     auto normal = normalMarkerHolder.GetMarker();
5175 
5176     graph_->GetStartBlock()->SetMarker(normal);
5177     MarkNormalBlocksRecursive(graph_->GetStartBlock(), normal);
5178 
5179     // First step - create blocks, leaving LLVM EntryBlock untouched
5180     BuildBasicBlocks(normal);
5181     InitializeEntryBlock(preventInlining);
5182 
5183     // Second step - visit all instructions, including StartBlock, but not filling PHI inputs
5184     BuildInstructions(normal);
5185 
5186     // Third step - fill the PHIs inputs
5187     for (auto block : graph_->GetBlocksRPO()) {
5188         FillPhiInputs(block, normal);
5189     }
5190 
5191     if (!graph_->GetMode().IsFastPath()) {
5192         debugData_->EndSubprogram(func_);
5193     }
5194     if (!arkInterface_->IsIrtocMode()) {
5195         func_->addFnAttr("frame-pointer", "all");
5196     }
5197 #ifndef NDEBUG
5198     // Only for tests
5199     BreakIrIfNecessary();
5200 #endif
5201     // verifyFunction returns false if there are no errors. But we return true if everything is ok.
5202     auto verified = !verifyFunction(*func_, &llvm::errs());
5203     if (!verified) {
5204         func_->print(llvm::errs());
5205     }
5206     return verified;
5207 }
5208 
InsertArkFrameInfo(llvm::Module * module,Arch arch)5209 void LLVMIrConstructor::InsertArkFrameInfo(llvm::Module *module, Arch arch)
5210 {
5211     constexpr std::string_view ARK_CALLER_SLOTS_MD = "ark.frame.info";
5212     ASSERT(module->getNamedMetadata(ARK_CALLER_SLOTS_MD) == nullptr);
5213     auto arkFrameInfoMd = module->getOrInsertNamedMetadata(ARK_CALLER_SLOTS_MD);
5214     auto builder = llvm::IRBuilder<>(module->getContext());
5215 
5216     // The fist param is a difference between Ark's fp and the start of LLVM frame.
5217     auto md = llvm::ConstantAsMetadata::get(builder.getInt32(0U));
5218     arkFrameInfoMd->addOperand(llvm::MDNode::get(module->getContext(), {md}));
5219 
5220     // The second param contains offsets of caller-saved registers inside the ark's frame
5221     std::vector<size_t> callParamsRegs;
5222     switch (arch) {
5223         case Arch::AARCH64: {
5224             auto src = ArchCallingConvention<Arch::AARCH64>::Target::CALL_PARAMS_REGS;
5225             callParamsRegs = std::vector<size_t>(src.begin(), src.end());
5226             break;
5227         }
5228         case Arch::X86_64: {
5229             auto src = ArchCallingConvention<Arch::X86_64>::Target::CALL_PARAMS_REGS;
5230             callParamsRegs = std::vector<size_t>(src.begin(), src.end());
5231             break;
5232         }
5233         default:
5234             UNREACHABLE();
5235     }
5236 
5237     CFrameLayout frameLayout(arch, 0);
5238     const auto callerRegsSlotStart = frameLayout.GetCallerFirstSlot(false);
5239     const auto callerRegsCount = frameLayout.GetCallerRegistersCount(false);
5240     std::vector<llvm::Metadata *> argOffsets;
5241     for (auto paramRegId : callParamsRegs) {
5242         int slot = callerRegsSlotStart + (callerRegsCount - 1 - paramRegId);
5243         slot += frameLayout.GetStackStartSlot();
5244         constexpr auto FP_ORIGIN = CFrameLayout::OffsetOrigin::FP;
5245         constexpr auto OFFSET_IN_BYTES = CFrameLayout::OffsetUnit::BYTES;
5246         auto offset = -frameLayout.GetOffset<FP_ORIGIN, OFFSET_IN_BYTES>(slot);
5247         ASSERT(std::numeric_limits<int32_t>::min() <= offset);
5248         ASSERT(offset <= std::numeric_limits<int32_t>::max());
5249         if (arch == Arch::AARCH64) {
5250             offset -= frameLayout.GetSlotSize() * 2U;
5251         }
5252         argOffsets.push_back(llvm::ConstantAsMetadata::get(builder.getInt32(offset)));
5253     }
5254     arkFrameInfoMd->addOperand(llvm::MDNode::get(module->getContext(), argOffsets));
5255 
5256     // The third param is actual frame size
5257     auto val = frameLayout.GetFrameSize<CFrameLayout::OffsetUnit::BYTES>();
5258     // LLVM will store LR & FP
5259     if (arch == Arch::AARCH64) {
5260         val -= frameLayout.GetSlotSize() * 2U;
5261     }
5262     auto vmd = llvm::ConstantAsMetadata::get(builder.getInt32(val));
5263     arkFrameInfoMd->addOperand(llvm::MDNode::get(module->getContext(), {vmd}));
5264 }
5265 
ProvideSafepointPoll(llvm::Module * module,LLVMArkInterface * arkInterface,Arch arch)5266 void LLVMIrConstructor::ProvideSafepointPoll(llvm::Module *module, LLVMArkInterface *arkInterface, Arch arch)
5267 {
5268     // Has been already provided
5269     ASSERT(module->getFunction(LLVMArkInterface::GC_SAFEPOINT_POLL_NAME) == nullptr);
5270     auto &ctx = module->getContext();
5271     auto builder = llvm::IRBuilder<>(ctx);
5272 
5273     // Create a gc.safepoint_poll itself
5274     auto pollFtype = llvm::FunctionType::get(builder.getVoidTy(), false);
5275     auto poll = llvm::Function::Create(pollFtype, llvm::Function::ExternalLinkage,
5276                                        LLVMArkInterface::GC_SAFEPOINT_POLL_NAME, module);
5277     poll->setDoesNotThrow();
5278 
5279     // Creating a body
5280     auto entry = llvm::BasicBlock::Create(ctx, "bb", poll);
5281     builder.SetInsertPoint(entry);
5282 
5283     int64_t flagAddrOffset = arkInterface->GetRuntime()->GetFlagAddrOffset(arch);
5284     auto trigger =
5285         llvmbackend::runtime_calls::LoadTLSValue(&builder, arkInterface, flagAddrOffset, builder.getInt16Ty());
5286     auto needSafepoint = builder.CreateICmpNE(trigger, builder.getInt16(0), "need_safepoint");
5287     // Create a ret instuction immediately to split bb right before it
5288     auto ret = builder.CreateRetVoid();
5289 
5290     // Split into IF-THEN before RET and insert a safepoint call into THEN block
5291     auto weights =
5292         llvm::MDBuilder(ctx).createBranchWeights(llvmbackend::Metadata::BranchWeights::UNLIKELY_BRANCH_WEIGHT,
5293                                                  llvmbackend::Metadata::BranchWeights::LIKELY_BRANCH_WEIGHT);
5294 
5295     builder.SetInsertPoint(llvm::SplitBlockAndInsertIfThen(needSafepoint, ret, false, weights));
5296     builder.GetInsertBlock()->setName("safepoint");
5297     auto eid = RuntimeInterface::EntrypointId::SAFEPOINT;
5298     arkInterface->GetOrCreateRuntimeFunctionType(ctx, module, LLVMArkInterface::RuntimeCallType::ENTRYPOINT,
5299                                                  static_cast<LLVMArkInterface::EntrypointId>(eid));
5300     auto threadReg = llvmbackend::runtime_calls::GetThreadRegValue(&builder, arkInterface);
5301     auto spCall = llvmbackend::runtime_calls::CreateEntrypointCallCommon(
5302         &builder, threadReg, arkInterface, static_cast<llvmbackend::runtime_calls::EntrypointId>(eid));
5303 
5304     spCall->addFnAttr(llvm::Attribute::get(ctx, "safepoint"));
5305 }
5306 
CheckGraph(Graph * graph)5307 std::string LLVMIrConstructor::CheckGraph(Graph *graph)
5308 {
5309     ASSERT(!graph->IsDynamicMethod());
5310     for (auto basicBlock : graph->GetBlocksRPO()) {
5311         for (auto inst : basicBlock->AllInsts()) {
5312             bool canCompile = LLVMIrConstructor::CanCompile(inst);
5313             if (!canCompile) {
5314                 // It means we have one of the following cases:
5315                 // * meet some brand-new opcode in Ark Compiler IR
5316                 // * dynamic intrinsic call (in non-dynamic method!)
5317                 // * not yet patched SLOW_PATH_ENTRY call in Irtoc code
5318                 std::stringstream sstream;
5319                 sstream << GetOpcodeString(inst->GetOpcode()) << " unexpected in LLVM lowering. Method = "
5320                         << graph->GetRuntime()->GetMethodFullName(graph->GetMethod());
5321                 std::string error = sstream.str();
5322                 LLVM_LOG(ERROR, IR) << error;
5323                 return error;
5324             }
5325         }
5326     }
5327     return "";
5328 }
5329 
CanCompile(Inst * inst)5330 bool LLVMIrConstructor::CanCompile(Inst *inst)
5331 {
5332     if (inst->IsIntrinsic()) {
5333         auto iid = inst->CastToIntrinsic()->GetIntrinsicId();
5334         // We support only slowpaths where the second immediate is an external function
5335         if (iid == RuntimeInterface::IntrinsicId::INTRINSIC_SLOW_PATH_ENTRY) {
5336             return inst->CastToIntrinsic()->GetImms().size() > 1;
5337         }
5338         return CanCompileIntrinsic(iid);
5339     }
5340     // Check if we have method that can handle it
5341     // CC-OFFNXT(C_RULE_SWITCH_BRANCH_CHECKER) autogenerated code
5342     switch (inst->GetOpcode()) {
5343         default:
5344             UNREACHABLE_CONSTEXPR();
5345             // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
5346 #define INST_DEF(OPCODE, ...)                                                     \
5347     case Opcode::OPCODE: {                                                        \
5348         /* CC-OFFNXT(G.PRE.05) function gen */                                    \
5349         return &LLVMIrConstructor::Visit##OPCODE != &GraphVisitor::Visit##OPCODE; \
5350     }
5351             OPCODE_LIST(INST_DEF)
5352     }
5353 #undef INST_DEF
5354 }
5355 
5356 #ifndef NDEBUG
BreakIrIfNecessary()5357 void LLVMIrConstructor::BreakIrIfNecessary()
5358 {
5359     if (llvmbackend::g_options.GetLlvmBreakIrRegex().empty()) {
5360         return;
5361     }
5362 
5363     std::regex regex {llvmbackend::g_options.GetLlvmBreakIrRegex()};
5364 
5365     if (!std::regex_match(func_->getName().str(), regex)) {
5366         return;
5367     }
5368 
5369     LLVM_LOG(DEBUG, IR) << "Breaking IR for '" << func_->getName().str() << "' because it matches regex = '"
5370                         << llvmbackend::g_options.GetLlvmBreakIrRegex() << "'";
5371 
5372     for (auto &basicBlock : *func_) {
5373         basicBlock.getTerminator()->eraseFromParent();
5374     }
5375 }
5376 #endif
5377 
5378 #include "llvm_ir_constructor_gen.inl"
5379 
5380 }  // namespace ark::compiler
5381