• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef COMPILER_OPTIMIZER_CODEGEN_CODEGEN_H
17 #define COMPILER_OPTIMIZER_CODEGEN_CODEGEN_H
18 
19 /*
20 Codegen interface for compiler
21 ! Do not use this file in runtime
22 */
23 
24 #include "code_info/code_info_builder.h"
25 #include "compiler_logger.h"
26 #include "disassembly.h"
27 #include "frame_info.h"
28 #include "optimizer/analysis/live_registers.h"
29 #include "optimizer/code_generator/callconv.h"
30 #include "optimizer/code_generator/encode.h"
31 #include "optimizer/code_generator/registers_description.h"
32 #include "optimizer/code_generator/slow_path.h"
33 #include "optimizer/code_generator/spill_fill_encoder.h"
34 #include "optimizer/code_generator/target_info.h"
35 #include "optimizer/ir/analysis.h"
36 #include "optimizer/ir/graph.h"
37 #include "optimizer/ir/graph_visitor.h"
38 #include "optimizer/optimizations/regalloc/spill_fills_resolver.h"
39 #include "optimizer/pass_manager.h"
40 #include "utils/cframe_layout.h"
41 
42 namespace panda::compiler {
43 // Maximum size in bytes
44 constexpr size_t INST_IN_SLOW_PATH = 64;
45 
46 class Encoder;
47 class CodeBuilder;
48 class OsrEntryStub;
49 
IrTypeToMetainfoType(DataType::Type type)50 inline VRegInfo::Type IrTypeToMetainfoType(DataType::Type type)
51 {
52     switch (type) {
53         case DataType::UINT64:
54         case DataType::INT64:
55             return VRegInfo::Type::INT64;
56         case DataType::ANY:
57             return VRegInfo::Type::ANY;
58         case DataType::UINT32:
59         case DataType::UINT16:
60         case DataType::UINT8:
61         case DataType::INT32:
62         case DataType::INT16:
63         case DataType::INT8:
64             return VRegInfo::Type::INT32;
65         case DataType::FLOAT64:
66             return VRegInfo::Type::FLOAT64;
67         case DataType::FLOAT32:
68             return VRegInfo::Type::FLOAT32;
69         case DataType::BOOL:
70             return VRegInfo::Type::BOOL;
71         case DataType::REFERENCE:
72             return VRegInfo::Type::OBJECT;
73         default:
74             UNREACHABLE();
75     }
76 }
77 
78 class Codegen : public Optimization {
79     using EntrypointId = RuntimeInterface::EntrypointId;
80 
81 public:
82     explicit Codegen(Graph *graph);
83     NO_MOVE_SEMANTIC(Codegen);
84     NO_COPY_SEMANTIC(Codegen);
85 
86     ~Codegen() override = default;
87 
88     bool RunImpl() override;
89     const char *GetPassName() const override;
90     bool AbortIfFailed() const override;
91 
92     static bool Run(Graph *graph);
93 
GetAllocator()94     ArenaAllocator *GetAllocator() const
95     {
96         return allocator_;
97     }
GetLocalAllocator()98     ArenaAllocator *GetLocalAllocator() const
99     {
100         return localAllocator_;
101     }
GetFrameInfo()102     FrameInfo *GetFrameInfo() const
103     {
104         return frameInfo_;
105     }
SetFrameInfo(FrameInfo * frameInfo)106     void SetFrameInfo(FrameInfo *frameInfo)
107     {
108         frameInfo_ = frameInfo;
109     }
110     virtual void CreateFrameInfo();
111 
GetRuntime()112     RuntimeInterface *GetRuntime() const
113     {
114         return runtime_;
115     }
GetRegfile()116     RegistersDescription *GetRegfile() const
117     {
118         return regfile_;
119     }
GetEncoder()120     Encoder *GetEncoder() const
121     {
122         return enc_;
123     }
GetCallingConvention()124     CallingConvention *GetCallingConvention() const
125     {
126         return callconv_;
127     }
128 
GetGraphVisitor()129     GraphVisitor *GetGraphVisitor() const
130     {
131         return visitor_;
132     }
133 
GetLabelEntry()134     LabelHolder::LabelId GetLabelEntry() const
135     {
136         return labelEntry_;
137     }
138 
GetLabelExit()139     LabelHolder::LabelId GetLabelExit() const
140     {
141         return labelExit_;
142     }
143 
GetMethodId()144     RuntimeInterface::MethodId GetMethodId()
145     {
146         return methodId_;
147     }
148 
SetStartCodeOffset(size_t offset)149     void SetStartCodeOffset(size_t offset)
150     {
151         startCodeOffset_ = offset;
152     }
153 
GetStartCodeOffset()154     size_t GetStartCodeOffset() const
155     {
156         return startCodeOffset_;
157     }
158 
159     void Convert(ArenaVector<Reg> *regsUsage, const ArenaVector<bool> *mask, TypeInfo typeInfo);
160 
161     Reg ConvertRegister(Register ref, DataType::Type type = DataType::Type::INT64);
162 
163     Imm ConvertImmWithExtend(uint64_t imm, DataType::Type type);
164 
165     Condition ConvertCc(ConditionCode cc);
166     Condition ConvertCcOverflow(ConditionCode cc);
167 
ConvertDataType(DataType::Type type,Arch arch)168     static inline TypeInfo ConvertDataType(DataType::Type type, Arch arch)
169     {
170         return TypeInfo::FromDataType(type, arch);
171     }
172 
GetArch()173     Arch GetArch() const
174     {
175         return GetTarget().GetArch();
176     }
177 
GetTarget()178     Target GetTarget() const
179     {
180         return target_;
181     }
182 
GetPtrRegType()183     TypeInfo GetPtrRegType() const
184     {
185         return target_.GetPtrRegType();
186     }
187 
GetCodeBuilder()188     CodeInfoBuilder *GetCodeBuilder() const
189     {
190         return codeBuilder_;
191     }
192 
193     void CreateStackMap(Inst *inst, Inst *user = nullptr);
194 
195     void CreateStackMapRec(SaveStateInst *saveState, bool requireVregMap, Inst *targetSite);
196     void CreateVRegMap(SaveStateInst *saveState, size_t vregsCount, Inst *targetSite);
197     void CreateVreg(const Location &location, Inst *inst, const VirtualRegister &vreg);
198     void FillVregIndices(SaveStateInst *saveState);
199 
200     void CreateOsrEntry(SaveStateInst *saveState);
201 
202     void CreateVRegForRegister(const Location &location, Inst *inst, const VirtualRegister &vreg);
203 
204     /// 'live_inputs' shows that inst's source registers should be added the the mask
205     template <bool LIVE_INPUTS = false>
GetLiveRegisters(Inst * inst)206     std::pair<RegMask, VRegMask> GetLiveRegisters(Inst *inst)
207     {
208         RegMask liveRegs;
209         VRegMask liveFpRegs;
210         if (!g_options.IsCompilerSaveOnlyLiveRegisters() || inst == nullptr) {
211             liveRegs.set();
212             liveFpRegs.set();
213             return {liveRegs, liveFpRegs};
214         }
215         // Run LiveRegisters pass only if it is actually required
216         if (!GetGraph()->IsAnalysisValid<LiveRegisters>()) {
217             GetGraph()->RunPass<LiveRegisters>();
218         }
219 
220         // Add registers from intervals that are live at inst's definition
221         auto &lr = GetGraph()->GetAnalysis<LiveRegisters>();
222         lr.VisitIntervalsWithLiveRegisters<LIVE_INPUTS>(inst, [&liveRegs, &liveFpRegs, this](const auto &li) {
223             auto reg = ConvertRegister(li->GetReg(), li->GetType());
224             GetEncoder()->SetRegister(&liveRegs, &liveFpRegs, reg);
225         });
226 
227         // Add live temp registers
228         liveRegs |= GetEncoder()->GetLiveTmpRegMask();
229         liveFpRegs |= GetEncoder()->GetLiveTmpFpRegMask();
230 
231         return {liveRegs, liveFpRegs};
232     }
233 
234     // Limits live register set to a number of registers used to pass parameters to the runtime or fastpath call:
235     // 1) these ones are saved/restored by caller
236     // 2) the remaining ones are saved/restored by the bridge function (aarch only) or by fastpath prologue/epilogue
237     void FillOnlyParameters(RegMask *liveRegs, uint32_t numParams, bool isFastpath) const;
238 
239     template <typename T, typename... Args>
CreateSlowPath(Inst * inst,Args &&...args)240     T *CreateSlowPath(Inst *inst, Args &&...args)
241     {
242         static_assert(std::is_base_of_v<SlowPathBase, T>);
243         auto label = GetEncoder()->CreateLabel();
244         auto slowPath = GetLocalAllocator()->New<T>(label, inst, std::forward<Args>(args)...);
245         slowPaths_.push_back(slowPath);
246         return slowPath;
247     }
248 
249     void EmitSlowPaths();
250 
251     /**
252      * Insert tracing code to the generated code. See `Trace` method in the `runtime/entrypoints.cpp`.
253      * NOTE(compiler): we should rework parameters assigning algorithm, that is duplicated here.
254      * @param params parameters to be passed to the TRACE entrypoint, first parameter must be TraceId value.
255      */
256     template <typename... Args>
InsertTrace(Args &&...params)257     void InsertTrace(Args &&...params)
258     {
259         SCOPED_DISASM_STR(this, "Trace");
260         [[maybe_unused]] constexpr size_t MAX_PARAM_NUM = 8;
261         static_assert(sizeof...(Args) <= MAX_PARAM_NUM);
262         auto regfile = GetRegfile();
263         auto saveRegs = regfile->GetCallerSavedRegMask();
264         saveRegs.set(GetTarget().GetReturnRegId());
265         auto saveVregs = regfile->GetCallerSavedVRegMask();
266         saveVregs.set(GetTarget().GetReturnFpRegId());
267 
268         SaveCallerRegisters(saveRegs, saveVregs, false);
269         FillCallParams(std::forward<Args>(params)...);
270         EmitCallRuntimeCode(nullptr, EntrypointId::TRACE);
271         LoadCallerRegisters(saveRegs, saveVregs, false);
272     }
273 
274     void CallIntrinsic(Inst *inst, RuntimeInterface::IntrinsicId id);
275 
276     template <bool IS_FASTPATH, typename... Args>
CallEntrypoint(Inst * inst,EntrypointId id,Reg dstReg,RegMask preservedRegs,Args &&...params)277     void CallEntrypoint(Inst *inst, EntrypointId id, Reg dstReg, RegMask preservedRegs, Args &&...params)
278     {
279         ASSERT(inst != nullptr);
280         CHECK_EQ(sizeof...(Args), GetRuntime()->GetEntrypointArgsNum(id));
281         if (GetArch() == Arch::AARCH32) {
282             // There is a problem with 64-bit parameters:
283             // params number passed from entrypoints_gen.S.erb will be inconsistent with Aarch32 ABI.
284             // Thus, runtime bridges will have wrong params number (\paramsnum macro argument).
285             ASSERT(EnsureParamsFitIn32Bit({params...}));
286             ASSERT(!dstReg.IsValid() || dstReg.GetSize() <= WORD_SIZE);
287         }
288 
289         SCOPED_DISASM_STR(this, std::string("CallEntrypoint: ") + GetRuntime()->GetEntrypointName(id));
290         RegMask liveRegs {preservedRegs | GetLiveRegisters(inst).first};
291         RegMask paramsMask;
292         if (inst->HasImplicitRuntimeCall() && !GetRuntime()->IsEntrypointNoreturn(id)) {
293             SaveRegistersForImplicitRuntime(inst, &paramsMask, &liveRegs);
294         }
295 
296         ASSERT(IS_FASTPATH == GetRuntime()->IsEntrypointFastPath(id));
297         bool retRegAlive {liveRegs.Test(GetTarget().GetReturnRegId())};
298         // parameter regs: their initial values must be stored by the caller
299         // Other caller regs stored in bridges
300         FillOnlyParameters(&liveRegs, sizeof...(Args), IS_FASTPATH);
301 
302         if (IS_FASTPATH && retRegAlive && dstReg.IsValid()) {
303             Reg retReg = GetTarget().GetReturnReg(dstReg.GetType());
304             if (dstReg.GetId() != retReg.GetId()) {
305                 GetEncoder()->SetRegister(&liveRegs, nullptr, retReg, true);
306             }
307         }
308 
309         GetEncoder()->SetRegister(&liveRegs, nullptr, dstReg, false);
310         SaveCallerRegisters(liveRegs, VRegMask(), true);
311 
312         if (sizeof...(Args) != 0) {
313             FillCallParams(std::forward<Args>(params)...);
314         }
315 
316         // Call Code
317         if (!EmitCallRuntimeCode(inst, id)) {
318             return;
319         }
320         if (dstReg.IsValid()) {
321             ASSERT(dstReg.IsScalar());
322             Reg retReg = GetTarget().GetReturnReg(dstReg.GetType());
323             if (!IS_FASTPATH && retRegAlive && dstReg.GetId() != retReg.GetId() &&
324                 (!GetTarget().FirstParamIsReturnReg(retReg.GetType()) || sizeof...(Args) == 0U)) {
325                 GetEncoder()->SetRegister(&liveRegs, nullptr, retReg, true);
326             }
327 
328             // We must:
329             //  sign extended INT8 and INT16 to INT32
330             //  zero extended UINT8 and UINT16 to UINT32
331             if (dstReg.GetSize() < WORD_SIZE) {
332                 bool isSigned = DataType::IsTypeSigned(inst->GetType());
333                 GetEncoder()->EncodeCast(dstReg.As(INT32_TYPE), isSigned, retReg, isSigned);
334             } else {
335                 GetEncoder()->EncodeMov(dstReg, retReg);
336             }
337         }
338         CallEntrypointFinalize(liveRegs, paramsMask, inst);
339     }
340 
CallEntrypointFinalize(RegMask & liveRegs,RegMask & paramsMask,Inst * inst)341     void CallEntrypointFinalize(RegMask &liveRegs, RegMask &paramsMask, Inst *inst)
342     {
343         LoadCallerRegisters(liveRegs, VRegMask(), true);
344 
345         if (!inst->HasImplicitRuntimeCall()) {
346             return;
347         }
348         for (auto i = 0U; i < paramsMask.size(); i++) {
349             if (paramsMask.test(i)) {
350                 inst->GetSaveState()->GetRootsRegsMask().reset(i);
351             }
352         }
353     }
354 
355     // The function is used for calling runtime functions through special bridges.
356     // !NOTE Don't use the function for calling runtime without bridges(it save only parameters on stack)
357     template <typename... Args>
CallRuntime(Inst * inst,EntrypointId id,Reg dstReg,RegMask preservedRegs,Args &&...params)358     void CallRuntime(Inst *inst, EntrypointId id, Reg dstReg, RegMask preservedRegs, Args &&...params)
359     {
360         CallEntrypoint<false>(inst, id, dstReg, preservedRegs, std::forward<Args>(params)...);
361     }
362 
363     template <typename... Args>
CallFastPath(Inst * inst,EntrypointId id,Reg dstReg,RegMask preservedRegs,Args &&...params)364     void CallFastPath(Inst *inst, EntrypointId id, Reg dstReg, RegMask preservedRegs, Args &&...params)
365     {
366         CallEntrypoint<true>(inst, id, dstReg, preservedRegs, std::forward<Args>(params)...);
367     }
368 
369     template <typename... Args>
CallRuntimeWithMethod(Inst * inst,void * method,EntrypointId eid,Reg dstReg,Args &&...params)370     void CallRuntimeWithMethod(Inst *inst, void *method, EntrypointId eid, Reg dstReg, Args &&...params)
371     {
372         if (GetGraph()->IsAotMode()) {
373             ScopedTmpReg methodReg(GetEncoder());
374             LoadMethod(methodReg);
375             CallRuntime(inst, eid, dstReg, RegMask::GetZeroMask(), methodReg, std::forward<Args>(params)...);
376         } else {
377             if (Is64BitsArch(GetArch())) {
378                 CallRuntime(inst, eid, dstReg, RegMask::GetZeroMask(), TypedImm(reinterpret_cast<uint64_t>(method)),
379                             std::forward<Args>(params)...);
380             } else {
381                 // uintptr_t causes problems on host cross-jit compilation
382                 CallRuntime(inst, eid, dstReg, RegMask::GetZeroMask(), TypedImm(down_cast<uint32_t>(method)),
383                             std::forward<Args>(params)...);
384             }
385         }
386     }
387 
388     void SaveRegistersForImplicitRuntime(Inst *inst, RegMask *paramsMask, RegMask *mask);
389 
390     void VisitNewArray(Inst *inst);
391 
392     void LoadClassFromObject(Reg classReg, Reg objReg);
393     void VisitCallIndirect(CallIndirectInst *inst);
394     void VisitCall(CallInst *inst);
395     void CreateCallIntrinsic(IntrinsicInst *inst);
396     void CreateMultiArrayCall(CallInst *callInst);
397     void CreateNewObjCall(NewObjectInst *newObj);
398     void CreateNewObjCallOld(NewObjectInst *newObj);
399     void CreateMonitorCall(MonitorInst *inst);
400     void CreateMonitorCallOld(MonitorInst *inst);
401     void CreateCheckCastInterfaceCall(Inst *inst);
402     void CreateNonDefaultInitClass(ClassInst *initInst);
403     void CheckObject(Reg reg, LabelHolder::LabelId label);
404     template <bool IS_CLASS = false>
405     void CreatePreWRB(Inst *inst, MemRef mem, RegMask preserved = {}, bool storePair = false);
406     void CreatePostWRB(Inst *inst, MemRef mem, Reg reg1, Reg reg2 = INVALID_REGISTER);
407     void CreatePostWRBForDynamic(Inst *inst, MemRef mem, Reg reg1, Reg reg2);
408     void EncodePostWRB(Inst *inst, MemRef mem, Reg reg1, Reg reg2, bool checkObject = true);
409     void CreatePostInterRegionBarrier(Inst *inst, MemRef mem, Reg reg1, Reg reg2, bool checkObject);
410     void CreatePostInterGenerationalBarrier(Reg base);
411     // Creates call to IRtoC PostWrb Entrypoint. Offline means AOT or IRtoC compilation -> type of GC is not known. So
412     // Managed Thread keeps pointer to actual IRtoC GC barriers implementation at run-time.
413     void CreateOfflineIrtocPostWrb(Inst *inst, MemRef mem, Reg reg1, Reg reg2);
414     // Creates call to IRtoC PostWrb Entrypoint. Online means JIT compilation -> we know GC type.
415     void CreateOnlineIrtocPostWrbRegionTwoRegs(Inst *inst, MemRef mem, Reg reg1, Reg reg2, bool checkObject);
416     void CreateOnlineIrtocPostWrbRegionOneReg(Inst *inst, MemRef mem, Reg reg1, bool checkObject);
417     void CreateOnlineIrtocPostWrb(Inst *inst, MemRef mem, Reg reg1, Reg reg2, bool checkObject);
418     template <typename... Args>
CallBarrier(RegMask liveRegs,VRegMask liveVregs,std::variant<EntrypointId,Reg> entrypoint,Args &&...params)419     void CallBarrier(RegMask liveRegs, VRegMask liveVregs, std::variant<EntrypointId, Reg> entrypoint, Args &&...params)
420     {
421         SaveCallerRegisters(liveRegs, liveVregs, true);
422         FillCallParams(std::forward<Args>(params)...);
423         EmitCallRuntimeCode(nullptr, entrypoint);
424         LoadCallerRegisters(liveRegs, liveVregs, true);
425     }
426 
427     void CreateLoadClassFromPLT(Inst *inst, Reg tmpReg, Reg dst, size_t classId);
428     void CreateJumpToClassResolverPltShared(Inst *inst, Reg tmpReg, RuntimeInterface::EntrypointId id);
429     void CreateLoadTLABInformation(Reg regTlabStart, Reg regTlabSize);
430     void CreateCheckForTLABWithConstSize(Inst *inst, Reg regTlabStart, Reg regTlabSize, size_t size,
431                                          LabelHolder::LabelId label);
432     void CreateDebugRuntimeCallsForNewObject(Inst *inst, Reg regTlabStart, size_t allocSize, RegMask preserved);
433     void CreateDebugRuntimeCallsForObjectClone(Inst *inst, Reg dst);
434     void CallFastCreateStringFromCharArrayTlab(Inst *inst, Reg dst, Reg offset, Reg count, Reg array,
435                                                std::variant<Reg, TypedImm> klass);
436     void CreateReturn(const Inst *inst);
437     template <typename T>
CreateUnaryCheck(Inst * inst,RuntimeInterface::EntrypointId id,DeoptimizeType type,Condition cc)438     void CreateUnaryCheck(Inst *inst, RuntimeInterface::EntrypointId id, DeoptimizeType type, Condition cc)
439     {
440         [[maybe_unused]] auto ss = inst->GetSaveState();
441         ASSERT(ss != nullptr &&
442                (ss->GetOpcode() == Opcode::SaveState || ss->GetOpcode() == Opcode::SaveStateDeoptimize));
443 
444         LabelHolder::LabelId slowPath;
445         if (inst->CanDeoptimize()) {
446             slowPath = CreateSlowPath<SlowPathDeoptimize>(inst, type)->GetLabel();
447         } else {
448             slowPath = CreateSlowPath<T>(inst, id)->GetLabel();
449         }
450         auto srcType = inst->GetInputType(0);
451         auto src = ConvertRegister(inst->GetSrcReg(0), srcType);
452         GetEncoder()->EncodeJump(slowPath, src, cc);
453     }
454 
455     // The function alignment up the value from alignment_reg using tmp_reg.
456     void CreateAlignmentValue(Reg alignmentReg, Reg tmpReg, size_t alignment);
457     void TryInsertImplicitNullCheck(Inst *inst, size_t prevOffset);
458 
GetFrameLayout()459     const CFrameLayout &GetFrameLayout() const
460     {
461         return frameLayout_;
462     }
463 
464     bool RegisterKeepCallArgument(CallInst *callInst, Reg reg);
465 
466     void LoadMethod(Reg dst);
467     void LoadFreeSlot(Reg dst);
468     void StoreFreeSlot(Reg src);
469 
GetStackOffset(Location location)470     ssize_t GetStackOffset(Location location)
471     {
472         if (location.GetKind() == LocationType::STACK_ARGUMENT) {
473             return location.GetValue() * GetFrameLayout().GetSlotSize();
474         }
475 
476         if (location.GetKind() == LocationType::STACK_PARAMETER) {
477             return GetFrameLayout().GetFrameSize<CFrameLayout::OffsetUnit::BYTES>() +
478                    (location.GetValue() * GetFrameLayout().GetSlotSize());
479         }
480 
481         ASSERT(location.GetKind() == LocationType::STACK);
482         return GetFrameLayout().GetSpillOffsetFromSpInBytes(location.GetValue());
483     }
484 
GetMemRefForSlot(Location location)485     MemRef GetMemRefForSlot(Location location)
486     {
487         ASSERT(location.IsAnyStack());
488         return MemRef(SpReg(), GetStackOffset(location));
489     }
490 
SpReg()491     Reg SpReg() const
492     {
493         return GetTarget().GetStackReg();
494     }
495 
FpReg()496     Reg FpReg() const
497     {
498         return GetTarget().GetFrameReg();
499     }
500 
501     bool HasLiveCallerSavedRegs(Inst *inst);
502     void SaveCallerRegisters(RegMask liveRegs, VRegMask liveVregs, bool adjustRegs);
503     void LoadCallerRegisters(RegMask liveRegs, VRegMask liveVregs, bool adjustRegs);
504 
505     void IssueDisasm();
506 
507     // Initialization internal variables
508     void Initialize();
509     bool Finalize();
510 
GetDisasm()511     const Disassembly *GetDisasm() const
512     {
513         return &disasm_;
514     }
515 
GetDisasm()516     Disassembly *GetDisasm()
517     {
518         return &disasm_;
519     }
520 
AddLiveOut(const BasicBlock * bb,const Register reg)521     void AddLiveOut(const BasicBlock *bb, const Register reg)
522     {
523         liveOuts_[bb].Set(reg);
524     }
525 
GetLiveOut(const BasicBlock * bb)526     RegMask GetLiveOut(const BasicBlock *bb) const
527     {
528         auto it = liveOuts_.find(bb);
529         return it != liveOuts_.end() ? it->second : RegMask();
530     }
531 
ThreadReg()532     Reg ThreadReg() const
533     {
534         return Reg(GetThreadReg(GetArch()), GetTarget().GetPtrRegType());
535     }
536 
537     static bool InstEncodedWithLibCall(const Inst *inst, Arch arch);
538 
539     void EncodeDynamicCast(Inst *inst, Reg dst, bool dstSigned, Reg src);
540 
541     Reg ConvertInstTmpReg(const Inst *inst, DataType::Type type) const;
542     Reg ConvertInstTmpReg(const Inst *inst) const;
543 
OffsetFitReferenceTypeSize(uint64_t offset)544     bool OffsetFitReferenceTypeSize(uint64_t offset) const
545     {
546         // -1 because some arch uses signed offset
547         // NOLINTNEXTLINE(hicpp-signed-bitwise)
548         uint64_t maxOffset = 1ULL << (DataType::GetTypeSize(DataType::REFERENCE, GetArch()) - 1);
549         return offset < maxOffset;
550     }
551 
552 protected:
553     virtual void GeneratePrologue();
554     virtual void GenerateEpilogue();
555 
556     // Main logic steps
557     bool BeginMethod();
558     bool VisitGraph();
559     void EndMethod();
560     bool CopyToCodeCache();
561     void DumpCode();
562 
GetUsedRegs()563     RegMask GetUsedRegs() const
564     {
565         return usedRegs_;
566     }
GetUsedVRegs()567     RegMask GetUsedVRegs() const
568     {
569         return usedVregs_;
570     }
571 
572     template <typename... Args>
573     void FillCallParams(Args &&...params);
574 
575     template <size_t IMM_ARRAY_SIZE, typename Arg, typename... Args>
576     ALWAYS_INLINE inline void FillCallParamsHandleOperands(
577         ParameterInfo *paramInfo, SpillFillInst *regMoves, ArenaVector<Reg> *spMoves,
578         [[maybe_unused]] typename std::array<std::pair<Reg, Imm>, IMM_ARRAY_SIZE>::iterator immsIter, Arg &&arg,
579         Args &&...params);
580 
581     void EmitJump(const BasicBlock *bb);
582 
583     bool EmitCallRuntimeCode(Inst *inst, std::variant<EntrypointId, Reg> entrypoint);
584 
585     void IntfInlineCachePass(ResolveVirtualInst *resolver, Reg methodReg, Reg tmpReg, Reg objReg);
586 
587     template <typename T>
588     RuntimeInterface::MethodPtr GetCallerOfUnresolvedMethod(T *resolver);
589 
590     void EmitResolveVirtual(ResolveVirtualInst *resolver);
591     void EmitResolveUnknownVirtual(ResolveVirtualInst *resolver, Reg methodReg);
592     void EmitResolveVirtualAot(ResolveVirtualInst *resolver, Reg methodReg);
593     void EmitCallVirtual(CallInst *call);
594     void EmitCallResolvedVirtual(CallInst *call);
595     void EmitCallStatic(CallInst *call);
596     void EmitResolveStatic(ResolveStaticInst *resolver);
597     void EmitCallResolvedStatic(CallInst *call);
598     void EmitCallDynamic(CallInst *call);
599     void FinalizeCall(CallInst *call);
600 
GetVtableShift()601     uint32_t GetVtableShift()
602     {
603         // The size of the VTable element is equal to the size of pointers for the architecture
604         // (not the size of pointer to objects)
605         constexpr uint32_t SHIFT_64_BITS = 3;
606         constexpr uint32_t SHIFT_32_BITS = 2;
607         return Is64BitsArch(GetGraph()->GetArch()) ? SHIFT_64_BITS : SHIFT_32_BITS;
608     }
609 
610     void CalculateCardIndex(Reg baseReg, ScopedTmpReg *tmp, ScopedTmpReg *tmp1);
611     void CreateBuiltinIntrinsic(IntrinsicInst *inst);
612     static constexpr int32_t NUM_OF_SRC_BUILTIN = 6;
613     static constexpr uint8_t FIRST_OPERAND = 0;
614     static constexpr uint8_t SECOND_OPERAND = 1;
615     static constexpr uint8_t THIRD_OPERAND = 2;
616     static constexpr uint8_t FOURTH_OPERAND = 3;
617     static constexpr uint8_t FIFTH_OPERAND = 4;
618     using SRCREGS = std::array<Reg, NUM_OF_SRC_BUILTIN>;
619     // implementation is generated with compiler/optimizer/templates/intrinsics/intrinsics_codegen.inl.erb
620     void FillBuiltin(IntrinsicInst *inst, SRCREGS src, Reg dst);
621 
622     template <typename Arg, typename... Args>
AddParamRegsInLiveMasksHandleArgs(ParameterInfo * paramInfo,RegMask * liveRegs,VRegMask * liveVregs,Arg param,Args &&...params)623     ALWAYS_INLINE inline void AddParamRegsInLiveMasksHandleArgs(ParameterInfo *paramInfo, RegMask *liveRegs,
624                                                                 VRegMask *liveVregs, Arg param, Args &&...params)
625     {
626         auto currDst = paramInfo->GetNativeParam(param.GetType());
627         if (std::holds_alternative<Reg>(currDst)) {
628             auto reg = std::get<Reg>(currDst);
629             if (reg.IsScalar()) {
630                 liveRegs->set(reg.GetId());
631             } else {
632                 liveVregs->set(reg.GetId());
633             }
634         } else {
635             GetEncoder()->SetFalseResult();
636             UNREACHABLE();
637         }
638         if constexpr (sizeof...(Args) != 0) {
639             AddParamRegsInLiveMasksHandleArgs(paramInfo, liveRegs, liveVregs, std::forward<Args>(params)...);
640         }
641     }
642 
643     template <typename... Args>
AddParamRegsInLiveMasks(RegMask * liveRegs,VRegMask * liveVregs,Args &&...params)644     void AddParamRegsInLiveMasks(RegMask *liveRegs, VRegMask *liveVregs, Args &&...params)
645     {
646         auto callconv = GetCallingConvention();
647         auto paramInfo = callconv->GetParameterInfo(0);
648         AddParamRegsInLiveMasksHandleArgs(paramInfo, liveRegs, liveVregs, std::forward<Args>(params)...);
649     }
650 
651     template <typename... Args>
CreateStubCall(Inst * inst,RuntimeInterface::IntrinsicId intrinsicId,Reg dst,Args &&...params)652     void CreateStubCall(Inst *inst, RuntimeInterface::IntrinsicId intrinsicId, Reg dst, Args &&...params)
653     {
654         VRegMask liveVregs;
655         RegMask liveRegs;
656         AddParamRegsInLiveMasks(&liveRegs, &liveVregs, params...);
657         auto enc = GetEncoder();
658         {
659             SCOPED_DISASM_STR(this, "Save caller saved regs");
660             SaveCallerRegisters(liveRegs, liveVregs, true);
661         }
662 
663         FillCallParams(std::forward<Args>(params)...);
664         CallIntrinsic(inst, intrinsicId);
665 
666         if (inst->GetSaveState() != nullptr) {
667             CreateStackMap(inst);
668         }
669 
670         if (dst.IsValid()) {
671             Reg retVal = GetTarget().GetReturnReg(dst.GetType());
672             if (dst.GetId() != retVal.GetId()) {
673                 enc->SetRegister(&liveRegs, &liveVregs, retVal, true);
674             }
675             ASSERT(dst.IsScalar());
676             enc->EncodeMov(dst, retVal);
677         }
678 
679         {
680             SCOPED_DISASM_STR(this, "Restore caller saved regs");
681             enc->SetRegister(&liveRegs, &liveVregs, dst, false);
682             LoadCallerRegisters(liveRegs, liveVregs, true);
683         }
684     }
685 
686     ScopedTmpReg CalculatePreviousTLABAllocSize(Reg reg, LabelHolder::LabelId label);
687     friend class IntrinsicCodegenTest;
688 
IntrinsicSlowPathEntry(IntrinsicInst * inst)689     virtual void IntrinsicSlowPathEntry([[maybe_unused]] IntrinsicInst *inst)
690     {
691         GetEncoder()->SetFalseResult();
692     }
IntrinsicCallRuntimeSaveAll(IntrinsicInst * inst)693     virtual void IntrinsicCallRuntimeSaveAll([[maybe_unused]] IntrinsicInst *inst)
694     {
695         GetEncoder()->SetFalseResult();
696     }
IntrinsicSaveRegisters(IntrinsicInst * inst)697     virtual void IntrinsicSaveRegisters([[maybe_unused]] IntrinsicInst *inst)
698     {
699         GetEncoder()->SetFalseResult();
700     }
IntrinsicRestoreRegisters(IntrinsicInst * inst)701     virtual void IntrinsicRestoreRegisters([[maybe_unused]] IntrinsicInst *inst)
702     {
703         GetEncoder()->SetFalseResult();
704     }
IntrinsicTailCall(IntrinsicInst * inst)705     virtual void IntrinsicTailCall([[maybe_unused]] IntrinsicInst *inst)
706     {
707         GetEncoder()->SetFalseResult();
708     }
709 
710 #include "codegen_language_extensions.h"
711 #include "intrinsics_codegen.inl.h"
712 
713 private:
714     template <typename T>
EncodeImms(const T & imms,bool skipFirstLocation)715     void EncodeImms(const T &imms, bool skipFirstLocation)
716     {
717         auto paramInfo = GetCallingConvention()->GetParameterInfo(0);
718         auto immType = DataType::INT32;
719         if (skipFirstLocation) {
720             paramInfo->GetNextLocation(immType);
721         }
722         for (auto imm : imms) {
723             auto location = paramInfo->GetNextLocation(immType);
724             ASSERT(location.IsFixedRegister());
725             auto dstReg = ConvertRegister(location.GetValue(), immType);
726             GetEncoder()->EncodeMov(dstReg, Imm(imm));
727         }
728     }
729 
730     [[maybe_unused]] bool EnsureParamsFitIn32Bit(std::initializer_list<std::variant<Reg, TypedImm>> params);
731 
732     template <typename... Args>
733     void FillPostWrbCallParams(MemRef mem, Args &&...params);
734 
735 private:
736     ArenaAllocator *allocator_;
737     ArenaAllocator *localAllocator_;
738     // Register description
739     RegistersDescription *regfile_;
740     // Encoder implementation
741     Encoder *enc_;
742     // Target architecture calling convention model
743     CallingConvention *callconv_;
744     // Current execution model implementation
745     // Visitor for instructions
746     GraphVisitor *visitor_ {};
747 
748     CodeInfoBuilder *codeBuilder_ {nullptr};
749 
750     ArenaVector<SlowPathBase *> slowPaths_;
751     ArenaUnorderedMap<RuntimeInterface::EntrypointId, SlowPathShared *> slowPathsMap_;
752 
753     const CFrameLayout frameLayout_;  // NOLINT(readability-identifier-naming)
754 
755     ArenaVector<OsrEntryStub *> osrEntries_;
756 
757     RuntimeInterface::MethodId methodId_ {INVALID_ID};
758 
759     size_t startCodeOffset_ {0};
760 
761     ArenaVector<std::pair<int16_t, int16_t>> vregIndices_;
762 
763     RuntimeInterface *runtime_ {nullptr};
764 
765     LabelHolder::LabelId labelEntry_ {};
766     LabelHolder::LabelId labelExit_ {};
767 
768     FrameInfo *frameInfo_ {nullptr};
769 
770     const Target target_;
771 
772     /* Registers that have been allocated by regalloc */
773     RegMask usedRegs_ {0};
774     RegMask usedVregs_ {0};
775 
776     /* Map of BasicBlock to live-out regsiters mask. It is needed in epilogue encoding to avoid overwriting of the
777      * live-out registers */
778     ArenaUnorderedMap<const BasicBlock *, RegMask> liveOuts_;
779 
780     Disassembly disasm_;
781 
782     SpillFillsResolver spillFillsResolver_;
783 
784     friend class EncodeVisitor;
785     friend class BaselineCodegen;
786     friend class SlowPathJsCastDoubleToInt32;
787 };  // Codegen
788 
789 class EncodeVisitor : public GraphVisitor {
790     using EntrypointId = RuntimeInterface::EntrypointId;
791 
792 public:
EncodeVisitor(Codegen * cg)793     explicit EncodeVisitor(Codegen *cg) : cg_(cg), arch_(cg->GetArch()) {}
794 
795     EncodeVisitor() = delete;
796 
GetBlocksToVisit()797     const ArenaVector<BasicBlock *> &GetBlocksToVisit() const override
798     {
799         return cg_->GetGraph()->GetBlocksRPO();
800     }
GetCodegen()801     Codegen *GetCodegen() const
802     {
803         return cg_;
804     }
GetEncoder()805     Encoder *GetEncoder()
806     {
807         return cg_->GetEncoder();
808     }
GetArch()809     Arch GetArch() const
810     {
811         return arch_;
812     }
GetCallingConvention()813     CallingConvention *GetCallingConvention()
814     {
815         return cg_->GetCallingConvention();
816     }
817 
GetRegfile()818     RegistersDescription *GetRegfile()
819     {
820         return cg_->GetRegfile();
821     }
822 
GetResult()823     bool GetResult()
824     {
825         return success_ && cg_->GetEncoder()->GetResult();
826     }
827 
828     // For each group of SpillFillData representing spill or fill operations and
829     // sharing the same source and destination types order by stack slot number in descending order.
830     static void SortSpillFillData(ArenaVector<SpillFillData> *spillFills);
831     // Checks if two spill-fill operations could be coalesced into single operation over pair of arguments.
832     static bool CanCombineSpillFills(SpillFillData pred, SpillFillData succ, const CFrameLayout &fl,
833                                      const Graph *graph);
834 
835 protected:
836     // UnaryOperation
837     static void VisitMov(GraphVisitor *visitor, Inst *inst);
838     static void VisitNeg(GraphVisitor *visitor, Inst *inst);
839     static void VisitAbs(GraphVisitor *visitor, Inst *inst);
840     static void VisitNot(GraphVisitor *visitor, Inst *inst);
841     static void VisitSqrt(GraphVisitor *visitor, Inst *inst);
842 
843     // BinaryOperation
844     static void VisitAdd(GraphVisitor *visitor, Inst *inst);
845     static void VisitSub(GraphVisitor *visitor, Inst *inst);
846     static void VisitMul(GraphVisitor *visitor, Inst *inst);
847     static void VisitShl(GraphVisitor *visitor, Inst *inst);
848     static void VisitAShr(GraphVisitor *visitor, Inst *inst);
849     static void VisitAnd(GraphVisitor *visitor, Inst *inst);
850     static void VisitOr(GraphVisitor *visitor, Inst *inst);
851     static void VisitXor(GraphVisitor *visitor, Inst *inst);
852 
853     // Binary Overflow Operation
854     static void VisitAddOverflow(GraphVisitor *v, Inst *inst);
855     static void VisitAddOverflowCheck(GraphVisitor *v, Inst *inst);
856     static void VisitSubOverflow(GraphVisitor *v, Inst *inst);
857     static void VisitSubOverflowCheck(GraphVisitor *v, Inst *inst);
858     static void VisitNegOverflowAndZeroCheck(GraphVisitor *v, Inst *inst);
859 
860 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
861 #define BINARY_IMM_OPERATION(opc) static void Visit##opc##I(GraphVisitor *visitor, Inst *inst);
862 
863 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
864 #define BINARRY_IMM_OPS(DEF) DEF(Add) DEF(Sub) DEF(Shl) DEF(AShr) DEF(And) DEF(Or) DEF(Xor)
865 
866     BINARRY_IMM_OPS(BINARY_IMM_OPERATION)
867 
868 #undef BINARRY_IMM_OPS
869 #undef BINARY_IMM_OPERATION
870 
871 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
872 #define BINARY_SIGN_UNSIGN_OPERATION(opc) static void Visit##opc(GraphVisitor *visitor, Inst *inst);
873 
874 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
875 #define SIGN_UNSIGN_OPS(DEF) DEF(Div) DEF(Mod) DEF(Min) DEF(Max) DEF(Shr)
876 
877     SIGN_UNSIGN_OPS(BINARY_SIGN_UNSIGN_OPERATION)
878 
879 #undef SIGN_UNSIGN_OPS
880 #undef BINARY_SIGN_UNSIGN_OPERATION
881 
882 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
883 #define BINARY_SHIFTED_REGISTER_OPERATION_DEF(opc, ignored) \
884     static void Visit##opc##SR(GraphVisitor *visitor, Inst *inst);
885 
886     ENCODE_INST_WITH_SHIFTED_OPERAND(BINARY_SHIFTED_REGISTER_OPERATION_DEF)
887 
888 #undef BINARY_SHIFTED_REGISTER_OPERATION_DEF
889 
890     static void VisitShrI(GraphVisitor *visitor, Inst *inst);
891 
892     static void VisitCast(GraphVisitor *visitor, Inst *inst);
893 
894     static void VisitBitcast(GraphVisitor *visitor, Inst *inst);
895 
896     static void VisitPhi([[maybe_unused]] GraphVisitor *visitor, [[maybe_unused]] Inst *inst);
897 
898     static void VisitConstant(GraphVisitor *visitor, Inst *inst);
899 
900     static void VisitNullPtr(GraphVisitor *visitor, Inst *inst);
901 
902     static void VisitLoadUndefined(GraphVisitor *visitor, Inst *inst);
903 
904     static void VisitIf(GraphVisitor *visitor, Inst *inst);
905 
906     static void VisitIfImm(GraphVisitor *visitor, Inst *inst);
907 
908     static void VisitCompare(GraphVisitor *visitor, Inst *inst);
909 
910     static void VisitCmp(GraphVisitor *visitor, Inst *inst);
911 
912     // All next visitors use execution model for implementation
913     static void VisitReturnVoid(GraphVisitor *visitor, Inst * /* unused */);
914 
915     static void VisitReturn(GraphVisitor *visitor, Inst *inst);
916 
917     static void VisitReturnI(GraphVisitor *visitor, Inst *inst);
918 
919     static void VisitReturnInlined(GraphVisitor *visitor, Inst * /* unused */);
920 
921     static void VisitNewArray(GraphVisitor *visitor, Inst *inst);
922 
923     static void VisitLoadConstArray(GraphVisitor *visitor, Inst *inst);
924 
925     static void VisitFillConstArray(GraphVisitor *visitor, Inst *inst);
926 
927     static void VisitParameter(GraphVisitor *visitor, Inst *inst);
928 
929     static void VisitStoreArray(GraphVisitor *visitor, Inst *inst);
930 
931     static void VisitSpillFill(GraphVisitor *visitor, Inst *inst);
932 
933     static void VisitSaveState(GraphVisitor *visitor, Inst *inst);
934 
935     static void VisitSaveStateDeoptimize(GraphVisitor *visitor, Inst *inst);
936 
937     static void VisitSaveStateOsr(GraphVisitor *visitor, Inst *inst);
938 
939     static void VisitLoadArray(GraphVisitor *visitor, Inst *inst);
940 
941     static void VisitLoadCompressedStringChar(GraphVisitor *visitor, Inst *inst);
942 
943     static void VisitLenArray(GraphVisitor *visitor, Inst *inst);
944 
945     static void VisitNullCheck(GraphVisitor *visitor, Inst *inst);
946 
947     static void VisitBoundsCheck(GraphVisitor *visitor, Inst *inst);
948 
949     static void VisitZeroCheck(GraphVisitor *visitor, Inst *inst);
950 
951     static void VisitRefTypeCheck(GraphVisitor *visitor, Inst *inst);
952 
953     static void VisitNegativeCheck(GraphVisitor *visitor, Inst *inst);
954 
955     static void VisitNotPositiveCheck(GraphVisitor *visitor, Inst *inst);
956 
957     static void VisitLoadString(GraphVisitor *visitor, Inst *inst);
958 
959     static void VisitResolveObjectField(GraphVisitor *visitor, Inst *inst);
960 
961     static void VisitLoadResolvedObjectField(GraphVisitor *visitor, Inst *inst);
962 
963     static void VisitLoadObject(GraphVisitor *visitor, Inst *inst);
964 
965     static void VisitLoad(GraphVisitor *visitor, Inst *inst);
966 
967     static void VisitStoreObject(GraphVisitor *visitor, Inst *inst);
968 
969     static void VisitStoreResolvedObjectField(GraphVisitor *visitor, Inst *inst);
970 
971     static void VisitStore(GraphVisitor *visitor, Inst *inst);
972 
973     static void VisitLoadStatic(GraphVisitor *visitor, Inst *inst);
974 
975     static void VisitResolveObjectFieldStatic(GraphVisitor *visitor, Inst *inst);
976 
977     static void VisitLoadResolvedObjectFieldStatic(GraphVisitor *visitor, Inst *inst);
978 
979     static void VisitStoreStatic(GraphVisitor *visitor, Inst *inst);
980 
981     static void VisitUnresolvedStoreStatic(GraphVisitor *visitor, Inst *inst);
982 
983     static void VisitStoreResolvedObjectFieldStatic(GraphVisitor *visitor, Inst *inst);
984 
985     static void VisitNewObject(GraphVisitor *visitor, Inst *inst);
986 
987     static void VisitLoadRuntimeClass(GraphVisitor *visitor, Inst *inst);
988 
989     static void VisitLoadClass(GraphVisitor *visitor, Inst *inst);
990 
991     static void VisitLoadAndInitClass(GraphVisitor *visitor, Inst *inst);
992 
993     static void VisitGetGlobalVarAddress(GraphVisitor *visitor, Inst *inst);
994 
995     static void VisitUnresolvedLoadAndInitClass(GraphVisitor *visitor, Inst *inst);
996 
997     static void VisitInitClass(GraphVisitor *visitor, Inst *inst);
998 
999     static void VisitUnresolvedInitClass(GraphVisitor *visitor, Inst *inst);
1000 
1001     static void VisitLoadType(GraphVisitor *visitor, Inst *inst);
1002 
1003     static void VisitUnresolvedLoadType(GraphVisitor *visitor, Inst *inst);
1004 
1005     static void VisitCheckCast(GraphVisitor *visitor, Inst *inst);
1006 
1007     static void VisitIsInstance(GraphVisitor *visitor, Inst *inst);
1008 
1009     static void VisitMonitor(GraphVisitor *visitor, Inst *inst);
1010 
1011     static void VisitIntrinsic(GraphVisitor *visitor, Inst *inst);
1012 
1013     static void VisitBuiltin(GraphVisitor *visitor, Inst *inst);
1014 
1015     static void VisitBoundsCheckI(GraphVisitor *visitor, Inst *inst);
1016 
1017     static void VisitStoreArrayI(GraphVisitor *visitor, Inst *inst);
1018 
1019     static void VisitLoadArrayI(GraphVisitor *visitor, Inst *inst);
1020 
1021     static void VisitLoadCompressedStringCharI(GraphVisitor *visitor, Inst *inst);
1022 
1023     static void VisitLoadI(GraphVisitor *visitor, Inst *inst);
1024 
1025     static void VisitStoreI(GraphVisitor *visitor, Inst *inst);
1026 
1027     static void VisitMultiArray(GraphVisitor *visitor, Inst *inst);
1028     static void VisitInitEmptyString(GraphVisitor *visitor, Inst *inst);
1029     static void VisitInitString(GraphVisitor *visitor, Inst *inst);
1030 
1031     static void VisitCallStatic(GraphVisitor *visitor, Inst *inst);
1032 
1033     static void VisitResolveStatic(GraphVisitor *visitor, Inst *inst);
1034     static void VisitCallResolvedStatic(GraphVisitor *visitor, Inst *inst);
1035 
1036     static void VisitCallVirtual(GraphVisitor *visitor, Inst *inst);
1037 
1038     static void VisitResolveVirtual(GraphVisitor *visitor, Inst *inst);
1039     static void VisitCallResolvedVirtual(GraphVisitor *visitor, Inst *inst);
1040 
1041     static void VisitCallLaunchStatic(GraphVisitor *visitor, Inst *inst);
1042     static void VisitCallResolvedLaunchStatic(GraphVisitor *visitor, Inst *inst);
1043 
1044     static void VisitCallLaunchVirtual(GraphVisitor *visitor, Inst *inst);
1045     static void VisitCallResolvedLaunchVirtual(GraphVisitor *visitor, Inst *inst);
1046 
1047     static void VisitCallDynamic(GraphVisitor *visitor, Inst *inst);
1048 
1049     static void VisitLoadConstantPool(GraphVisitor *visitor, Inst *inst);
1050     static void VisitLoadLexicalEnv(GraphVisitor *visitor, Inst *inst);
1051 
1052     static void VisitLoadFromConstantPool(GraphVisitor *visitor, Inst *inst);
1053 
1054     static void VisitSafePoint(GraphVisitor *visitor, Inst *inst);
1055 
1056     static void VisitSelect(GraphVisitor *visitor, Inst *inst);
1057 
1058     static void VisitSelectImm(GraphVisitor *visitor, Inst *inst);
1059 
1060     static void VisitLoadArrayPair(GraphVisitor *visitor, Inst *inst);
1061 
1062     static void VisitLoadArrayPairI(GraphVisitor *visitor, Inst *inst);
1063 
1064     static void VisitLoadPairPart(GraphVisitor *visitor, Inst *inst);
1065 
1066     static void VisitStoreArrayPair(GraphVisitor *visitor, Inst *inst);
1067 
1068     static void VisitStoreArrayPairI(GraphVisitor *visitor, Inst *inst);
1069 
1070     static void VisitLoadExclusive(GraphVisitor *visitor, Inst *inst);
1071 
1072     static void VisitStoreExclusive(GraphVisitor *visitor, Inst *inst);
1073 
1074     static void VisitNOP(GraphVisitor *visitor, Inst *inst);
1075 
1076     static void VisitThrow(GraphVisitor *visitor, Inst *inst);
1077 
1078     static void VisitDeoptimizeIf(GraphVisitor *visitor, Inst *inst);
1079 
1080     static void VisitDeoptimizeCompare(GraphVisitor *visitor, Inst *inst);
1081 
1082     static void VisitDeoptimizeCompareImm(GraphVisitor *visitor, Inst *inst);
1083 
1084     static void VisitDeoptimize(GraphVisitor *visitor, Inst *inst);
1085 
1086     static void VisitIsMustDeoptimize(GraphVisitor *visitor, Inst *inst);
1087 
1088     static void VisitMAdd(GraphVisitor *visitor, Inst *inst);
1089     static void VisitMSub(GraphVisitor *visitor, Inst *inst);
1090     static void VisitMNeg(GraphVisitor *visitor, Inst *inst);
1091     static void VisitOrNot(GraphVisitor *visitor, Inst *inst);
1092     static void VisitAndNot(GraphVisitor *visitor, Inst *inst);
1093     static void VisitXorNot(GraphVisitor *visitor, Inst *inst);
1094     static void VisitNegSR(GraphVisitor *visitor, Inst *inst);
1095 
1096     static void VisitGetInstanceClass(GraphVisitor *visitor, Inst *inst);
1097     static void VisitGetManagedClassObject(GraphVisitor *visito, Inst *inst);
1098     static void VisitLoadImmediate(GraphVisitor *visitor, Inst *inst);
1099     static void VisitFunctionImmediate(GraphVisitor *visitor, Inst *inst);
1100     static void VisitLoadObjFromConst(GraphVisitor *visitor, Inst *inst);
1101     static void VisitRegDef(GraphVisitor *visitor, Inst *inst);
1102     static void VisitLiveIn(GraphVisitor *visitor, Inst *inst);
1103     static void VisitLiveOut(GraphVisitor *visitor, Inst *inst);
1104     static void VisitCallIndirect(GraphVisitor *visitor, Inst *inst);
1105     static void VisitCall(GraphVisitor *visitor, Inst *inst);
1106 
1107     // Dyn inst.
1108     static void VisitCompareAnyType(GraphVisitor *visitor, Inst *inst);
1109     static void VisitGetAnyTypeName(GraphVisitor *visitor, Inst *inst);
1110     static void VisitCastAnyTypeValue(GraphVisitor *visitor, Inst *inst);
1111     static void VisitCastValueToAnyType(GraphVisitor *visitor, Inst *inst);
1112     static void VisitAnyTypeCheck(GraphVisitor *visitor, Inst *inst);
1113     static void VisitHclassCheck(GraphVisitor *visitor, Inst *inst);
1114     static void VisitObjByIndexCheck(GraphVisitor *visitor, Inst *inst);
1115 
1116     static void VisitLoadObjectDynamic(GraphVisitor *visitor, Inst *inst);
1117     static void VisitStoreObjectDynamic(GraphVisitor *visitor, Inst *inst);
1118 
VisitDefault(Inst * inst)1119     void VisitDefault([[maybe_unused]] Inst *inst) override
1120     {
1121 #ifndef NDEBUG
1122         COMPILER_LOG(DEBUG, CODEGEN) << "Can't encode instruction " << GetOpcodeString(inst->GetOpcode())
1123                                      << " with type " << DataType::ToString(inst->GetType());
1124 #endif
1125         success_ = false;
1126     }
1127 
1128     // Helper functions
1129     static void FillUnresolvedClass(GraphVisitor *visitor, Inst *inst);
1130     static void FillObjectClass(GraphVisitor *visitor, Reg tmpReg, LabelHolder::LabelId throwLabel);
1131     static void FillOtherClass(GraphVisitor *visitor, Inst *inst, Reg tmpReg, LabelHolder::LabelId throwLabel);
1132     static void FillArrayObjectClass(GraphVisitor *visitor, Reg tmpReg, LabelHolder::LabelId throwLabel);
1133     static void FillArrayClass(GraphVisitor *visitor, Inst *inst, Reg tmpReg, LabelHolder::LabelId throwLabel);
1134     static void FillInterfaceClass(GraphVisitor *visitor, Inst *inst);
1135 
1136     static void FillLoadClassUnresolved(GraphVisitor *visitor, Inst *inst);
1137 
1138     static void FillCheckCast(GraphVisitor *visitor, Inst *inst, Reg src, LabelHolder::LabelId endLabel,
1139                               compiler::ClassType klassType);
1140 
1141     static void FillIsInstanceUnresolved(GraphVisitor *visitor, Inst *inst);
1142 
1143     static void FillIsInstanceCaseObject(GraphVisitor *visitor, Inst *inst, Reg tmpReg);
1144 
1145     static void FillIsInstanceCaseOther(GraphVisitor *visitor, Inst *inst, Reg tmpReg, LabelHolder::LabelId endLabel);
1146 
1147     static void FillIsInstanceCaseArrayObject(GraphVisitor *visitor, Inst *inst, Reg tmpReg,
1148                                               LabelHolder::LabelId endLabel);
1149 
1150     static void FillIsInstanceCaseArrayClass(GraphVisitor *visitor, Inst *inst, Reg tmpReg,
1151                                              LabelHolder::LabelId endLabel);
1152 
1153     static void FillIsInstanceCaseInterface(GraphVisitor *visitor, Inst *inst);
1154 
1155     static void FillIsInstance(GraphVisitor *visitor, Inst *inst, Reg tmpReg, LabelHolder::LabelId endLabel);
1156 
1157 #include "optimizer/ir/visitor.inc"
1158 
1159 private:
1160     Codegen *cg_;
1161     Arch arch_;
1162     bool success_ {true};
1163 };  // EncodeVisitor
1164 
1165 template <size_t IMM_ARRAY_SIZE, typename Arg, typename... Args>
FillCallParamsHandleOperands(ParameterInfo * paramInfo,SpillFillInst * regMoves,ArenaVector<Reg> * spMoves,typename std::array<std::pair<Reg,Imm>,IMM_ARRAY_SIZE>::iterator immsIter,Arg && arg,Args &&...params)1166 ALWAYS_INLINE inline void Codegen::FillCallParamsHandleOperands(
1167     ParameterInfo *paramInfo, SpillFillInst *regMoves, ArenaVector<Reg> *spMoves,
1168     [[maybe_unused]] typename std::array<std::pair<Reg, Imm>, IMM_ARRAY_SIZE>::iterator immsIter, Arg &&arg,
1169     Args &&...params)
1170 {
1171     Location dst;
1172     auto type = arg.GetType().ToDataType();
1173     dst = paramInfo->GetNextLocation(type);
1174     if (dst.IsStackArgument()) {
1175         GetEncoder()->SetFalseResult();
1176         UNREACHABLE();  // Move to BoundaryFrame
1177     }
1178 
1179     static_assert(std::is_same_v<std::decay_t<Arg>, TypedImm> || std::is_convertible_v<Arg, Reg>);
1180     if constexpr (std::is_same_v<std::decay_t<Arg>, TypedImm>) {
1181         auto reg = ConvertRegister(dst.GetValue(), type);
1182         *immsIter = {reg, arg.GetImm()};
1183         immsIter++;
1184     } else {
1185         Reg reg(std::forward<Arg>(arg));
1186         if (reg == SpReg()) {
1187             // SP should be handled separately, since on the ARM64 target it has ID out of range
1188             spMoves->emplace_back(ConvertRegister(dst.GetValue(), type));
1189         } else {
1190             regMoves->AddSpillFill(Location::MakeRegister(reg.GetId(), type), dst, type);
1191         }
1192     }
1193     if constexpr (sizeof...(Args) != 0) {
1194         FillCallParamsHandleOperands<IMM_ARRAY_SIZE>(paramInfo, regMoves, spMoves, immsIter,
1195                                                      std::forward<Args>(params)...);
1196     }
1197 }
1198 
1199 template <typename T, typename... Args>
CountParameters()1200 constexpr std::pair<size_t, size_t> CountParameters()
1201 {
1202     static_assert(std::is_same_v<std::decay_t<T>, TypedImm> != std::is_convertible_v<T, Reg>);
1203     if constexpr (sizeof...(Args) != 0) {
1204         constexpr auto IMM_REG_COUNT = CountParameters<Args...>();
1205 
1206         if constexpr (std::is_same_v<std::decay_t<T>, TypedImm>) {
1207             return {IMM_REG_COUNT.first + 1, IMM_REG_COUNT.second};
1208         } else if constexpr (std::is_convertible_v<T, Reg>) {
1209             return {IMM_REG_COUNT.first, IMM_REG_COUNT.second + 1};
1210         }
1211     }
1212     return {std::is_same_v<std::decay_t<T>, TypedImm>, std::is_convertible_v<T, Reg>};
1213 }
1214 
1215 template <typename... Args>
FillCallParams(Args &&...params)1216 void Codegen::FillCallParams(Args &&...params)
1217 {
1218     SCOPED_DISASM_STR(this, "FillCallParams");
1219     if constexpr (sizeof...(Args) != 0) {
1220         constexpr size_t IMMEDIATES_COUNT = CountParameters<Args...>().first;
1221         constexpr size_t REGS_COUNT = CountParameters<Args...>().second;
1222         // Native call - do not add reserve parameters
1223         auto paramInfo = GetCallingConvention()->GetParameterInfo(0);
1224         std::array<std::pair<Reg, Imm>, IMMEDIATES_COUNT> immediates {};
1225         ArenaVector<Reg> spMoves(GetLocalAllocator()->Adapter());
1226         auto regMoves = GetGraph()->CreateInstSpillFill();
1227         spMoves.reserve(REGS_COUNT);
1228         regMoves->GetSpillFills().reserve(REGS_COUNT);
1229 
1230         FillCallParamsHandleOperands<IMMEDIATES_COUNT>(paramInfo, regMoves, &spMoves, immediates.begin(),
1231                                                        std::forward<Args>(params)...);
1232 
1233         // Resolve registers move order and encode
1234         spillFillsResolver_.ResolveIfRequired(regMoves);
1235         SpillFillEncoder(this, regMoves).EncodeSpillFill();
1236 
1237         // Encode immediates moves
1238         for (auto &immValues : immediates) {
1239             GetEncoder()->EncodeMov(immValues.first, immValues.second);
1240         }
1241 
1242         // Encode moves from SP reg
1243         for (auto dst : spMoves) {
1244             GetEncoder()->EncodeMov(dst, SpReg());
1245         }
1246     }
1247 }
1248 
1249 template <typename... Args>
FillPostWrbCallParams(MemRef mem,Args &&...params)1250 void Codegen::FillPostWrbCallParams(MemRef mem, Args &&...params)
1251 {
1252     auto base {mem.GetBase().As(TypeInfo::FromDataType(DataType::REFERENCE, GetArch()))};
1253     if (mem.HasIndex()) {
1254         ASSERT(mem.GetScale() == 0 && !mem.HasDisp());
1255         FillCallParams(base, mem.GetIndex(), std::forward<Args>(params)...);
1256     } else {
1257         FillCallParams(base, TypedImm(mem.GetDisp()), std::forward<Args>(params)...);
1258     }
1259 }
1260 
1261 }  // namespace panda::compiler
1262 
1263 #endif  // COMPILER_OPTIMIZER_CODEGEN_CODEGEN_H
1264