• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021-2025 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef COMPILER_OPTIMIZER_CODEGEN_CODEGEN_H
17 #define COMPILER_OPTIMIZER_CODEGEN_CODEGEN_H
18 
19 /*
20 Codegen interface for compiler
21 ! Do not use this file in runtime
22 */
23 
24 #include <tuple>
25 #include "code_info/code_info_builder.h"
26 #include "compiler_logger.h"
27 #include "disassembly.h"
28 #include "frame_info.h"
29 #include "optimizer/analysis/live_registers.h"
30 #include "optimizer/code_generator/callconv.h"
31 #include "optimizer/code_generator/encode.h"
32 #include "optimizer/code_generator/scoped_tmp_reg.h"
33 #include "optimizer/code_generator/registers_description.h"
34 #include "optimizer/code_generator/slow_path.h"
35 #include "optimizer/code_generator/spill_fill_encoder.h"
36 #include "optimizer/code_generator/target_info.h"
37 #include "optimizer/ir/analysis.h"
38 #include "optimizer/ir/graph.h"
39 #include "optimizer/ir/graph_visitor.h"
40 #include "optimizer/optimizations/regalloc/spill_fills_resolver.h"
41 #include "optimizer/pass_manager.h"
42 #include "utils/cframe_layout.h"
43 
44 namespace ark::compiler {
45 // Maximum size in bytes
46 constexpr size_t SLOW_PATH_SIZE = 64;
47 
48 class Encoder;
49 class CodeBuilder;
50 class OsrEntryStub;
51 
52 // CC-OFFNXT(G.FUD.06) big switch-case
IrTypeToMetainfoType(DataType::Type type)53 inline VRegInfo::Type IrTypeToMetainfoType(DataType::Type type)
54 {
55     switch (type) {
56         case DataType::UINT64:
57         case DataType::INT64:
58             return VRegInfo::Type::INT64;
59         case DataType::ANY:
60             return VRegInfo::Type::ANY;
61         case DataType::UINT32:
62         case DataType::UINT16:
63         case DataType::UINT8:
64         case DataType::INT32:
65         case DataType::INT16:
66         case DataType::INT8:
67             return VRegInfo::Type::INT32;
68         case DataType::FLOAT64:
69             return VRegInfo::Type::FLOAT64;
70         case DataType::FLOAT32:
71             return VRegInfo::Type::FLOAT32;
72         case DataType::BOOL:
73             return VRegInfo::Type::BOOL;
74         case DataType::REFERENCE:
75             return VRegInfo::Type::OBJECT;
76         default:
77             UNREACHABLE();
78     }
79 }
80 
81 class Codegen : public Optimization {
82     using EntrypointId = RuntimeInterface::EntrypointId;
83 
84 public:
85     explicit Codegen(Graph *graph);
86     NO_MOVE_SEMANTIC(Codegen);
87     NO_COPY_SEMANTIC(Codegen);
88 
89     ~Codegen() override = default;
90 
91     bool RunImpl() override;
92     const char *GetPassName() const override;
93     bool AbortIfFailed() const override;
94 
95     static bool Run(Graph *graph);
96 
GetAllocator()97     ArenaAllocator *GetAllocator() const
98     {
99         return allocator_;
100     }
GetLocalAllocator()101     ArenaAllocator *GetLocalAllocator() const
102     {
103         return localAllocator_;
104     }
GetFrameInfo()105     FrameInfo *GetFrameInfo() const
106     {
107         return frameInfo_;
108     }
SetFrameInfo(FrameInfo * frameInfo)109     void SetFrameInfo(FrameInfo *frameInfo)
110     {
111         frameInfo_ = frameInfo;
112     }
113     virtual void CreateFrameInfo();
114 
GetRuntime()115     RuntimeInterface *GetRuntime() const
116     {
117         return runtime_;
118     }
GetRegfile()119     RegistersDescription *GetRegfile() const
120     {
121         return regfile_;
122     }
GetEncoder()123     Encoder *GetEncoder() const
124     {
125         return enc_;
126     }
GetCallingConvention()127     CallingConvention *GetCallingConvention() const
128     {
129         return callconv_;
130     }
131 
GetGraphVisitor()132     GraphVisitor *GetGraphVisitor() const
133     {
134         return visitor_;
135     }
136 
GetLabelEntry()137     LabelHolder::LabelId GetLabelEntry() const
138     {
139         return labelEntry_;
140     }
141 
GetLabelExit()142     LabelHolder::LabelId GetLabelExit() const
143     {
144         return labelExit_;
145     }
146 
GetMethodId()147     RuntimeInterface::MethodId GetMethodId()
148     {
149         return methodId_;
150     }
151 
SetStartCodeOffset(size_t offset)152     void SetStartCodeOffset(size_t offset)
153     {
154         startCodeOffset_ = offset;
155     }
156 
GetStartCodeOffset()157     size_t GetStartCodeOffset() const
158     {
159         return startCodeOffset_;
160     }
161 
162     void Convert(ArenaVector<Reg> *regsUsage, const ArenaVector<bool> *mask, TypeInfo typeInfo);
163 
164     Reg ConvertRegister(Register r, DataType::Type type = DataType::Type::INT64);
165 
166     template <size_t SRC_REGS_COUNT>
ConvertSrcRegisters(Inst * inst)167     constexpr auto ConvertSrcRegisters(Inst *inst)
168     {
169         auto lastTuple = std::make_tuple(ConvertRegister(inst->GetSrcReg(SRC_REGS_COUNT - 1), inst->GetType()));
170         return std::tuple_cat(ConvertSrcRegisters<SRC_REGS_COUNT - 1>(inst), lastTuple);
171     }
172 
173     template <size_t SRC_REGS_COUNT>
ConvertRegisters(Inst * inst)174     constexpr auto ConvertRegisters(Inst *inst)
175     {
176         auto dstTuple = std::make_tuple(ConvertRegister(inst->GetDstReg(), inst->GetType()));
177         return std::tuple_cat(dstTuple, ConvertSrcRegisters<SRC_REGS_COUNT>(inst));
178     }
179 
180     Imm ConvertImmWithExtend(uint64_t imm, DataType::Type type);
181 
182     Condition ConvertCc(ConditionCode cc);
183     Condition ConvertCcOverflow(ConditionCode cc);
184 
ConvertDataType(DataType::Type type,Arch arch)185     static inline TypeInfo ConvertDataType(DataType::Type type, Arch arch)
186     {
187         return TypeInfo::FromDataType(type, arch);
188     }
189 
GetArch()190     Arch GetArch() const
191     {
192         return GetTarget().GetArch();
193     }
194 
GetTarget()195     Target GetTarget() const
196     {
197         return target_;
198     }
199 
GetPtrRegType()200     TypeInfo GetPtrRegType() const
201     {
202         return target_.GetPtrRegType();
203     }
204 
GetCodeBuilder()205     CodeInfoBuilder *GetCodeBuilder() const
206     {
207         return codeBuilder_;
208     }
209 
IsCompressedStringsEnabled()210     bool IsCompressedStringsEnabled() const
211     {
212         return runtime_->IsCompressedStringsEnabled();
213     }
214 
215     void CreateStackMap(Inst *inst, Inst *user = nullptr);
216 
217     void CreateStackMapRec(SaveStateInst *saveState, bool requireVregMap, Inst *targetSite);
218     void CreateVRegMap(SaveStateInst *saveState, size_t vregsCount, Inst *targetSite);
219     void CreateVreg(const Location &location, Inst *inst, const VirtualRegister &vreg);
220     void FillVregIndices(SaveStateInst *saveState);
221 
222     void CreateOsrEntry(SaveStateInst *saveState);
223 
224     void CreateVRegForRegister(const Location &location, Inst *inst, const VirtualRegister &vreg);
225 
226     /// 'live_inputs' shows that inst's source registers should be added the the mask
227     template <bool LIVE_INPUTS = false>
228     std::pair<RegMask, VRegMask> GetLiveRegisters(Inst *inst);
229     // Limits live register set to a number of registers used to pass parameters to the runtime or fastpath call:
230     // 1) these ones are saved/restored by caller
231     // 2) the remaining ones are saved/restored by the bridge function (aarch only) or by fastpath prologue/epilogue
232     void FillOnlyParameters(RegMask *liveRegs, uint32_t numParams, bool isFastpath) const;
233 
234     template <typename T, typename... Args>
235     T *CreateSlowPath(Inst *inst, Args &&...args);
236 
237     void EmitSlowPaths();
238 
239     /**
240      * Insert tracing code to the generated code. See `Trace` method in the `runtime/entrypoints.cpp`.
241      * NOTE(compiler): we should rework parameters assigning algorithm, that is duplicated here.
242      * @param params parameters to be passed to the TRACE entrypoint, first parameter must be TraceId value.
243      */
244     template <typename... Args>
245     void InsertTrace(Args &&...params);
246 #if defined(EVENT_METHOD_ENTER_ENABLED) && EVENT_METHOD_ENTER_ENABLED != 0
247     void MakeTrace();
248 #endif
249     void CallIntrinsic(Inst *inst, RuntimeInterface::IntrinsicId id);
250 
251     template <bool IS_FASTPATH, typename... Args>
252     void CallEntrypoint(Inst *inst, EntrypointId id, Reg dstReg, RegMask preservedRegs, Args &&...params);
253 
CallEntrypointFinalize(RegMask & liveRegs,RegMask & paramsMask,Inst * inst)254     void CallEntrypointFinalize(RegMask &liveRegs, RegMask &paramsMask, Inst *inst)
255     {
256         LoadCallerRegisters(liveRegs, VRegMask(), true);
257 
258         if (!inst->HasImplicitRuntimeCall()) {
259             return;
260         }
261         for (auto i = 0U; i < paramsMask.size(); i++) {
262             if (paramsMask.test(i)) {
263                 inst->GetSaveState()->GetRootsRegsMask().reset(i);
264             }
265         }
266     }
267 
268     // The function is used for calling runtime functions through special bridges.
269     // !NOTE Don't use the function for calling runtime without bridges(it save only parameters on stack)
270     template <typename... Args>
271     void CallRuntime(Inst *inst, EntrypointId id, Reg dstReg, RegMask preservedRegs, Args &&...params);
272     template <typename... Args>
273     void CallFastPath(Inst *inst, EntrypointId id, Reg dstReg, RegMask preservedRegs, Args &&...params);
274     template <typename... Args>
275     void CallRuntimeWithMethod(Inst *inst, void *method, EntrypointId eid, Reg dstReg, Args &&...params);
276     void SaveRegistersForImplicitRuntime(Inst *inst, RegMask *paramsMask, RegMask *mask);
277 
278     void VisitNewArray(Inst *inst);
279 
280     void LoadClassFromObject(Reg classReg, Reg objReg);
281     void VisitCallIndirect(CallIndirectInst *inst);
282     void VisitCall(CallInst *inst);
283     void CreateCallIntrinsic(IntrinsicInst *inst);
284     void CreateMultiArrayCall(CallInst *callInst);
285     void CreateNewObjCall(NewObjectInst *newObj);
286     void CreateNewObjCallOld(NewObjectInst *newObj);
287     void CreateMonitorCall(MonitorInst *inst);
288     void CreateMonitorCallOld(MonitorInst *inst);
289     void CreateCheckCastInterfaceCall(Inst *inst);
290     void CreateNonDefaultInitClass(ClassInst *initInst);
291     void CheckObject(Reg reg, LabelHolder::LabelId label);
292     template <bool IS_CLASS = false>
293     void CreatePreWRB(Inst *inst, MemRef mem, RegMask preserved = {}, bool storePair = false);
294     void CreatePostWRB(Inst *inst, MemRef mem, Reg reg1, Reg reg2 = INVALID_REGISTER, RegMask preserved = {});
295     void CreatePostWRBForDynamic(Inst *inst, MemRef mem, Reg reg1, Reg reg2, RegMask preserved = {});
296     template <typename... Args>
297     void CallBarrier(RegMask liveRegs, VRegMask liveVregs, std::variant<EntrypointId, Reg> entrypoint,
298                      Args &&...params);
299     void CreateLoadClassFromPLT(Inst *inst, Reg tmpReg, Reg dst, size_t classId);
300     void CreateJumpToClassResolverPltShared(Inst *inst, Reg tmpReg, RuntimeInterface::EntrypointId id);
301     void CreateLoadTLABInformation(Reg regTlabStart, Reg regTlabSize);
302     void CreateCheckForTLABWithConstSize(Inst *inst, Reg regTlabStart, Reg regTlabSize, size_t size,
303                                          LabelHolder::LabelId label);
304     void CreateDebugRuntimeCallsForNewObject(Inst *inst, Reg regTlabStart, size_t allocSize, RegMask preserved);
305     void CreateDebugRuntimeCallsForObjectClone(Inst *inst, Reg dst);
306     void CreateReturn(const Inst *inst);
307     template <typename T>
308     void CreateUnaryCheck(Inst *inst, RuntimeInterface::EntrypointId id, DeoptimizeType type, Condition cc);
309 
310     // The function alignment up the value from alignment_reg using tmp_reg.
311     void CreateAlignmentValue(Reg alignmentReg, Reg tmpReg, size_t alignment);
312     void TryInsertImplicitNullCheck(Inst *inst, size_t prevOffset);
313 
GetFrameLayout()314     const CFrameLayout &GetFrameLayout() const
315     {
316         return frameLayout_;
317     }
318 
319     bool RegisterKeepCallArgument(CallInst *callInst, Reg reg);
320 
321     void LoadMethod(Reg dst);
322     void LoadFreeSlot(Reg dst);
323     void StoreFreeSlot(Reg src);
324 
325     ssize_t GetStackOffset(Location location);
326     ssize_t GetBaseOffset(Location location);
327     MemRef GetMemRefForSlot(Location location);
328     Reg SpReg() const;
329     Reg FpReg() const;
330 
331     bool HasLiveCallerSavedRegs(Inst *inst);
332     void SaveCallerRegisters(RegMask liveRegs, VRegMask liveVregs, bool adjustRegs);
333     void LoadCallerRegisters(RegMask liveRegs, VRegMask liveVregs, bool adjustRegs);
334 
335     // Initialization internal variables
336     void Initialize();
337     bool Finalize();
338     void IssueDisasm();
339     const Disassembly *GetDisasm() const;
340     Disassembly *GetDisasm();
341     void AddLiveOut(const BasicBlock *bb, const Register reg);
342     RegMask GetLiveOut(const BasicBlock *bb) const;
343 
344     Reg ThreadReg() const;
345     static bool InstEncodedWithLibCall(const Inst *inst, Arch arch);
346 
347     void EncodeDynamicCast(Inst *inst, Reg dst, bool dstSigned, Reg src);
348 
349     PANDA_PUBLIC_API Reg ConvertInstTmpReg(const Inst *inst, DataType::Type type) const;
350     Reg ConvertInstTmpReg(const Inst *inst) const;
351 
352     bool OffsetFitReferenceTypeSize(uint64_t offset) const;
353 
354 protected:
355     virtual void GeneratePrologue();
356     virtual void GenerateEpilogue();
357 
358     // Main logic steps
359     bool BeginMethod();
360     bool VisitGraph();
361     void EndMethod();
362     bool CopyToCodeCache();
363     void DumpCode();
364 
365     RegMask GetUsedRegs() const;
366     RegMask GetUsedVRegs() const;
367 
368     template <typename... Args>
369     void FillCallParams(Args &&...params);
370 
371     template <size_t IMM_ARRAY_SIZE>
372     class FillCallParamsHelper;
373 
374     void EmitJump(const BasicBlock *bb);
375     bool EmitCallRuntimeCode(Inst *inst, std::variant<EntrypointId, Reg> entrypoint);
376 
377     void IntfInlineCachePass(ResolveVirtualInst *resolver, Reg methodReg, Reg tmpReg, Reg objReg);
378 
379     template <typename T>
380     RuntimeInterface::MethodPtr GetCallerOfUnresolvedMethod(T *resolver);
381 
382     void EmitResolveVirtual(ResolveVirtualInst *resolver);
383     void EmitResolveUnknownVirtual(ResolveVirtualInst *resolver, Reg methodReg);
384     void EmitResolveVirtualAot(ResolveVirtualInst *resolver, Reg methodReg);
385     void EmitCallVirtual(CallInst *call);
386     void EmitCallResolvedVirtual(CallInst *call);
387     void EmitCallStatic(CallInst *call);
388     void EmitResolveStatic(ResolveStaticInst *resolver);
389     void EmitCallResolvedStatic(CallInst *call);
390     void EmitCallDynamic(CallInst *call);
391     void EmitCallNative(CallInst *call);
392     void FinalizeCall(CallInst *call);
393 
394     uint32_t GetVtableShift();
395     void CalculateCardIndex(Reg baseReg, ScopedTmpReg *tmp, ScopedTmpReg *tmp1);
396     void CreateBuiltinIntrinsic(IntrinsicInst *inst);
397     static constexpr int32_t NUM_OF_SRC_BUILTIN = 6;
398     static constexpr uint8_t FIRST_OPERAND = 0;
399     static constexpr uint8_t SECOND_OPERAND = 1;
400     static constexpr uint8_t THIRD_OPERAND = 2;
401     static constexpr uint8_t FOURTH_OPERAND = 3;
402     static constexpr uint8_t FIFTH_OPERAND = 4;
403     using SRCREGS = std::array<Reg, NUM_OF_SRC_BUILTIN>;
404     // implementation is generated with compiler/optimizer/templates/intrinsics/intrinsics_codegen.inl.erb
405     void FillBuiltin(IntrinsicInst *inst, SRCREGS src, Reg dst);
406 
407     template <typename Arg, typename... Args>
408     ALWAYS_INLINE inline void AddParamRegsInLiveMasksHandleArgs(ParameterInfo *paramInfo, RegMask *liveRegs,
409                                                                 VRegMask *liveVregs, Arg param, Args &&...params);
410 
411     template <typename... Args>
412     void AddParamRegsInLiveMasks(RegMask *liveRegs, VRegMask *liveVregs, Args &&...params);
413     template <typename... Args>
414     void CreateStubCall(Inst *inst, RuntimeInterface::IntrinsicId intrinsicId, Reg dst, Args &&...params);
415 
416     ScopedTmpReg CalculatePreviousTLABAllocSize(Reg reg, LabelHolder::LabelId label);
417     friend class IntrinsicCodegenTest;
418 
419     void CreateStringFromCharArrayTlab(Inst *inst, Reg dst, SRCREGS src);
420 #include "codegen_language_extensions.h"
421 #include "intrinsics_codegen.inl.h"
422 
423 private:
424     template <typename T>
425     void EncodeImms(const T &imms, bool skipFirstLocation);
426 
427     static bool EnsureParamsFitIn32Bit(std::initializer_list<std::variant<Reg, TypedImm>> params);
428 
429     template <typename... Args>
430     void FillPostWrbCallParams(MemRef mem, Args &&...params);
431 
432     void EmitAtomicByteOr(Reg addr, Reg value);
433 
434 private:
435     ArenaAllocator *allocator_;
436     ArenaAllocator *localAllocator_;
437     // Register description
438     RegistersDescription *regfile_;
439     // Encoder implementation
440     Encoder *enc_;
441     // Target architecture calling convention model
442     CallingConvention *callconv_;
443     // Current execution model implementation
444     // Visitor for instructions
445     GraphVisitor *visitor_ {};
446     CodeInfoBuilder *codeBuilder_ {nullptr};
447 
448     ArenaVector<SlowPathBase *> slowPaths_;
449     ArenaUnorderedMap<RuntimeInterface::EntrypointId, SlowPathShared *> slowPathsMap_;
450 
451     const CFrameLayout frameLayout_;
452     FrameInfo *frameInfo_ {nullptr};
453     ArenaVector<OsrEntryStub *> osrEntries_;
454     RuntimeInterface::MethodId methodId_ {INVALID_ID};
455     size_t startCodeOffset_ {0};
456     ArenaVector<std::pair<int16_t, int16_t>> vregIndices_;
457 
458     RuntimeInterface *runtime_ {nullptr};
459 
460     LabelHolder::LabelId labelEntry_ {};
461     LabelHolder::LabelId labelExit_ {};
462 
463     const Target target_;
464 
465     /* Registers that have been allocated by regalloc */
466     RegMask usedRegs_ {0};
467     RegMask usedVregs_ {0};
468     /* Map of BasicBlock to live-out regsiters mask. It is needed in epilogue encoding to avoid overwriting of the
469      * live-out registers */
470     ArenaUnorderedMap<const BasicBlock *, RegMask> liveOuts_;
471 
472     Disassembly disasm_;
473     SpillFillsResolver spillFillsResolver_;
474 
475     friend class EncodeVisitor;
476     friend class BaselineCodegen;
477     friend class SlowPathJsCastDoubleToInt32;
478     friend class PostWriteBarrier;
479 };  // Codegen
480 
481 template <>
482 constexpr auto Codegen::ConvertSrcRegisters<0>([[maybe_unused]] Inst *inst)
483 {
484     return std::make_tuple();
485 }
486 
487 // PostWriteBarrier
488 class PostWriteBarrier {
489 public:
490     PostWriteBarrier() = delete;
PostWriteBarrier(Codegen * cg,Inst * inst)491     PostWriteBarrier(Codegen *cg, Inst *inst) : cg_(cg), inst_(inst)
492     {
493         ASSERT(cg_ != nullptr);
494         ASSERT(inst_ != nullptr);
495         type_ = cg_->GetRuntime()->GetPostType();
496     }
497     DEFAULT_MOVE_SEMANTIC(PostWriteBarrier);
498     DEFAULT_COPY_SEMANTIC(PostWriteBarrier);
499     ~PostWriteBarrier() = default;
500 
501     void Encode(MemRef mem, Reg reg1, Reg reg2, bool checkObject = true, RegMask preserved = {});
502 
503 private:
504     static constexpr auto BARRIER_POSITION = ark::mem::BarrierPosition::BARRIER_POSITION_POST;
505     Codegen *cg_;
506     Inst *inst_;
507     ark::mem::BarrierType type_;
508 
509     struct Args {
510         MemRef mem;
511         Reg reg1;
512         Reg reg2;
513         RegMask preserved;
514         bool checkObject = true;
515     };
516 
517     void EncodeInterRegionBarrier(Args args);
518     void EncodeInterGenerationalBarrier(Reg base);
519     // Creates call to IRtoC PostWrb Entrypoint. Offline means AOT or IRtoC compilation -> type of GC is not known.
520     // So Managed Thread keeps pointer to actual IRtoC GC barriers implementation at run-time.
521     void EncodeOfflineIrtocBarrier(Args args);
522     // Creates call to IRtoC PostWrb Entrypoint. Online means JIT compilation -> we know GC type.
523     void EncodeOnlineIrtocBarrier(Args args);
524     void EncodeOnlineIrtocRegionTwoRegsBarrier(Args args);
525     void EncodeOnlineIrtocRegionOneRegBarrier(Args args);
526 
527     // Auxillary methods
528     void EncodeCalculateCardIndex(Reg baseReg, ScopedTmpReg *tmp, ScopedTmpReg *tmp1);
529     void EncodeCheckObject(Reg base, Reg reg1, LabelHolder::LabelId skipLabel, bool checkNull);
530     void EncodeWrapOneArg(Reg param, Reg base, MemRef mem, size_t additionalOffset = 0);
531 
532     template <typename T>
GetBarrierOperandValue(ark::mem::BarrierPosition position,std::string_view name)533     T GetBarrierOperandValue(ark::mem::BarrierPosition position, std::string_view name)
534     {
535         auto operand = cg_->GetRuntime()->GetBarrierOperand(position, name);
536         return std::get<T>(operand.GetValue());
537     }
538 
539     template <typename... Args>
FillCallParams(MemRef mem,Args &&...params)540     void FillCallParams(MemRef mem, Args &&...params)
541     {
542         auto base {mem.GetBase().As(TypeInfo::FromDataType(DataType::REFERENCE, cg_->GetArch()))};
543         if (mem.HasIndex()) {
544             ASSERT(mem.GetScale() == 0 && !mem.HasDisp());
545             cg_->FillCallParams(base, mem.GetIndex(), std::forward<Args>(params)...);
546         } else {
547             cg_->FillCallParams(base, TypedImm(mem.GetDisp()), std::forward<Args>(params)...);
548         }
549     }
550 
HasObject2(const Args & args)551     bool HasObject2(const Args &args) const
552     {
553         ASSERT(args.reg1.IsValid());
554         return args.reg2.IsValid() && args.reg1 != args.reg2;
555     }
556 
GetBase(const Args & args)557     Reg GetBase(const Args &args) const
558     {
559         return args.mem.GetBase().As(TypeInfo::FromDataType(DataType::REFERENCE, cg_->GetArch()));
560     }
561 
GetParamRegs(const size_t paramsNumber,const Args & args)562     RegMask GetParamRegs(const size_t paramsNumber, const Args &args) const
563     {
564         auto paramRegs {cg_->GetTarget().GetParamRegsMask(paramsNumber) & cg_->GetLiveRegisters(inst_).first};
565         return (paramRegs | args.preserved);
566     }
567 };  // PostWriteBarrier
568 
569 }  // namespace ark::compiler
570 
571 #include "codegen-inl.h"
572 
573 #endif  // COMPILER_OPTIMIZER_CODEGEN_CODEGEN_H
574