1 /*
2 * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #ifndef COMPILER_OPTIMIZER_CODEGEN_CODEGEN_H
17 #define COMPILER_OPTIMIZER_CODEGEN_CODEGEN_H
18
19 /*
20 Codegen interface for compiler
21 ! Do not use this file in runtime
22 */
23
24 #include <tuple>
25 #include "code_info/code_info_builder.h"
26 #include "compiler_logger.h"
27 #include "disassembly.h"
28 #include "frame_info.h"
29 #include "optimizer/analysis/live_registers.h"
30 #include "optimizer/code_generator/callconv.h"
31 #include "optimizer/code_generator/encode.h"
32 #include "optimizer/code_generator/scoped_tmp_reg.h"
33 #include "optimizer/code_generator/registers_description.h"
34 #include "optimizer/code_generator/slow_path.h"
35 #include "optimizer/code_generator/spill_fill_encoder.h"
36 #include "optimizer/code_generator/target_info.h"
37 #include "optimizer/ir/analysis.h"
38 #include "optimizer/ir/graph.h"
39 #include "optimizer/ir/graph_visitor.h"
40 #include "optimizer/optimizations/regalloc/spill_fills_resolver.h"
41 #include "optimizer/pass_manager.h"
42 #include "utils/cframe_layout.h"
43
44 namespace ark::compiler {
45 // Maximum size in bytes
46 constexpr size_t SLOW_PATH_SIZE = 64;
47
48 class Encoder;
49 class CodeBuilder;
50 class OsrEntryStub;
51
52 // CC-OFFNXT(G.FUD.06) big switch-case
IrTypeToMetainfoType(DataType::Type type)53 inline VRegInfo::Type IrTypeToMetainfoType(DataType::Type type)
54 {
55 switch (type) {
56 case DataType::UINT64:
57 case DataType::INT64:
58 return VRegInfo::Type::INT64;
59 case DataType::ANY:
60 return VRegInfo::Type::ANY;
61 case DataType::UINT32:
62 case DataType::UINT16:
63 case DataType::UINT8:
64 case DataType::INT32:
65 case DataType::INT16:
66 case DataType::INT8:
67 return VRegInfo::Type::INT32;
68 case DataType::FLOAT64:
69 return VRegInfo::Type::FLOAT64;
70 case DataType::FLOAT32:
71 return VRegInfo::Type::FLOAT32;
72 case DataType::BOOL:
73 return VRegInfo::Type::BOOL;
74 case DataType::REFERENCE:
75 return VRegInfo::Type::OBJECT;
76 default:
77 UNREACHABLE();
78 }
79 }
80
81 class Codegen : public Optimization {
82 using EntrypointId = RuntimeInterface::EntrypointId;
83
84 public:
85 explicit Codegen(Graph *graph);
86 NO_MOVE_SEMANTIC(Codegen);
87 NO_COPY_SEMANTIC(Codegen);
88
89 ~Codegen() override = default;
90
91 bool RunImpl() override;
92 const char *GetPassName() const override;
93 bool AbortIfFailed() const override;
94
95 static bool Run(Graph *graph);
96
GetAllocator()97 ArenaAllocator *GetAllocator() const
98 {
99 return allocator_;
100 }
GetLocalAllocator()101 ArenaAllocator *GetLocalAllocator() const
102 {
103 return localAllocator_;
104 }
GetFrameInfo()105 FrameInfo *GetFrameInfo() const
106 {
107 return frameInfo_;
108 }
SetFrameInfo(FrameInfo * frameInfo)109 void SetFrameInfo(FrameInfo *frameInfo)
110 {
111 frameInfo_ = frameInfo;
112 }
113 virtual void CreateFrameInfo();
114
GetRuntime()115 RuntimeInterface *GetRuntime() const
116 {
117 return runtime_;
118 }
GetRegfile()119 RegistersDescription *GetRegfile() const
120 {
121 return regfile_;
122 }
GetEncoder()123 Encoder *GetEncoder() const
124 {
125 return enc_;
126 }
GetCallingConvention()127 CallingConvention *GetCallingConvention() const
128 {
129 return callconv_;
130 }
131
GetGraphVisitor()132 GraphVisitor *GetGraphVisitor() const
133 {
134 return visitor_;
135 }
136
GetLabelEntry()137 LabelHolder::LabelId GetLabelEntry() const
138 {
139 return labelEntry_;
140 }
141
GetLabelExit()142 LabelHolder::LabelId GetLabelExit() const
143 {
144 return labelExit_;
145 }
146
GetMethodId()147 RuntimeInterface::MethodId GetMethodId()
148 {
149 return methodId_;
150 }
151
SetStartCodeOffset(size_t offset)152 void SetStartCodeOffset(size_t offset)
153 {
154 startCodeOffset_ = offset;
155 }
156
GetStartCodeOffset()157 size_t GetStartCodeOffset() const
158 {
159 return startCodeOffset_;
160 }
161
162 void Convert(ArenaVector<Reg> *regsUsage, const ArenaVector<bool> *mask, TypeInfo typeInfo);
163
164 Reg ConvertRegister(Register r, DataType::Type type = DataType::Type::INT64);
165
166 template <size_t SRC_REGS_COUNT>
ConvertSrcRegisters(Inst * inst)167 constexpr auto ConvertSrcRegisters(Inst *inst)
168 {
169 auto lastTuple = std::make_tuple(ConvertRegister(inst->GetSrcReg(SRC_REGS_COUNT - 1), inst->GetType()));
170 return std::tuple_cat(ConvertSrcRegisters<SRC_REGS_COUNT - 1>(inst), lastTuple);
171 }
172
173 template <size_t SRC_REGS_COUNT>
ConvertRegisters(Inst * inst)174 constexpr auto ConvertRegisters(Inst *inst)
175 {
176 auto dstTuple = std::make_tuple(ConvertRegister(inst->GetDstReg(), inst->GetType()));
177 return std::tuple_cat(dstTuple, ConvertSrcRegisters<SRC_REGS_COUNT>(inst));
178 }
179
180 Imm ConvertImmWithExtend(uint64_t imm, DataType::Type type);
181
182 Condition ConvertCc(ConditionCode cc);
183 Condition ConvertCcOverflow(ConditionCode cc);
184
ConvertDataType(DataType::Type type,Arch arch)185 static inline TypeInfo ConvertDataType(DataType::Type type, Arch arch)
186 {
187 return TypeInfo::FromDataType(type, arch);
188 }
189
GetArch()190 Arch GetArch() const
191 {
192 return GetTarget().GetArch();
193 }
194
GetTarget()195 Target GetTarget() const
196 {
197 return target_;
198 }
199
GetPtrRegType()200 TypeInfo GetPtrRegType() const
201 {
202 return target_.GetPtrRegType();
203 }
204
GetCodeBuilder()205 CodeInfoBuilder *GetCodeBuilder() const
206 {
207 return codeBuilder_;
208 }
209
IsCompressedStringsEnabled()210 bool IsCompressedStringsEnabled() const
211 {
212 return runtime_->IsCompressedStringsEnabled();
213 }
214
215 void CreateStackMap(Inst *inst, Inst *user = nullptr);
216
217 void CreateStackMapRec(SaveStateInst *saveState, bool requireVregMap, Inst *targetSite);
218 void CreateVRegMap(SaveStateInst *saveState, size_t vregsCount, Inst *targetSite);
219 void CreateVreg(const Location &location, Inst *inst, const VirtualRegister &vreg);
220 void FillVregIndices(SaveStateInst *saveState);
221
222 void CreateOsrEntry(SaveStateInst *saveState);
223
224 void CreateVRegForRegister(const Location &location, Inst *inst, const VirtualRegister &vreg);
225
226 /// 'live_inputs' shows that inst's source registers should be added the the mask
227 template <bool LIVE_INPUTS = false>
228 std::pair<RegMask, VRegMask> GetLiveRegisters(Inst *inst);
229 // Limits live register set to a number of registers used to pass parameters to the runtime or fastpath call:
230 // 1) these ones are saved/restored by caller
231 // 2) the remaining ones are saved/restored by the bridge function (aarch only) or by fastpath prologue/epilogue
232 void FillOnlyParameters(RegMask *liveRegs, uint32_t numParams, bool isFastpath) const;
233
234 template <typename T, typename... Args>
235 T *CreateSlowPath(Inst *inst, Args &&...args);
236
237 void EmitSlowPaths();
238
239 /**
240 * Insert tracing code to the generated code. See `Trace` method in the `runtime/entrypoints.cpp`.
241 * NOTE(compiler): we should rework parameters assigning algorithm, that is duplicated here.
242 * @param params parameters to be passed to the TRACE entrypoint, first parameter must be TraceId value.
243 */
244 template <typename... Args>
245 void InsertTrace(Args &&...params);
246 #if defined(EVENT_METHOD_ENTER_ENABLED) && EVENT_METHOD_ENTER_ENABLED != 0
247 void MakeTrace();
248 #endif
249 void CallIntrinsic(Inst *inst, RuntimeInterface::IntrinsicId id);
250
251 template <bool IS_FASTPATH, typename... Args>
252 void CallEntrypoint(Inst *inst, EntrypointId id, Reg dstReg, RegMask preservedRegs, Args &&...params);
253
CallEntrypointFinalize(RegMask & liveRegs,RegMask & paramsMask,Inst * inst)254 void CallEntrypointFinalize(RegMask &liveRegs, RegMask ¶msMask, Inst *inst)
255 {
256 LoadCallerRegisters(liveRegs, VRegMask(), true);
257
258 if (!inst->HasImplicitRuntimeCall()) {
259 return;
260 }
261 for (auto i = 0U; i < paramsMask.size(); i++) {
262 if (paramsMask.test(i)) {
263 inst->GetSaveState()->GetRootsRegsMask().reset(i);
264 }
265 }
266 }
267
268 // The function is used for calling runtime functions through special bridges.
269 // !NOTE Don't use the function for calling runtime without bridges(it save only parameters on stack)
270 template <typename... Args>
271 void CallRuntime(Inst *inst, EntrypointId id, Reg dstReg, RegMask preservedRegs, Args &&...params);
272 template <typename... Args>
273 void CallFastPath(Inst *inst, EntrypointId id, Reg dstReg, RegMask preservedRegs, Args &&...params);
274 template <typename... Args>
275 void CallRuntimeWithMethod(Inst *inst, void *method, EntrypointId eid, Reg dstReg, Args &&...params);
276 void SaveRegistersForImplicitRuntime(Inst *inst, RegMask *paramsMask, RegMask *mask);
277
278 void VisitNewArray(Inst *inst);
279
280 void LoadClassFromObject(Reg classReg, Reg objReg);
281 void VisitCallIndirect(CallIndirectInst *inst);
282 void VisitCall(CallInst *inst);
283 void CreateCallIntrinsic(IntrinsicInst *inst);
284 void CreateMultiArrayCall(CallInst *callInst);
285 void CreateNewObjCall(NewObjectInst *newObj);
286 void CreateNewObjCallOld(NewObjectInst *newObj);
287 void CreateMonitorCall(MonitorInst *inst);
288 void CreateMonitorCallOld(MonitorInst *inst);
289 void CreateCheckCastInterfaceCall(Inst *inst);
290 void CreateNonDefaultInitClass(ClassInst *initInst);
291 void CheckObject(Reg reg, LabelHolder::LabelId label);
292 template <bool IS_CLASS = false>
293 void CreatePreWRB(Inst *inst, MemRef mem, RegMask preserved = {}, bool storePair = false);
294 void CreatePostWRB(Inst *inst, MemRef mem, Reg reg1, Reg reg2 = INVALID_REGISTER, RegMask preserved = {});
295 void CreatePostWRBForDynamic(Inst *inst, MemRef mem, Reg reg1, Reg reg2, RegMask preserved = {});
296 template <typename... Args>
297 void CallBarrier(RegMask liveRegs, VRegMask liveVregs, std::variant<EntrypointId, Reg> entrypoint,
298 Args &&...params);
299 void CreateLoadClassFromPLT(Inst *inst, Reg tmpReg, Reg dst, size_t classId);
300 void CreateJumpToClassResolverPltShared(Inst *inst, Reg tmpReg, RuntimeInterface::EntrypointId id);
301 void CreateLoadTLABInformation(Reg regTlabStart, Reg regTlabSize);
302 void CreateCheckForTLABWithConstSize(Inst *inst, Reg regTlabStart, Reg regTlabSize, size_t size,
303 LabelHolder::LabelId label);
304 void CreateDebugRuntimeCallsForNewObject(Inst *inst, Reg regTlabStart, size_t allocSize, RegMask preserved);
305 void CreateDebugRuntimeCallsForObjectClone(Inst *inst, Reg dst);
306 void CreateReturn(const Inst *inst);
307 template <typename T>
308 void CreateUnaryCheck(Inst *inst, RuntimeInterface::EntrypointId id, DeoptimizeType type, Condition cc);
309
310 // The function alignment up the value from alignment_reg using tmp_reg.
311 void CreateAlignmentValue(Reg alignmentReg, Reg tmpReg, size_t alignment);
312 void TryInsertImplicitNullCheck(Inst *inst, size_t prevOffset);
313
GetFrameLayout()314 const CFrameLayout &GetFrameLayout() const
315 {
316 return frameLayout_;
317 }
318
319 bool RegisterKeepCallArgument(CallInst *callInst, Reg reg);
320
321 void LoadMethod(Reg dst);
322 void LoadFreeSlot(Reg dst);
323 void StoreFreeSlot(Reg src);
324
325 ssize_t GetStackOffset(Location location);
326 MemRef GetMemRefForSlot(Location location);
327 Reg SpReg() const;
328 Reg FpReg() const;
329
330 bool HasLiveCallerSavedRegs(Inst *inst);
331 void SaveCallerRegisters(RegMask liveRegs, VRegMask liveVregs, bool adjustRegs);
332 void LoadCallerRegisters(RegMask liveRegs, VRegMask liveVregs, bool adjustRegs);
333
334 // Initialization internal variables
335 void Initialize();
336 bool Finalize();
337 void IssueDisasm();
338 const Disassembly *GetDisasm() const;
339 Disassembly *GetDisasm();
340 void AddLiveOut(const BasicBlock *bb, const Register reg);
341 RegMask GetLiveOut(const BasicBlock *bb) const;
342
343 Reg ThreadReg() const;
344 static bool InstEncodedWithLibCall(const Inst *inst, Arch arch);
345
346 void EncodeDynamicCast(Inst *inst, Reg dst, bool dstSigned, Reg src);
347
348 PANDA_PUBLIC_API Reg ConvertInstTmpReg(const Inst *inst, DataType::Type type) const;
349 Reg ConvertInstTmpReg(const Inst *inst) const;
350
351 bool OffsetFitReferenceTypeSize(uint64_t offset) const;
352
353 protected:
354 virtual void GeneratePrologue();
355 virtual void GenerateEpilogue();
356
357 // Main logic steps
358 bool BeginMethod();
359 bool VisitGraph();
360 void EndMethod();
361 bool CopyToCodeCache();
362 void DumpCode();
363
364 RegMask GetUsedRegs() const;
365 RegMask GetUsedVRegs() const;
366
367 template <typename... Args>
368 void FillCallParams(Args &&...params);
369
370 template <size_t IMM_ARRAY_SIZE>
371 class FillCallParamsHelper;
372
373 void EmitJump(const BasicBlock *bb);
374 bool EmitCallRuntimeCode(Inst *inst, std::variant<EntrypointId, Reg> entrypoint);
375
376 void IntfInlineCachePass(ResolveVirtualInst *resolver, Reg methodReg, Reg tmpReg, Reg objReg);
377
378 template <typename T>
379 RuntimeInterface::MethodPtr GetCallerOfUnresolvedMethod(T *resolver);
380
381 void EmitResolveVirtual(ResolveVirtualInst *resolver);
382 void EmitResolveUnknownVirtual(ResolveVirtualInst *resolver, Reg methodReg);
383 void EmitResolveVirtualAot(ResolveVirtualInst *resolver, Reg methodReg);
384 void EmitCallVirtual(CallInst *call);
385 void EmitCallResolvedVirtual(CallInst *call);
386 void EmitCallStatic(CallInst *call);
387 void EmitResolveStatic(ResolveStaticInst *resolver);
388 void EmitCallResolvedStatic(CallInst *call);
389 void EmitCallDynamic(CallInst *call);
390 void FinalizeCall(CallInst *call);
391
392 uint32_t GetVtableShift();
393 void CalculateCardIndex(Reg baseReg, ScopedTmpReg *tmp, ScopedTmpReg *tmp1);
394 void CreateBuiltinIntrinsic(IntrinsicInst *inst);
395 static constexpr int32_t NUM_OF_SRC_BUILTIN = 6;
396 static constexpr uint8_t FIRST_OPERAND = 0;
397 static constexpr uint8_t SECOND_OPERAND = 1;
398 static constexpr uint8_t THIRD_OPERAND = 2;
399 static constexpr uint8_t FOURTH_OPERAND = 3;
400 static constexpr uint8_t FIFTH_OPERAND = 4;
401 using SRCREGS = std::array<Reg, NUM_OF_SRC_BUILTIN>;
402 // implementation is generated with compiler/optimizer/templates/intrinsics/intrinsics_codegen.inl.erb
403 void FillBuiltin(IntrinsicInst *inst, SRCREGS src, Reg dst);
404
405 template <typename Arg, typename... Args>
406 ALWAYS_INLINE inline void AddParamRegsInLiveMasksHandleArgs(ParameterInfo *paramInfo, RegMask *liveRegs,
407 VRegMask *liveVregs, Arg param, Args &&...params);
408
409 template <typename... Args>
410 void AddParamRegsInLiveMasks(RegMask *liveRegs, VRegMask *liveVregs, Args &&...params);
411 template <typename... Args>
412 void CreateStubCall(Inst *inst, RuntimeInterface::IntrinsicId intrinsicId, Reg dst, Args &&...params);
413
414 ScopedTmpReg CalculatePreviousTLABAllocSize(Reg reg, LabelHolder::LabelId label);
415 friend class IntrinsicCodegenTest;
416
417 void CreateStringFromCharArrayTlab(Inst *inst, Reg dst, SRCREGS src);
418 #include "codegen_language_extensions.h"
419 #include "intrinsics_codegen.inl.h"
420
421 private:
422 template <typename T>
423 void EncodeImms(const T &imms, bool skipFirstLocation);
424
425 static bool EnsureParamsFitIn32Bit(std::initializer_list<std::variant<Reg, TypedImm>> params);
426
427 template <typename... Args>
428 void FillPostWrbCallParams(MemRef mem, Args &&...params);
429
430 void EmitAtomicByteOr(Reg addr, Reg value);
431
432 private:
433 ArenaAllocator *allocator_;
434 ArenaAllocator *localAllocator_;
435 // Register description
436 RegistersDescription *regfile_;
437 // Encoder implementation
438 Encoder *enc_;
439 // Target architecture calling convention model
440 CallingConvention *callconv_;
441 // Current execution model implementation
442 // Visitor for instructions
443 GraphVisitor *visitor_ {};
444 CodeInfoBuilder *codeBuilder_ {nullptr};
445
446 ArenaVector<SlowPathBase *> slowPaths_;
447 ArenaUnorderedMap<RuntimeInterface::EntrypointId, SlowPathShared *> slowPathsMap_;
448
449 const CFrameLayout frameLayout_;
450 FrameInfo *frameInfo_ {nullptr};
451 ArenaVector<OsrEntryStub *> osrEntries_;
452 RuntimeInterface::MethodId methodId_ {INVALID_ID};
453 size_t startCodeOffset_ {0};
454 ArenaVector<std::pair<int16_t, int16_t>> vregIndices_;
455
456 RuntimeInterface *runtime_ {nullptr};
457
458 LabelHolder::LabelId labelEntry_ {};
459 LabelHolder::LabelId labelExit_ {};
460
461 const Target target_;
462
463 /* Registers that have been allocated by regalloc */
464 RegMask usedRegs_ {0};
465 RegMask usedVregs_ {0};
466 /* Map of BasicBlock to live-out regsiters mask. It is needed in epilogue encoding to avoid overwriting of the
467 * live-out registers */
468 ArenaUnorderedMap<const BasicBlock *, RegMask> liveOuts_;
469
470 Disassembly disasm_;
471 SpillFillsResolver spillFillsResolver_;
472
473 friend class EncodeVisitor;
474 friend class BaselineCodegen;
475 friend class SlowPathJsCastDoubleToInt32;
476 friend class PostWriteBarrier;
477 }; // Codegen
478
479 template <>
480 constexpr auto Codegen::ConvertSrcRegisters<0>([[maybe_unused]] Inst *inst)
481 {
482 return std::make_tuple();
483 }
484
485 // PostWriteBarrier
486 class PostWriteBarrier {
487 public:
488 PostWriteBarrier() = delete;
PostWriteBarrier(Codegen * cg,Inst * inst)489 PostWriteBarrier(Codegen *cg, Inst *inst) : cg_(cg), inst_(inst)
490 {
491 ASSERT(cg_ != nullptr);
492 ASSERT(inst_ != nullptr);
493 type_ = cg_->GetRuntime()->GetPostType();
494 }
495 DEFAULT_MOVE_SEMANTIC(PostWriteBarrier);
496 DEFAULT_COPY_SEMANTIC(PostWriteBarrier);
497 ~PostWriteBarrier() = default;
498
499 void Encode(MemRef mem, Reg reg1, Reg reg2, bool checkObject = true, RegMask preserved = {});
500
501 private:
502 static constexpr auto BARRIER_POSITION = ark::mem::BarrierPosition::BARRIER_POSITION_POST;
503 Codegen *cg_;
504 Inst *inst_;
505 ark::mem::BarrierType type_;
506
507 struct Args {
508 MemRef mem;
509 Reg reg1;
510 Reg reg2;
511 RegMask preserved;
512 bool checkObject = true;
513 };
514
515 void EncodeInterRegionBarrier(Args args);
516 void EncodeInterGenerationalBarrier(Reg base);
517 // Creates call to IRtoC PostWrb Entrypoint. Offline means AOT or IRtoC compilation -> type of GC is not known.
518 // So Managed Thread keeps pointer to actual IRtoC GC barriers implementation at run-time.
519 void EncodeOfflineIrtocBarrier(Args args);
520 // Creates call to IRtoC PostWrb Entrypoint. Online means JIT compilation -> we know GC type.
521 void EncodeOnlineIrtocBarrier(Args args);
522 void EncodeOnlineIrtocRegionTwoRegsBarrier(Args args);
523 void EncodeOnlineIrtocRegionOneRegBarrier(Args args);
524
525 // Auxillary methods
526 void EncodeCalculateCardIndex(Reg baseReg, ScopedTmpReg *tmp, ScopedTmpReg *tmp1);
527 void EncodeCheckObject(Reg base, Reg reg1, LabelHolder::LabelId skipLabel, bool checkNull);
528 void EncodeWrapOneArg(Reg param, Reg base, MemRef mem, size_t additionalOffset = 0);
529
530 template <typename T>
GetBarrierOperandValue(ark::mem::BarrierPosition position,std::string_view name)531 T GetBarrierOperandValue(ark::mem::BarrierPosition position, std::string_view name)
532 {
533 auto operand = cg_->GetRuntime()->GetBarrierOperand(position, name);
534 return std::get<T>(operand.GetValue());
535 }
536
537 template <typename... Args>
FillCallParams(MemRef mem,Args &&...params)538 void FillCallParams(MemRef mem, Args &&...params)
539 {
540 auto base {mem.GetBase().As(TypeInfo::FromDataType(DataType::REFERENCE, cg_->GetArch()))};
541 if (mem.HasIndex()) {
542 ASSERT(mem.GetScale() == 0 && !mem.HasDisp());
543 cg_->FillCallParams(base, mem.GetIndex(), std::forward<Args>(params)...);
544 } else {
545 cg_->FillCallParams(base, TypedImm(mem.GetDisp()), std::forward<Args>(params)...);
546 }
547 }
548
HasObject2(const Args & args)549 bool HasObject2(const Args &args) const
550 {
551 ASSERT(args.reg1.IsValid());
552 return args.reg2.IsValid() && args.reg1 != args.reg2;
553 }
554
GetBase(const Args & args)555 Reg GetBase(const Args &args) const
556 {
557 return args.mem.GetBase().As(TypeInfo::FromDataType(DataType::REFERENCE, cg_->GetArch()));
558 }
559
GetParamRegs(const size_t paramsNumber,const Args & args)560 RegMask GetParamRegs(const size_t paramsNumber, const Args &args) const
561 {
562 auto paramRegs {cg_->GetTarget().GetParamRegsMask(paramsNumber) & cg_->GetLiveRegisters(inst_).first};
563 return (paramRegs | args.preserved);
564 }
565 }; // PostWriteBarrier
566
567 } // namespace ark::compiler
568
569 #include "codegen-inl.h"
570
571 #endif // COMPILER_OPTIMIZER_CODEGEN_CODEGEN_H
572