1 /*
2 * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #ifndef COMPILER_OPTIMIZER_CODEGEN_CODEGEN_H
17 #define COMPILER_OPTIMIZER_CODEGEN_CODEGEN_H
18
19 /*
20 Codegen interface for compiler
21 ! Do not use this file in runtime
22 */
23
24 #include <tuple>
25 #include "code_info/code_info_builder.h"
26 #include "compiler_logger.h"
27 #include "disassembly.h"
28 #include "frame_info.h"
29 #include "optimizer/analysis/live_registers.h"
30 #include "optimizer/code_generator/callconv.h"
31 #include "optimizer/code_generator/encode.h"
32 #include "optimizer/code_generator/scoped_tmp_reg.h"
33 #include "optimizer/code_generator/registers_description.h"
34 #include "optimizer/code_generator/slow_path.h"
35 #include "optimizer/code_generator/spill_fill_encoder.h"
36 #include "optimizer/code_generator/target_info.h"
37 #include "optimizer/ir/analysis.h"
38 #include "optimizer/ir/graph.h"
39 #include "optimizer/ir/graph_visitor.h"
40 #include "optimizer/optimizations/regalloc/spill_fills_resolver.h"
41 #include "optimizer/pass_manager.h"
42 #include "utils/cframe_layout.h"
43
44 namespace ark::compiler {
45 // Maximum size in bytes
46 constexpr size_t SLOW_PATH_SIZE = 64;
47
48 class Encoder;
49 class CodeBuilder;
50 class OsrEntryStub;
51
IrTypeToMetainfoType(DataType::Type type)52 inline VRegInfo::Type IrTypeToMetainfoType(DataType::Type type)
53 {
54 switch (type) {
55 case DataType::UINT64:
56 case DataType::INT64:
57 return VRegInfo::Type::INT64;
58 case DataType::ANY:
59 return VRegInfo::Type::ANY;
60 case DataType::UINT32:
61 case DataType::UINT16:
62 case DataType::UINT8:
63 case DataType::INT32:
64 case DataType::INT16:
65 case DataType::INT8:
66 return VRegInfo::Type::INT32;
67 case DataType::FLOAT64:
68 return VRegInfo::Type::FLOAT64;
69 case DataType::FLOAT32:
70 return VRegInfo::Type::FLOAT32;
71 case DataType::BOOL:
72 return VRegInfo::Type::BOOL;
73 case DataType::REFERENCE:
74 return VRegInfo::Type::OBJECT;
75 default:
76 UNREACHABLE();
77 }
78 }
79
80 class Codegen : public Optimization {
81 using EntrypointId = RuntimeInterface::EntrypointId;
82
83 public:
84 explicit Codegen(Graph *graph);
85 NO_MOVE_SEMANTIC(Codegen);
86 NO_COPY_SEMANTIC(Codegen);
87
88 ~Codegen() override = default;
89
90 bool RunImpl() override;
91 const char *GetPassName() const override;
92 bool AbortIfFailed() const override;
93
94 static bool Run(Graph *graph);
95
GetAllocator()96 ArenaAllocator *GetAllocator() const
97 {
98 return allocator_;
99 }
GetLocalAllocator()100 ArenaAllocator *GetLocalAllocator() const
101 {
102 return localAllocator_;
103 }
GetFrameInfo()104 FrameInfo *GetFrameInfo() const
105 {
106 return frameInfo_;
107 }
SetFrameInfo(FrameInfo * frameInfo)108 void SetFrameInfo(FrameInfo *frameInfo)
109 {
110 frameInfo_ = frameInfo;
111 }
112 virtual void CreateFrameInfo();
113
GetRuntime()114 RuntimeInterface *GetRuntime() const
115 {
116 return runtime_;
117 }
GetRegfile()118 RegistersDescription *GetRegfile() const
119 {
120 return regfile_;
121 }
GetEncoder()122 Encoder *GetEncoder() const
123 {
124 return enc_;
125 }
GetCallingConvention()126 CallingConvention *GetCallingConvention() const
127 {
128 return callconv_;
129 }
130
GetGraphVisitor()131 GraphVisitor *GetGraphVisitor() const
132 {
133 return visitor_;
134 }
135
GetLabelEntry()136 LabelHolder::LabelId GetLabelEntry() const
137 {
138 return labelEntry_;
139 }
140
GetLabelExit()141 LabelHolder::LabelId GetLabelExit() const
142 {
143 return labelExit_;
144 }
145
GetMethodId()146 RuntimeInterface::MethodId GetMethodId()
147 {
148 return methodId_;
149 }
150
SetStartCodeOffset(size_t offset)151 void SetStartCodeOffset(size_t offset)
152 {
153 startCodeOffset_ = offset;
154 }
155
GetStartCodeOffset()156 size_t GetStartCodeOffset() const
157 {
158 return startCodeOffset_;
159 }
160
161 void Convert(ArenaVector<Reg> *regsUsage, const ArenaVector<bool> *mask, TypeInfo typeInfo);
162
163 Reg ConvertRegister(Register r, DataType::Type type = DataType::Type::INT64);
164
165 template <size_t SRC_REGS_COUNT>
ConvertSrcRegisters(Inst * inst)166 constexpr auto ConvertSrcRegisters(Inst *inst)
167 {
168 auto lastTuple = std::make_tuple(ConvertRegister(inst->GetSrcReg(SRC_REGS_COUNT - 1), inst->GetType()));
169 return std::tuple_cat(ConvertSrcRegisters<SRC_REGS_COUNT - 1>(inst), lastTuple);
170 }
171
172 template <size_t SRC_REGS_COUNT>
ConvertRegisters(Inst * inst)173 constexpr auto ConvertRegisters(Inst *inst)
174 {
175 auto dstTuple = std::make_tuple(ConvertRegister(inst->GetDstReg(), inst->GetType()));
176 return std::tuple_cat(dstTuple, ConvertSrcRegisters<SRC_REGS_COUNT>(inst));
177 }
178
179 Imm ConvertImmWithExtend(uint64_t imm, DataType::Type type);
180
181 Condition ConvertCc(ConditionCode cc);
182 Condition ConvertCcOverflow(ConditionCode cc);
183
ConvertDataType(DataType::Type type,Arch arch)184 static inline TypeInfo ConvertDataType(DataType::Type type, Arch arch)
185 {
186 return TypeInfo::FromDataType(type, arch);
187 }
188
GetArch()189 Arch GetArch() const
190 {
191 return GetTarget().GetArch();
192 }
193
GetTarget()194 Target GetTarget() const
195 {
196 return target_;
197 }
198
GetPtrRegType()199 TypeInfo GetPtrRegType() const
200 {
201 return target_.GetPtrRegType();
202 }
203
GetCodeBuilder()204 CodeInfoBuilder *GetCodeBuilder() const
205 {
206 return codeBuilder_;
207 }
208
IsCompressedStringsEnabled()209 bool IsCompressedStringsEnabled() const
210 {
211 return runtime_->IsCompressedStringsEnabled();
212 }
213
214 void CreateStackMap(Inst *inst, Inst *user = nullptr);
215
216 void CreateStackMapRec(SaveStateInst *saveState, bool requireVregMap, Inst *targetSite);
217 void CreateVRegMap(SaveStateInst *saveState, size_t vregsCount, Inst *targetSite);
218 void CreateVreg(const Location &location, Inst *inst, const VirtualRegister &vreg);
219 void FillVregIndices(SaveStateInst *saveState);
220
221 void CreateOsrEntry(SaveStateInst *saveState);
222
223 void CreateVRegForRegister(const Location &location, Inst *inst, const VirtualRegister &vreg);
224
225 /// 'live_inputs' shows that inst's source registers should be added the the mask
226 template <bool LIVE_INPUTS = false>
227 std::pair<RegMask, VRegMask> GetLiveRegisters(Inst *inst);
228 // Limits live register set to a number of registers used to pass parameters to the runtime or fastpath call:
229 // 1) these ones are saved/restored by caller
230 // 2) the remaining ones are saved/restored by the bridge function (aarch only) or by fastpath prologue/epilogue
231 void FillOnlyParameters(RegMask *liveRegs, uint32_t numParams, bool isFastpath) const;
232
233 template <typename T, typename... Args>
234 T *CreateSlowPath(Inst *inst, Args &&...args);
235
236 void EmitSlowPaths();
237
238 /**
239 * Insert tracing code to the generated code. See `Trace` method in the `runtime/entrypoints.cpp`.
240 * NOTE(compiler): we should rework parameters assigning algorithm, that is duplicated here.
241 * @param params parameters to be passed to the TRACE entrypoint, first parameter must be TraceId value.
242 */
243 template <typename... Args>
244 void InsertTrace(Args &&...params);
245 void CallIntrinsic(Inst *inst, RuntimeInterface::IntrinsicId id);
246
247 template <bool IS_FASTPATH, typename... Args>
248 void CallEntrypoint(Inst *inst, EntrypointId id, Reg dstReg, RegMask preservedRegs, Args &&...params);
249
CallEntrypointFinalize(RegMask & liveRegs,RegMask & paramsMask,Inst * inst)250 void CallEntrypointFinalize(RegMask &liveRegs, RegMask ¶msMask, Inst *inst)
251 {
252 LoadCallerRegisters(liveRegs, VRegMask(), true);
253
254 if (!inst->HasImplicitRuntimeCall()) {
255 return;
256 }
257 for (auto i = 0U; i < paramsMask.size(); i++) {
258 if (paramsMask.test(i)) {
259 inst->GetSaveState()->GetRootsRegsMask().reset(i);
260 }
261 }
262 }
263
264 // The function is used for calling runtime functions through special bridges.
265 // !NOTE Don't use the function for calling runtime without bridges(it save only parameters on stack)
266 template <typename... Args>
267 void CallRuntime(Inst *inst, EntrypointId id, Reg dstReg, RegMask preservedRegs, Args &&...params);
268 template <typename... Args>
269 void CallFastPath(Inst *inst, EntrypointId id, Reg dstReg, RegMask preservedRegs, Args &&...params);
270 template <typename... Args>
271 void CallRuntimeWithMethod(Inst *inst, void *method, EntrypointId eid, Reg dstReg, Args &&...params);
272 void SaveRegistersForImplicitRuntime(Inst *inst, RegMask *paramsMask, RegMask *mask);
273
274 void VisitNewArray(Inst *inst);
275
276 void LoadClassFromObject(Reg classReg, Reg objReg);
277 void VisitCallIndirect(CallIndirectInst *inst);
278 void VisitCall(CallInst *inst);
279 void CreateCallIntrinsic(IntrinsicInst *inst);
280 void CreateMultiArrayCall(CallInst *callInst);
281 void CreateNewObjCall(NewObjectInst *newObj);
282 void CreateNewObjCallOld(NewObjectInst *newObj);
283 void CreateMonitorCall(MonitorInst *inst);
284 void CreateMonitorCallOld(MonitorInst *inst);
285 void CreateCheckCastInterfaceCall(Inst *inst);
286 void CreateNonDefaultInitClass(ClassInst *initInst);
287 void CheckObject(Reg reg, LabelHolder::LabelId label);
288 template <bool IS_CLASS = false>
289 void CreatePreWRB(Inst *inst, MemRef mem, RegMask preserved = {}, bool storePair = false);
290 void CreatePostWRB(Inst *inst, MemRef mem, Reg reg1, Reg reg2 = INVALID_REGISTER, RegMask preserved = {});
291 void CreatePostWRBForDynamic(Inst *inst, MemRef mem, Reg reg1, Reg reg2, RegMask preserved = {});
292 template <typename... Args>
293 void CallBarrier(RegMask liveRegs, VRegMask liveVregs, std::variant<EntrypointId, Reg> entrypoint,
294 Args &&...params);
295 void CreateLoadClassFromPLT(Inst *inst, Reg tmpReg, Reg dst, size_t classId);
296 void CreateJumpToClassResolverPltShared(Inst *inst, Reg tmpReg, RuntimeInterface::EntrypointId id);
297 void CreateLoadTLABInformation(Reg regTlabStart, Reg regTlabSize);
298 void CreateCheckForTLABWithConstSize(Inst *inst, Reg regTlabStart, Reg regTlabSize, size_t size,
299 LabelHolder::LabelId label);
300 void CreateDebugRuntimeCallsForNewObject(Inst *inst, Reg regTlabStart, size_t allocSize, RegMask preserved);
301 void CreateDebugRuntimeCallsForObjectClone(Inst *inst, Reg dst);
302 void CreateReturn(const Inst *inst);
303 template <typename T>
304 void CreateUnaryCheck(Inst *inst, RuntimeInterface::EntrypointId id, DeoptimizeType type, Condition cc);
305
306 // The function alignment up the value from alignment_reg using tmp_reg.
307 void CreateAlignmentValue(Reg alignmentReg, Reg tmpReg, size_t alignment);
308 void TryInsertImplicitNullCheck(Inst *inst, size_t prevOffset);
309
GetFrameLayout()310 const CFrameLayout &GetFrameLayout() const
311 {
312 return frameLayout_;
313 }
314
315 bool RegisterKeepCallArgument(CallInst *callInst, Reg reg);
316
317 void LoadMethod(Reg dst);
318 void LoadFreeSlot(Reg dst);
319 void StoreFreeSlot(Reg src);
320
321 ssize_t GetStackOffset(Location location);
322 MemRef GetMemRefForSlot(Location location);
323 Reg SpReg() const;
324 Reg FpReg() const;
325
326 bool HasLiveCallerSavedRegs(Inst *inst);
327 void SaveCallerRegisters(RegMask liveRegs, VRegMask liveVregs, bool adjustRegs);
328 void LoadCallerRegisters(RegMask liveRegs, VRegMask liveVregs, bool adjustRegs);
329
330 // Initialization internal variables
331 void Initialize();
332 bool Finalize();
333 void IssueDisasm();
334 const Disassembly *GetDisasm() const;
335 Disassembly *GetDisasm();
336 void AddLiveOut(const BasicBlock *bb, const Register reg);
337 RegMask GetLiveOut(const BasicBlock *bb) const;
338
339 Reg ThreadReg() const;
340 static bool InstEncodedWithLibCall(const Inst *inst, Arch arch);
341
342 void EncodeDynamicCast(Inst *inst, Reg dst, bool dstSigned, Reg src);
343
344 Reg ConvertInstTmpReg(const Inst *inst, DataType::Type type) const;
345 Reg ConvertInstTmpReg(const Inst *inst) const;
346
347 bool OffsetFitReferenceTypeSize(uint64_t offset) const;
348
349 protected:
350 virtual void GeneratePrologue();
351 virtual void GenerateEpilogue();
352
353 // Main logic steps
354 bool BeginMethod();
355 bool VisitGraph();
356 void EndMethod();
357 bool CopyToCodeCache();
358 void DumpCode();
359
360 RegMask GetUsedRegs() const;
361 RegMask GetUsedVRegs() const;
362
363 template <typename... Args>
364 void FillCallParams(Args &&...params);
365
366 template <size_t IMM_ARRAY_SIZE>
367 class FillCallParamsHelper;
368
369 void EmitJump(const BasicBlock *bb);
370 bool EmitCallRuntimeCode(Inst *inst, std::variant<EntrypointId, Reg> entrypoint);
371
372 void IntfInlineCachePass(ResolveVirtualInst *resolver, Reg methodReg, Reg tmpReg, Reg objReg);
373
374 template <typename T>
375 RuntimeInterface::MethodPtr GetCallerOfUnresolvedMethod(T *resolver);
376
377 void EmitResolveVirtual(ResolveVirtualInst *resolver);
378 void EmitResolveUnknownVirtual(ResolveVirtualInst *resolver, Reg methodReg);
379 void EmitResolveVirtualAot(ResolveVirtualInst *resolver, Reg methodReg);
380 void EmitCallVirtual(CallInst *call);
381 void EmitCallResolvedVirtual(CallInst *call);
382 void EmitCallStatic(CallInst *call);
383 void EmitResolveStatic(ResolveStaticInst *resolver);
384 void EmitCallResolvedStatic(CallInst *call);
385 void EmitCallDynamic(CallInst *call);
386 void FinalizeCall(CallInst *call);
387
388 uint32_t GetVtableShift();
389 void CalculateCardIndex(Reg baseReg, ScopedTmpReg *tmp, ScopedTmpReg *tmp1);
390 void CreateBuiltinIntrinsic(IntrinsicInst *inst);
391 static constexpr int32_t NUM_OF_SRC_BUILTIN = 6;
392 static constexpr uint8_t FIRST_OPERAND = 0;
393 static constexpr uint8_t SECOND_OPERAND = 1;
394 static constexpr uint8_t THIRD_OPERAND = 2;
395 static constexpr uint8_t FOURTH_OPERAND = 3;
396 static constexpr uint8_t FIFTH_OPERAND = 4;
397 using SRCREGS = std::array<Reg, NUM_OF_SRC_BUILTIN>;
398 // implementation is generated with compiler/optimizer/templates/intrinsics/intrinsics_codegen.inl.erb
399 void FillBuiltin(IntrinsicInst *inst, SRCREGS src, Reg dst);
400
401 template <typename Arg, typename... Args>
402 ALWAYS_INLINE inline void AddParamRegsInLiveMasksHandleArgs(ParameterInfo *paramInfo, RegMask *liveRegs,
403 VRegMask *liveVregs, Arg param, Args &&...params);
404
405 template <typename... Args>
406 void AddParamRegsInLiveMasks(RegMask *liveRegs, VRegMask *liveVregs, Args &&...params);
407 template <typename... Args>
408 void CreateStubCall(Inst *inst, RuntimeInterface::IntrinsicId intrinsicId, Reg dst, Args &&...params);
409
410 ScopedTmpReg CalculatePreviousTLABAllocSize(Reg reg, LabelHolder::LabelId label);
411 friend class IntrinsicCodegenTest;
412
413 virtual void IntrinsicSlowPathEntry(IntrinsicInst *inst);
414 virtual void IntrinsicCallRuntimeSaveAll(IntrinsicInst *inst);
415 virtual void IntrinsicSaveRegisters(IntrinsicInst *inst);
416 virtual void IntrinsicRestoreRegisters(IntrinsicInst *inst);
417 virtual void IntrinsicTailCall(IntrinsicInst *inst);
418 virtual void IntrinsicSaveTlabStatsSafe(IntrinsicInst *inst, Reg src1, Reg src2, Reg tmp);
419
420 void CreateStringFromCharArrayTlab(Inst *inst, Reg dst, SRCREGS src);
421 #include "codegen_language_extensions.h"
422 #include "intrinsics_codegen.inl.h"
423
424 private:
425 template <typename T>
426 void EncodeImms(const T &imms, bool skipFirstLocation);
427
428 static bool EnsureParamsFitIn32Bit(std::initializer_list<std::variant<Reg, TypedImm>> params);
429
430 template <typename... Args>
431 void FillPostWrbCallParams(MemRef mem, Args &&...params);
432
433 void EmitAtomicByteOr(Reg addr, Reg value);
434
435 private:
436 ArenaAllocator *allocator_;
437 ArenaAllocator *localAllocator_;
438 // Register description
439 RegistersDescription *regfile_;
440 // Encoder implementation
441 Encoder *enc_;
442 // Target architecture calling convention model
443 CallingConvention *callconv_;
444 // Current execution model implementation
445 // Visitor for instructions
446 GraphVisitor *visitor_ {};
447 CodeInfoBuilder *codeBuilder_ {nullptr};
448
449 ArenaVector<SlowPathBase *> slowPaths_;
450 ArenaUnorderedMap<RuntimeInterface::EntrypointId, SlowPathShared *> slowPathsMap_;
451
452 const CFrameLayout frameLayout_;
453 FrameInfo *frameInfo_ {nullptr};
454 ArenaVector<OsrEntryStub *> osrEntries_;
455 RuntimeInterface::MethodId methodId_ {INVALID_ID};
456 size_t startCodeOffset_ {0};
457 ArenaVector<std::pair<int16_t, int16_t>> vregIndices_;
458
459 RuntimeInterface *runtime_ {nullptr};
460
461 LabelHolder::LabelId labelEntry_ {};
462 LabelHolder::LabelId labelExit_ {};
463
464 const Target target_;
465
466 /* Registers that have been allocated by regalloc */
467 RegMask usedRegs_ {0};
468 RegMask usedVregs_ {0};
469 /* Map of BasicBlock to live-out regsiters mask. It is needed in epilogue encoding to avoid overwriting of the
470 * live-out registers */
471 ArenaUnorderedMap<const BasicBlock *, RegMask> liveOuts_;
472
473 Disassembly disasm_;
474 SpillFillsResolver spillFillsResolver_;
475
476 friend class EncodeVisitor;
477 friend class BaselineCodegen;
478 friend class SlowPathJsCastDoubleToInt32;
479 friend class PostWriteBarrier;
480 }; // Codegen
481
482 template <>
483 constexpr auto Codegen::ConvertSrcRegisters<0>([[maybe_unused]] Inst *inst)
484 {
485 return std::make_tuple();
486 }
487
488 // PostWriteBarrier
489 class PostWriteBarrier {
490 public:
491 PostWriteBarrier() = delete;
PostWriteBarrier(Codegen * cg,Inst * inst)492 PostWriteBarrier(Codegen *cg, Inst *inst) : cg_(cg), inst_(inst)
493 {
494 ASSERT(cg_ != nullptr);
495 ASSERT(inst_ != nullptr);
496 type_ = cg_->GetRuntime()->GetPostType();
497 }
498 DEFAULT_MOVE_SEMANTIC(PostWriteBarrier);
499 DEFAULT_COPY_SEMANTIC(PostWriteBarrier);
500 ~PostWriteBarrier() = default;
501
502 void Encode(MemRef mem, Reg reg1, Reg reg2, bool checkObject = true, RegMask preserved = {});
503
504 private:
505 static constexpr auto BARRIER_POSITION = ark::mem::BarrierPosition::BARRIER_POSITION_POST;
506 Codegen *cg_;
507 Inst *inst_;
508 ark::mem::BarrierType type_;
509
510 struct Args {
511 MemRef mem;
512 Reg reg1;
513 Reg reg2;
514 RegMask preserved;
515 bool checkObject = true;
516 };
517
518 void EncodeInterRegionBarrier(Args args);
519 void EncodeInterGenerationalBarrier(Reg base);
520 // Creates call to IRtoC PostWrb Entrypoint. Offline means AOT or IRtoC compilation -> type of GC is not known.
521 // So Managed Thread keeps pointer to actual IRtoC GC barriers implementation at run-time.
522 void EncodeOfflineIrtocBarrier(Args args);
523 // Creates call to IRtoC PostWrb Entrypoint. Online means JIT compilation -> we know GC type.
524 void EncodeOnlineIrtocBarrier(Args args);
525 void EncodeOnlineIrtocRegionTwoRegsBarrier(Args args);
526 void EncodeOnlineIrtocRegionOneRegBarrier(Args args);
527
528 // Auxillary methods
529 void EncodeCalculateCardIndex(Reg baseReg, ScopedTmpReg *tmp, ScopedTmpReg *tmp1);
530 void EncodeCheckObject(Reg base, Reg reg1, LabelHolder::LabelId skipLabel, bool checkNull);
531 void EncodeWrapOneArg(Reg param, Reg base, MemRef mem, size_t additionalOffset = 0);
532
533 template <typename T>
GetBarrierOperandValue(ark::mem::BarrierPosition position,std::string_view name)534 T GetBarrierOperandValue(ark::mem::BarrierPosition position, std::string_view name)
535 {
536 auto operand = cg_->GetRuntime()->GetBarrierOperand(position, name);
537 return std::get<T>(operand.GetValue());
538 }
539
540 template <typename... Args>
FillCallParams(MemRef mem,Args &&...params)541 void FillCallParams(MemRef mem, Args &&...params)
542 {
543 auto base {mem.GetBase().As(TypeInfo::FromDataType(DataType::REFERENCE, cg_->GetArch()))};
544 if (mem.HasIndex()) {
545 ASSERT(mem.GetScale() == 0 && !mem.HasDisp());
546 cg_->FillCallParams(base, mem.GetIndex(), std::forward<Args>(params)...);
547 } else {
548 cg_->FillCallParams(base, TypedImm(mem.GetDisp()), std::forward<Args>(params)...);
549 }
550 }
551
HasObject2(const Args & args)552 bool HasObject2(const Args &args) const
553 {
554 ASSERT(args.reg1.IsValid());
555 return args.reg2.IsValid() && args.reg1 != args.reg2;
556 }
557
GetBase(const Args & args)558 Reg GetBase(const Args &args) const
559 {
560 return args.mem.GetBase().As(TypeInfo::FromDataType(DataType::REFERENCE, cg_->GetArch()));
561 }
562
GetParamRegs(const size_t paramsNumber,const Args & args)563 RegMask GetParamRegs(const size_t paramsNumber, const Args &args) const
564 {
565 auto paramRegs {cg_->GetTarget().GetParamRegsMask(paramsNumber) & cg_->GetLiveRegisters(inst_).first};
566 return (paramRegs | args.preserved);
567 }
568 }; // PostWriteBarrier
569
570 } // namespace ark::compiler
571
572 #include "codegen-inl.h"
573
574 #endif // COMPILER_OPTIMIZER_CODEGEN_CODEGEN_H
575