• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2024-2025 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef COMPILER_OPTIMIZER_CODEGEN_CODEGEN_INL_H
17 #define COMPILER_OPTIMIZER_CODEGEN_CODEGEN_INL_H
18 
19 namespace ark::compiler {
20 
21 /// 'live_inputs' shows that inst's source registers should be added the the mask
22 template <bool LIVE_INPUTS>
GetLiveRegisters(Inst * inst)23 std::pair<RegMask, VRegMask> Codegen::GetLiveRegisters(Inst *inst)
24 {
25     RegMask liveRegs;
26     VRegMask liveFpRegs;
27     if (!g_options.IsCompilerSaveOnlyLiveRegisters() || inst == nullptr) {
28         liveRegs.set();
29         liveFpRegs.set();
30         return {liveRegs, liveFpRegs};
31     }
32     // Run LiveRegisters pass only if it is actually required
33     if (!GetGraph()->IsAnalysisValid<LiveRegisters>()) {
34         GetGraph()->RunPass<LiveRegisters>();
35     }
36 
37     // Add registers from intervals that are live at inst's definition
38     auto &lr = GetGraph()->GetAnalysis<LiveRegisters>();
39     lr.VisitIntervalsWithLiveRegisters<LIVE_INPUTS>(inst, [&liveRegs, &liveFpRegs, this](const auto &li) {
40         auto reg = ConvertRegister(li->GetReg(), li->GetType());
41         GetEncoder()->SetRegister(&liveRegs, &liveFpRegs, reg);
42     });
43 
44     // Add live temp registers
45     liveRegs |= GetEncoder()->GetLiveTmpRegMask();
46     liveFpRegs |= GetEncoder()->GetLiveTmpFpRegMask();
47 
48     return {liveRegs, liveFpRegs};
49 }
50 
51 template <typename T, typename... Args>
CreateSlowPath(Inst * inst,Args &&...args)52 T *Codegen::CreateSlowPath(Inst *inst, Args &&...args)
53 {
54     static_assert(std::is_base_of_v<SlowPathBase, T>);
55     auto label = GetEncoder()->CreateLabel();
56     auto slowPath = GetLocalAllocator()->New<T>(label, inst, std::forward<Args>(args)...);
57     slowPaths_.push_back(slowPath);
58     return slowPath;
59 }
60 
61 /**
62  * Insert tracing code to the generated code. See `Trace` method in the `runtime/entrypoints.cpp`.
63  * NOTE(compiler): we should rework parameters assigning algorithm, that is duplicated here.
64  * @param params parameters to be passed to the TRACE entrypoint, first parameter must be TraceId value.
65  */
66 template <typename... Args>
InsertTrace(Args &&...params)67 void Codegen::InsertTrace(Args &&...params)
68 {
69     SCOPED_DISASM_STR(this, "Trace");
70     [[maybe_unused]] constexpr size_t MAX_PARAM_NUM = 8;
71     static_assert(sizeof...(Args) <= MAX_PARAM_NUM);
72     auto regfile = GetRegfile();
73     auto saveRegs = regfile->GetCallerSavedRegMask();
74     saveRegs.set(GetTarget().GetReturnRegId());
75     auto saveVregs = regfile->GetCallerSavedVRegMask();
76     saveVregs.set(GetTarget().GetReturnFpRegId());
77 
78     SaveCallerRegisters(saveRegs, saveVregs, false);
79     FillCallParams(std::forward<Args>(params)...);
80     EmitCallRuntimeCode(nullptr, EntrypointId::TRACE);
81     LoadCallerRegisters(saveRegs, saveVregs, false);
82 }
83 
84 template <bool IS_FASTPATH, typename... Args>
CallEntrypoint(Inst * inst,EntrypointId id,Reg dstReg,RegMask preservedRegs,Args &&...params)85 void Codegen::CallEntrypoint(Inst *inst, EntrypointId id, Reg dstReg, RegMask preservedRegs, Args &&...params)
86 {
87     ASSERT(inst != nullptr);
88     CHECK_EQ(sizeof...(Args), GetRuntime()->GetEntrypointArgsNum(id));
89     if (GetArch() == Arch::AARCH32) {
90         // There is a problem with 64-bit parameters:
91         // params number passed from entrypoints_gen.S.erb will be inconsistent with Aarch32 ABI.
92         // Thus, runtime bridges will have wrong params number (\paramsnum macro argument).
93         ASSERT(EnsureParamsFitIn32Bit({params...}));
94         ASSERT(!dstReg.IsValid() || dstReg.GetSize() <= WORD_SIZE);
95     }
96 
97     SCOPED_DISASM_STR(this, std::string("CallEntrypoint: ") + GetRuntime()->GetEntrypointName(id));
98     RegMask liveRegs {preservedRegs | GetLiveRegisters(inst).first};
99     RegMask paramsMask;
100     if (inst->HasImplicitRuntimeCall() && !GetRuntime()->IsEntrypointNoreturn(id)) {
101         SaveRegistersForImplicitRuntime(inst, &paramsMask, &liveRegs);
102     }
103 
104     ASSERT(IS_FASTPATH == GetRuntime()->IsEntrypointFastPath(id));
105     bool retRegAlive {liveRegs.Test(GetTarget().GetReturnRegId())};
106     // parameter regs: their initial values must be stored by the caller
107     // Other caller regs stored in bridges
108     FillOnlyParameters(&liveRegs, sizeof...(Args), IS_FASTPATH);
109     // When value stored in target return register outlives current call, it must be stored too
110     if (retRegAlive && dstReg.IsValid()) {
111         Reg retReg = GetTarget().GetReturnReg(dstReg.GetType());
112         if (dstReg.GetId() != retReg.GetId()) {
113             GetEncoder()->SetRegister(&liveRegs, nullptr, retReg, true);
114         }
115     }
116 
117     SaveCallerRegisters(liveRegs, VRegMask(), true);
118 
119     if (sizeof...(Args) != 0) {
120         FillCallParams(std::forward<Args>(params)...);
121     }
122 
123     // Call Code
124     if (!EmitCallRuntimeCode(inst, id)) {
125         return;
126     }
127     if (dstReg.IsValid()) {
128         ASSERT(dstReg.IsScalar());
129         GetEncoder()->SetRegister(&liveRegs, nullptr, dstReg, false);
130         Reg retReg = GetTarget().GetReturnReg(dstReg.GetType());
131         // We must:
132         //  sign extended INT8 and INT16 to INT32
133         //  zero extended UINT8 and UINT16 to UINT32
134         if (dstReg.GetSize() < WORD_SIZE) {
135             bool isSigned = DataType::IsTypeSigned(inst->GetType());
136             GetEncoder()->EncodeCast(dstReg.As(INT32_TYPE), isSigned, retReg, isSigned);
137         } else {
138             GetEncoder()->EncodeMov(dstReg, retReg);
139         }
140     }
141     CallEntrypointFinalize(liveRegs, paramsMask, inst);
142 }
143 
144 // The function is used for calling runtime functions through special bridges.
145 // !NOTE Don't use the function for calling runtime without bridges(it save only parameters on stack)
146 template <typename... Args>
CallRuntime(Inst * inst,EntrypointId id,Reg dstReg,RegMask preservedRegs,Args &&...params)147 void Codegen::CallRuntime(Inst *inst, EntrypointId id, Reg dstReg, RegMask preservedRegs, Args &&...params)
148 {
149     CallEntrypoint<false>(inst, id, dstReg, preservedRegs, std::forward<Args>(params)...);
150 }
151 
152 template <typename... Args>
CallFastPath(Inst * inst,EntrypointId id,Reg dstReg,RegMask preservedRegs,Args &&...params)153 void Codegen::CallFastPath(Inst *inst, EntrypointId id, Reg dstReg, RegMask preservedRegs, Args &&...params)
154 {
155     CallEntrypoint<true>(inst, id, dstReg, preservedRegs, std::forward<Args>(params)...);
156 }
157 
158 template <typename... Args>
CallRuntimeWithMethod(Inst * inst,void * method,EntrypointId eid,Reg dstReg,Args &&...params)159 void Codegen::CallRuntimeWithMethod(Inst *inst, void *method, EntrypointId eid, Reg dstReg, Args &&...params)
160 {
161     if (GetGraph()->IsAotMode()) {
162         ScopedTmpReg methodReg(GetEncoder());
163         LoadMethod(methodReg);
164         CallRuntime(inst, eid, dstReg, RegMask::GetZeroMask(), methodReg, std::forward<Args>(params)...);
165     } else {
166         if (Is64BitsArch(GetArch())) {
167             CallRuntime(inst, eid, dstReg, RegMask::GetZeroMask(), TypedImm(reinterpret_cast<uint64_t>(method)),
168                         std::forward<Args>(params)...);
169         } else {
170             // uintptr_t causes problems on host cross-jit compilation
171             CallRuntime(inst, eid, dstReg, RegMask::GetZeroMask(), TypedImm(down_cast<uint32_t>(method)),
172                         std::forward<Args>(params)...);
173         }
174     }
175 }
176 
177 template <typename... Args>
CallBarrier(RegMask liveRegs,VRegMask liveVregs,std::variant<EntrypointId,Reg> entrypoint,Args &&...params)178 void Codegen::CallBarrier(RegMask liveRegs, VRegMask liveVregs, std::variant<EntrypointId, Reg> entrypoint,
179                           Args &&...params)
180 {
181     bool isFastpath = GetGraph()->GetMode().IsFastPath();
182     if (isFastpath) {
183         // irtoc fastpath needs to save all caller registers in case of call native function
184         liveRegs = GetCallerRegsMask(GetArch(), false);
185         liveVregs = GetCallerRegsMask(GetArch(), true);
186     }
187     SaveCallerRegisters(liveRegs, liveVregs, !isFastpath);
188     FillCallParams(std::forward<Args>(params)...);
189     EmitCallRuntimeCode(nullptr, entrypoint);
190     LoadCallerRegisters(liveRegs, liveVregs, !isFastpath);
191 }
192 
193 template <typename T>
CreateUnaryCheck(Inst * inst,RuntimeInterface::EntrypointId id,DeoptimizeType type,Condition cc)194 void Codegen::CreateUnaryCheck(Inst *inst, RuntimeInterface::EntrypointId id, DeoptimizeType type, Condition cc)
195 {
196     [[maybe_unused]] auto ss = inst->GetSaveState();
197     ASSERT(ss != nullptr && (ss->GetOpcode() == Opcode::SaveState || ss->GetOpcode() == Opcode::SaveStateDeoptimize));
198 
199     LabelHolder::LabelId slowPath;
200     if (inst->CanDeoptimize()) {
201         slowPath = CreateSlowPath<SlowPathDeoptimize>(inst, type)->GetLabel();
202     } else {
203         slowPath = CreateSlowPath<T>(inst, id)->GetLabel();
204     }
205     auto srcType = inst->GetInputType(0);
206     auto src = ConvertRegister(inst->GetSrcReg(0), srcType);
207     GetEncoder()->EncodeJump(slowPath, src, cc);
208 }
209 
210 // The function alignment up the value from alignment_reg using tmp_reg.
211 
GetStackOffset(Location location)212 inline ssize_t Codegen::GetStackOffset(Location location)
213 {
214     if (location.GetKind() == LocationType::STACK_ARGUMENT) {
215         return location.GetValue() * GetFrameLayout().GetSlotSize();
216     }
217 
218     if (location.GetKind() == LocationType::STACK_PARAMETER) {
219         return GetFrameLayout().GetFrameSize<CFrameLayout::OffsetUnit::BYTES>() +
220                (location.GetValue() * GetFrameLayout().GetSlotSize());
221     }
222 
223     ASSERT(location.GetKind() == LocationType::STACK);
224     return GetFrameLayout().GetSpillOffsetFromSpInBytes(location.GetValue());
225 }
226 
GetBaseOffset(Location location)227 inline ssize_t Codegen::GetBaseOffset(Location location)
228 {
229     ASSERT(location.IsRegisterValid());
230     auto *frame = GetFrameInfo();
231     auto regNum = location.GetValue();
232     bool isFp = location.IsFpRegister();
233 
234     auto offset = isFp ? frame->GetFpCalleesOffset() : frame->GetCalleesOffset();
235     offset += GetCalleeRegsMask(GetArch(), isFp).GetDistanceFromTail(regNum);
236     offset *= GetFrameLayout().GetSlotSize();
237     return offset;
238 }
239 
GetMemRefForSlot(Location location)240 inline MemRef Codegen::GetMemRefForSlot(Location location)
241 {
242     ASSERT(location.IsAnyStack());
243     return MemRef(SpReg(), GetStackOffset(location));
244 }
245 
SpReg()246 inline Reg Codegen::SpReg() const
247 {
248     return GetTarget().GetStackReg();
249 }
250 
FpReg()251 inline Reg Codegen::FpReg() const
252 {
253     return GetTarget().GetFrameReg();
254 }
255 
GetDisasm()256 inline const Disassembly *Codegen::GetDisasm() const
257 {
258     return &disasm_;
259 }
260 
GetDisasm()261 inline Disassembly *Codegen::GetDisasm()
262 {
263     return &disasm_;
264 }
265 
AddLiveOut(const BasicBlock * bb,const Register reg)266 inline void Codegen::AddLiveOut(const BasicBlock *bb, const Register reg)
267 {
268     liveOuts_[bb].Set(reg);
269 }
270 
GetLiveOut(const BasicBlock * bb)271 inline RegMask Codegen::GetLiveOut(const BasicBlock *bb) const
272 {
273     auto it = liveOuts_.find(bb);
274     return it != liveOuts_.end() ? it->second : RegMask();
275 }
276 
ThreadReg()277 inline Reg Codegen::ThreadReg() const
278 {
279     return Reg(GetThreadReg(GetArch()), GetTarget().GetPtrRegType());
280 }
281 
OffsetFitReferenceTypeSize(uint64_t offset)282 inline bool Codegen::OffsetFitReferenceTypeSize(uint64_t offset) const
283 {
284     // -1 because some arch uses signed offset
285     // NOLINTNEXTLINE(hicpp-signed-bitwise)
286     uint64_t maxOffset = 1ULL << (DataType::GetTypeSize(DataType::REFERENCE, GetArch()) - 1);
287     return offset < maxOffset;
288 }
289 
GetUsedRegs()290 inline RegMask Codegen::GetUsedRegs() const
291 {
292     return usedRegs_;
293 }
GetUsedVRegs()294 inline RegMask Codegen::GetUsedVRegs() const
295 {
296     return usedVregs_;
297 }
298 
GetVtableShift()299 inline uint32_t Codegen::GetVtableShift()
300 {
301     // The size of the VTable element is equal to the size of pointers for the architecture
302     // (not the size of pointer to objects)
303     constexpr uint32_t SHIFT_64_BITS = 3;
304     constexpr uint32_t SHIFT_32_BITS = 2;
305     return Is64BitsArch(GetGraph()->GetArch()) ? SHIFT_64_BITS : SHIFT_32_BITS;
306 }
307 
308 template <typename Arg, typename... Args>
AddParamRegsInLiveMasksHandleArgs(ParameterInfo * paramInfo,RegMask * liveRegs,VRegMask * liveVregs,Arg param,Args &&...params)309 ALWAYS_INLINE void Codegen::AddParamRegsInLiveMasksHandleArgs(ParameterInfo *paramInfo, RegMask *liveRegs,
310                                                               VRegMask *liveVregs, Arg param, Args &&...params)
311 {
312     auto currDst = paramInfo->GetNativeParam(param.GetType());
313     if (std::holds_alternative<Reg>(currDst)) {
314         auto reg = std::get<Reg>(currDst);
315         if (reg.IsScalar()) {
316             liveRegs->set(reg.GetId());
317         } else {
318             liveVregs->set(reg.GetId());
319         }
320     } else {
321         GetEncoder()->SetFalseResult();
322         UNREACHABLE();
323     }
324     if constexpr (sizeof...(Args) != 0) {
325         AddParamRegsInLiveMasksHandleArgs(paramInfo, liveRegs, liveVregs, std::forward<Args>(params)...);
326     }
327 }
328 
329 template <typename... Args>
AddParamRegsInLiveMasks(RegMask * liveRegs,VRegMask * liveVregs,Args &&...params)330 void Codegen::AddParamRegsInLiveMasks(RegMask *liveRegs, VRegMask *liveVregs, Args &&...params)
331 {
332     auto callconv = GetCallingConvention();
333     auto paramInfo = callconv->GetParameterInfo(0);
334     AddParamRegsInLiveMasksHandleArgs(paramInfo, liveRegs, liveVregs, std::forward<Args>(params)...);
335 }
336 
337 template <typename... Args>
CreateStubCall(Inst * inst,RuntimeInterface::IntrinsicId intrinsicId,Reg dst,Args &&...params)338 void Codegen::CreateStubCall(Inst *inst, RuntimeInterface::IntrinsicId intrinsicId, Reg dst, Args &&...params)
339 {
340     VRegMask liveVregs;
341     RegMask liveRegs;
342     auto enc = GetEncoder();
343 
344     AddParamRegsInLiveMasks(&liveRegs, &liveVregs, params...);
345 
346     if (dst.IsValid()) {
347         ASSERT(dst.IsScalar());
348         enc->SetRegister(&liveRegs, &liveVregs, dst, false);
349         Reg retVal = GetTarget().GetReturnReg(dst.GetType());
350         if (dst.GetId() != retVal.GetId()) {
351             enc->SetRegister(&liveRegs, &liveVregs, retVal, true);
352         }
353     }
354 
355     SaveCallerRegisters(liveRegs, liveVregs, true);
356 
357     FillCallParams(std::forward<Args>(params)...);
358     CallIntrinsic(inst, intrinsicId);
359 
360     if (inst->GetSaveState() != nullptr) {
361         CreateStackMap(inst);
362     }
363 
364     if (dst.IsValid()) {
365         Reg retVal = GetTarget().GetReturnReg(dst.GetType());
366         enc->EncodeMov(dst, retVal);
367     }
368 
369     LoadCallerRegisters(liveRegs, liveVregs, true);
370 }
371 
372 template <typename T>
EncodeImms(const T & imms,bool skipFirstLocation)373 void Codegen::EncodeImms(const T &imms, bool skipFirstLocation)
374 {
375     auto paramInfo = GetCallingConvention()->GetParameterInfo(0);
376     auto immType = DataType::INT32;
377     if (skipFirstLocation) {
378         paramInfo->GetNextLocation(immType);
379     }
380     for (auto imm : imms) {
381         auto location = paramInfo->GetNextLocation(immType);
382         ASSERT(location.IsFixedRegister());
383         auto dstReg = ConvertRegister(location.GetValue(), immType);
384         GetEncoder()->EncodeMov(dstReg, Imm(imm));
385     }
386 }
387 
388 template <typename... Args>
389 void FillPostWrbCallParams(MemRef mem, Args &&...params);
390 
391 template <size_t IMM_ARRAY_SIZE>
392 class Codegen::FillCallParamsHelper {
393 public:
394     using ImmsIter = typename std::array<std::pair<Reg, Imm>, IMM_ARRAY_SIZE>::iterator;
395 
FillCallParamsHelper(Codegen * cg,ParameterInfo * paramInfo,SpillFillInst * regMoves,ArenaVector<Reg> * spMoves,ImmsIter immsIter)396     FillCallParamsHelper(Codegen *cg, ParameterInfo *paramInfo, SpillFillInst *regMoves, ArenaVector<Reg> *spMoves,
397                          ImmsIter immsIter)
398         : cg_(cg), paramInfo_(paramInfo), regMoves_(regMoves), spMoves_(spMoves), immsIter_(immsIter)
399     {
400     }
401 
402     template <typename Arg, typename... Args>
FillCallParamsHandleOperands(Arg && arg,Args &&...params)403     ALWAYS_INLINE void FillCallParamsHandleOperands(Arg &&arg, Args &&...params)
404     {
405         Location dst;
406         auto type = arg.GetType().ToDataType();
407         dst = paramInfo_->GetNextLocation(type);
408         if (dst.IsStackArgument()) {
409             cg_->GetEncoder()->SetFalseResult();
410             UNREACHABLE();  // Move to BoundaryFrame
411         }
412 
413         static_assert(std::is_same_v<std::decay_t<Arg>, TypedImm> || std::is_convertible_v<Arg, Reg>);
414         if constexpr (std::is_same_v<std::decay_t<Arg>, TypedImm>) {
415             auto reg = cg_->ConvertRegister(dst.GetValue(), type);
416             *immsIter_ = {reg, arg.GetImm()};
417             immsIter_++;
418         } else {
419             Reg reg(std::forward<Arg>(arg));
420             if (reg == cg_->SpReg()) {
421                 // SP should be handled separately, since on the ARM64 target it has ID out of range
422                 spMoves_->emplace_back(cg_->ConvertRegister(dst.GetValue(), type));
423             } else {
424                 regMoves_->AddSpillFill(Location::MakeRegister(reg.GetId(), type), dst, type);
425             }
426         }
427         if constexpr (sizeof...(Args) != 0) {
428             FillCallParamsHandleOperands(std::forward<Args>(params)...);
429         }
430     }
431 
432 private:
433     Codegen *cg_ {};
434     ParameterInfo *paramInfo_ {};
435     SpillFillInst *regMoves_ {};
436     ArenaVector<Reg> *spMoves_ {};
437     ImmsIter immsIter_ {};
438 };
439 
440 template <typename T, typename... Args>
CountParameters()441 constexpr std::pair<size_t, size_t> CountParameters()
442 {
443     static_assert(std::is_same_v<std::decay_t<T>, TypedImm> != std::is_convertible_v<T, Reg>);
444     if constexpr (sizeof...(Args) != 0) {
445         constexpr auto IMM_REG_COUNT = CountParameters<Args...>();
446 
447         if constexpr (std::is_same_v<std::decay_t<T>, TypedImm>) {
448             return {IMM_REG_COUNT.first + 1, IMM_REG_COUNT.second};
449         } else if constexpr (std::is_convertible_v<T, Reg>) {
450             return {IMM_REG_COUNT.first, IMM_REG_COUNT.second + 1};
451         }
452     }
453     return {std::is_same_v<std::decay_t<T>, TypedImm>, std::is_convertible_v<T, Reg>};
454 }
455 
456 template <typename... Args>
FillCallParams(Args &&...params)457 void Codegen::FillCallParams(Args &&...params)
458 {
459     SCOPED_DISASM_STR(this, "FillCallParams");
460     if constexpr (sizeof...(Args) != 0) {
461         constexpr size_t IMMEDIATES_COUNT = CountParameters<Args...>().first;
462         constexpr size_t REGS_COUNT = CountParameters<Args...>().second;
463         // Native call - do not add reserve parameters
464         auto paramInfo = GetCallingConvention()->GetParameterInfo(0);
465         std::array<std::pair<Reg, Imm>, IMMEDIATES_COUNT> immediates {};
466         ArenaVector<Reg> spMoves(GetLocalAllocator()->Adapter());
467         auto regMoves = GetGraph()->CreateInstSpillFill();
468         spMoves.reserve(REGS_COUNT);
469         regMoves->GetSpillFills().reserve(REGS_COUNT);
470 
471         FillCallParamsHelper<IMMEDIATES_COUNT> h {this, paramInfo, regMoves, &spMoves, immediates.begin()};
472         h.FillCallParamsHandleOperands(std::forward<Args>(params)...);
473 
474         // Resolve registers move order and encode
475         spillFillsResolver_.ResolveIfRequired(regMoves);
476         SpillFillEncoder(this, regMoves).EncodeSpillFill();
477 
478         // Encode immediates moves
479         for (auto &immValues : immediates) {
480             GetEncoder()->EncodeMov(immValues.first, immValues.second);
481         }
482 
483         // Encode moves from SP reg
484         for (auto dst : spMoves) {
485             GetEncoder()->EncodeMov(dst, SpReg());
486         }
487     }
488 }
489 
490 template <typename... Args>
FillPostWrbCallParams(MemRef mem,Args &&...params)491 void Codegen::FillPostWrbCallParams(MemRef mem, Args &&...params)
492 {
493     auto base {mem.GetBase().As(TypeInfo::FromDataType(DataType::REFERENCE, GetArch()))};
494     if (mem.HasIndex()) {
495         ASSERT(mem.GetScale() == 0 && !mem.HasDisp());
496         FillCallParams(base, mem.GetIndex(), std::forward<Args>(params)...);
497     } else {
498         FillCallParams(base, TypedImm(mem.GetDisp()), std::forward<Args>(params)...);
499     }
500 }
501 
502 }  // namespace ark::compiler
503 
504 #endif  // COMPILER_OPTIMIZER_CODEGEN_CODEGEN_H
505