• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 /*
16 Low-level calling convention
17 */
18 #include "target/aarch64/target.h"
19 
20 namespace ark::compiler::aarch64 {
21 
22 constexpr int32_t IMM_2 = 2;
23 
Aarch64CallingConvention(ArenaAllocator * allocator,Encoder * enc,RegistersDescription * descr,CallConvMode mode)24 Aarch64CallingConvention::Aarch64CallingConvention(ArenaAllocator *allocator, Encoder *enc, RegistersDescription *descr,
25                                                    CallConvMode mode)
26     : CallingConvention(allocator, enc, descr, mode)
27 {
28 }
29 
GetTarget()30 constexpr auto Aarch64CallingConvention::GetTarget()
31 {
32     return ark::compiler::Target(Arch::AARCH64);
33 }
34 
IsValid() const35 bool Aarch64CallingConvention::IsValid() const
36 {
37     return true;
38 }
39 
GetMasm()40 vixl::aarch64::MacroAssembler *Aarch64CallingConvention::GetMasm()
41 {
42     return (static_cast<Aarch64Encoder *>(GetEncoder()))->GetMasm();
43 }
44 
GetParameterInfo(uint8_t regsOffset)45 ParameterInfo *Aarch64CallingConvention::GetParameterInfo(uint8_t regsOffset)
46 {
47     auto paramInfo = GetAllocator()->New<aarch64::Aarch64ParameterInfo>();
48     for (int i = 0; i < regsOffset; ++i) {
49         paramInfo->GetNativeParam(INT64_TYPE);
50     }
51     return paramInfo;
52 }
53 
GetCodeEntry()54 void *Aarch64CallingConvention::GetCodeEntry()
55 {
56     return reinterpret_cast<void *>(GetMasm()->GetInstructionAt(0));
57 }
58 
GetCodeSize()59 uint32_t Aarch64CallingConvention::GetCodeSize()
60 {
61     return GetMasm()->GetSizeOfCodeGenerated();
62 }
63 
PushRegs(vixl::aarch64::CPURegList regs,vixl::aarch64::CPURegList vregs,bool isCallee)64 size_t Aarch64CallingConvention::PushRegs(vixl::aarch64::CPURegList regs, vixl::aarch64::CPURegList vregs,
65                                           bool isCallee)
66 {
67     if ((regs.GetCount() % IMM_2) == 1) {
68         ASSERT((regs.GetList() & (UINT64_C(1) << vixl::aarch64::xzr.GetCode())) == 0);
69         regs.Combine(vixl::aarch64::xzr);
70     }
71     if ((vregs.GetCount() % IMM_2) == 1) {
72         auto regdescr = static_cast<Aarch64RegisterDescription *>(GetRegfile());
73         uint8_t allignmentVreg = regdescr->GetAlignmentVreg(isCallee);
74         ASSERT((vregs.GetList() & (UINT64_C(1) << allignmentVreg)) == 0);
75         vregs.Combine(allignmentVreg);
76     }
77     GetMasm()->PushCPURegList(vregs);
78     GetMasm()->PushCPURegList(regs);
79     return vregs.GetCount() + regs.GetCount();
80 }
81 
PopRegs(vixl::aarch64::CPURegList regs,vixl::aarch64::CPURegList vregs,bool isCallee)82 size_t Aarch64CallingConvention::PopRegs(vixl::aarch64::CPURegList regs, vixl::aarch64::CPURegList vregs, bool isCallee)
83 {
84     if ((regs.GetCount() % IMM_2) == 1) {
85         ASSERT((regs.GetList() & (UINT64_C(1) << vixl::aarch64::xzr.GetCode())) == 0);
86         regs.Combine(vixl::aarch64::xzr);
87     }
88     if ((vregs.GetCount() % IMM_2) == 1) {
89         auto regdescr = static_cast<Aarch64RegisterDescription *>(GetRegfile());
90         uint8_t allignmentVreg = regdescr->GetAlignmentVreg(isCallee);
91         ASSERT((vregs.GetList() & (UINT64_C(1) << allignmentVreg)) == 0);
92         vregs.Combine(allignmentVreg);
93     }
94     GetMasm()->PopCPURegList(regs);
95     GetMasm()->PopCPURegList(vregs);
96     return vregs.GetCount() + regs.GetCount();
97 }
98 
GetNativeParam(const TypeInfo & type)99 std::variant<Reg, uint8_t> Aarch64ParameterInfo::GetNativeParam(const TypeInfo &type)
100 {
101     if (type.IsFloat()) {
102         if (currentVectorNumber_ > MAX_VECTOR_PARAM_ID) {
103             return currentStackOffset_++;
104         }
105         return Reg(currentVectorNumber_++, type);
106     }
107     if (currentScalarNumber_ > MAX_SCALAR_PARAM_ID) {
108         return currentStackOffset_++;
109     }
110     auto ret = Reg(currentScalarNumber_++, type);
111     if (type.GetSize() > DOUBLE_WORD_SIZE) {
112         currentScalarNumber_++;
113     }
114     return ret;
115 }
116 
GetNextLocation(DataType::Type type)117 Location Aarch64ParameterInfo::GetNextLocation(DataType::Type type)
118 {
119     if (DataType::IsFloatType(type)) {
120         if (currentVectorNumber_ > MAX_VECTOR_PARAM_ID) {
121             return Location::MakeStackArgument(currentStackOffset_++);
122         }
123         return Location::MakeFpRegister(currentVectorNumber_++);
124     }
125     if (currentScalarNumber_ > MAX_SCALAR_PARAM_ID) {
126         return Location::MakeStackArgument(currentStackOffset_++);
127     }
128     Target target(Arch::AARCH64);
129     return Location::MakeRegister(target.GetParamRegId(currentScalarNumber_++));
130 }
131 
InitFlagsReg(bool hasFloatRegs)132 Reg Aarch64CallingConvention::InitFlagsReg(bool hasFloatRegs)
133 {
134     auto flags {static_cast<uint64_t>(hasFloatRegs) << CFrameLayout::HasFloatRegsFlag::START_BIT};
135     auto flagsReg {GetTarget().GetZeroReg()};
136     if (flags != 0U) {
137         flagsReg = GetTarget().GetLinkReg();
138         GetEncoder()->EncodeMov(flagsReg, Imm(flags));
139     }
140     return flagsReg;
141 }
142 
143 using vixl::aarch64::CPURegList, vixl::aarch64::CPURegister, vixl::aarch64::MemOperand;
144 
SaveCalleeSavedRegs(const FrameInfo & frameInfo,const CFrameLayout & fl,size_t spToRegsSlots,bool isNative)145 void Aarch64CallingConvention::SaveCalleeSavedRegs(const FrameInfo &frameInfo, const CFrameLayout &fl,
146                                                    size_t spToRegsSlots, bool isNative)
147 {
148     RegMask calleeRegsMask;
149     VRegMask calleeVregsMask;
150     auto regdescr = static_cast<Aarch64RegisterDescription *>(GetRegfile());
151     bool irtocOptimized = isNative ? GetMode().IsOptIrtoc() : false;
152 
153     regdescr->FillUsedCalleeSavedRegisters(&calleeRegsMask, &calleeVregsMask, frameInfo.GetSaveUnusedCalleeRegs(),
154                                            irtocOptimized);
155     SET_CFI_CALLEE_REGS(calleeRegsMask);
156     SET_CFI_CALLEE_VREGS(calleeVregsMask);
157     auto lastCalleeReg = spToRegsSlots + calleeRegsMask.Count();
158     auto lastCalleeVreg = spToRegsSlots + fl.GetCalleeRegistersCount(false) + calleeVregsMask.Count();
159     auto calleeRegs = CPURegList(CPURegister::kRegister, vixl::aarch64::kXRegSize, calleeRegsMask.GetValue());
160     auto calleeVregs = CPURegList(CPURegister::kVRegister, vixl::aarch64::kXRegSize, calleeVregsMask.GetValue());
161     auto sp = GetTarget().GetStackReg();
162     GetMasm()->StoreCPURegList(calleeRegs, MemOperand(VixlReg(sp), VixlImm(-lastCalleeReg * fl.GetSlotSize())));
163     GetMasm()->StoreCPURegList(calleeVregs, MemOperand(VixlReg(sp), VixlImm(-lastCalleeVreg * fl.GetSlotSize())));
164     SET_CFI_OFFSET(pushCallees, GetEncoder()->GetCursorOffset());
165 }
166 
GeneratePrologue(const FrameInfo & frameInfo)167 void Aarch64CallingConvention::GeneratePrologue(const FrameInfo &frameInfo)
168 {
169     static_assert((CFrameLayout::GetLocalsCount() & 1U) == 0);
170     auto encoder = GetEncoder();
171     const CFrameLayout &fl = encoder->GetFrameLayout();
172     auto sp = GetTarget().GetStackReg();
173     auto fp = GetTarget().GetFrameReg();
174     auto lr = GetTarget().GetLinkReg();
175     auto spToRegsSlots = CFrameLayout::GetTopToRegsSlotsCount();
176 
177     // Save FP and LR
178     if (frameInfo.GetSaveFrameAndLinkRegs() || ProvideCFI()) {
179         static_assert(CFrameLayout::GetTopToRegsSlotsCount() > CFrameLayout::GetFpLrSlotsCount());
180         GetMasm()->PushCPURegList(vixl::aarch64::CPURegList(VixlReg(fp), VixlReg(lr)));
181         SET_CFI_OFFSET(pushFplr, encoder->GetCursorOffset());
182         spToRegsSlots -= CFrameLayout::GetFpLrSlotsCount();
183     }
184 
185     // Setup FP
186     if (frameInfo.GetSetupFrame() || ProvideCFI()) {
187         // If SetupFrame flag is set, then SaveFrameAndLinkRegs must be set also.
188         // These are separate flags as it looks like Irtoc does not need frame setup
189         // but requires to save frame and link regs.
190         ASSERT(!frameInfo.GetSetupFrame() || frameInfo.GetSaveFrameAndLinkRegs());
191         encoder->EncodeMov(fp, sp);
192         SET_CFI_OFFSET(setFp, encoder->GetCursorOffset());
193     }
194 
195     if (IsDynCallMode() && GetDynInfo().IsCheckRequired()) {
196         static_assert(CallConvDynInfo::REG_NUM_ARGS == 1);
197         static_assert(CallConvDynInfo::REG_COUNT == CallConvDynInfo::REG_NUM_ARGS + 1);
198 
199         ASSERT(frameInfo.GetSaveFrameAndLinkRegs());
200 
201         constexpr auto NUM_ACTUAL_REG = GetTarget().GetParamReg(CallConvDynInfo::REG_NUM_ARGS);
202         constexpr auto NUM_EXPECTED_REG = GetTarget().GetParamReg(CallConvDynInfo::REG_COUNT);
203         auto numExpected = GetDynInfo().GetNumExpectedArgs();
204 
205         auto expandDone = encoder->CreateLabel();
206         encoder->EncodeJump(expandDone, NUM_ACTUAL_REG, Imm(numExpected), Condition::GE);
207         encoder->EncodeMov(NUM_EXPECTED_REG, Imm(numExpected));
208 
209         MemRef expandEntrypoint(Reg(GetThreadReg(Arch::AARCH64), GetTarget().GetPtrRegType()),
210                                 GetDynInfo().GetExpandEntrypointTlsOffset());
211         GetEncoder()->MakeCall(expandEntrypoint);
212         encoder->BindLabel(expandDone);
213     }
214 
215     // Reset flags and setup method
216     if (frameInfo.GetSetupFrame()) {
217         static_assert(CFrameMethod::End() == CFrameFlags::Start());
218         constexpr int64_t SLOTS_COUNT = CFrameMethod::GetSize() + CFrameFlags::GetSize();
219 
220         GetMasm()->Stp(VixlReg(InitFlagsReg(frameInfo.GetHasFloatRegs())),  // Reset OSR flag and set HasFloatRegsFlag
221                        VixlReg(GetTarget().GetParamReg(0)),                 // Set Method pointer
222                        vixl::aarch64::MemOperand(VixlReg(sp), VixlImm(-SLOTS_COUNT * fl.GetSlotSize()),
223                                                  vixl::aarch64::AddrMode::PreIndex));
224         spToRegsSlots -= SLOTS_COUNT;
225     }
226 
227     SaveCalleeSavedRegs(frameInfo, fl, spToRegsSlots, false);
228 
229     // Adjust SP
230     if (frameInfo.GetAdjustSpReg()) {
231         auto spToFrameEndOffset = (spToRegsSlots + fl.GetRegsSlotsCount()) * fl.GetSlotSize();
232         encoder->EncodeSub(sp, sp, Imm(spToFrameEndOffset));
233     }
234 }
235 
GenerateEpilogue(const FrameInfo & frameInfo,std::function<void ()> postJob)236 void Aarch64CallingConvention::GenerateEpilogue(const FrameInfo &frameInfo, std::function<void()> postJob)
237 {
238     auto encoder = GetEncoder();
239     const CFrameLayout &fl = encoder->GetFrameLayout();
240     auto regdescr = static_cast<Aarch64RegisterDescription *>(GetRegfile());
241     auto sp = GetTarget().GetStackReg();
242     auto fp = GetTarget().GetFrameReg();
243     auto lr = GetTarget().GetLinkReg();
244 
245     if (postJob) {
246         postJob();
247     }
248 
249     // Restore callee-registers
250     RegMask calleeRegsMask;
251     VRegMask calleeVregsMask;
252     regdescr->FillUsedCalleeSavedRegisters(&calleeRegsMask, &calleeVregsMask, frameInfo.GetSaveUnusedCalleeRegs());
253 
254     auto calleeRegs = CPURegList(CPURegister::kRegister, vixl::aarch64::kXRegSize, calleeRegsMask.GetValue());
255     auto calleeVregs = CPURegList(CPURegister::kVRegister, vixl::aarch64::kXRegSize, calleeVregsMask.GetValue());
256 
257     if (frameInfo.GetAdjustSpReg()) {
258         // SP points to the frame's bottom
259         auto lastCalleeReg = fl.GetRegsSlotsCount() - calleeRegsMask.Count();
260         auto lastCalleeVreg = fl.GetRegsSlotsCount() - fl.GetCalleeRegistersCount(false) - calleeVregsMask.Count();
261         GetMasm()->LoadCPURegList(calleeRegs, MemOperand(VixlReg(sp), VixlImm(lastCalleeReg * fl.GetSlotSize())));
262         GetMasm()->LoadCPURegList(calleeVregs, MemOperand(VixlReg(sp), VixlImm(lastCalleeVreg * fl.GetSlotSize())));
263     } else {
264         // SP either points to the frame's top or frame's top + FPLR slot
265         auto spToRegsSlots = CFrameLayout::GetTopToRegsSlotsCount();
266         if (frameInfo.GetSaveFrameAndLinkRegs() || ProvideCFI()) {
267             // Adjust for FPLR slot
268             spToRegsSlots -= CFrameLayout::GetFpLrSlotsCount();
269         }
270         auto lastCalleeReg = spToRegsSlots + calleeRegsMask.Count();
271         auto lastCalleeVreg = spToRegsSlots + fl.GetCalleeRegistersCount(false) + calleeVregsMask.Count();
272         GetMasm()->LoadCPURegList(calleeRegs, MemOperand(VixlReg(sp), VixlImm(-lastCalleeReg * fl.GetSlotSize())));
273         GetMasm()->LoadCPURegList(calleeVregs, MemOperand(VixlReg(sp), VixlImm(-lastCalleeVreg * fl.GetSlotSize())));
274     }
275     SET_CFI_OFFSET(popCallees, encoder->GetCursorOffset());
276 
277     // Adjust SP
278     if (frameInfo.GetAdjustSpReg()) {
279         // SP points to the frame's bottom
280         auto spToFrameTopSlots = fl.GetRegsSlotsCount() + CFrameRegs::Start() - CFrameReturnAddr::Start();
281         if (frameInfo.GetSaveFrameAndLinkRegs() || ProvideCFI()) {
282             spToFrameTopSlots -= CFrameLayout::GetFpLrSlotsCount();
283         }
284         auto spToFrameTopOffset = spToFrameTopSlots * fl.GetSlotSize();
285         encoder->EncodeAdd(sp, sp, Imm(spToFrameTopOffset));
286     }
287 
288     // Restore FP and LR
289     if (IsOsrMode()) {
290         encoder->EncodeAdd(sp, sp, Imm(CFrameLayout::GetFpLrSlotsCount() * fl.GetSlotSize()));
291         encoder->EncodeLdp(fp, lr, false, MemRef(fp, -fl.GetOsrFpLrOffset()));
292     } else if (frameInfo.GetSaveFrameAndLinkRegs() || ProvideCFI()) {
293         GetMasm()->PopCPURegList(vixl::aarch64::CPURegList(VixlReg(fp), VixlReg(lr)));
294     }
295     SET_CFI_OFFSET(popFplr, encoder->GetCursorOffset());
296 
297     GetMasm()->Ret();
298 }
299 
GenerateNativePrologue(const FrameInfo & frameInfo)300 void Aarch64CallingConvention::GenerateNativePrologue(const FrameInfo &frameInfo)
301 {
302     static_assert((CFrameLayout::GetLocalsCount() & 1U) == 0);
303     auto encoder = GetEncoder();
304     const CFrameLayout &fl = encoder->GetFrameLayout();
305     auto sp = GetTarget().GetStackReg();
306     auto fp = GetTarget().GetFrameReg();
307     auto lr = GetTarget().GetLinkReg();
308     auto spToRegsSlots = CFrameLayout::GetTopToRegsSlotsCount();
309 
310     // Save FP and LR
311     if (frameInfo.GetSaveFrameAndLinkRegs() || ProvideCFI()) {
312         static_assert(CFrameLayout::GetTopToRegsSlotsCount() > CFrameLayout::GetFpLrSlotsCount());
313         GetMasm()->PushCPURegList(vixl::aarch64::CPURegList(VixlReg(fp), VixlReg(lr)));
314         SET_CFI_OFFSET(pushFplr, encoder->GetCursorOffset());
315         spToRegsSlots -= CFrameLayout::GetFpLrSlotsCount();
316     }
317 
318     // 'Native' calling convention requires setting up FP for FastPath calls from IRtoC Interpreter entrypoint
319     if (frameInfo.GetSetupFrame() || ProvideCFI()) {
320         encoder->EncodeMov(fp, sp);
321         SET_CFI_OFFSET(setFp, encoder->GetCursorOffset());
322     }
323 
324     if (IsDynCallMode() && GetDynInfo().IsCheckRequired()) {
325         static_assert(CallConvDynInfo::REG_NUM_ARGS == 1);
326         static_assert(CallConvDynInfo::REG_COUNT == CallConvDynInfo::REG_NUM_ARGS + 1);
327 
328         ASSERT(frameInfo.GetSaveFrameAndLinkRegs());
329 
330         constexpr auto NUM_ACTUAL_REG = GetTarget().GetParamReg(CallConvDynInfo::REG_NUM_ARGS);
331         constexpr auto NUM_EXPECTED_REG = GetTarget().GetParamReg(CallConvDynInfo::REG_COUNT);
332         auto numExpected = GetDynInfo().GetNumExpectedArgs();
333 
334         auto expandDone = encoder->CreateLabel();
335         encoder->EncodeJump(expandDone, NUM_ACTUAL_REG, Imm(numExpected), Condition::GE);
336         encoder->EncodeMov(NUM_EXPECTED_REG, Imm(numExpected));
337 
338         MemRef expandEntrypoint(Reg(GetThreadReg(Arch::AARCH64), GetTarget().GetPtrRegType()),
339                                 GetDynInfo().GetExpandEntrypointTlsOffset());
340         GetEncoder()->MakeCall(expandEntrypoint);
341         encoder->BindLabel(expandDone);
342     }
343 
344     // Save callee-saved registers
345     SaveCalleeSavedRegs(frameInfo, fl, spToRegsSlots, true);
346 
347     // Adjust SP
348     if (frameInfo.GetAdjustSpReg()) {
349         auto spToFrameEndOffset = (spToRegsSlots + fl.GetRegsSlotsCount()) * fl.GetSlotSize();
350         encoder->EncodeSub(sp, sp, Imm(spToFrameEndOffset));
351     }
352 }
353 
GenerateNativeEpilogue(const FrameInfo & frameInfo,std::function<void ()> postJob)354 void Aarch64CallingConvention::GenerateNativeEpilogue(const FrameInfo &frameInfo, std::function<void()> postJob)
355 {
356     auto encoder = GetEncoder();
357     const CFrameLayout &fl = encoder->GetFrameLayout();
358     auto regdescr = static_cast<Aarch64RegisterDescription *>(GetRegfile());
359     auto sp = GetTarget().GetStackReg();
360     auto fp = GetTarget().GetFrameReg();
361     auto lr = GetTarget().GetLinkReg();
362 
363     if (postJob) {
364         postJob();
365     }
366 
367     // Restore callee-registers
368     RegMask calleeRegsMask;
369     VRegMask calleeVregsMask;
370     regdescr->FillUsedCalleeSavedRegisters(&calleeRegsMask, &calleeVregsMask, frameInfo.GetSaveUnusedCalleeRegs(),
371                                            GetMode().IsOptIrtoc());
372 
373     auto calleeRegs = CPURegList(CPURegister::kRegister, vixl::aarch64::kXRegSize, calleeRegsMask.GetValue());
374     auto calleeVregs = CPURegList(CPURegister::kVRegister, vixl::aarch64::kXRegSize, calleeVregsMask.GetValue());
375 
376     if (frameInfo.GetAdjustSpReg()) {
377         // SP points to the frame's bottom
378         auto lastCalleeReg = fl.GetRegsSlotsCount() - calleeRegsMask.Count();
379         auto lastCalleeVreg = fl.GetRegsSlotsCount() - fl.GetCalleeRegistersCount(false) - calleeVregsMask.Count();
380         GetMasm()->LoadCPURegList(calleeRegs, MemOperand(VixlReg(sp), VixlImm(lastCalleeReg * fl.GetSlotSize())));
381         GetMasm()->LoadCPURegList(calleeVregs, MemOperand(VixlReg(sp), VixlImm(lastCalleeVreg * fl.GetSlotSize())));
382     } else {
383         // SP either points to the frame's top or frame's top + FPLR slot
384         auto spToRegsSlots = CFrameLayout::GetTopToRegsSlotsCount();
385         if (frameInfo.GetSaveFrameAndLinkRegs() || ProvideCFI()) {
386             // Adjust for FPLR slot
387             spToRegsSlots -= CFrameLayout::GetFpLrSlotsCount();
388         }
389         auto lastCalleeReg = spToRegsSlots + calleeRegsMask.Count();
390         auto lastCalleeVreg = spToRegsSlots + fl.GetCalleeRegistersCount(false) + calleeVregsMask.Count();
391         GetMasm()->LoadCPURegList(calleeRegs, MemOperand(VixlReg(sp), VixlImm(-lastCalleeReg * fl.GetSlotSize())));
392         GetMasm()->LoadCPURegList(calleeVregs, MemOperand(VixlReg(sp), VixlImm(-lastCalleeVreg * fl.GetSlotSize())));
393     }
394     SET_CFI_OFFSET(popCallees, encoder->GetCursorOffset());
395 
396     // Adjust SP
397     if (frameInfo.GetAdjustSpReg()) {
398         // SP points to the frame's bottom
399         auto spToFrameTopSlots = fl.GetRegsSlotsCount() + CFrameRegs::Start() - CFrameReturnAddr::Start();
400         if (frameInfo.GetSaveFrameAndLinkRegs() || ProvideCFI()) {
401             spToFrameTopSlots -= CFrameLayout::GetFpLrSlotsCount();
402         }
403         auto spToFrameTopOffset = spToFrameTopSlots * fl.GetSlotSize();
404         encoder->EncodeAdd(sp, sp, Imm(spToFrameTopOffset));
405     }
406 
407     // Restore FP and LR
408     if (IsOsrMode()) {
409         encoder->EncodeAdd(sp, sp, Imm(CFrameLayout::GetFpLrSlotsCount() * fl.GetSlotSize()));
410         encoder->EncodeLdp(fp, lr, false, MemRef(fp, -fl.GetOsrFpLrOffset()));
411     } else if (frameInfo.GetSaveFrameAndLinkRegs() || ProvideCFI()) {
412         GetMasm()->PopCPURegList(vixl::aarch64::CPURegList(VixlReg(fp), VixlReg(lr)));
413     }
414     SET_CFI_OFFSET(popFplr, encoder->GetCursorOffset());
415 
416     GetMasm()->Ret();
417 }
418 }  // namespace ark::compiler::aarch64
419