• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021-2025 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "codegen_fastpath.h"
17 #include "optimizer/ir/inst.h"
18 #include "relocations.h"
19 
20 namespace ark::compiler {
21 
SaveCallerRegistersInFrame(RegMask mask,Encoder * encoder,const CFrameLayout & fl,bool isFp)22 static void SaveCallerRegistersInFrame(RegMask mask, Encoder *encoder, const CFrameLayout &fl, bool isFp)
23 {
24     if (mask.none()) {
25         return;
26     }
27     auto fpReg = Target(fl.GetArch()).GetFrameReg();
28 
29     mask &= GetCallerRegsMask(fl.GetArch(), isFp);
30     auto startSlot = static_cast<size_t>(fl.GetStackStartSlot()) + fl.GetCallerLastSlot(isFp);
31     encoder->SaveRegisters(mask, isFp, -startSlot, fpReg, GetCallerRegsMask(fl.GetArch(), isFp));
32 }
33 
RestoreCallerRegistersFromFrame(RegMask mask,Encoder * encoder,const CFrameLayout & fl,bool isFp)34 static void RestoreCallerRegistersFromFrame(RegMask mask, Encoder *encoder, const CFrameLayout &fl, bool isFp)
35 {
36     if (mask.none()) {
37         return;
38     }
39     auto fpReg = Target(fl.GetArch()).GetFrameReg();
40 
41     mask &= GetCallerRegsMask(fl.GetArch(), isFp);
42     auto startSlot = static_cast<size_t>(fl.GetStackStartSlot()) + fl.GetCallerLastSlot(isFp);
43     encoder->LoadRegisters(mask, isFp, -startSlot, fpReg, GetCallerRegsMask(fl.GetArch(), isFp));
44 }
45 
InstHasRuntimeCall(const Inst * inst)46 static bool InstHasRuntimeCall(const Inst *inst)
47 {
48     switch (inst->GetOpcode()) {
49         case Opcode::Store:
50             if (inst->CastToStore()->GetNeedBarrier()) {
51                 return true;
52             }
53             break;
54         case Opcode::StoreI:
55             if (inst->CastToStoreI()->GetNeedBarrier()) {
56                 return true;
57             }
58             break;
59         case Opcode::StoreArray:
60             if (inst->CastToStoreArray()->GetNeedBarrier()) {
61                 return true;
62             }
63             break;
64         case Opcode::StoreObject:
65             if (inst->CastToStoreObject()->GetNeedBarrier()) {
66                 return true;
67             }
68             break;
69         case Opcode::LoadObjectDynamic:
70         case Opcode::StoreObjectDynamic:
71             return true;
72         case Opcode::Cast:
73             if (inst->CastToCast()->IsDynamicCast()) {
74                 return true;
75             }
76             break;
77         default:
78             break;
79     }
80     if (inst->IsRuntimeCall()) {
81         if (!inst->IsIntrinsic()) {
82             return true;
83         }
84         auto intrinsicId = inst->CastToIntrinsic()->GetIntrinsicId();
85         if (intrinsicId != RuntimeInterface::IntrinsicId::INTRINSIC_SLOW_PATH_ENTRY &&
86             intrinsicId != RuntimeInterface::IntrinsicId::INTRINSIC_TAIL_CALL) {
87             return true;
88         }
89     }
90     return false;
91 }
92 
93 /*
94  * We determine runtime calls manually, not using MethodProperties::HasRuntimeCalls, because we need to ignore
95  * SLOW_PATH_ENTRY intrinsic, since it doesn't require LR to be preserved.
96  */
HasRuntimeCalls(const Graph & graph)97 static bool HasRuntimeCalls(const Graph &graph)
98 {
99     for (auto bb : graph.GetBlocksRPO()) {
100         for (auto inst : bb->Insts()) {
101             if (InstHasRuntimeCall(inst)) {
102                 return true;
103             }
104         }
105     }
106     return false;
107 }
108 
GeneratePrologue()109 void CodegenFastPath::GeneratePrologue()
110 {
111     SCOPED_DISASM_STR(this, "FastPath Prologue");
112 
113     auto callerRegs = RegMask(GetCallerRegsMask(GetArch(), false));
114     auto argsNum = GetRuntime()->GetMethodArgumentsCount(GetGraph()->GetMethod());
115     callerRegs &= GetUsedRegs() & ~GetTarget().GetParamRegsMask(argsNum);
116     SaveCallerRegistersInFrame(callerRegs, GetEncoder(), GetFrameLayout(), false);
117 
118     auto hasRuntimeCalls = HasRuntimeCalls(*GetGraph());
119 
120     savedRegisters_ = GetUsedRegs() & RegMask(GetCalleeRegsMask(GetArch(), false));
121     if (GetTarget().SupportLinkReg() && hasRuntimeCalls) {
122         savedRegisters_ |= GetTarget().GetLinkReg().GetMask();
123         GetEncoder()->EnableLrAsTempReg(true);
124     }
125 
126     if (GetUsedVRegs().Any()) {
127         SaveCallerRegistersInFrame(GetUsedVRegs() & GetCallerRegsMask(GetArch(), true), GetEncoder(), GetFrameLayout(),
128                                    true);
129         savedFpRegisters_ = GetUsedVRegs() & VRegMask(GetCalleeRegsMask(GetArch(), true));
130     }
131 
132     GetEncoder()->PushRegisters(savedRegisters_, savedFpRegisters_, GetTarget().SupportLinkReg());
133 
134     if (GetFrameInfo()->GetSpillsCount() != 0) {
135         GetEncoder()->EncodeSub(
136             GetTarget().GetStackReg(), GetTarget().GetStackReg(),
137             Imm(RoundUp(GetFrameInfo()->GetSpillsCount() * GetTarget().WordSize(), GetTarget().GetSpAlignment())));
138     }
139 }
140 
GetCallerRegistersToRestore() const141 RegMask CodegenFastPath::GetCallerRegistersToRestore() const
142 {
143     RegMask callerRegs = GetUsedRegs() & RegMask(GetCallerRegsMask(GetArch(), false));
144 
145     auto argsNum = GetRuntime()->GetMethodArgumentsCount(GetGraph()->GetMethod());
146     callerRegs &= ~GetTarget().GetParamRegsMask(argsNum);
147 
148     if (auto retType {GetRuntime()->GetMethodReturnType(GetGraph()->GetMethod())};
149         retType != DataType::VOID && retType != DataType::NO_TYPE) {
150         ASSERT(!DataType::IsFloatType(retType));
151         callerRegs.reset(GetTarget().GetReturnRegId());
152     }
153     return callerRegs;
154 }
155 
GenerateEpilogue()156 void CodegenFastPath::GenerateEpilogue()
157 {
158     SCOPED_DISASM_STR(this, "FastPath Epilogue");
159 
160     if (GetFrameInfo()->GetSpillsCount() != 0) {
161         GetEncoder()->EncodeAdd(
162             GetTarget().GetStackReg(), GetTarget().GetStackReg(),
163             Imm(RoundUp(GetFrameInfo()->GetSpillsCount() * GetTarget().WordSize(), GetTarget().GetSpAlignment())));
164     }
165 
166     RestoreCallerRegistersFromFrame(GetCallerRegistersToRestore(), GetEncoder(), GetFrameLayout(), false);
167 
168     if (GetUsedVRegs().Any()) {
169         RestoreCallerRegistersFromFrame(GetUsedVRegs() & GetCallerRegsMask(GetArch(), true), GetEncoder(),
170                                         GetFrameLayout(), true);
171     }
172 
173     GetEncoder()->PopRegisters(savedRegisters_, savedFpRegisters_, GetTarget().SupportLinkReg());
174 
175     GetEncoder()->EncodeReturn();
176 }
177 
CreateFrameInfo()178 void CodegenFastPath::CreateFrameInfo()
179 {
180     auto frame = GetGraph()->GetLocalAllocator()->New<FrameInfo>(
181         FrameInfo::PositionedCallers::Encode(true) | FrameInfo::PositionedCallees::Encode(false) |
182         FrameInfo::CallersRelativeFp::Encode(true) | FrameInfo::CalleesRelativeFp::Encode(false) |
183         FrameInfo::PushCallers::Encode(true));
184     ASSERT(frame != nullptr);
185     frame->SetSpillsCount(GetGraph()->GetStackSlotsCount());
186     CFrameLayout fl(GetGraph()->GetArch(), GetGraph()->GetStackSlotsCount(), false);
187 
188     frame->SetCallersOffset(fl.GetOffset<CFrameLayout::OffsetOrigin::SP, CFrameLayout::OffsetUnit::SLOTS>(
189         fl.GetStackStartSlot() + fl.GetCallerLastSlot(false)));
190     frame->SetFpCallersOffset(fl.GetOffset<CFrameLayout::OffsetOrigin::SP, CFrameLayout::OffsetUnit::SLOTS>(
191         fl.GetStackStartSlot() + fl.GetCallerLastSlot(true)));
192     frame->SetCalleesOffset(-fl.GetOffset<CFrameLayout::OffsetOrigin::FP, CFrameLayout::OffsetUnit::SLOTS>(
193         fl.GetStackStartSlot() + fl.GetCalleeLastSlot(false)));
194     frame->SetFpCalleesOffset(-fl.GetOffset<CFrameLayout::OffsetOrigin::FP, CFrameLayout::OffsetUnit::SLOTS>(
195         fl.GetStackStartSlot() + fl.GetCalleeLastSlot(true)));
196 
197     SetFrameInfo(frame);
198 }
199 
200 /*
201  * Safe call of the c++ function from the irtoc
202  * */
CreateTailCall(IntrinsicInst * inst,bool isFastpath)203 void CodegenFastPath::CreateTailCall(IntrinsicInst *inst, bool isFastpath)
204 {
205     auto encoder = GetEncoder();
206 
207     if (GetFrameInfo()->GetSpillsCount() != 0) {
208         encoder->EncodeAdd(
209             GetTarget().GetStackReg(), GetTarget().GetStackReg(),
210             Imm(RoundUp(GetFrameInfo()->GetSpillsCount() * GetTarget().WordSize(), GetTarget().GetSpAlignment())));
211     }
212 
213     /* Once we reach the slow path, we can release all temp registers, since slow path terminates execution */
214     auto tempsMask = GetTarget().GetTempRegsMask();
215     for (size_t reg = tempsMask.GetMinRegister(); reg <= tempsMask.GetMaxRegister(); reg++) {
216         if (tempsMask.Test(reg)) {
217             encoder->ReleaseScratchRegister(Reg(reg, INT32_TYPE));
218         }
219     }
220 
221     if (isFastpath) {
222         RestoreCallerRegistersFromFrame(GetCallerRegistersToRestore(), encoder, GetFrameLayout(), false);
223         if (GetUsedVRegs().Any()) {
224             RestoreCallerRegistersFromFrame(GetUsedVRegs() & GetCallerRegsMask(GetArch(), true), encoder,
225                                             GetFrameLayout(), true);
226         }
227     } else {
228         RegMask callerRegs = ~GetUsedRegs() & RegMask(GetCallerRegsMask(GetArch(), false));
229         auto argsNum = GetRuntime()->GetMethodArgumentsCount(GetGraph()->GetMethod());
230         callerRegs &= ~GetTarget().GetParamRegsMask(argsNum);
231 
232         if (GetUsedVRegs().Any()) {
233             VRegMask fpCallerRegs = ~GetUsedVRegs() & RegMask(GetCallerRegsMask(GetArch(), true));
234             SaveCallerRegistersInFrame(fpCallerRegs, encoder, GetFrameLayout(), true);
235         }
236 
237         SaveCallerRegistersInFrame(callerRegs, encoder, GetFrameLayout(), false);
238     }
239     encoder->PopRegisters(savedRegisters_, savedFpRegisters_, GetTarget().SupportLinkReg());
240 
241     /* First Imm is offset of the runtime entrypoint for Ark Irtoc */
242     /* Second Imm is necessary for proper LLVM Irtoc FastPath compilation */
243     CHECK_LE(inst->GetImms().size(), 2U);
244     if (inst->GetRelocate()) {
245         RelocationInfo relocation;
246         encoder->EncodeJump(&relocation);
247         GetGraph()->GetRelocationHandler()->AddRelocation(relocation);
248     } else {
249         ScopedTmpReg tmp(encoder);
250         auto offset = inst->GetImms()[0];
251         encoder->EncodeLdr(tmp, false, MemRef(ThreadReg(), offset));
252         encoder->EncodeJump(tmp);
253     }
254 }
255 
EmitSimdIntrinsic(IntrinsicInst * inst,Reg dst,SRCREGS src)256 void CodegenFastPath::EmitSimdIntrinsic(IntrinsicInst *inst, Reg dst, SRCREGS src)
257 {
258     auto intrinsic = inst->GetIntrinsicId();
259     if (intrinsic == RuntimeInterface::IntrinsicId::INTRINSIC_COMPRESS_EIGHT_UTF16_TO_UTF8_CHARS_USING_SIMD) {
260         GetEncoder()->EncodeCompressEightUtf16ToUtf8CharsUsingSimd(src[FIRST_OPERAND], src[SECOND_OPERAND]);
261     } else if (intrinsic == RuntimeInterface::IntrinsicId::INTRINSIC_COMPRESS_SIXTEEN_UTF16_TO_UTF8_CHARS_USING_SIMD) {
262         GetEncoder()->EncodeCompressSixteenUtf16ToUtf8CharsUsingSimd(src[FIRST_OPERAND], src[SECOND_OPERAND]);
263     } else if (intrinsic == RuntimeInterface::IntrinsicId::INTRINSIC_MEM_CHAR_U8_X32_USING_SIMD) {
264         GetEncoder()->EncodeMemCharU8X32UsingSimd(dst, src[FIRST_OPERAND], src[SECOND_OPERAND],
265                                                   ConvertInstTmpReg(inst, DataType::FLOAT64));
266     } else if (intrinsic == RuntimeInterface::IntrinsicId::INTRINSIC_MEM_CHAR_U16_X16_USING_SIMD) {
267         GetEncoder()->EncodeMemCharU16X16UsingSimd(dst, src[FIRST_OPERAND], src[SECOND_OPERAND],
268                                                    ConvertInstTmpReg(inst, DataType::FLOAT64));
269     } else if (intrinsic == RuntimeInterface::IntrinsicId::INTRINSIC_MEM_CHAR_U8_X16_USING_SIMD) {
270         GetEncoder()->EncodeMemCharU8X16UsingSimd(dst, src[FIRST_OPERAND], src[SECOND_OPERAND],
271                                                   ConvertInstTmpReg(inst, DataType::FLOAT64));
272     } else if (intrinsic == RuntimeInterface::IntrinsicId::INTRINSIC_MEM_CHAR_U16_X8_USING_SIMD) {
273         GetEncoder()->EncodeMemCharU16X8UsingSimd(dst, src[FIRST_OPERAND], src[SECOND_OPERAND],
274                                                   ConvertInstTmpReg(inst, DataType::FLOAT64));
275     } else {
276         UNREACHABLE();
277     }
278 }
279 
EmitReverseIntrinsic(IntrinsicInst * inst,Reg dst,SRCREGS src)280 void CodegenFastPath::EmitReverseIntrinsic(IntrinsicInst *inst, Reg dst, SRCREGS src)
281 {
282     auto intrinsic = inst->GetIntrinsicId();
283     if (intrinsic == RuntimeInterface::IntrinsicId::INTRINSIC_REVERSE_BYTES_U64 ||
284         intrinsic == RuntimeInterface::IntrinsicId::INTRINSIC_REVERSE_BYTES_U32) {
285         GetEncoder()->EncodeReverseBytes(dst, src[0]);
286     } else if (intrinsic == RuntimeInterface::IntrinsicId::INTRINSIC_REVERSE_HALF_WORDS) {
287         GetEncoder()->EncodeReverseHalfWords(dst, src[0]);
288     } else {
289         UNREACHABLE();
290     }
291 }
292 
EmitMarkWordIntrinsic(IntrinsicInst * inst,Reg dst,SRCREGS src)293 void CodegenFastPath::EmitMarkWordIntrinsic(IntrinsicInst *inst, Reg dst, SRCREGS src)
294 {
295     auto intrinsic = inst->GetIntrinsicId();
296     if (intrinsic == RuntimeInterface::IntrinsicId::INTRINSIC_LOAD_ACQUIRE_MARK_WORD_EXCLUSIVE) {
297         GetEncoder()->EncodeLdrExclusive(dst, src[0], true);
298     } else if (intrinsic == RuntimeInterface::IntrinsicId::INTRINSIC_STORE_RELEASE_MARK_WORD_EXCLUSIVE) {
299         GetEncoder()->EncodeStrExclusive(dst, src[SECOND_OPERAND], src[0], true);
300     } else if (intrinsic == RuntimeInterface::IntrinsicId::INTRINSIC_COMPARE_AND_SET_MARK_WORD) {
301         GetEncoder()->EncodeCompareAndSwap(dst, src[0], src[SECOND_OPERAND], src[THIRD_OPERAND]);
302     } else {
303         UNREACHABLE();
304     }
305 }
306 
EmitDataMemoryBarrierFullIntrinsic(IntrinsicInst * inst,Reg dst,SRCREGS src)307 void CodegenFastPath::EmitDataMemoryBarrierFullIntrinsic([[maybe_unused]] IntrinsicInst *inst, [[maybe_unused]] Reg dst,
308                                                          [[maybe_unused]] SRCREGS src)
309 {
310     ASSERT(inst->GetIntrinsicId() == RuntimeInterface::IntrinsicId::INTRINSIC_DATA_MEMORY_BARRIER_FULL);
311     GetEncoder()->EncodeMemoryBarrier(memory_order::FULL);
312 }
313 
314 /*
315  * Safe call of the c++ function from the irtoc
316  */
EmitWriteTlabStatsSafeIntrinsic(IntrinsicInst * inst,Reg dst,SRCREGS src)317 void CodegenFastPath::EmitWriteTlabStatsSafeIntrinsic([[maybe_unused]] IntrinsicInst *inst, [[maybe_unused]] Reg dst,
318                                                       SRCREGS src)
319 {
320     ASSERT(inst->GetIntrinsicId() == RuntimeInterface::IntrinsicId::INTRINSIC_WRITE_TLAB_STATS_SAFE);
321     ASSERT(!inst->HasUsers());
322 
323     auto src1 = src[FIRST_OPERAND];
324     auto src2 = src[SECOND_OPERAND];
325     auto tmp = src[THIRD_OPERAND];
326 
327     ASSERT(tmp.IsValid());
328     ASSERT(tmp != GetRegfile()->GetZeroReg());
329 
330     auto regs = GetCallerRegsMask(GetArch(), false) | GetCalleeRegsMask(GetArch(), false);
331     auto vregs = GetCallerRegsMask(GetArch(), true);
332     GetEncoder()->PushRegisters(regs, vregs);
333 
334     FillCallParams(src1, src2);
335 
336     auto id = RuntimeInterface::EntrypointId::WRITE_TLAB_STATS_NO_BRIDGE;
337     MemRef entry(ThreadReg(), GetRuntime()->GetEntrypointTlsOffset(GetArch(), id));
338     GetEncoder()->EncodeLdr(tmp, false, entry);
339     GetEncoder()->MakeCall(tmp);
340 
341     GetEncoder()->PopRegisters(regs, vregs);
342 }
343 
EmitExpandU8ToU16Intrinsic(IntrinsicInst * inst,Reg dst,SRCREGS src)344 void CodegenFastPath::EmitExpandU8ToU16Intrinsic([[maybe_unused]] IntrinsicInst *inst, Reg dst, SRCREGS src)
345 {
346     ASSERT(inst->GetIntrinsicId() == RuntimeInterface::IntrinsicId::INTRINSIC_EXPAND_U8_TO_U16);
347     GetEncoder()->EncodeUnsignedExtendBytesToShorts(dst, src[0]);
348 }
349 
EmitAtomicByteOrIntrinsic(IntrinsicInst * inst,Reg dst,SRCREGS src)350 void CodegenFastPath::EmitAtomicByteOrIntrinsic([[maybe_unused]] IntrinsicInst *inst, [[maybe_unused]] Reg dst,
351                                                 SRCREGS src)
352 {
353     ASSERT(inst->GetIntrinsicId() == RuntimeInterface::IntrinsicId::INTRINSIC_ATOMIC_BYTE_OR);
354     bool fastEncoding = true;
355     if (GetArch() == Arch::AARCH64 && !g_options.IsCpuFeatureEnabled(CpuFeature::ATOMICS)) {
356         fastEncoding = false;
357     }
358     GetEncoder()->EncodeAtomicByteOr(src[FIRST_OPERAND], src[SECOND_OPERAND], fastEncoding);
359 }
360 
EmitSaveOrRestoreRegsEpIntrinsic(IntrinsicInst * inst,Reg dst,SRCREGS src)361 void CodegenFastPath::EmitSaveOrRestoreRegsEpIntrinsic(IntrinsicInst *inst, [[maybe_unused]] Reg dst,
362                                                        [[maybe_unused]] SRCREGS src)
363 {
364     RegMask calleeRegs = GetUsedRegs() & RegMask(GetCalleeRegsMask(GetArch(), false));
365     // We need to restore all caller regs, since caller doesn't care about registers at all (except parameters)
366     auto callerRegs = RegMask(GetCallerRegsMask(GetArch(), false));
367     auto callerVregs = RegMask(GetCallerRegsMask(GetArch(), true));
368     for (auto &input : inst->GetInputs()) {
369         calleeRegs.reset(input.GetInst()->GetDstReg());
370         callerRegs.reset(input.GetInst()->GetDstReg());
371     }
372     if (GetTarget().SupportLinkReg()) {
373         callerRegs.set(GetTarget().GetLinkReg().GetId());
374     }
375     if (!inst->HasUsers()) {
376         callerRegs.set(GetTarget().GetReturnReg(GetPtrRegType()).GetId());
377     }
378     auto intrinsic = inst->GetIntrinsicId();
379     if (intrinsic == RuntimeInterface::IntrinsicId::INTRINSIC_SAVE_REGISTERS_EP) {
380         GetEncoder()->PushRegisters(callerRegs | calleeRegs, callerVregs);
381     } else if (intrinsic == RuntimeInterface::IntrinsicId::INTRINSIC_RESTORE_REGISTERS_EP) {
382         GetEncoder()->PopRegisters(callerRegs | calleeRegs, callerVregs);
383     } else {
384         UNREACHABLE();
385     }
386 }
387 
EmitTailCallIntrinsic(IntrinsicInst * inst,Reg dst,SRCREGS src)388 void CodegenFastPath::EmitTailCallIntrinsic(IntrinsicInst *inst, [[maybe_unused]] Reg dst, [[maybe_unused]] SRCREGS src)
389 {
390     ASSERT(inst->GetIntrinsicId() == RuntimeInterface::IntrinsicId::INTRINSIC_TAIL_CALL);
391     CreateTailCall(inst, true);
392 }
393 
EmitSlowPathEntryIntrinsic(IntrinsicInst * inst,Reg dst,SRCREGS src)394 void CodegenFastPath::EmitSlowPathEntryIntrinsic(IntrinsicInst *inst, [[maybe_unused]] Reg dst,
395                                                  [[maybe_unused]] SRCREGS src)
396 {
397     ASSERT(inst->GetIntrinsicId() == RuntimeInterface::IntrinsicId::INTRINSIC_SLOW_PATH_ENTRY);
398     CreateTailCall(inst, false);
399 }
400 
EmitJsCastDoubleToCharIntrinsic(IntrinsicInst * inst,Reg dst,SRCREGS src)401 void CodegenFastPath::EmitJsCastDoubleToCharIntrinsic([[maybe_unused]] IntrinsicInst *inst, Reg dst, SRCREGS src)
402 {
403     ASSERT(inst->GetIntrinsicId() == RuntimeInterface::IntrinsicId::INTRINSIC_JS_CAST_DOUBLE_TO_CHAR);
404     auto srcReg = src[FIRST_OPERAND];
405 
406     CHECK_EQ(srcReg.GetSize(), BITS_PER_UINT64);
407     CHECK_EQ(dst.GetSize(), BITS_PER_UINT32);
408     dst = dst.As(INT32_TYPE);
409 
410     auto enc {GetEncoder()};
411     if (g_options.IsCpuFeatureEnabled(CpuFeature::JSCVT)) {
412         // no failure may occur
413         enc->EncodeJsDoubleToCharCast(dst, srcReg);
414     } else {
415         constexpr uint32_t FAILURE_RESULT_FLAG = (1U << 16U);
416         ScopedTmpRegU32 tmp(enc);
417         enc->EncodeJsDoubleToCharCast(dst, srcReg, tmp, FAILURE_RESULT_FLAG);
418     }
419 }
420 }  // namespace ark::compiler
421