• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021-2025 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "slow_path.h"
17 #include "codegen.h"
18 
19 namespace ark::compiler {
20 
Generate(Codegen * codegen)21 void SlowPathBase::Generate(Codegen *codegen)
22 {
23     ASSERT(!generated_);
24 
25 #ifndef NDEBUG
26     std::string opcodeStr(GetInst()->GetOpcodeStr());
27     if (GetInst()->IsIntrinsic()) {
28         opcodeStr += "." + GetIntrinsicName(static_cast<IntrinsicInst *>(GetInst())->GetIntrinsicId());
29     }
30 #endif
31     SCOPED_DISASM_STR(codegen,
32                       std::string("SlowPath for inst ") + std::to_string(GetInst()->GetId()) + ". " + opcodeStr);
33     Encoder *encoder = codegen->GetEncoder();
34     ASSERT(encoder->IsValid());
35     encoder->BindLabel(GetLabel());
36 
37     GenerateImpl(codegen);
38 
39     if (encoder->IsLabelValid(labelBack_)) {
40         codegen->GetEncoder()->EncodeJump(GetBackLabel());
41     }
42 #ifndef NDEBUG
43     generated_ = true;
44 #endif
45 }
46 
47 // ARRAY_INDEX_OUT_OF_BOUNDS_EXCEPTION, STRING_INDEX_OUT_OF_BOUNDS_EXCEPTION
GenerateThrowOutOfBoundsException(Codegen * codegen)48 bool SlowPathEntrypoint::GenerateThrowOutOfBoundsException(Codegen *codegen)
49 {
50     auto lenReg = codegen->ConvertRegister(GetInst()->GetSrcReg(0), GetInst()->GetInputType(0));
51     if (GetInst()->GetOpcode() == Opcode::BoundsCheckI) {
52         ScopedTmpReg indexReg(codegen->GetEncoder());
53         codegen->GetEncoder()->EncodeMov(indexReg, Imm(GetInst()->CastToBoundsCheckI()->GetImm()));
54         codegen->CallRuntime(GetInst(), GetEntrypoint(), INVALID_REGISTER, RegMask::GetZeroMask(), indexReg, lenReg);
55     } else {
56         ASSERT(GetInst()->GetOpcode() == Opcode::BoundsCheck);
57         auto indexReg = codegen->ConvertRegister(GetInst()->GetSrcReg(1), GetInst()->GetInputType(1));
58         codegen->CallRuntime(GetInst(), GetEntrypoint(), INVALID_REGISTER, RegMask::GetZeroMask(), indexReg, lenReg);
59     }
60     return true;
61 }
62 
63 // INITIALIZE_CLASS
GenerateInitializeClass(Codegen * codegen)64 bool SlowPathEntrypoint::GenerateInitializeClass(Codegen *codegen)
65 {
66     auto inst = GetInst();
67     if (GetInst()->GetDstReg() != INVALID_REG) {
68         ASSERT(inst->GetOpcode() == Opcode::LoadAndInitClass);
69         Reg klassReg {codegen->ConvertRegister(GetInst()->GetDstReg(), DataType::REFERENCE)};
70         RegMask preservedRegs;
71         codegen->GetEncoder()->SetRegister(&preservedRegs, nullptr, klassReg);
72         codegen->CallRuntime(GetInst(), GetEntrypoint(), INVALID_REGISTER, preservedRegs, klassReg);
73     } else {
74         ASSERT(inst->GetOpcode() == Opcode::InitClass);
75         ASSERT(!codegen->GetGraph()->IsAotMode());
76         // check uintptr_t for cross:
77         auto klass = reinterpret_cast<uintptr_t>(inst->CastToInitClass()->GetClass());
78         codegen->CallRuntime(GetInst(), GetEntrypoint(), INVALID_REGISTER, RegMask::GetZeroMask(), TypedImm(klass));
79     }
80     return true;
81 }
82 
83 // IS_INSTANCE
GenerateIsInstance(Codegen * codegen)84 bool SlowPathEntrypoint::GenerateIsInstance(Codegen *codegen)
85 {
86     auto src = codegen->ConvertRegister(GetInst()->GetSrcReg(0), DataType::REFERENCE);  // obj
87     auto klass = codegen->ConvertRegister(GetInst()->GetSrcReg(1), DataType::REFERENCE);
88     auto dst = codegen->ConvertRegister(GetInst()->GetDstReg(), GetInst()->GetType());
89     codegen->CallRuntime(GetInst(), EntrypointId::IS_INSTANCE, dst, RegMask::GetZeroMask(), src, klass);
90     return true;
91 }
92 
93 // CHECK_CAST
GenerateCheckCast(Codegen * codegen)94 bool SlowPathEntrypoint::GenerateCheckCast(Codegen *codegen)
95 {
96     auto src = codegen->ConvertRegister(GetInst()->GetSrcReg(0), DataType::REFERENCE);  // obj
97     auto klass = codegen->ConvertRegister(GetInst()->GetSrcReg(1), DataType::REFERENCE);
98     codegen->CallRuntime(GetInst(), GetEntrypoint(), INVALID_REGISTER, RegMask::GetZeroMask(), src, klass);
99     return true;
100 }
101 
102 // CREATE_OBJECT
GenerateCreateObject(Codegen * codegen)103 bool SlowPathEntrypoint::GenerateCreateObject(Codegen *codegen)
104 {
105     auto inst = GetInst();
106     auto dst = codegen->ConvertRegister(inst->GetDstReg(), inst->GetType());
107     auto src = codegen->ConvertRegister(inst->GetSrcReg(0), inst->GetInputType(0));
108 
109     codegen->CallRuntime(inst, EntrypointId::CREATE_OBJECT_BY_CLASS, dst, RegMask::GetZeroMask(), src);
110 
111     return true;
112 }
113 
GenerateByEntry(Codegen * codegen)114 bool SlowPathEntrypoint::GenerateByEntry(Codegen *codegen)
115 {
116     switch (GetEntrypoint()) {
117         case EntrypointId::THROW_EXCEPTION: {
118             auto src = codegen->ConvertRegister(GetInst()->GetSrcReg(0), DataType::Type::REFERENCE);
119             codegen->CallRuntime(GetInst(), GetEntrypoint(), INVALID_REGISTER, RegMask::GetZeroMask(), src);
120             return true;
121         }
122         case EntrypointId::NULL_POINTER_EXCEPTION:
123         case EntrypointId::ARITHMETIC_EXCEPTION:
124         case EntrypointId::THROW_NATIVE_EXCEPTION:
125             codegen->CallRuntime(GetInst(), GetEntrypoint(), INVALID_REGISTER, {});
126             return true;
127         case EntrypointId::ARRAY_INDEX_OUT_OF_BOUNDS_EXCEPTION:
128         case EntrypointId::STRING_INDEX_OUT_OF_BOUNDS_EXCEPTION:
129             return GenerateThrowOutOfBoundsException(codegen);
130         case EntrypointId::NEGATIVE_ARRAY_SIZE_EXCEPTION: {
131             auto size = codegen->ConvertRegister(GetInst()->GetSrcReg(0), GetInst()->GetInputType(0));
132             codegen->CallRuntime(GetInst(), GetEntrypoint(), INVALID_REGISTER, RegMask::GetZeroMask(), size);
133             return true;
134         }
135         case EntrypointId::INITIALIZE_CLASS:
136             return GenerateInitializeClass(codegen);
137         case EntrypointId::IS_INSTANCE:
138             return GenerateIsInstance(codegen);
139         case EntrypointId::CHECK_CAST:
140         case EntrypointId::CHECK_CAST_DEOPTIMIZE:
141             return GenerateCheckCast(codegen);
142         case EntrypointId::CREATE_OBJECT_BY_CLASS:
143             return GenerateCreateObject(codegen);
144         case EntrypointId::SAFEPOINT:
145             codegen->CallRuntime(GetInst(), GetEntrypoint(), INVALID_REGISTER, {});
146             return true;
147         default:
148             return false;
149     }
150 }
151 
GenerateImpl(Codegen * codegen)152 void SlowPathEntrypoint::GenerateImpl(Codegen *codegen)
153 {
154     if (!GenerateByEntry(codegen)) {
155         switch (GetEntrypoint()) {
156             case EntrypointId::GET_UNKNOWN_CALLEE_METHOD:
157             case EntrypointId::RESOLVE_UNKNOWN_VIRTUAL_CALL:
158             case EntrypointId::GET_FIELD_OFFSET:
159             case EntrypointId::GET_UNKNOWN_STATIC_FIELD_MEMORY_ADDRESS:
160             case EntrypointId::RESOLVE_CLASS_OBJECT:
161             case EntrypointId::RESOLVE_CLASS:
162             case EntrypointId::ABSTRACT_METHOD_ERROR:
163             case EntrypointId::INITIALIZE_CLASS_BY_ID:
164             case EntrypointId::CHECK_STORE_ARRAY_REFERENCE:
165             case EntrypointId::RESOLVE_STRING_AOT:
166             case EntrypointId::CLASS_CAST_EXCEPTION:
167                 break;
168             default:
169                 LOG(FATAL, COMPILER) << "Unsupported entrypoint!";
170                 UNREACHABLE();
171                 break;
172         }
173     }
174 }
175 
GenerateImpl(Codegen * codegen)176 void SlowPathDeoptimize::GenerateImpl(Codegen *codegen)
177 {
178     uintptr_t value =
179         helpers::ToUnderlying(deoptimizeType_) | (GetInst()->GetId() << MinimumBitsToStore(DeoptimizeType::COUNT));
180     codegen->CallRuntime(GetInst(), GetEntrypoint(), INVALID_REGISTER, RegMask::GetZeroMask(), TypedImm(value));
181 }
182 
GenerateImpl(Codegen * codegen)183 void SlowPathIntrinsic::GenerateImpl(Codegen *codegen)
184 {
185     codegen->CreateCallIntrinsic(GetInst()->CastToIntrinsic());
186 }
187 
GenerateImpl(Codegen * codegen)188 void SlowPathImplicitNullCheck::GenerateImpl(Codegen *codegen)
189 {
190     ASSERT(!GetInst()->CastToNullCheck()->IsImplicit());
191     SlowPathEntrypoint::GenerateImpl(codegen);
192 }
193 
GenerateImpl(Codegen * codegen)194 void SlowPathShared::GenerateImpl(Codegen *codegen)
195 {
196     ASSERT(tmpReg_ != INVALID_REGISTER);
197     [[maybe_unused]] ScopedTmpReg tmpReg(codegen->GetEncoder(), tmpReg_);
198     ASSERT(tmpReg.GetReg().GetId() == tmpReg_.GetId());
199     auto graph = codegen->GetGraph();
200     ASSERT(graph->IsAotMode());
201     auto aotData = graph->GetAotData();
202     aotData->SetSharedSlowPathOffset(GetEntrypoint(), codegen->GetEncoder()->GetCursorOffset());
203     MemRef entry(codegen->ThreadReg(), graph->GetRuntime()->GetEntrypointTlsOffset(graph->GetArch(), GetEntrypoint()));
204     ScopedTmpReg tmp1Reg(codegen->GetEncoder());
205     codegen->GetEncoder()->EncodeLdr(tmp1Reg, false, entry);
206     codegen->GetEncoder()->EncodeJump(tmp1Reg);
207 }
208 
GenerateImpl(Codegen * codegen)209 void SlowPathResolveStringAot::GenerateImpl(Codegen *codegen)
210 {
211     ScopedTmpRegU64 tmpAddrReg(codegen->GetEncoder());
212     // Slot address was loaded into temporary register before we jumped into slow path, but it is already released
213     // because temporary registers are scoped. Try to allocate a new one and check that it is the same register
214     // as was allocated in codegen. If it is a different register then copy the slot address into it.
215     if (tmpAddrReg.GetReg() != addrReg_) {
216         codegen->GetEncoder()->EncodeMov(tmpAddrReg, addrReg_);
217     }
218     codegen->CallRuntimeWithMethod(GetInst(), method_, GetEntrypoint(), dstReg_, TypedImm(stringId_), tmpAddrReg);
219 }
220 
GenerateImpl(Codegen * codegen)221 void SlowPathRefCheck::GenerateImpl(Codegen *codegen)
222 {
223     ASSERT(arrayReg_ != INVALID_REGISTER);
224     ASSERT(refReg_ != INVALID_REGISTER);
225     codegen->CallRuntime(GetInst(), GetEntrypoint(), INVALID_REGISTER, RegMask::GetZeroMask(), arrayReg_, refReg_);
226 }
227 
GenerateImpl(Codegen * codegen)228 void SlowPathAbstract::GenerateImpl(Codegen *codegen)
229 {
230     SCOPED_DISASM_STR(codegen, std::string("SlowPath for Abstract method ") + std::to_string(GetInst()->GetId()));
231     ASSERT(methodReg_ != INVALID_REGISTER);
232     ScopedTmpReg methodReg(codegen->GetEncoder(), methodReg_);
233     ASSERT(methodReg.GetReg().GetId() == methodReg_.GetId());
234     codegen->CallRuntime(GetInst(), GetEntrypoint(), INVALID_REGISTER, RegMask::GetZeroMask(), methodReg.GetReg());
235 }
236 
GenerateImpl(Codegen * codegen)237 void SlowPathCheckCast::GenerateImpl(Codegen *codegen)
238 {
239     SCOPED_DISASM_STR(codegen, std::string("SlowPath for CheckCast exception") + std::to_string(GetInst()->GetId()));
240     auto inst = GetInst();
241     auto src = codegen->ConvertRegister(inst->GetSrcReg(0), inst->GetInputType(0));
242 
243     codegen->CallRuntime(GetInst(), GetEntrypoint(), INVALID_REGISTER, RegMask::GetZeroMask(), classReg_, src);
244 }
245 
GenerateImpl(Codegen * codegen)246 void SlowPathUnresolved::GenerateImpl(Codegen *codegen)
247 {
248     SlowPathEntrypoint::GenerateImpl(codegen);
249 
250     ASSERT(method_ != nullptr);
251     ASSERT(typeId_ != 0);
252     ASSERT(slotAddr_ != 0);
253     auto typeImm = TypedImm(typeId_);
254     auto arch = codegen->GetGraph()->GetArch();
255     // On 32-bit architecture slot address requires additional down_cast,
256     // similar to `method` address processing in `CallRuntimeWithMethod`
257     auto slotImm = Is64BitsArch(arch) ? TypedImm(slotAddr_) : TypedImm(down_cast<uint32_t>(slotAddr_));
258 
259     ScopedTmpReg valueReg(codegen->GetEncoder());
260     if (GetInst()->GetOpcode() == Opcode::ResolveVirtual || GetInst()->GetOpcode() == Opcode::ResolveByName) {
261         codegen->CallRuntimeWithMethod(GetInst(), method_, GetEntrypoint(), valueReg, argReg_, typeImm, slotImm);
262     } else if (GetEntrypoint() == EntrypointId::GET_UNKNOWN_CALLEE_METHOD ||
263                GetEntrypoint() == EntrypointId::GET_UNKNOWN_STATIC_FIELD_MEMORY_ADDRESS) {
264         codegen->CallRuntimeWithMethod(GetInst(), method_, GetEntrypoint(), valueReg, typeImm, slotImm);
265     } else {
266         codegen->CallRuntimeWithMethod(GetInst(), method_, GetEntrypoint(), valueReg, typeImm);
267 
268         ScopedTmpReg addrReg(codegen->GetEncoder());
269         codegen->GetEncoder()->EncodeMov(addrReg, Imm(slotAddr_));
270         codegen->GetEncoder()->EncodeStr(valueReg, MemRef(addrReg));
271     }
272 
273     if (dstReg_.IsValid()) {
274         codegen->GetEncoder()->EncodeMov(dstReg_, valueReg);
275     }
276 }
277 
GenerateImpl(Codegen * codegen)278 void SlowPathJsCastDoubleToInt32::GenerateImpl(Codegen *codegen)
279 {
280     ASSERT(dstReg_.IsValid());
281     ASSERT(srcReg_.IsValid());
282 
283     auto enc {codegen->GetEncoder()};
284     if (codegen->GetGraph()->GetMode().SupportManagedCode()) {
285         ScopedTmpRegU64 tmp(enc);
286         enc->EncodeMov(tmp, srcReg_);
287         codegen->CallRuntime(GetInst(), EntrypointId::JS_CAST_DOUBLE_TO_INT32, dstReg_, RegMask::GetZeroMask(), tmp);
288         return;
289     }
290 
291     auto [live_regs, live_vregs] {codegen->GetLiveRegisters<true>(GetInst())};
292     live_regs.Reset(dstReg_.GetId());
293 
294     codegen->SaveCallerRegisters(live_regs, live_vregs, true);
295     codegen->FillCallParams(srcReg_);
296     codegen->EmitCallRuntimeCode(nullptr, EntrypointId::JS_CAST_DOUBLE_TO_INT32_NO_BRIDGE);
297 
298     auto retReg {codegen->GetTarget().GetReturnReg(dstReg_.GetType())};
299     if (dstReg_.GetId() != retReg.GetId()) {
300         enc->EncodeMov(dstReg_, retReg);
301     }
302     codegen->LoadCallerRegisters(live_regs, live_vregs, true);
303 }
304 
GenerateImpl(Codegen * codegen)305 void SlowPathStringHashCode::GenerateImpl(Codegen *codegen)
306 {
307     ASSERT(dstReg_.IsValid());
308     ASSERT(srcReg_.IsValid());
309     codegen->CallFastPath(GetInst(), GetEntrypoint(), dstReg_, RegMask::GetZeroMask(), srcReg_);
310 }
311 
312 }  // namespace ark::compiler
313