1 /*
2 * Copyright (c) 2021-2025 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "inst.h"
17 #include "graph.h"
18 #include "basicblock.h"
19 #include "graph_visitor.h"
20 #include "optimizer/optimizations/vn.h"
21 #include "profiling/profiling.h"
22
23 namespace ark::compiler {
24
GetInverseConditionCode(ConditionCode code)25 ConditionCode GetInverseConditionCode(ConditionCode code)
26 {
27 switch (code) {
28 case ConditionCode::CC_EQ:
29 return ConditionCode::CC_NE;
30 case ConditionCode::CC_NE:
31 return ConditionCode::CC_EQ;
32
33 case ConditionCode::CC_LT:
34 return ConditionCode::CC_GE;
35 case ConditionCode::CC_LE:
36 return ConditionCode::CC_GT;
37 case ConditionCode::CC_GT:
38 return ConditionCode::CC_LE;
39 case ConditionCode::CC_GE:
40 return ConditionCode::CC_LT;
41
42 case ConditionCode::CC_B:
43 return ConditionCode::CC_AE;
44 case ConditionCode::CC_BE:
45 return ConditionCode::CC_A;
46 case ConditionCode::CC_A:
47 return ConditionCode::CC_BE;
48 case ConditionCode::CC_AE:
49 return ConditionCode::CC_B;
50
51 case ConditionCode::CC_TST_EQ:
52 return ConditionCode::CC_TST_NE;
53 case ConditionCode::CC_TST_NE:
54 return ConditionCode::CC_TST_EQ;
55
56 default:
57 UNREACHABLE();
58 }
59 }
60
InverseSignednessConditionCode(ConditionCode code)61 ConditionCode InverseSignednessConditionCode(ConditionCode code)
62 {
63 switch (code) {
64 case ConditionCode::CC_EQ:
65 return ConditionCode::CC_EQ;
66 case ConditionCode::CC_NE:
67 return ConditionCode::CC_NE;
68
69 case ConditionCode::CC_LT:
70 return ConditionCode::CC_B;
71 case ConditionCode::CC_LE:
72 return ConditionCode::CC_BE;
73 case ConditionCode::CC_GT:
74 return ConditionCode::CC_A;
75 case ConditionCode::CC_GE:
76 return ConditionCode::CC_AE;
77
78 case ConditionCode::CC_B:
79 return ConditionCode::CC_LT;
80 case ConditionCode::CC_BE:
81 return ConditionCode::CC_LE;
82 case ConditionCode::CC_A:
83 return ConditionCode::CC_GT;
84 case ConditionCode::CC_AE:
85 return ConditionCode::CC_GE;
86
87 case ConditionCode::CC_TST_EQ:
88 return ConditionCode::CC_TST_EQ;
89 case ConditionCode::CC_TST_NE:
90 return ConditionCode::CC_TST_NE;
91
92 default:
93 UNREACHABLE();
94 }
95 }
96
IsSignedConditionCode(ConditionCode code)97 bool IsSignedConditionCode(ConditionCode code)
98 {
99 switch (code) {
100 case ConditionCode::CC_LT:
101 case ConditionCode::CC_LE:
102 case ConditionCode::CC_GT:
103 case ConditionCode::CC_GE:
104 return true;
105
106 case ConditionCode::CC_EQ:
107 case ConditionCode::CC_NE:
108 case ConditionCode::CC_B:
109 case ConditionCode::CC_BE:
110 case ConditionCode::CC_A:
111 case ConditionCode::CC_AE:
112 case ConditionCode::CC_TST_EQ:
113 case ConditionCode::CC_TST_NE:
114 return false;
115
116 default:
117 UNREACHABLE();
118 }
119 }
120
SwapOperandsConditionCode(ConditionCode code)121 ConditionCode SwapOperandsConditionCode(ConditionCode code)
122 {
123 switch (code) {
124 case ConditionCode::CC_EQ:
125 case ConditionCode::CC_NE:
126 return code;
127
128 case ConditionCode::CC_LT:
129 return ConditionCode::CC_GT;
130 case ConditionCode::CC_LE:
131 return ConditionCode::CC_GE;
132 case ConditionCode::CC_GT:
133 return ConditionCode::CC_LT;
134 case ConditionCode::CC_GE:
135 return ConditionCode::CC_LE;
136
137 case ConditionCode::CC_B:
138 return ConditionCode::CC_A;
139 case ConditionCode::CC_BE:
140 return ConditionCode::CC_AE;
141 case ConditionCode::CC_A:
142 return ConditionCode::CC_B;
143 case ConditionCode::CC_AE:
144 return ConditionCode::CC_BE;
145
146 case ConditionCode::CC_TST_EQ:
147 case ConditionCode::CC_TST_NE:
148 return code;
149
150 default:
151 UNREACHABLE();
152 }
153 }
154
IsVolatileMemInst(const Inst * inst)155 bool IsVolatileMemInst(const Inst *inst)
156 {
157 switch (inst->GetOpcode()) {
158 case Opcode::LoadObject:
159 return inst->CastToLoadObject()->GetVolatile();
160 case Opcode::LoadObjectPair:
161 return inst->CastToLoadObjectPair()->GetVolatile();
162 case Opcode::StoreObject:
163 return inst->CastToStoreObject()->GetVolatile();
164 case Opcode::StoreObjectPair:
165 return inst->CastToStoreObjectPair()->GetVolatile();
166 case Opcode::LoadStatic:
167 return inst->CastToLoadStatic()->GetVolatile();
168 case Opcode::StoreStatic:
169 return inst->CastToStoreStatic()->GetVolatile();
170 case Opcode::UnresolvedStoreStatic:
171 case Opcode::LoadResolvedObjectFieldStatic:
172 case Opcode::StoreResolvedObjectFieldStatic:
173 return true;
174 default:
175 return false;
176 }
177 }
178
179 const ObjectTypeInfo ObjectTypeInfo::INVALID {};
180 const ObjectTypeInfo ObjectTypeInfo::UNKNOWN {1};
181
ReserveInputs(size_t capacity)182 void Inst::ReserveInputs(size_t capacity)
183 {
184 ASSERT(IsOperandsDynamic());
185 GetDynamicOperands()->Reallocate(capacity);
186 }
187
GetInst()188 Inst *User::GetInst()
189 {
190 if (UNLIKELY(IsDynamic())) {
191 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
192 return *reinterpret_cast<Inst **>(this + GetIndex() + 1);
193 }
194 auto p = reinterpret_cast<uintptr_t>(this);
195 p += (GetIndex() + 1) * sizeof(User);
196
197 auto inputsCount {SizeField::Decode(properties_)};
198 p += (inputsCount + Input::GetPadding(RUNTIME_ARCH, inputsCount)) * sizeof(Input);
199 return reinterpret_cast<Inst *>(p);
200 }
201
InsertBefore(Inst * inst)202 void Inst::InsertBefore(Inst *inst)
203 {
204 ASSERT(bb_ != nullptr);
205 bb_->InsertBefore(inst, this);
206 }
207
InsertAfter(Inst * inst)208 void Inst::InsertAfter(Inst *inst)
209 {
210 ASSERT(bb_ != nullptr);
211 bb_->InsertAfter(inst, this);
212 }
213
GetInliningDepth() const214 uint32_t Inst::GetInliningDepth() const
215 {
216 auto ss = GetSaveState();
217 return ss == nullptr ? 0 : ss->GetInliningDepth();
218 }
219
Reallocate(size_t newCapacity)220 void DynamicOperands::Reallocate([[maybe_unused]] size_t newCapacity /* =0 */)
221 {
222 if (newCapacity == 0) {
223 constexpr auto IMM_2 = 2;
224 newCapacity = (((capacity_ != 0U) ? capacity_ : 1U) << 1U) + IMM_2;
225 } else if (newCapacity <= capacity_) {
226 return;
227 }
228 auto size = newCapacity * (sizeof(User) + sizeof(Inst *)) + sizeof(Inst *);
229 auto newStor = reinterpret_cast<uintptr_t>(allocator_->Alloc(size));
230
231 auto ownerInst {GetOwnerInst()};
232 // Set pointer to owned instruction into new storage NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
233 ASSERT(reinterpret_cast<User *>(newStor) != nullptr);
234 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
235 *reinterpret_cast<Inst **>(reinterpret_cast<User *>(newStor) + newCapacity) = ownerInst;
236
237 if (users_ == nullptr) {
238 users_ = reinterpret_cast<User *>(newStor);
239 capacity_ = newCapacity;
240 return;
241 }
242 Input *oldInputs = Inputs();
243 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
244 auto *newInputs = reinterpret_cast<Input *>(newStor + sizeof(User) * newCapacity) + 1;
245
246 for (size_t i = 0; i < size_; i++) {
247 Inst *oldInput = oldInputs[i].GetInst(); // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
248 ASSERT(oldInput);
249 // Initialize new User in container. Since users are placed from end of array, i.e. zero index element
250 // will be at the end of array, we need to add capacity and substitute index.
251 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
252 User *newUser = new (reinterpret_cast<User *>(newStor) + newCapacity - i - 1) User(false, i, newCapacity);
253 auto oldUser {GetUser(i)};
254 if (ownerInst->IsSaveState()) {
255 newUser->SetVirtualRegister(oldUser->GetVirtualRegister());
256 } else if (ownerInst->IsPhi()) {
257 newUser->SetBbNum(oldUser->GetBbNum());
258 }
259 oldInput->RemoveUser(oldUser);
260 oldInput->AddUser(newUser);
261 newInputs[i] = Input(oldInput); // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
262 }
263 capacity_ = newCapacity;
264 users_ = reinterpret_cast<User *>(newStor);
265 }
266
Append(Inst * inst)267 unsigned DynamicOperands::Append(Inst *inst)
268 {
269 ASSERT(capacity_ >= size_);
270 if (capacity_ == size_) {
271 Reallocate();
272 }
273 ASSERT(capacity_ > size_);
274 SetInput(size_, Input(inst));
275 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
276 new (users_ + capacity_ - size_ - 1) User(false, size_, capacity_);
277 auto user {GetUser(size_)};
278 if (GetOwnerInst()->IsPhi()) {
279 user->SetBbNum(size_);
280 }
281 ASSERT(inst != nullptr);
282 inst->AddUser(user);
283 return size_++;
284 }
285
Remove(unsigned index)286 void DynamicOperands::Remove(unsigned index)
287 {
288 size_--;
289 auto *currInput = GetInput(index)->GetInst();
290 if (currInput->GetBasicBlock() != nullptr && currInput->HasUsers()) {
291 currInput->RemoveUser(GetUser(index));
292 }
293
294 auto bbNum {GetUser(index)->GetBbNum()};
295 auto ownerInst {GetOwnerInst()};
296
297 if (index != size_) {
298 auto *lastInput = GetInput(size_)->GetInst();
299 if (lastInput->HasUsers()) {
300 lastInput->RemoveUser(GetUser(size_));
301 lastInput->AddUser(GetUser(index));
302 }
303 SetInput(index, *GetInput(size_));
304 if (ownerInst->IsSaveState()) {
305 GetUser(index)->SetVirtualRegister(GetUser(size_)->GetVirtualRegister());
306 } else if (ownerInst->IsPhi()) {
307 GetUser(index)->SetBbNum(GetUser(size_)->GetBbNum());
308 }
309 }
310
311 if (ownerInst->IsPhi()) {
312 for (size_t i {0}; i < size_; ++i) {
313 if (GetUser(i)->GetBbNum() == size_) {
314 GetUser(i)->SetBbNum(bbNum);
315 break;
316 }
317 }
318 }
319 }
320
SetVnObject(VnObject * vnObj)321 void GetAnyTypeNameInst::SetVnObject(VnObject *vnObj)
322 {
323 vnObj->Add(static_cast<uint32_t>(GetAnyType()));
324 }
325
SetVnObject(VnObject * vnObj)326 void CompareAnyTypeInst::SetVnObject(VnObject *vnObj)
327 {
328 vnObj->Add(static_cast<uint32_t>(GetAnyType()));
329 }
330
SetVnObject(VnObject * vnObj)331 void BinaryImmOperation::SetVnObject(VnObject *vnObj)
332 {
333 vnObj->Add(GetImm());
334 }
335
SetVnObject(VnObject * vnObj)336 void BinaryShiftedRegisterOperation::SetVnObject(VnObject *vnObj)
337 {
338 vnObj->Add(GetImm());
339 vnObj->Add(static_cast<uint32_t>(GetShiftType()));
340 }
341
SetVnObject(VnObject * vnObj)342 void UnaryShiftedRegisterOperation::SetVnObject(VnObject *vnObj)
343 {
344 vnObj->Add(GetImm());
345 vnObj->Add(static_cast<uint32_t>(GetShiftType()));
346 }
347
SetVnObject(VnObject * vnObj)348 void CompareInst::SetVnObject(VnObject *vnObj)
349 {
350 vnObj->Add(static_cast<uint32_t>(GetCc()));
351 }
352
SetVnObject(VnObject * vnObj)353 void SelectInst::SetVnObject(VnObject *vnObj)
354 {
355 vnObj->Add(static_cast<uint32_t>(GetCc()));
356 }
357
SetVnObject(VnObject * vnObj)358 void IfInst::SetVnObject(VnObject *vnObj)
359 {
360 vnObj->Add(static_cast<uint32_t>(GetCc()));
361 }
362
SetVnObject(VnObject * vnObj)363 void IfImmInst::SetVnObject(VnObject *vnObj)
364 {
365 vnObj->Add(static_cast<uint32_t>(GetCc()));
366 }
367
SetVnObject(VnObject * vnObj)368 void UnaryOperation::SetVnObject(VnObject *vnObj)
369 {
370 if (GetOpcode() == Opcode::Cast) {
371 vnObj->Add(static_cast<uint32_t>(GetInput(0).GetInst()->GetType()));
372 }
373 }
374
SetVnObject(VnObject * vnObj)375 void CmpInst::SetVnObject(VnObject *vnObj)
376 {
377 if (DataType::IsFloatType(GetOperandsType())) {
378 vnObj->Add(static_cast<uint32_t>(IsFcmpg()));
379 }
380 vnObj->Add(static_cast<uint32_t>(GetInputType(0)));
381 }
382
SetVnObject(VnObject * vnObj)383 void LoadFromPoolDynamic::SetVnObject(VnObject *vnObj)
384 {
385 vnObj->Add(GetTypeId());
386 }
387
SetVnObject(VnObject * vnObj)388 void CastInst::SetVnObject(VnObject *vnObj)
389 {
390 vnObj->Add(static_cast<uint32_t>(GetInputType(0)));
391 }
392
SetVnObject(VnObject * vnObj)393 void LoadImmediateInst::SetVnObject(VnObject *vnObj)
394 {
395 vnObj->Add(static_cast<uint64_t>(GetObjectType()));
396 vnObj->Add(reinterpret_cast<uint64_t>(GetObject()));
397 }
398
SetVnObject(VnObject * vnObj)399 void RuntimeClassInst::SetVnObject(VnObject *vnObj)
400 {
401 vnObj->Add(reinterpret_cast<uint64_t>(GetClass()));
402 }
403
SetVnObject(VnObject * vnObj)404 void LoadObjFromConstInst::SetVnObject(VnObject *vnObj)
405 {
406 vnObj->Add(static_cast<uint64_t>(GetObjPtr()));
407 }
408
SetVnObject(VnObject * vnObj)409 void FunctionImmediateInst::SetVnObject(VnObject *vnObj)
410 {
411 vnObj->Add(static_cast<uint64_t>(GetFunctionPtr()));
412 }
413
IsDynamicCast() const414 bool CastInst::IsDynamicCast() const
415 {
416 return DataType::IsFloatType(GetInputType(0U)) && DataType::GetCommonType(GetType()) == DataType::INT64 &&
417 GetBasicBlock()->GetGraph()->IsDynamicMethod();
418 }
419
GetPhiInputBb(unsigned index)420 BasicBlock *PhiInst::GetPhiInputBb(unsigned index)
421 {
422 ASSERT(index < GetInputsCount());
423
424 auto bbNum {GetPhiInputBbNum(index)};
425 ASSERT(bbNum < GetBasicBlock()->GetPredsBlocks().size());
426 return GetBasicBlock()->GetPredsBlocks()[bbNum];
427 }
428
GetPhiInput(BasicBlock * bb)429 Inst *PhiInst::GetPhiInput(BasicBlock *bb)
430 {
431 auto index = GetPredBlockIndex(bb);
432 ASSERT(index < GetInputs().size());
433 return GetInput(index).GetInst();
434 }
435
GetPhiDataflowInput(BasicBlock * bb)436 Inst *PhiInst::GetPhiDataflowInput(BasicBlock *bb)
437 {
438 auto index = GetPredBlockIndex(bb);
439 ASSERT(index < GetInputs().size());
440 return GetDataFlowInput(index);
441 }
442
GetPredBlockIndex(const BasicBlock * block) const443 size_t PhiInst::GetPredBlockIndex(const BasicBlock *block) const
444 {
445 for (size_t i {0}; i < GetInputsCount(); ++i) {
446 if (GetPhiInputBb(i) == block) {
447 return i;
448 }
449 }
450 UNREACHABLE();
451 }
452
453 template <Opcode OPC, size_t INPUT_IDX>
SkipInstructions(Inst * inputInst)454 Inst *SkipInstructions(Inst *inputInst)
455 {
456 // NOLINTNEXTLINE(readability-magic-numbers)
457 for (Opcode opcode = inputInst->GetOpcode(); opcode == OPC; opcode = inputInst->GetOpcode()) {
458 inputInst = inputInst->GetInput(INPUT_IDX).GetInst();
459 }
460 return inputInst;
461 }
462 /*
463 * For instructions LoadArray, StoreArray, LoadArrayPair, StoreArrayPair, LoadArrayI, StoreArrayI, LoadArrayPairI,
464 * StoreArrayPairI, LenArray, LoadObject, StoreObject, CallVirtual, Monitor, LoadObjectPair, StoreObjectPair with
465 * NullCheck input the dataflow user is object, which is the first input of NullCheck instruction.
466 * For instructions LoadArray, StoreArray, LoadArrayPair, StoreArrayPair with BoundsCheck input the dataflow user is
467 * array index, which is the second input of BoundsCheck instruction
468 * For instructions Div and Mod with ZeroCheck input the dataflow user is the first input of ZeroCheck
469 */
GetDataFlowInput(Inst * inputInst)470 Inst *Inst::GetDataFlowInput(Inst *inputInst)
471 {
472 auto opcode = inputInst->GetOpcode();
473 if (opcode == Opcode::NullCheck) {
474 return SkipInstructions<Opcode::NullCheck, 0>(inputInst);
475 }
476 if (opcode == Opcode::BoundsCheck) {
477 return SkipInstructions<Opcode::BoundsCheck, 1>(inputInst);
478 }
479 if (opcode == Opcode::BoundsCheckI) {
480 return SkipInstructions<Opcode::BoundsCheckI, 0>(inputInst);
481 }
482 if (opcode == Opcode::ZeroCheck) {
483 return SkipInstructions<Opcode::ZeroCheck, 0>(inputInst);
484 }
485 if (opcode == Opcode::NegativeCheck) {
486 return SkipInstructions<Opcode::NegativeCheck, 0>(inputInst);
487 }
488 if (opcode == Opcode::NotPositiveCheck) {
489 return SkipInstructions<Opcode::NotPositiveCheck, 0>(inputInst);
490 }
491 if (opcode == Opcode::AnyTypeCheck) {
492 return SkipInstructions<Opcode::AnyTypeCheck, 0>(inputInst);
493 }
494 if (opcode == Opcode::ObjByIndexCheck) {
495 return SkipInstructions<Opcode::ObjByIndexCheck, 0>(inputInst);
496 }
497 if (opcode == Opcode::HclassCheck) {
498 inputInst = SkipInstructions<Opcode::HclassCheck, 0>(inputInst);
499 return SkipInstructions<Opcode::LoadObject, 0>(inputInst);
500 }
501 if (opcode == Opcode::RefTypeCheck) {
502 inputInst = SkipInstructions<Opcode::RefTypeCheck, 1>(inputInst);
503 if (inputInst->GetOpcode() == Opcode::NullCheck) {
504 return SkipInstructions<Opcode::NullCheck, 0>(inputInst);
505 }
506 return inputInst;
507 }
508 return inputInst;
509 }
510
IsPrecedingInSameBlock(const Inst * other) const511 bool Inst::IsPrecedingInSameBlock(const Inst *other) const
512 {
513 ASSERT(other != nullptr && GetBasicBlock() == other->GetBasicBlock());
514 if (this == other) {
515 return true;
516 }
517 auto next = GetNext();
518 while (next != nullptr) {
519 if (next == other) {
520 return true;
521 }
522 next = next->GetNext();
523 }
524 return false;
525 }
526
IsDominate(const Inst * other) const527 bool Inst::IsDominate(const Inst *other) const
528 {
529 ASSERT(other != nullptr);
530 if (this == other) {
531 return true;
532 }
533 auto thisBb = GetBasicBlock();
534 auto otherBb = other->GetBasicBlock();
535 return thisBb == otherBb ? IsPrecedingInSameBlock(other) : thisBb->IsDominate(otherBb);
536 }
537
InSameBlockOrDominate(const Inst * other) const538 bool Inst::InSameBlockOrDominate(const Inst *other) const
539 {
540 return GetBasicBlock() == other->GetBasicBlock() || IsDominate(other);
541 }
542
Clone(const Graph * targetGraph) const543 Inst *Inst::Clone(const Graph *targetGraph) const
544 {
545 ASSERT(targetGraph != nullptr);
546 auto clone = targetGraph->CreateInst(GetOpcode());
547 clone->bitFields_ = GetAllFields();
548 clone->pc_ = GetPc();
549 #ifndef NDEBUG
550 clone->SetDstReg(GetDstReg());
551 #endif
552 if (IsOperandsDynamic()) {
553 clone->ReserveInputs(GetInputsCount());
554 }
555 #ifdef PANDA_COMPILER_DEBUG_INFO
556 clone->SetCurrentMethod(GetCurrentMethod());
557 #endif
558 return clone;
559 }
560
561 template <size_t N>
Clone(const Graph * targetGraph) const562 Inst *FixedInputsInst<N>::Clone(const Graph *targetGraph) const
563 {
564 auto clone = static_cast<FixedInputsInst *>(Inst::Clone(targetGraph));
565 #ifndef NDEBUG
566 for (size_t i = 0; i < INPUT_COUNT; ++i) {
567 clone->SetSrcReg(i, GetSrcReg(i));
568 }
569 #endif
570 return clone;
571 }
572
573 #if PANDA_TARGET_MACOS
574 template class FixedInputsInst<0>;
575 template class FixedInputsInst<1>;
576 template class FixedInputsInst<2U>;
577 template class FixedInputsInst<3U>;
578 template class FixedInputsInst<4U>;
579 #endif
580
Clone(const Graph * targetGraph) const581 Inst *CallInst::Clone(const Graph *targetGraph) const
582 {
583 ASSERT(targetGraph != nullptr);
584 auto instClone = Inst::Clone(targetGraph);
585 auto callClone = static_cast<CallInst *>(instClone);
586 callClone->SetCallMethodId(GetCallMethodId());
587 callClone->SetCallMethod(GetCallMethod());
588 callClone->SetIsNative(GetIsNative());
589 callClone->SetCanNativeException(GetCanNativeException());
590 CloneTypes(targetGraph->GetAllocator(), callClone);
591 return instClone;
592 }
593
Clone(const Graph * targetGraph) const594 Inst *CallIndirectInst::Clone(const Graph *targetGraph) const
595 {
596 auto clone = Inst::Clone(targetGraph)->CastToCallIndirect();
597 CloneTypes(targetGraph->GetAllocator(), clone);
598 return clone;
599 }
600
Clone(const Graph * targetGraph) const601 Inst *IntrinsicInst::Clone(const Graph *targetGraph) const
602 {
603 ASSERT(targetGraph != nullptr);
604 auto intrinsicClone = (GetOpcode() == Opcode::Intrinsic ? Inst::Clone(targetGraph)->CastToIntrinsic()
605 : Inst::Clone(targetGraph)->CastToBuiltin());
606 intrinsicClone->intrinsicId_ = GetIntrinsicId();
607 CloneTypes(targetGraph->GetAllocator(), intrinsicClone);
608 if (HasImms()) {
609 for (auto imm : GetImms()) {
610 intrinsicClone->AddImm(targetGraph->GetAllocator(), imm);
611 }
612 }
613 intrinsicClone->SetMethod(GetMethod());
614 return intrinsicClone;
615 }
616
Clone(const Graph * targetGraph) const617 Inst *ConstantInst::Clone(const Graph *targetGraph) const
618 {
619 Inst *newCnst = nullptr;
620 bool isSupportInt32 = GetBasicBlock()->GetGraph()->IsBytecodeOptimizer();
621 switch (GetType()) {
622 case DataType::INT32:
623 newCnst = targetGraph->CreateInstConstant(static_cast<int32_t>(GetIntValue()), isSupportInt32);
624 break;
625 case DataType::INT64:
626 newCnst = targetGraph->CreateInstConstant(GetIntValue(), isSupportInt32);
627 break;
628 case DataType::FLOAT32:
629 newCnst = targetGraph->CreateInstConstant(GetFloatValue(), isSupportInt32);
630 break;
631 case DataType::FLOAT64:
632 newCnst = targetGraph->CreateInstConstant(GetDoubleValue(), isSupportInt32);
633 break;
634 case DataType::ANY:
635 newCnst = targetGraph->CreateInstConstant(GetRawValue(), isSupportInt32);
636 newCnst->SetType(DataType::ANY);
637 break;
638 default:
639 UNREACHABLE();
640 }
641 #ifndef NDEBUG
642 newCnst->SetDstReg(GetDstReg());
643 #endif
644 return newCnst;
645 }
646
Clone(const Graph * targetGraph) const647 Inst *ParameterInst::Clone(const Graph *targetGraph) const
648 {
649 auto clone = FixedInputsInst::Clone(targetGraph)->CastToParameter();
650 clone->SetArgNumber(GetArgNumber());
651 clone->SetLocationData(GetLocationData());
652 return clone;
653 }
654
Clone(const Graph * targetGraph) const655 Inst *SaveStateInst::Clone(const Graph *targetGraph) const
656 {
657 auto clone = static_cast<SaveStateInst *>(Inst::Clone(targetGraph));
658 if (GetImmediatesCount() > 0) {
659 clone->AllocateImmediates(targetGraph->GetAllocator(), GetImmediatesCount());
660 std::copy(immediates_->begin(), immediates_->end(), clone->immediates_->begin());
661 }
662 clone->method_ = method_;
663 clone->callerInst_ = callerInst_;
664 clone->inliningDepth_ = inliningDepth_;
665 return clone;
666 }
667
Clone(const Graph * targetGraph) const668 Inst *BinaryShiftedRegisterOperation::Clone(const Graph *targetGraph) const
669 {
670 auto clone = static_cast<BinaryShiftedRegisterOperation *>(FixedInputsInst::Clone(targetGraph));
671 clone->SetImm(GetImm());
672 clone->SetShiftType(GetShiftType());
673 return clone;
674 }
675
Clone(const Graph * targetGraph) const676 Inst *UnaryShiftedRegisterOperation::Clone(const Graph *targetGraph) const
677 {
678 auto clone = static_cast<UnaryShiftedRegisterOperation *>(FixedInputsInst::Clone(targetGraph));
679 clone->SetImm(GetImm());
680 clone->SetShiftType(GetShiftType());
681 return clone;
682 }
683
AppendImmediate(uint64_t imm,uint16_t vreg,DataType::Type type,VRegType vregType)684 void SaveStateInst::AppendImmediate(uint64_t imm, uint16_t vreg, DataType::Type type, VRegType vregType)
685 {
686 if (immediates_ == nullptr) {
687 ASSERT(GetBasicBlock() != nullptr);
688 AllocateImmediates(GetBasicBlock()->GetGraph()->GetAllocator(), 0);
689 }
690 ASSERT(immediates_ != nullptr);
691 immediates_->emplace_back(SaveStateImm {imm, vreg, type, vregType});
692 }
693
AllocateImmediates(ArenaAllocator * allocator,size_t size)694 void SaveStateInst::AllocateImmediates(ArenaAllocator *allocator, size_t size)
695 {
696 immediates_ = allocator->New<ArenaVector<SaveStateImm>>(allocator->Adapter());
697 ASSERT(immediates_ != nullptr);
698 immediates_->resize(size);
699 }
700
GetInputsWereDeletedRec() const701 bool SaveStateInst::GetInputsWereDeletedRec() const
702 {
703 if (GetInputsWereDeleted()) {
704 return true;
705 }
706 if (callerInst_ != nullptr) {
707 auto *saveState = callerInst_->GetSaveState();
708 ASSERT(saveState != nullptr);
709 return saveState->GetInputsWereDeletedRec();
710 }
711 return false;
712 }
713
AppendCatchTypeId(uint32_t id,uint32_t catchEdgeIndex)714 void TryInst::AppendCatchTypeId(uint32_t id, uint32_t catchEdgeIndex)
715 {
716 if (catchTypeIds_ == nullptr) {
717 ASSERT(catchEdgeIndexes_ == nullptr);
718 ASSERT(GetBasicBlock() != nullptr);
719 auto allocator = GetBasicBlock()->GetGraph()->GetAllocator();
720 catchTypeIds_ = allocator->New<ArenaVector<uint32_t>>(allocator->Adapter());
721 catchEdgeIndexes_ = allocator->New<ArenaVector<uint32_t>>(allocator->Adapter());
722 }
723 catchTypeIds_->push_back(id);
724 ASSERT(catchEdgeIndexes_ != nullptr);
725 catchEdgeIndexes_->push_back(catchEdgeIndex);
726 }
727
AppendThrowableInst(const Inst * inst)728 void CatchPhiInst::AppendThrowableInst(const Inst *inst)
729 {
730 if (throwInsts_ == nullptr) {
731 ASSERT(GetBasicBlock() != nullptr);
732 auto allocator = GetBasicBlock()->GetGraph()->GetAllocator();
733 throwInsts_ = allocator->New<ArenaVector<const Inst *>>(allocator->Adapter());
734 }
735 ASSERT(throwInsts_ != nullptr);
736 throwInsts_->push_back(inst);
737 }
738
ReplaceThrowableInst(const Inst * oldInst,const Inst * newInst)739 void CatchPhiInst::ReplaceThrowableInst(const Inst *oldInst, const Inst *newInst)
740 {
741 auto index = GetThrowableInstIndex(oldInst);
742 throwInsts_->at(index) = newInst;
743 }
744
RemoveInput(unsigned index)745 void CatchPhiInst::RemoveInput(unsigned index)
746 {
747 Inst::RemoveInput(index);
748 if (throwInsts_ != nullptr) {
749 throwInsts_->at(index) = throwInsts_->back();
750 throwInsts_->pop_back();
751 }
752 }
753
Clone(const Graph * targetGraph) const754 Inst *TryInst::Clone(const Graph *targetGraph) const
755 {
756 auto clone = FixedInputsInst::Clone(targetGraph)->CastToTry();
757 if (auto idsCount = this->GetCatchTypeIdsCount(); idsCount > 0) {
758 if (clone->catchTypeIds_ == nullptr) {
759 auto allocator = targetGraph->GetAllocator();
760 clone->catchTypeIds_ = allocator->New<ArenaVector<uint32_t>>(allocator->Adapter());
761 clone->catchEdgeIndexes_ = allocator->New<ArenaVector<uint32_t>>(allocator->Adapter());
762 }
763 clone->catchTypeIds_->resize(idsCount);
764 clone->catchEdgeIndexes_->resize(idsCount);
765 std::copy(this->catchTypeIds_->begin(), this->catchTypeIds_->end(), clone->catchTypeIds_->begin());
766 std::copy(this->catchEdgeIndexes_->begin(), this->catchEdgeIndexes_->end(), clone->catchEdgeIndexes_->begin());
767 }
768 return clone;
769 }
770
GetEdgeIfInputTrue()771 BasicBlock *IfImmInst::GetEdgeIfInputTrue()
772 {
773 return GetBasicBlock()->GetSuccessor(GetTrueInputEdgeIdx());
774 }
775
GetEdgeIfInputFalse()776 BasicBlock *IfImmInst::GetEdgeIfInputFalse()
777 {
778 return GetBasicBlock()->GetSuccessor(1 - GetTrueInputEdgeIdx());
779 }
780
781 /**
782 * NB! Can be called before Lowering pass only
783 * Return if_imm's block successor index when input is true
784 */
GetTrueInputEdgeIdx()785 size_t IfImmInst::GetTrueInputEdgeIdx()
786 {
787 ASSERT(GetBasicBlock() != nullptr);
788 ASSERT(GetBasicBlock()->GetSuccsBlocks().size() == MAX_SUCCS_NUM);
789 ASSERT(GetCc() == ConditionCode::CC_NE || GetCc() == ConditionCode::CC_EQ);
790 ASSERT(GetImm() == 0);
791 return GetCc() == CC_NE ? 0 : 1;
792 }
793
IsPropagateLiveness() const794 bool Inst::IsPropagateLiveness() const
795 {
796 return (CanThrow() && GetBasicBlock()->IsTry()) || CanDeoptimize();
797 }
798
RequireRegMap() const799 bool Inst::RequireRegMap() const
800 {
801 if (GetOpcode() == Opcode::SafePoint) {
802 return g_options.IsCompilerSafePointsRequireRegMap();
803 }
804 return GetOpcode() == Opcode::SaveStateOsr || IsPropagateLiveness();
805 }
806
IsZeroRegInst() const807 bool Inst::IsZeroRegInst() const
808 {
809 ASSERT(GetBasicBlock() != nullptr);
810 ASSERT(GetBasicBlock()->GetGraph() != nullptr);
811 return GetBasicBlock()->GetGraph()->GetZeroReg() != GetInvalidReg() && IsZeroConstantOrNullPtr(this) &&
812 !IsReferenceForNativeApiCall();
813 }
814
IsAccRead() const815 bool Inst::IsAccRead() const
816 {
817 return GetFlag(inst_flags::ACC_READ);
818 }
819
IsAccWrite() const820 bool Inst::IsAccWrite() const
821 {
822 if (GetBasicBlock()->GetGraph()->IsDynamicMethod() && IsConst()) {
823 return true;
824 }
825 return GetFlag(inst_flags::ACC_WRITE);
826 }
827
828 // Returns true if instruction result can be object
IsReferenceOrAny() const829 bool Inst::IsReferenceOrAny() const
830 {
831 if (GetType() == DataType::ANY) {
832 switch (opcode_) {
833 // GetAnyTypeName always return movable string
834 case Opcode::GetAnyTypeName:
835 // We conservative decide that phi with ANY type is always reference,
836 // because for phi we can speculate incorrect any_type
837 case Opcode::Phi:
838 return true;
839 default:
840 break;
841 }
842 auto anyType = GetAnyType();
843 if (anyType == AnyBaseType::UNDEFINED_TYPE) {
844 return true;
845 }
846 auto dataType = AnyBaseTypeToDataType(anyType);
847 return dataType == DataType::REFERENCE;
848 }
849 return GetType() == DataType::REFERENCE;
850 }
851
IsMovableObjectRec(Inst * inst,Marker visitedMrk)852 bool IsMovableObjectRec(Inst *inst, Marker visitedMrk)
853 {
854 if (inst->SetMarker(visitedMrk)) {
855 return false;
856 }
857 if (inst->IsPhi()) {
858 for (size_t i = 0U; i < inst->GetInputsCount(); ++i) {
859 if (IsMovableObjectRec(inst->GetDataFlowInput(i), visitedMrk)) {
860 return true;
861 }
862 }
863 return false;
864 }
865 return inst->IsMovableObject();
866 }
867
868 // Returns true if instruction result can be moved by GC
869 // Returns false for checks because their result is equal to input
IsMovableObject()870 bool Inst::IsMovableObject()
871 {
872 if (IsCheck() || !IsReferenceOrAny()) {
873 return false;
874 }
875 switch (opcode_) {
876 case Opcode::NullPtr:
877 case Opcode::LoadClass:
878 case Opcode::InitClass:
879 case Opcode::LoadAndInitClass:
880 case Opcode::UnresolvedLoadAndInitClass:
881 case Opcode::LoadImmediate:
882 case Opcode::GetInstanceClass:
883 case Opcode::GetGlobalVarAddress:
884 case Opcode::ResolveObjectFieldStatic:
885 case Opcode::Constant:
886 case Opcode::LoadConstantPool:
887 case Opcode::LoadRuntimeClass:
888 case Opcode::LoadUniqueObject:
889 // The result of these instructions can't be moved by GC.
890 return false;
891 case Opcode::LoadObject:
892 // Classes in non moveble space.
893 return this->CastToLoadObject()->GetObjectType() != ObjectType::MEM_DYN_CLASS &&
894 this->CastToLoadObject()->GetObjectType() != ObjectType::MEM_DYN_HCLASS;
895 case Opcode::Phi: {
896 MarkerHolder marker {GetBasicBlock()->GetGraph()};
897 return IsMovableObjectRec(this, marker.GetMarker());
898 }
899 case Opcode::Intrinsic:
900 return CastToIntrinsic()->GetIntrinsicId() !=
901 RuntimeInterface::IntrinsicId::INTRINSIC_COMPILER_GET_NATIVE_METHOD_MANAGED_CLASS;
902 default:
903 return true;
904 }
905 }
906
GetTryBeginInst(const BasicBlock * tryBeginBb)907 TryInst *GetTryBeginInst(const BasicBlock *tryBeginBb)
908 {
909 ASSERT(tryBeginBb != nullptr && tryBeginBb->IsTryBegin());
910 for (auto inst : tryBeginBb->AllInsts()) {
911 if (inst->GetOpcode() == Opcode::Try) {
912 return inst->CastToTry();
913 }
914 }
915 UNREACHABLE();
916 return nullptr;
917 }
918
919 /**
920 * Regalloc's helper to checks if intrinsic's arguments should be located on the registers according to
921 * calling-convention
922 */
IsNativeCall() const923 bool IntrinsicInst::IsNativeCall() const
924 {
925 ASSERT(GetBasicBlock() != nullptr);
926 ASSERT(GetBasicBlock()->GetGraph() != nullptr);
927 if (IsFastpathIntrinsic(intrinsicId_)) {
928 return false;
929 }
930 #ifdef PANDA_WITH_IRTOC
931 if (IsIrtocIntrinsic(intrinsicId_)) {
932 return intrinsicId_ == RuntimeInterface::IntrinsicId::INTRINSIC_SLOW_PATH_ENTRY;
933 }
934 #endif
935 auto graph = GetBasicBlock()->GetGraph();
936 auto arch = graph->GetArch();
937 auto runtime = graph->GetRuntime();
938 return !EncodesBuiltin(runtime, intrinsicId_, arch) || IsRuntimeCall();
939 }
940
GetDeoptimizeType() const941 DeoptimizeType AnyTypeCheckInst::GetDeoptimizeType() const
942 {
943 auto graph = GetBasicBlock()->GetGraph();
944 auto customDeoptimize = graph->IsAotMode() || graph->GetRuntime()->GetMethodProfile(graph->GetMethod(), true) !=
945 profiling::INVALID_PROFILE;
946 if (!customDeoptimize) {
947 return DeoptimizeType::ANY_TYPE_CHECK;
948 }
949 switch (AnyBaseTypeToDataType(GetAnyType())) {
950 case DataType::Type::INT32:
951 return DeoptimizeType::NOT_SMALL_INT;
952 case DataType::Type::FLOAT64:
953 if (IsIntegerWasSeen()) {
954 return DeoptimizeType::NOT_NUMBER;
955 }
956 return DeoptimizeType::DOUBLE_WITH_INT;
957 default:
958 return DeoptimizeType::ANY_TYPE_CHECK;
959 }
960 }
961
ExtendFlags(Inst * inst)962 void HclassCheckInst::ExtendFlags(Inst *inst)
963 {
964 ASSERT(inst->GetOpcode() == Opcode::HclassCheck);
965 auto check = inst->CastToHclassCheck();
966 if (check->GetCheckFunctionIsNotClassConstructor()) {
967 SetCheckFunctionIsNotClassConstructor(true);
968 }
969 if (check->GetCheckIsFunction()) {
970 SetCheckIsFunction(true);
971 }
972 }
973
974 } // namespace ark::compiler
975