• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "inst.h"
17 #include "graph.h"
18 #include "basicblock.h"
19 #include "graph_visitor.h"
20 #include "optimizer/optimizations/vn.h"
21 #include "profiling/profiling.h"
22 
23 namespace ark::compiler {
24 
GetInverseConditionCode(ConditionCode code)25 ConditionCode GetInverseConditionCode(ConditionCode code)
26 {
27     switch (code) {
28         case ConditionCode::CC_EQ:
29             return ConditionCode::CC_NE;
30         case ConditionCode::CC_NE:
31             return ConditionCode::CC_EQ;
32 
33         case ConditionCode::CC_LT:
34             return ConditionCode::CC_GE;
35         case ConditionCode::CC_LE:
36             return ConditionCode::CC_GT;
37         case ConditionCode::CC_GT:
38             return ConditionCode::CC_LE;
39         case ConditionCode::CC_GE:
40             return ConditionCode::CC_LT;
41 
42         case ConditionCode::CC_B:
43             return ConditionCode::CC_AE;
44         case ConditionCode::CC_BE:
45             return ConditionCode::CC_A;
46         case ConditionCode::CC_A:
47             return ConditionCode::CC_BE;
48         case ConditionCode::CC_AE:
49             return ConditionCode::CC_B;
50 
51         case ConditionCode::CC_TST_EQ:
52             return ConditionCode::CC_TST_NE;
53         case ConditionCode::CC_TST_NE:
54             return ConditionCode::CC_TST_EQ;
55 
56         default:
57             UNREACHABLE();
58     }
59 }
60 
InverseSignednessConditionCode(ConditionCode code)61 ConditionCode InverseSignednessConditionCode(ConditionCode code)
62 {
63     switch (code) {
64         case ConditionCode::CC_EQ:
65             return ConditionCode::CC_EQ;
66         case ConditionCode::CC_NE:
67             return ConditionCode::CC_NE;
68 
69         case ConditionCode::CC_LT:
70             return ConditionCode::CC_B;
71         case ConditionCode::CC_LE:
72             return ConditionCode::CC_BE;
73         case ConditionCode::CC_GT:
74             return ConditionCode::CC_A;
75         case ConditionCode::CC_GE:
76             return ConditionCode::CC_AE;
77 
78         case ConditionCode::CC_B:
79             return ConditionCode::CC_LT;
80         case ConditionCode::CC_BE:
81             return ConditionCode::CC_LE;
82         case ConditionCode::CC_A:
83             return ConditionCode::CC_GT;
84         case ConditionCode::CC_AE:
85             return ConditionCode::CC_GE;
86 
87         case ConditionCode::CC_TST_EQ:
88             return ConditionCode::CC_TST_EQ;
89         case ConditionCode::CC_TST_NE:
90             return ConditionCode::CC_TST_NE;
91 
92         default:
93             UNREACHABLE();
94     }
95 }
96 
IsSignedConditionCode(ConditionCode code)97 bool IsSignedConditionCode(ConditionCode code)
98 {
99     switch (code) {
100         case ConditionCode::CC_LT:
101         case ConditionCode::CC_LE:
102         case ConditionCode::CC_GT:
103         case ConditionCode::CC_GE:
104             return true;
105 
106         case ConditionCode::CC_EQ:
107         case ConditionCode::CC_NE:
108         case ConditionCode::CC_B:
109         case ConditionCode::CC_BE:
110         case ConditionCode::CC_A:
111         case ConditionCode::CC_AE:
112         case ConditionCode::CC_TST_EQ:
113         case ConditionCode::CC_TST_NE:
114             return false;
115 
116         default:
117             UNREACHABLE();
118     }
119 }
120 
SwapOperandsConditionCode(ConditionCode code)121 ConditionCode SwapOperandsConditionCode(ConditionCode code)
122 {
123     switch (code) {
124         case ConditionCode::CC_EQ:
125         case ConditionCode::CC_NE:
126             return code;
127 
128         case ConditionCode::CC_LT:
129             return ConditionCode::CC_GT;
130         case ConditionCode::CC_LE:
131             return ConditionCode::CC_GE;
132         case ConditionCode::CC_GT:
133             return ConditionCode::CC_LT;
134         case ConditionCode::CC_GE:
135             return ConditionCode::CC_LE;
136 
137         case ConditionCode::CC_B:
138             return ConditionCode::CC_A;
139         case ConditionCode::CC_BE:
140             return ConditionCode::CC_AE;
141         case ConditionCode::CC_A:
142             return ConditionCode::CC_B;
143         case ConditionCode::CC_AE:
144             return ConditionCode::CC_BE;
145 
146         case ConditionCode::CC_TST_EQ:
147         case ConditionCode::CC_TST_NE:
148             return code;
149 
150         default:
151             UNREACHABLE();
152     }
153 }
154 
IsVolatileMemInst(const Inst * inst)155 bool IsVolatileMemInst(const Inst *inst)
156 {
157     switch (inst->GetOpcode()) {
158         case Opcode::LoadObject:
159             return inst->CastToLoadObject()->GetVolatile();
160         case Opcode::LoadObjectPair:
161             return inst->CastToLoadObjectPair()->GetVolatile();
162         case Opcode::StoreObject:
163             return inst->CastToStoreObject()->GetVolatile();
164         case Opcode::StoreObjectPair:
165             return inst->CastToStoreObjectPair()->GetVolatile();
166         case Opcode::LoadStatic:
167             return inst->CastToLoadStatic()->GetVolatile();
168         case Opcode::StoreStatic:
169             return inst->CastToStoreStatic()->GetVolatile();
170         case Opcode::UnresolvedStoreStatic:
171         case Opcode::LoadResolvedObjectFieldStatic:
172         case Opcode::StoreResolvedObjectFieldStatic:
173             return true;
174         default:
175             return false;
176     }
177 }
178 
179 const ObjectTypeInfo ObjectTypeInfo::INVALID {};
180 const ObjectTypeInfo ObjectTypeInfo::UNKNOWN {1};
181 
ReserveInputs(size_t capacity)182 void Inst::ReserveInputs(size_t capacity)
183 {
184     ASSERT(IsOperandsDynamic());
185     GetDynamicOperands()->Reallocate(capacity);
186 }
187 
GetInst()188 Inst *User::GetInst()
189 {
190     if (UNLIKELY(IsDynamic())) {
191         // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
192         return *reinterpret_cast<Inst **>(this + GetIndex() + 1);
193     }
194     auto p = reinterpret_cast<uintptr_t>(this);
195     p += (GetIndex() + 1) * sizeof(User);
196 
197     auto inputsCount {SizeField::Decode(properties_)};
198     p += (inputsCount + Input::GetPadding(RUNTIME_ARCH, inputsCount)) * sizeof(Input);
199     return reinterpret_cast<Inst *>(p);
200 }
201 
InsertBefore(Inst * inst)202 void Inst::InsertBefore(Inst *inst)
203 {
204     ASSERT(bb_ != nullptr);
205     bb_->InsertBefore(inst, this);
206 }
207 
InsertAfter(Inst * inst)208 void Inst::InsertAfter(Inst *inst)
209 {
210     ASSERT(bb_ != nullptr);
211     bb_->InsertAfter(inst, this);
212 }
213 
GetInliningDepth() const214 uint32_t Inst::GetInliningDepth() const
215 {
216     auto ss = GetSaveState();
217     return ss == nullptr ? 0 : ss->GetInliningDepth();
218 }
219 
Reallocate(size_t newCapacity)220 void DynamicOperands::Reallocate([[maybe_unused]] size_t newCapacity /* =0 */)
221 {
222     if (newCapacity == 0) {
223         constexpr auto IMM_2 = 2;
224         newCapacity = (((capacity_ != 0U) ? capacity_ : 1U) << 1U) + IMM_2;
225     } else if (newCapacity <= capacity_) {
226         return;
227     }
228     auto size = newCapacity * (sizeof(User) + sizeof(Inst *)) + sizeof(Inst *);
229     auto newStor = reinterpret_cast<uintptr_t>(allocator_->Alloc(size));
230 
231     auto ownerInst {GetOwnerInst()};
232     // Set pointer to owned instruction into new storage NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
233     *reinterpret_cast<Inst **>(reinterpret_cast<User *>(newStor) + newCapacity) = ownerInst;
234 
235     if (users_ == nullptr) {
236         users_ = reinterpret_cast<User *>(newStor);
237         capacity_ = newCapacity;
238         return;
239     }
240     Input *oldInputs = Inputs();
241     // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
242     auto *newInputs = reinterpret_cast<Input *>(newStor + sizeof(User) * newCapacity) + 1;
243 
244     for (size_t i = 0; i < size_; i++) {
245         Inst *oldInput = oldInputs[i].GetInst();  // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
246         ASSERT(oldInput);
247         // Initialize new User in container. Since users are placed from end of array, i.e. zero index element
248         // will be at the end of array, we need to add capacity and substitute index.
249         // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
250         User *newUser = new (reinterpret_cast<User *>(newStor) + newCapacity - i - 1) User(false, i, newCapacity);
251         auto oldUser {GetUser(i)};
252         if (ownerInst->IsSaveState()) {
253             newUser->SetVirtualRegister(oldUser->GetVirtualRegister());
254         } else if (ownerInst->IsPhi()) {
255             newUser->SetBbNum(oldUser->GetBbNum());
256         }
257         oldInput->RemoveUser(oldUser);
258         oldInput->AddUser(newUser);
259         newInputs[i] = Input(oldInput);  // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
260     }
261     capacity_ = newCapacity;
262     users_ = reinterpret_cast<User *>(newStor);
263 }
264 
Append(Inst * inst)265 unsigned DynamicOperands::Append(Inst *inst)
266 {
267     ASSERT(capacity_ >= size_);
268     if (capacity_ == size_) {
269         Reallocate();
270     }
271     SetInput(size_, Input(inst));
272     // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
273     new (users_ + capacity_ - size_ - 1) User(false, size_, capacity_);
274     auto user {GetUser(size_)};
275     if (GetOwnerInst()->IsPhi()) {
276         user->SetBbNum(size_);
277     }
278     inst->AddUser(user);
279     return size_++;
280 }
281 
Remove(unsigned index)282 void DynamicOperands::Remove(unsigned index)
283 {
284     size_--;
285     auto *currInput = GetInput(index)->GetInst();
286     if (currInput->GetBasicBlock() != nullptr && currInput->HasUsers()) {
287         currInput->RemoveUser(GetUser(index));
288     }
289 
290     auto bbNum {GetUser(index)->GetBbNum()};
291     auto ownerInst {GetOwnerInst()};
292 
293     if (index != size_) {
294         auto *lastInput = GetInput(size_)->GetInst();
295         if (lastInput->HasUsers()) {
296             lastInput->RemoveUser(GetUser(size_));
297             lastInput->AddUser(GetUser(index));
298         }
299         SetInput(index, *GetInput(size_));
300         if (ownerInst->IsSaveState()) {
301             GetUser(index)->SetVirtualRegister(GetUser(size_)->GetVirtualRegister());
302         } else if (ownerInst->IsPhi()) {
303             GetUser(index)->SetBbNum(GetUser(size_)->GetBbNum());
304         }
305     }
306 
307     if (ownerInst->IsPhi()) {
308         for (size_t i {0}; i < size_; ++i) {
309             if (GetUser(i)->GetBbNum() == size_) {
310                 GetUser(i)->SetBbNum(bbNum);
311                 break;
312             }
313         }
314     }
315 }
316 
SetVnObject(VnObject * vnObj)317 void GetAnyTypeNameInst::SetVnObject(VnObject *vnObj)
318 {
319     vnObj->Add(static_cast<uint32_t>(GetAnyType()));
320 }
321 
SetVnObject(VnObject * vnObj)322 void CompareAnyTypeInst::SetVnObject(VnObject *vnObj)
323 {
324     vnObj->Add(static_cast<uint32_t>(GetAnyType()));
325 }
326 
SetVnObject(VnObject * vnObj)327 void BinaryImmOperation::SetVnObject(VnObject *vnObj)
328 {
329     vnObj->Add(GetImm());
330 }
331 
SetVnObject(VnObject * vnObj)332 void BinaryShiftedRegisterOperation::SetVnObject(VnObject *vnObj)
333 {
334     vnObj->Add(GetImm());
335     vnObj->Add(static_cast<uint32_t>(GetShiftType()));
336 }
337 
SetVnObject(VnObject * vnObj)338 void UnaryShiftedRegisterOperation::SetVnObject(VnObject *vnObj)
339 {
340     vnObj->Add(GetImm());
341     vnObj->Add(static_cast<uint32_t>(GetShiftType()));
342 }
343 
SetVnObject(VnObject * vnObj)344 void CompareInst::SetVnObject(VnObject *vnObj)
345 {
346     vnObj->Add(static_cast<uint32_t>(GetCc()));
347 }
348 
SetVnObject(VnObject * vnObj)349 void SelectInst::SetVnObject(VnObject *vnObj)
350 {
351     vnObj->Add(static_cast<uint32_t>(GetCc()));
352 }
353 
SetVnObject(VnObject * vnObj)354 void IfInst::SetVnObject(VnObject *vnObj)
355 {
356     vnObj->Add(static_cast<uint32_t>(GetCc()));
357 }
358 
SetVnObject(VnObject * vnObj)359 void IfImmInst::SetVnObject(VnObject *vnObj)
360 {
361     vnObj->Add(static_cast<uint32_t>(GetCc()));
362 }
363 
SetVnObject(VnObject * vnObj)364 void UnaryOperation::SetVnObject(VnObject *vnObj)
365 {
366     if (GetOpcode() == Opcode::Cast) {
367         vnObj->Add(static_cast<uint32_t>(GetInput(0).GetInst()->GetType()));
368     }
369 }
370 
SetVnObject(VnObject * vnObj)371 void CmpInst::SetVnObject(VnObject *vnObj)
372 {
373     if (DataType::IsFloatType(GetOperandsType())) {
374         vnObj->Add(static_cast<uint32_t>(IsFcmpg()));
375     }
376     vnObj->Add(static_cast<uint32_t>(GetInputType(0)));
377 }
378 
SetVnObject(VnObject * vnObj)379 void LoadFromPoolDynamic::SetVnObject(VnObject *vnObj)
380 {
381     vnObj->Add(GetTypeId());
382 }
383 
SetVnObject(VnObject * vnObj)384 void CastInst::SetVnObject(VnObject *vnObj)
385 {
386     vnObj->Add(static_cast<uint32_t>(GetInputType(0)));
387 }
388 
SetVnObject(VnObject * vnObj)389 void LoadImmediateInst::SetVnObject(VnObject *vnObj)
390 {
391     vnObj->Add(static_cast<uint64_t>(GetObjectType()));
392     vnObj->Add(reinterpret_cast<uint64_t>(GetObject()));
393 }
394 
SetVnObject(VnObject * vnObj)395 void RuntimeClassInst::SetVnObject(VnObject *vnObj)
396 {
397     vnObj->Add(reinterpret_cast<uint64_t>(GetClass()));
398 }
399 
SetVnObject(VnObject * vnObj)400 void LoadObjFromConstInst::SetVnObject(VnObject *vnObj)
401 {
402     vnObj->Add(static_cast<uint64_t>(GetObjPtr()));
403 }
404 
SetVnObject(VnObject * vnObj)405 void FunctionImmediateInst::SetVnObject(VnObject *vnObj)
406 {
407     vnObj->Add(static_cast<uint64_t>(GetFunctionPtr()));
408 }
409 
IsDynamicCast() const410 bool CastInst::IsDynamicCast() const
411 {
412     return DataType::IsFloatType(GetInputType(0U)) && DataType::GetCommonType(GetType()) == DataType::INT64 &&
413            GetBasicBlock()->GetGraph()->IsDynamicMethod();
414 }
415 
GetPhiInputBb(unsigned index)416 BasicBlock *PhiInst::GetPhiInputBb(unsigned index)
417 {
418     ASSERT(index < GetInputsCount());
419 
420     auto bbNum {GetPhiInputBbNum(index)};
421     ASSERT(bbNum < GetBasicBlock()->GetPredsBlocks().size());
422     return GetBasicBlock()->GetPredsBlocks()[bbNum];
423 }
424 
GetPhiInput(BasicBlock * bb)425 Inst *PhiInst::GetPhiInput(BasicBlock *bb)
426 {
427     auto index = GetPredBlockIndex(bb);
428     ASSERT(index < GetInputs().size());
429     return GetInput(index).GetInst();
430 }
431 
GetPhiDataflowInput(BasicBlock * bb)432 Inst *PhiInst::GetPhiDataflowInput(BasicBlock *bb)
433 {
434     auto index = GetPredBlockIndex(bb);
435     ASSERT(index < GetInputs().size());
436     return GetDataFlowInput(index);
437 }
438 
GetPredBlockIndex(const BasicBlock * block) const439 size_t PhiInst::GetPredBlockIndex(const BasicBlock *block) const
440 {
441     for (size_t i {0}; i < GetInputsCount(); ++i) {
442         if (GetPhiInputBb(i) == block) {
443             return i;
444         }
445     }
446     UNREACHABLE();
447 }
448 
449 template <Opcode OPC, size_t INPUT_IDX>
SkipInstructions(Inst * inputInst)450 Inst *SkipInstructions(Inst *inputInst)
451 {
452     // NOLINTNEXTLINE(readability-magic-numbers)
453     for (Opcode opcode = inputInst->GetOpcode(); opcode == OPC; opcode = inputInst->GetOpcode()) {
454         inputInst = inputInst->GetInput(INPUT_IDX).GetInst();
455     }
456     return inputInst;
457 }
458 /*
459  * For instructions LoadArray, StoreArray, LoadArrayPair, StoreArrayPair, LoadArrayI, StoreArrayI, LoadArrayPairI,
460  * StoreArrayPairI, LenArray, LoadObject, StoreObject, CallVirtual, Monitor, LoadObjectPair, StoreObjectPair with
461  * NullCheck input the dataflow user is object, which is the first input of NullCheck instruction.
462  * For instructions LoadArray, StoreArray, LoadArrayPair, StoreArrayPair with BoundsCheck input the dataflow user is
463  * array index, which is the second input of BoundsCheck instruction
464  * For instructions Div and Mod with ZeroCheck input the dataflow user is the first input of ZeroCheck
465  */
GetDataFlowInput(Inst * inputInst)466 Inst *Inst::GetDataFlowInput(Inst *inputInst)
467 {
468     auto opcode = inputInst->GetOpcode();
469     if (opcode == Opcode::NullCheck) {
470         return SkipInstructions<Opcode::NullCheck, 0>(inputInst);
471     }
472     if (opcode == Opcode::BoundsCheck) {
473         return SkipInstructions<Opcode::BoundsCheck, 1>(inputInst);
474     }
475     if (opcode == Opcode::BoundsCheckI) {
476         return SkipInstructions<Opcode::BoundsCheckI, 0>(inputInst);
477     }
478     if (opcode == Opcode::ZeroCheck) {
479         return SkipInstructions<Opcode::ZeroCheck, 0>(inputInst);
480     }
481     if (opcode == Opcode::NegativeCheck) {
482         return SkipInstructions<Opcode::NegativeCheck, 0>(inputInst);
483     }
484     if (opcode == Opcode::NotPositiveCheck) {
485         return SkipInstructions<Opcode::NotPositiveCheck, 0>(inputInst);
486     }
487     if (opcode == Opcode::AnyTypeCheck) {
488         return SkipInstructions<Opcode::AnyTypeCheck, 0>(inputInst);
489     }
490     if (opcode == Opcode::ObjByIndexCheck) {
491         return SkipInstructions<Opcode::ObjByIndexCheck, 0>(inputInst);
492     }
493     if (opcode == Opcode::HclassCheck) {
494         inputInst = SkipInstructions<Opcode::HclassCheck, 0>(inputInst);
495         return SkipInstructions<Opcode::LoadObject, 0>(inputInst);
496     }
497     if (opcode == Opcode::RefTypeCheck) {
498         inputInst = SkipInstructions<Opcode::RefTypeCheck, 1>(inputInst);
499         if (inputInst->GetOpcode() == Opcode::NullCheck) {
500             return SkipInstructions<Opcode::NullCheck, 0>(inputInst);
501         }
502         return inputInst;
503     }
504     return inputInst;
505 }
506 
IsPrecedingInSameBlock(const Inst * other) const507 bool Inst::IsPrecedingInSameBlock(const Inst *other) const
508 {
509     ASSERT(other != nullptr && GetBasicBlock() == other->GetBasicBlock());
510     if (this == other) {
511         return true;
512     }
513     auto next = GetNext();
514     while (next != nullptr) {
515         if (next == other) {
516             return true;
517         }
518         next = next->GetNext();
519     }
520     return false;
521 }
522 
IsDominate(const Inst * other) const523 bool Inst::IsDominate(const Inst *other) const
524 {
525     ASSERT(other != nullptr);
526     if (this == other) {
527         return true;
528     }
529     auto thisBb = GetBasicBlock();
530     auto otherBb = other->GetBasicBlock();
531     return thisBb == otherBb ? IsPrecedingInSameBlock(other) : thisBb->IsDominate(otherBb);
532 }
533 
InSameBlockOrDominate(const Inst * other) const534 bool Inst::InSameBlockOrDominate(const Inst *other) const
535 {
536     return GetBasicBlock() == other->GetBasicBlock() || IsDominate(other);
537 }
538 
Clone(const Graph * targetGraph) const539 Inst *Inst::Clone(const Graph *targetGraph) const
540 {
541     ASSERT(targetGraph != nullptr);
542     auto clone = targetGraph->CreateInst(GetOpcode());
543     clone->bitFields_ = GetAllFields();
544     clone->pc_ = GetPc();
545 #ifndef NDEBUG
546     clone->SetDstReg(GetDstReg());
547 #endif
548     if (IsOperandsDynamic()) {
549         clone->ReserveInputs(GetInputsCount());
550     }
551 #ifdef PANDA_COMPILER_DEBUG_INFO
552     clone->SetCurrentMethod(GetCurrentMethod());
553 #endif
554     return clone;
555 }
556 
557 template <size_t N>
Clone(const Graph * targetGraph) const558 Inst *FixedInputsInst<N>::Clone(const Graph *targetGraph) const
559 {
560     auto clone = static_cast<FixedInputsInst *>(Inst::Clone(targetGraph));
561 #ifndef NDEBUG
562     for (size_t i = 0; i < INPUT_COUNT; ++i) {
563         clone->SetSrcReg(i, GetSrcReg(i));
564     }
565 #endif
566     return clone;
567 }
568 
569 #if PANDA_TARGET_MACOS
570 template class FixedInputsInst<0>;
571 template class FixedInputsInst<1>;
572 template class FixedInputsInst<2U>;
573 template class FixedInputsInst<3U>;
574 template class FixedInputsInst<4U>;
575 #endif
576 
Clone(const Graph * targetGraph) const577 Inst *CallInst::Clone(const Graph *targetGraph) const
578 {
579     ASSERT(targetGraph != nullptr);
580     auto instClone = Inst::Clone(targetGraph);
581     auto callClone = static_cast<CallInst *>(instClone);
582     callClone->SetCallMethodId(GetCallMethodId());
583     callClone->SetCallMethod(GetCallMethod());
584     callClone->SetCanNativeException(GetCanNativeException());
585     CloneTypes(targetGraph->GetAllocator(), callClone);
586     return instClone;
587 }
588 
Clone(const Graph * targetGraph) const589 Inst *CallIndirectInst::Clone(const Graph *targetGraph) const
590 {
591     auto clone = Inst::Clone(targetGraph)->CastToCallIndirect();
592     CloneTypes(targetGraph->GetAllocator(), clone);
593     return clone;
594 }
595 
Clone(const Graph * targetGraph) const596 Inst *IntrinsicInst::Clone(const Graph *targetGraph) const
597 {
598     ASSERT(targetGraph != nullptr);
599     auto intrinsicClone = (GetOpcode() == Opcode::Intrinsic ? Inst::Clone(targetGraph)->CastToIntrinsic()
600                                                             : Inst::Clone(targetGraph)->CastToBuiltin());
601     intrinsicClone->intrinsicId_ = GetIntrinsicId();
602     CloneTypes(targetGraph->GetAllocator(), intrinsicClone);
603     if (HasImms()) {
604         for (auto imm : GetImms()) {
605             intrinsicClone->AddImm(targetGraph->GetAllocator(), imm);
606         }
607     }
608     intrinsicClone->SetMethod(GetMethod());
609     return intrinsicClone;
610 }
611 
Clone(const Graph * targetGraph) const612 Inst *ConstantInst::Clone(const Graph *targetGraph) const
613 {
614     Inst *newCnst = nullptr;
615     bool isSupportInt32 = GetBasicBlock()->GetGraph()->IsBytecodeOptimizer();
616     switch (GetType()) {
617         case DataType::INT32:
618             newCnst = targetGraph->CreateInstConstant(static_cast<int32_t>(GetIntValue()), isSupportInt32);
619             break;
620         case DataType::INT64:
621             newCnst = targetGraph->CreateInstConstant(GetIntValue(), isSupportInt32);
622             break;
623         case DataType::FLOAT32:
624             newCnst = targetGraph->CreateInstConstant(GetFloatValue(), isSupportInt32);
625             break;
626         case DataType::FLOAT64:
627             newCnst = targetGraph->CreateInstConstant(GetDoubleValue(), isSupportInt32);
628             break;
629         case DataType::ANY:
630             newCnst = targetGraph->CreateInstConstant(GetRawValue(), isSupportInt32);
631             newCnst->SetType(DataType::ANY);
632             break;
633         default:
634             UNREACHABLE();
635     }
636 #ifndef NDEBUG
637     newCnst->SetDstReg(GetDstReg());
638 #endif
639     return newCnst;
640 }
641 
Clone(const Graph * targetGraph) const642 Inst *ParameterInst::Clone(const Graph *targetGraph) const
643 {
644     auto clone = FixedInputsInst::Clone(targetGraph)->CastToParameter();
645     clone->SetArgNumber(GetArgNumber());
646     clone->SetLocationData(GetLocationData());
647     return clone;
648 }
649 
Clone(const Graph * targetGraph) const650 Inst *SaveStateInst::Clone(const Graph *targetGraph) const
651 {
652     auto clone = static_cast<SaveStateInst *>(Inst::Clone(targetGraph));
653     if (GetImmediatesCount() > 0) {
654         clone->AllocateImmediates(targetGraph->GetAllocator(), GetImmediatesCount());
655         std::copy(immediates_->begin(), immediates_->end(), clone->immediates_->begin());
656     }
657     clone->method_ = method_;
658     clone->callerInst_ = callerInst_;
659     clone->inliningDepth_ = inliningDepth_;
660     return clone;
661 }
662 
Clone(const Graph * targetGraph) const663 Inst *BinaryShiftedRegisterOperation::Clone(const Graph *targetGraph) const
664 {
665     auto clone = static_cast<BinaryShiftedRegisterOperation *>(FixedInputsInst::Clone(targetGraph));
666     clone->SetImm(GetImm());
667     clone->SetShiftType(GetShiftType());
668     return clone;
669 }
670 
Clone(const Graph * targetGraph) const671 Inst *UnaryShiftedRegisterOperation::Clone(const Graph *targetGraph) const
672 {
673     auto clone = static_cast<UnaryShiftedRegisterOperation *>(FixedInputsInst::Clone(targetGraph));
674     clone->SetImm(GetImm());
675     clone->SetShiftType(GetShiftType());
676     return clone;
677 }
678 
AppendImmediate(uint64_t imm,uint16_t vreg,DataType::Type type,VRegType vregType)679 void SaveStateInst::AppendImmediate(uint64_t imm, uint16_t vreg, DataType::Type type, VRegType vregType)
680 {
681     if (immediates_ == nullptr) {
682         ASSERT(GetBasicBlock() != nullptr);
683         AllocateImmediates(GetBasicBlock()->GetGraph()->GetAllocator(), 0);
684     }
685     immediates_->emplace_back(SaveStateImm {imm, vreg, type, vregType});
686 }
687 
AllocateImmediates(ArenaAllocator * allocator,size_t size)688 void SaveStateInst::AllocateImmediates(ArenaAllocator *allocator, size_t size)
689 {
690     immediates_ = allocator->New<ArenaVector<SaveStateImm>>(allocator->Adapter());
691     immediates_->resize(size);
692 }
693 
AppendCatchTypeId(uint32_t id,uint32_t catchEdgeIndex)694 void TryInst::AppendCatchTypeId(uint32_t id, uint32_t catchEdgeIndex)
695 {
696     if (catchTypeIds_ == nullptr) {
697         ASSERT(catchEdgeIndexes_ == nullptr);
698         ASSERT(GetBasicBlock() != nullptr);
699         auto allocator = GetBasicBlock()->GetGraph()->GetAllocator();
700         catchTypeIds_ = allocator->New<ArenaVector<uint32_t>>(allocator->Adapter());
701         catchEdgeIndexes_ = allocator->New<ArenaVector<uint32_t>>(allocator->Adapter());
702     }
703     catchTypeIds_->push_back(id);
704     catchEdgeIndexes_->push_back(catchEdgeIndex);
705 }
706 
AppendThrowableInst(const Inst * inst)707 void CatchPhiInst::AppendThrowableInst(const Inst *inst)
708 {
709     if (throwInsts_ == nullptr) {
710         ASSERT(GetBasicBlock() != nullptr);
711         auto allocator = GetBasicBlock()->GetGraph()->GetAllocator();
712         throwInsts_ = allocator->New<ArenaVector<const Inst *>>(allocator->Adapter());
713     }
714     throwInsts_->push_back(inst);
715 }
716 
ReplaceThrowableInst(const Inst * oldInst,const Inst * newInst)717 void CatchPhiInst::ReplaceThrowableInst(const Inst *oldInst, const Inst *newInst)
718 {
719     auto index = GetThrowableInstIndex(oldInst);
720     throwInsts_->at(index) = newInst;
721 }
722 
RemoveInput(unsigned index)723 void CatchPhiInst::RemoveInput(unsigned index)
724 {
725     Inst::RemoveInput(index);
726     if (throwInsts_ != nullptr) {
727         throwInsts_->at(index) = throwInsts_->back();
728         throwInsts_->pop_back();
729     }
730 }
731 
Clone(const Graph * targetGraph) const732 Inst *TryInst::Clone(const Graph *targetGraph) const
733 {
734     auto clone = FixedInputsInst::Clone(targetGraph)->CastToTry();
735     if (auto idsCount = this->GetCatchTypeIdsCount(); idsCount > 0) {
736         if (clone->catchTypeIds_ == nullptr) {
737             auto allocator = targetGraph->GetAllocator();
738             clone->catchTypeIds_ = allocator->New<ArenaVector<uint32_t>>(allocator->Adapter());
739             clone->catchEdgeIndexes_ = allocator->New<ArenaVector<uint32_t>>(allocator->Adapter());
740         }
741         clone->catchTypeIds_->resize(idsCount);
742         clone->catchEdgeIndexes_->resize(idsCount);
743         std::copy(this->catchTypeIds_->begin(), this->catchTypeIds_->end(), clone->catchTypeIds_->begin());
744         std::copy(this->catchEdgeIndexes_->begin(), this->catchEdgeIndexes_->end(), clone->catchEdgeIndexes_->begin());
745     }
746     return clone;
747 }
748 
GetEdgeIfInputTrue()749 BasicBlock *IfImmInst::GetEdgeIfInputTrue()
750 {
751     return GetBasicBlock()->GetSuccessor(GetTrueInputEdgeIdx());
752 }
753 
GetEdgeIfInputFalse()754 BasicBlock *IfImmInst::GetEdgeIfInputFalse()
755 {
756     return GetBasicBlock()->GetSuccessor(1 - GetTrueInputEdgeIdx());
757 }
758 
759 /**
760  * NB! Can be called before Lowering pass only
761  * Return if_imm's block successor index when input is true
762  */
GetTrueInputEdgeIdx()763 size_t IfImmInst::GetTrueInputEdgeIdx()
764 {
765     ASSERT(GetBasicBlock() != nullptr);
766     ASSERT(GetBasicBlock()->GetSuccsBlocks().size() == MAX_SUCCS_NUM);
767     ASSERT(GetCc() == ConditionCode::CC_NE || GetCc() == ConditionCode::CC_EQ);
768     ASSERT(GetImm() == 0);
769     return GetCc() == CC_NE ? 0 : 1;
770 }
771 
IsPropagateLiveness() const772 bool Inst::IsPropagateLiveness() const
773 {
774     return (CanThrow() && GetBasicBlock()->IsTry()) || CanDeoptimize();
775 }
776 
RequireRegMap() const777 bool Inst::RequireRegMap() const
778 {
779     if (GetOpcode() == Opcode::SafePoint) {
780         return g_options.IsCompilerSafePointsRequireRegMap();
781     }
782     return GetOpcode() == Opcode::SaveStateOsr || IsPropagateLiveness();
783 }
784 
IsZeroRegInst() const785 bool Inst::IsZeroRegInst() const
786 {
787     ASSERT(GetBasicBlock() != nullptr);
788     ASSERT(GetBasicBlock()->GetGraph() != nullptr);
789     return GetBasicBlock()->GetGraph()->GetZeroReg() != GetInvalidReg() && IsZeroConstantOrNullPtr(this);
790 }
791 
IsAccRead() const792 bool Inst::IsAccRead() const
793 {
794     return GetFlag(inst_flags::ACC_READ);
795 }
796 
IsAccWrite() const797 bool Inst::IsAccWrite() const
798 {
799     if (GetBasicBlock()->GetGraph()->IsDynamicMethod() && IsConst()) {
800         return true;
801     }
802     return GetFlag(inst_flags::ACC_WRITE);
803 }
804 
805 // Returns true if instruction result can be object
IsReferenceOrAny() const806 bool Inst::IsReferenceOrAny() const
807 {
808     if (GetType() == DataType::ANY) {
809         switch (opcode_) {
810             // GetAnyTypeName always return movable string
811             case Opcode::GetAnyTypeName:
812             // We conservative decide that phi with ANY type is always reference,
813             // because for phi we can speculate incorrect any_type
814             case Opcode::Phi:
815                 return true;
816             default:
817                 break;
818         }
819         auto anyType = GetAnyType();
820         if (anyType == AnyBaseType::UNDEFINED_TYPE) {
821             return true;
822         }
823         auto dataType = AnyBaseTypeToDataType(anyType);
824         return dataType == DataType::REFERENCE;
825     }
826     return GetType() == DataType::REFERENCE;
827 }
828 
IsMovableObjectRec(Inst * inst,Marker visitedMrk)829 bool IsMovableObjectRec(Inst *inst, Marker visitedMrk)
830 {
831     if (inst->SetMarker(visitedMrk)) {
832         return false;
833     }
834     if (inst->IsPhi()) {
835         for (size_t i = 0U; i < inst->GetInputsCount(); ++i) {
836             if (IsMovableObjectRec(inst->GetDataFlowInput(i), visitedMrk)) {
837                 return true;
838             }
839         }
840         return false;
841     }
842     return inst->IsMovableObject();
843 }
844 
845 // Returns true if instruction result can be moved by GC
846 // Returns false for checks because their result is equal to input
IsMovableObject()847 bool Inst::IsMovableObject()
848 {
849     if (IsCheck() || !IsReferenceOrAny()) {
850         return false;
851     }
852     switch (opcode_) {
853         case Opcode::NullPtr:
854         case Opcode::LoadClass:
855         case Opcode::InitClass:
856         case Opcode::LoadAndInitClass:
857         case Opcode::UnresolvedLoadAndInitClass:
858         case Opcode::LoadImmediate:
859         case Opcode::GetInstanceClass:
860         case Opcode::GetGlobalVarAddress:
861         case Opcode::ResolveObjectFieldStatic:
862         case Opcode::Constant:
863         case Opcode::LoadConstantPool:
864         case Opcode::LoadRuntimeClass:
865         case Opcode::LoadUndefined:
866             // The result of these instructions can't be moved by GC.
867             return false;
868         case Opcode::LoadObject:
869             // Classes in non moveble space.
870             return this->CastToLoadObject()->GetObjectType() != ObjectType::MEM_DYN_CLASS &&
871                    this->CastToLoadObject()->GetObjectType() != ObjectType::MEM_DYN_HCLASS;
872         case Opcode::Phi: {
873             MarkerHolder marker {GetBasicBlock()->GetGraph()};
874             return IsMovableObjectRec(this, marker.GetMarker());
875         }
876         default:
877             return true;
878     }
879 }
880 
GetTryBeginInst(const BasicBlock * tryBeginBb)881 TryInst *GetTryBeginInst(const BasicBlock *tryBeginBb)
882 {
883     ASSERT(tryBeginBb != nullptr && tryBeginBb->IsTryBegin());
884     for (auto inst : tryBeginBb->AllInsts()) {
885         if (inst->GetOpcode() == Opcode::Try) {
886             return inst->CastToTry();
887         }
888     }
889     UNREACHABLE();
890     return nullptr;
891 }
892 
893 /**
894  * Regalloc's helper to checks if intrinsic's arguments should be located on the registers according to
895  * calling-convention
896  */
IsNativeCall() const897 bool IntrinsicInst::IsNativeCall() const
898 {
899     ASSERT(GetBasicBlock() != nullptr);
900     ASSERT(GetBasicBlock()->GetGraph() != nullptr);
901     if (IsFastpathIntrinsic(intrinsicId_)) {
902         return false;
903     }
904 #ifdef PANDA_WITH_IRTOC
905     if (IsIrtocIntrinsic(intrinsicId_)) {
906         return intrinsicId_ == RuntimeInterface::IntrinsicId::INTRINSIC_SLOW_PATH_ENTRY;
907     }
908 #endif
909     auto graph = GetBasicBlock()->GetGraph();
910     auto arch = graph->GetArch();
911     auto runtime = graph->GetRuntime();
912     return !EncodesBuiltin(runtime, intrinsicId_, arch) || IsRuntimeCall();
913 }
914 
GetDeoptimizeType() const915 DeoptimizeType AnyTypeCheckInst::GetDeoptimizeType() const
916 {
917     auto graph = GetBasicBlock()->GetGraph();
918     auto customDeoptimize = graph->IsAotMode() || graph->GetRuntime()->GetMethodProfile(graph->GetMethod(), true) !=
919                                                       profiling::INVALID_PROFILE;
920     if (!customDeoptimize) {
921         return DeoptimizeType::ANY_TYPE_CHECK;
922     }
923     switch (AnyBaseTypeToDataType(GetAnyType())) {
924         case DataType::Type::INT32:
925             return DeoptimizeType::NOT_SMALL_INT;
926         case DataType::Type::FLOAT64:
927             if (IsIntegerWasSeen()) {
928                 return DeoptimizeType::NOT_NUMBER;
929             }
930             return DeoptimizeType::DOUBLE_WITH_INT;
931         default:
932             return DeoptimizeType::ANY_TYPE_CHECK;
933     }
934 }
935 
ExtendFlags(Inst * inst)936 void HclassCheckInst::ExtendFlags(Inst *inst)
937 {
938     ASSERT(inst->GetOpcode() == Opcode::HclassCheck);
939     auto check = inst->CastToHclassCheck();
940     if (check->GetCheckFunctionIsNotClassConstructor()) {
941         SetCheckFunctionIsNotClassConstructor(true);
942     }
943     if (check->GetCheckIsFunction()) {
944         SetCheckIsFunction(true);
945     }
946 }
947 
948 }  // namespace ark::compiler
949