• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021-2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "inst.h"
17 #include "graph.h"
18 #include "basicblock.h"
19 #include "graph_visitor.h"
20 #include "optimizer/optimizations/vn.h"
21 #include "profiling/profiling.h"
22 
23 namespace panda::compiler {
24 
25 const ObjectTypeInfo ObjectTypeInfo::INVALID {};
26 const ObjectTypeInfo ObjectTypeInfo::UNKNOWN {1};
27 
ReserveInputs(size_t capacity)28 void Inst::ReserveInputs(size_t capacity)
29 {
30     ASSERT(IsOperandsDynamic());
31     GetDynamicOperands()->Reallocate(capacity);
32 }
33 
GetInst()34 Inst *User::GetInst()
35 {
36     if (UNLIKELY(IsDynamic())) {
37         // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
38         return *reinterpret_cast<Inst **>(this + GetIndex() + 1);
39     }
40     auto p = reinterpret_cast<uintptr_t>(this);
41     p += (GetIndex() + 1) * sizeof(User);
42 
43     auto inputsCount {SizeField::Decode(properties_)};
44     p += (inputsCount + Input::GetPadding(RUNTIME_ARCH, inputsCount)) * sizeof(Input);
45     return reinterpret_cast<Inst *>(p);
46 }
47 
InsertBefore(Inst * inst)48 void Inst::InsertBefore(Inst *inst)
49 {
50     ASSERT(bb_ != nullptr);
51     bb_->InsertBefore(inst, this);
52 }
53 
InsertAfter(Inst * inst)54 void Inst::InsertAfter(Inst *inst)
55 {
56     ASSERT(bb_ != nullptr);
57     bb_->InsertAfter(inst, this);
58 }
59 
GetInliningDepth() const60 uint32_t Inst::GetInliningDepth() const
61 {
62     auto ss = GetSaveState();
63     return ss == nullptr ? 0 : ss->GetInliningDepth();
64 }
65 
Reallocate(size_t newCapacity)66 void DynamicOperands::Reallocate([[maybe_unused]] size_t newCapacity /* =0 */)
67 {
68     if (newCapacity == 0) {
69         constexpr auto IMM_2 = 2;
70         newCapacity = (((capacity_ != 0U) ? capacity_ : 1U) << 1U) + IMM_2;
71     } else if (newCapacity <= capacity_) {
72         return;
73     }
74     auto size = newCapacity * (sizeof(User) + sizeof(Inst *)) + sizeof(Inst *);
75     auto newStor = reinterpret_cast<uintptr_t>(allocator_->Alloc(size));
76 
77     auto ownerInst {GetOwnerInst()};
78     // Set pointer to owned instruction into new storage NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
79     *reinterpret_cast<Inst **>(reinterpret_cast<User *>(newStor) + newCapacity) = ownerInst;
80 
81     if (users_ == nullptr) {
82         users_ = reinterpret_cast<User *>(newStor);
83         capacity_ = newCapacity;
84         return;
85     }
86     Input *oldInputs = Inputs();
87     // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
88     auto *newInputs = reinterpret_cast<Input *>(newStor + sizeof(User) * newCapacity) + 1;
89 
90     for (size_t i = 0; i < size_; i++) {
91         Inst *oldInput = oldInputs[i].GetInst();  // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
92         ASSERT(oldInput);
93         // Initialize new User in container. Since users are placed from end of array, i.e. zero index element
94         // will be at the end of array, we need to add capacity and substitute index.
95         // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
96         User *newUser = new (reinterpret_cast<User *>(newStor) + newCapacity - i - 1) User(false, i, newCapacity);
97         auto oldUser {GetUser(i)};
98         if (ownerInst->IsSaveState()) {
99             newUser->SetVirtualRegister(oldUser->GetVirtualRegister());
100         } else if (ownerInst->IsPhi()) {
101             newUser->SetBbNum(oldUser->GetBbNum());
102         }
103         oldInput->RemoveUser(oldUser);
104         oldInput->AddUser(newUser);
105         newInputs[i] = Input(oldInput);  // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
106     }
107     capacity_ = newCapacity;
108     users_ = reinterpret_cast<User *>(newStor);
109 }
110 
Append(Inst * inst)111 unsigned DynamicOperands::Append(Inst *inst)
112 {
113     ASSERT(capacity_ >= size_);
114     if (capacity_ == size_) {
115         Reallocate();
116     }
117     SetInput(size_, Input(inst));
118     // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
119     new (users_ + capacity_ - size_ - 1) User(false, size_, capacity_);
120     auto user {GetUser(size_)};
121     if (GetOwnerInst()->IsPhi()) {
122         user->SetBbNum(size_);
123     }
124     inst->AddUser(user);
125     return size_++;
126 }
127 
Remove(unsigned index)128 void DynamicOperands::Remove(unsigned index)
129 {
130     size_--;
131     auto *currInput = GetInput(index)->GetInst();
132     if (currInput->GetBasicBlock() != nullptr && currInput->HasUsers()) {
133         currInput->RemoveUser(GetUser(index));
134     }
135 
136     auto bbNum {GetUser(index)->GetBbNum()};
137     auto ownerInst {GetOwnerInst()};
138 
139     if (index != size_) {
140         auto *lastInput = GetInput(size_)->GetInst();
141         if (lastInput->HasUsers()) {
142             lastInput->RemoveUser(GetUser(size_));
143             lastInput->AddUser(GetUser(index));
144         }
145         SetInput(index, *GetInput(size_));
146         if (ownerInst->IsSaveState()) {
147             GetUser(index)->SetVirtualRegister(GetUser(size_)->GetVirtualRegister());
148         } else if (ownerInst->IsPhi()) {
149             GetUser(index)->SetBbNum(GetUser(size_)->GetBbNum());
150         }
151     }
152 
153     if (ownerInst->IsPhi()) {
154         for (size_t i {0}; i < size_; ++i) {
155             if (GetUser(i)->GetBbNum() == size_) {
156                 GetUser(i)->SetBbNum(bbNum);
157                 break;
158             }
159         }
160     }
161 }
162 
SetVnObject(VnObject * vnObj)163 void GetAnyTypeNameInst::SetVnObject(VnObject *vnObj)
164 {
165     vnObj->Add(static_cast<uint32_t>(GetAnyType()));
166 }
167 
SetVnObject(VnObject * vnObj)168 void CompareAnyTypeInst::SetVnObject(VnObject *vnObj)
169 {
170     vnObj->Add(static_cast<uint32_t>(GetAnyType()));
171 }
172 
SetVnObject(VnObject * vnObj)173 void BinaryImmOperation::SetVnObject(VnObject *vnObj)
174 {
175     vnObj->Add(GetImm());
176 }
177 
SetVnObject(VnObject * vnObj)178 void BinaryShiftedRegisterOperation::SetVnObject(VnObject *vnObj)
179 {
180     vnObj->Add(GetImm());
181     vnObj->Add(static_cast<uint32_t>(GetShiftType()));
182 }
183 
SetVnObject(VnObject * vnObj)184 void UnaryShiftedRegisterOperation::SetVnObject(VnObject *vnObj)
185 {
186     vnObj->Add(GetImm());
187     vnObj->Add(static_cast<uint32_t>(GetShiftType()));
188 }
189 
SetVnObject(VnObject * vnObj)190 void CompareInst::SetVnObject(VnObject *vnObj)
191 {
192     vnObj->Add(static_cast<uint32_t>(GetCc()));
193 }
194 
SetVnObject(VnObject * vnObj)195 void SelectInst::SetVnObject(VnObject *vnObj)
196 {
197     vnObj->Add(static_cast<uint32_t>(GetCc()));
198 }
199 
SetVnObject(VnObject * vnObj)200 void IfInst::SetVnObject(VnObject *vnObj)
201 {
202     vnObj->Add(static_cast<uint32_t>(GetCc()));
203 }
204 
SetVnObject(VnObject * vnObj)205 void IfImmInst::SetVnObject(VnObject *vnObj)
206 {
207     vnObj->Add(static_cast<uint32_t>(GetCc()));
208 }
209 
SetVnObject(VnObject * vnObj)210 void UnaryOperation::SetVnObject(VnObject *vnObj)
211 {
212     if (GetOpcode() == Opcode::Cast) {
213         vnObj->Add(static_cast<uint32_t>(GetInput(0).GetInst()->GetType()));
214     }
215 }
216 
SetVnObject(VnObject * vnObj)217 void CmpInst::SetVnObject(VnObject *vnObj)
218 {
219     if (DataType::IsFloatType(GetOperandsType())) {
220         vnObj->Add(static_cast<uint32_t>(IsFcmpg()));
221     }
222     vnObj->Add(static_cast<uint32_t>(GetInputType(0)));
223 }
224 
SetVnObject(VnObject * vnObj)225 void LoadFromPoolDynamic::SetVnObject(VnObject *vnObj)
226 {
227     vnObj->Add(GetTypeId());
228 }
229 
SetVnObject(VnObject * vnObj)230 void CastInst::SetVnObject(VnObject *vnObj)
231 {
232     vnObj->Add(static_cast<uint32_t>(GetInputType(0)));
233 }
234 
SetVnObject(VnObject * vnObj)235 void LoadImmediateInst::SetVnObject(VnObject *vnObj)
236 {
237     vnObj->Add(reinterpret_cast<uint64_t>(GetObject()));
238 }
239 
SetVnObject(VnObject * vnObj)240 void RuntimeClassInst::SetVnObject(VnObject *vnObj)
241 {
242     vnObj->Add(reinterpret_cast<uint64_t>(GetClass()));
243 }
244 
SetVnObject(VnObject * vnObj)245 void LoadObjFromConstInst::SetVnObject(VnObject *vnObj)
246 {
247     vnObj->Add(static_cast<uint64_t>(GetObjPtr()));
248 }
249 
SetVnObject(VnObject * vnObj)250 void FunctionImmediateInst::SetVnObject(VnObject *vnObj)
251 {
252     vnObj->Add(static_cast<uint64_t>(GetFunctionPtr()));
253 }
254 
IsDynamicCast() const255 bool CastInst::IsDynamicCast() const
256 {
257     return DataType::IsFloatType(GetInputType(0U)) && DataType::GetCommonType(GetType()) == DataType::INT64 &&
258            GetBasicBlock()->GetGraph()->IsDynamicMethod();
259 }
260 
261 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
262 #define DEFINE_INST(TYPE)              \
263     void TYPE::Accept(GraphVisitor *v) \
264     {                                  \
265         v->VisitInst(this);            \
266     }
267 // NOTE(msherstennikov): There must be another way to generate this list
OPCODE_CLASS_LIST(DEFINE_INST)268 OPCODE_CLASS_LIST(DEFINE_INST)
269 #undef DEFINE_INST
270 
271 BasicBlock *PhiInst::GetPhiInputBb(unsigned index)
272 {
273     ASSERT(index < GetInputsCount());
274 
275     auto bbNum {GetPhiInputBbNum(index)};
276     ASSERT(bbNum < GetBasicBlock()->GetPredsBlocks().size());
277     return GetBasicBlock()->GetPredsBlocks()[bbNum];
278 }
279 
GetPhiInput(BasicBlock * bb)280 Inst *PhiInst::GetPhiInput(BasicBlock *bb)
281 {
282     auto index = GetPredBlockIndex(bb);
283     ASSERT(index < GetInputs().size());
284     return GetInput(index).GetInst();
285 }
286 
GetPhiDataflowInput(BasicBlock * bb)287 Inst *PhiInst::GetPhiDataflowInput(BasicBlock *bb)
288 {
289     auto index = GetPredBlockIndex(bb);
290     ASSERT(index < GetInputs().size());
291     return GetDataFlowInput(index);
292 }
293 
GetPredBlockIndex(const BasicBlock * block) const294 size_t PhiInst::GetPredBlockIndex(const BasicBlock *block) const
295 {
296     for (size_t i {0}; i < GetInputsCount(); ++i) {
297         if (GetPhiInputBb(i) == block) {
298             return i;
299         }
300     }
301     UNREACHABLE();
302 }
303 
304 template <Opcode OPC, size_t INPUT_IDX>
SkipInstructions(Inst * inputInst)305 Inst *SkipInstructions(Inst *inputInst)
306 {
307     // NOLINTNEXTLINE(readability-magic-numbers)
308     for (Opcode opcode = inputInst->GetOpcode(); opcode == OPC; opcode = inputInst->GetOpcode()) {
309         inputInst = inputInst->GetInput(INPUT_IDX).GetInst();
310     }
311     return inputInst;
312 }
313 /*
314  * For instructions LoadArray, StoreArray, LoadArrayPair, StoreArrayPair, LoadArrayI, StoreArrayI, LoadArrayPairI,
315  * StoreArrayPairI, LenArray, LoadObject, StoreObject, CallVirtual, Monitor with NullCheck input the dataflow user
316  * is object, which is the first input of NullCheck instruction.
317  * For instructions LoadArray, StoreArray, LoadArrayPair, StoreArrayPair with BoundsCheck input the dataflow user is
318  * array index, which is the second input of BoundsCheck instruction
319  * For instructions Div and Mod with ZeroCheck input the dataflow user is the first input of ZeroCheck
320  */
GetDataFlowInput(Inst * inputInst)321 Inst *Inst::GetDataFlowInput(Inst *inputInst)
322 {
323     auto opcode = inputInst->GetOpcode();
324     if (opcode == Opcode::NullCheck) {
325         return SkipInstructions<Opcode::NullCheck, 0>(inputInst);
326     }
327     if (opcode == Opcode::BoundsCheck) {
328         return SkipInstructions<Opcode::BoundsCheck, 1>(inputInst);
329     }
330     if (opcode == Opcode::BoundsCheckI) {
331         return SkipInstructions<Opcode::BoundsCheckI, 0>(inputInst);
332     }
333     if (opcode == Opcode::ZeroCheck) {
334         return SkipInstructions<Opcode::ZeroCheck, 0>(inputInst);
335     }
336     if (opcode == Opcode::NegativeCheck) {
337         return SkipInstructions<Opcode::NegativeCheck, 0>(inputInst);
338     }
339     if (opcode == Opcode::NotPositiveCheck) {
340         return SkipInstructions<Opcode::NotPositiveCheck, 0>(inputInst);
341     }
342     if (opcode == Opcode::AnyTypeCheck) {
343         return SkipInstructions<Opcode::AnyTypeCheck, 0>(inputInst);
344     }
345     if (opcode == Opcode::ObjByIndexCheck) {
346         return SkipInstructions<Opcode::ObjByIndexCheck, 0>(inputInst);
347     }
348     if (opcode == Opcode::HclassCheck) {
349         inputInst = SkipInstructions<Opcode::HclassCheck, 0>(inputInst);
350         return SkipInstructions<Opcode::LoadObject, 0>(inputInst);
351     }
352     if (opcode == Opcode::RefTypeCheck) {
353         inputInst = SkipInstructions<Opcode::RefTypeCheck, 1>(inputInst);
354         if (inputInst->GetOpcode() == Opcode::NullCheck) {
355             return SkipInstructions<Opcode::NullCheck, 0>(inputInst);
356         }
357         return inputInst;
358     }
359     return inputInst;
360 }
361 
IsPrecedingInSameBlock(const Inst * other) const362 bool Inst::IsPrecedingInSameBlock(const Inst *other) const
363 {
364     ASSERT(other != nullptr && GetBasicBlock() == other->GetBasicBlock());
365     if (this == other) {
366         return true;
367     }
368     auto next = GetNext();
369     while (next != nullptr) {
370         if (next == other) {
371             return true;
372         }
373         next = next->GetNext();
374     }
375     return false;
376 }
377 
IsDominate(const Inst * other) const378 bool Inst::IsDominate(const Inst *other) const
379 {
380     ASSERT(other != nullptr);
381     if (this == other) {
382         return true;
383     }
384     auto thisBb = GetBasicBlock();
385     auto otherBb = other->GetBasicBlock();
386     return thisBb == otherBb ? IsPrecedingInSameBlock(other) : thisBb->IsDominate(otherBb);
387 }
388 
InSameBlockOrDominate(const Inst * other) const389 bool Inst::InSameBlockOrDominate(const Inst *other) const
390 {
391     return GetBasicBlock() == other->GetBasicBlock() || IsDominate(other);
392 }
393 
Clone(const Graph * targetGraph) const394 Inst *Inst::Clone(const Graph *targetGraph) const
395 {
396     ASSERT(targetGraph != nullptr);
397     auto clone = targetGraph->CreateInst(GetOpcode());
398     clone->bitFields_ = GetAllFields();
399     clone->pc_ = GetPc();
400 #ifndef NDEBUG
401     clone->SetDstReg(GetDstReg());
402 #endif
403     if (IsOperandsDynamic()) {
404         clone->ReserveInputs(GetInputsCount());
405     }
406 #ifdef PANDA_COMPILER_DEBUG_INFO
407     clone->SetCurrentMethod(GetCurrentMethod());
408 #endif
409     return clone;
410 }
411 
412 template <size_t N>
Clone(const Graph * targetGraph) const413 Inst *FixedInputsInst<N>::Clone(const Graph *targetGraph) const
414 {
415     auto clone = static_cast<FixedInputsInst *>(Inst::Clone(targetGraph));
416 #ifndef NDEBUG
417     for (size_t i = 0; i < INPUT_COUNT; ++i) {
418         clone->SetSrcReg(i, GetSrcReg(i));
419     }
420 #endif
421     return clone;
422 }
423 
424 #if PANDA_TARGET_MACOS
425 template class FixedInputsInst<0>;
426 template class FixedInputsInst<1>;
427 template class FixedInputsInst<2U>;
428 template class FixedInputsInst<3U>;
429 template class FixedInputsInst<4U>;
430 #endif
431 
Clone(const Graph * targetGraph) const432 Inst *CallInst::Clone(const Graph *targetGraph) const
433 {
434     ASSERT(targetGraph != nullptr);
435     auto instClone = Inst::Clone(targetGraph);
436     auto callClone = static_cast<CallInst *>(instClone);
437     callClone->SetCallMethodId(GetCallMethodId());
438     callClone->SetCallMethod(GetCallMethod());
439     callClone->SetCanNativeException(GetCanNativeException());
440     CloneTypes(targetGraph->GetAllocator(), callClone);
441     return instClone;
442 }
443 
Clone(const Graph * targetGraph) const444 Inst *CallIndirectInst::Clone(const Graph *targetGraph) const
445 {
446     auto clone = Inst::Clone(targetGraph)->CastToCallIndirect();
447     CloneTypes(targetGraph->GetAllocator(), clone);
448     return clone;
449 }
450 
Clone(const Graph * targetGraph) const451 Inst *IntrinsicInst::Clone(const Graph *targetGraph) const
452 {
453     ASSERT(targetGraph != nullptr);
454     auto intrinsicClone = (GetOpcode() == Opcode::Intrinsic ? Inst::Clone(targetGraph)->CastToIntrinsic()
455                                                             : Inst::Clone(targetGraph)->CastToBuiltin());
456     intrinsicClone->SetIntrinsicId(GetIntrinsicId());
457     CloneTypes(targetGraph->GetAllocator(), intrinsicClone);
458     if (HasImms()) {
459         for (auto imm : GetImms()) {
460             intrinsicClone->AddImm(targetGraph->GetAllocator(), imm);
461         }
462     }
463     intrinsicClone->SetMethod(GetMethod());
464     return intrinsicClone;
465 }
466 
Clone(const Graph * targetGraph) const467 Inst *ConstantInst::Clone(const Graph *targetGraph) const
468 {
469     Inst *newCnst = nullptr;
470     bool isSupportInt32 = GetBasicBlock()->GetGraph()->IsBytecodeOptimizer();
471     switch (GetType()) {
472         case DataType::INT32:
473             newCnst = targetGraph->CreateInstConstant(static_cast<int32_t>(GetIntValue()), isSupportInt32);
474             break;
475         case DataType::INT64:
476             newCnst = targetGraph->CreateInstConstant(GetIntValue(), isSupportInt32);
477             break;
478         case DataType::FLOAT32:
479             newCnst = targetGraph->CreateInstConstant(GetFloatValue(), isSupportInt32);
480             break;
481         case DataType::FLOAT64:
482             newCnst = targetGraph->CreateInstConstant(GetDoubleValue(), isSupportInt32);
483             break;
484         case DataType::ANY:
485             newCnst = targetGraph->CreateInstConstant(GetRawValue(), isSupportInt32);
486             newCnst->SetType(DataType::ANY);
487             break;
488         default:
489             UNREACHABLE();
490     }
491 #ifndef NDEBUG
492     newCnst->SetDstReg(GetDstReg());
493 #endif
494     return newCnst;
495 }
496 
Clone(const Graph * targetGraph) const497 Inst *ParameterInst::Clone(const Graph *targetGraph) const
498 {
499     auto clone = Inst::Clone(targetGraph)->CastToParameter();
500     clone->SetArgNumber(GetArgNumber());
501     clone->SetLocationData(GetLocationData());
502     return clone;
503 }
504 
Clone(const Graph * targetGraph) const505 Inst *SaveStateInst::Clone(const Graph *targetGraph) const
506 {
507     auto clone = static_cast<SaveStateInst *>(Inst::Clone(targetGraph));
508     if (GetImmediatesCount() > 0) {
509         clone->AllocateImmediates(targetGraph->GetAllocator(), GetImmediatesCount());
510         std::copy(immediates_->begin(), immediates_->end(), clone->immediates_->begin());
511     }
512     clone->method_ = method_;
513     clone->callerInst_ = callerInst_;
514     clone->inliningDepth_ = inliningDepth_;
515     return clone;
516 }
517 
Clone(const Graph * targetGraph) const518 Inst *BinaryShiftedRegisterOperation::Clone(const Graph *targetGraph) const
519 {
520     auto clone = static_cast<BinaryShiftedRegisterOperation *>(FixedInputsInst::Clone(targetGraph));
521     clone->SetImm(GetImm());
522     clone->SetShiftType(GetShiftType());
523     return clone;
524 }
525 
Clone(const Graph * targetGraph) const526 Inst *UnaryShiftedRegisterOperation::Clone(const Graph *targetGraph) const
527 {
528     auto clone = static_cast<UnaryShiftedRegisterOperation *>(FixedInputsInst::Clone(targetGraph));
529     clone->SetImm(GetImm());
530     clone->SetShiftType(GetShiftType());
531     return clone;
532 }
533 
AppendImmediate(uint64_t imm,uint16_t vreg,DataType::Type type,VRegType vregType)534 void SaveStateInst::AppendImmediate(uint64_t imm, uint16_t vreg, DataType::Type type, VRegType vregType)
535 {
536     if (immediates_ == nullptr) {
537         ASSERT(GetBasicBlock() != nullptr);
538         AllocateImmediates(GetBasicBlock()->GetGraph()->GetAllocator(), 0);
539     }
540     immediates_->emplace_back(SaveStateImm {imm, vreg, type, vregType});
541 }
542 
AllocateImmediates(ArenaAllocator * allocator,size_t size)543 void SaveStateInst::AllocateImmediates(ArenaAllocator *allocator, size_t size)
544 {
545     immediates_ = allocator->New<ArenaVector<SaveStateImm>>(allocator->Adapter());
546     immediates_->resize(size);
547 }
548 
AppendCatchTypeId(uint32_t id,uint32_t catchEdgeIndex)549 void TryInst::AppendCatchTypeId(uint32_t id, uint32_t catchEdgeIndex)
550 {
551     if (catchTypeIds_ == nullptr) {
552         ASSERT(catchEdgeIndexes_ == nullptr);
553         ASSERT(GetBasicBlock() != nullptr);
554         auto allocator = GetBasicBlock()->GetGraph()->GetAllocator();
555         catchTypeIds_ = allocator->New<ArenaVector<uint32_t>>(allocator->Adapter());
556         catchEdgeIndexes_ = allocator->New<ArenaVector<uint32_t>>(allocator->Adapter());
557     }
558     catchTypeIds_->push_back(id);
559     catchEdgeIndexes_->push_back(catchEdgeIndex);
560 }
561 
AppendThrowableInst(const Inst * inst)562 void CatchPhiInst::AppendThrowableInst(const Inst *inst)
563 {
564     if (throwInsts_ == nullptr) {
565         ASSERT(GetBasicBlock() != nullptr);
566         auto allocator = GetBasicBlock()->GetGraph()->GetAllocator();
567         throwInsts_ = allocator->New<ArenaVector<const Inst *>>(allocator->Adapter());
568     }
569     throwInsts_->push_back(inst);
570 }
571 
ReplaceThrowableInst(const Inst * oldInst,const Inst * newInst)572 void CatchPhiInst::ReplaceThrowableInst(const Inst *oldInst, const Inst *newInst)
573 {
574     auto index = GetThrowableInstIndex(oldInst);
575     throwInsts_->at(index) = newInst;
576 }
577 
RemoveInput(unsigned index)578 void CatchPhiInst::RemoveInput(unsigned index)
579 {
580     Inst::RemoveInput(index);
581     if (throwInsts_ != nullptr) {
582         throwInsts_->at(index) = throwInsts_->back();
583         throwInsts_->pop_back();
584     }
585 }
586 
Clone(const Graph * targetGraph) const587 Inst *TryInst::Clone(const Graph *targetGraph) const
588 {
589     auto clone = FixedInputsInst::Clone(targetGraph)->CastToTry();
590     if (auto idsCount = this->GetCatchTypeIdsCount(); idsCount > 0) {
591         if (clone->catchTypeIds_ == nullptr) {
592             auto allocator = targetGraph->GetAllocator();
593             clone->catchTypeIds_ = allocator->New<ArenaVector<uint32_t>>(allocator->Adapter());
594             clone->catchEdgeIndexes_ = allocator->New<ArenaVector<uint32_t>>(allocator->Adapter());
595         }
596         clone->catchTypeIds_->resize(idsCount);
597         clone->catchEdgeIndexes_->resize(idsCount);
598         std::copy(this->catchTypeIds_->begin(), this->catchTypeIds_->end(), clone->catchTypeIds_->begin());
599         std::copy(this->catchEdgeIndexes_->begin(), this->catchEdgeIndexes_->end(), clone->catchEdgeIndexes_->begin());
600     }
601     return clone;
602 }
603 
GetEdgeIfInputTrue()604 BasicBlock *IfImmInst::GetEdgeIfInputTrue()
605 {
606     return GetBasicBlock()->GetSuccessor(GetTrueInputEdgeIdx());
607 }
608 
GetEdgeIfInputFalse()609 BasicBlock *IfImmInst::GetEdgeIfInputFalse()
610 {
611     return GetBasicBlock()->GetSuccessor(1 - GetTrueInputEdgeIdx());
612 }
613 
614 /**
615  * NB! Can be called before Lowering pass only
616  * Return if_imm's block successor index when input is true
617  */
GetTrueInputEdgeIdx()618 size_t IfImmInst::GetTrueInputEdgeIdx()
619 {
620     ASSERT(GetBasicBlock() != nullptr);
621     ASSERT(GetBasicBlock()->GetSuccsBlocks().size() == MAX_SUCCS_NUM);
622     ASSERT(GetCc() == ConditionCode::CC_NE || GetCc() == ConditionCode::CC_EQ);
623     ASSERT(GetImm() == 0);
624     return GetCc() == CC_NE ? 0 : 1;
625 }
626 
IsPropagateLiveness() const627 bool Inst::IsPropagateLiveness() const
628 {
629     return (CanThrow() && GetBasicBlock()->IsTry()) || CanDeoptimize();
630 }
631 
RequireRegMap() const632 bool Inst::RequireRegMap() const
633 {
634     if (GetOpcode() == Opcode::SafePoint) {
635         return g_options.IsCompilerSafePointsRequireRegMap();
636     }
637     return GetOpcode() == Opcode::SaveStateOsr || IsPropagateLiveness();
638 }
639 
IsZeroRegInst() const640 bool Inst::IsZeroRegInst() const
641 {
642     ASSERT(GetBasicBlock() != nullptr);
643     ASSERT(GetBasicBlock()->GetGraph() != nullptr);
644     return GetBasicBlock()->GetGraph()->GetZeroReg() != INVALID_REG && IsZeroConstantOrNullPtr(this);
645 }
646 
IsAccRead() const647 bool Inst::IsAccRead() const
648 {
649     return GetFlag(inst_flags::ACC_READ);
650 }
651 
IsAccWrite() const652 bool Inst::IsAccWrite() const
653 {
654     if (GetBasicBlock()->GetGraph()->IsDynamicMethod() && IsConst()) {
655         return true;
656     }
657     return GetFlag(inst_flags::ACC_WRITE);
658 }
659 
660 // Returns true if instruction result can be object
IsReferenceOrAny() const661 bool Inst::IsReferenceOrAny() const
662 {
663     if (GetType() == DataType::ANY) {
664         switch (opcode_) {
665             // GetAnyTypeName always return movable string
666             case Opcode::GetAnyTypeName:
667             // We conservative decide that phi with ANY type is always reference,
668             // because for phi we can speculate incorrect any_type
669             case Opcode::Phi:
670                 return true;
671             default:
672                 break;
673         }
674         auto anyType = GetAnyType();
675         if (anyType == AnyBaseType::UNDEFINED_TYPE) {
676             return true;
677         }
678         auto dataType = AnyBaseTypeToDataType(anyType);
679         return dataType == DataType::REFERENCE;
680     }
681     return GetType() == DataType::REFERENCE;
682 }
683 
684 // Returns true if instruction result can be moved by GC
685 // Returns false for checks because their result is equal to input
IsMovableObject() const686 bool Inst::IsMovableObject() const
687 {
688     if (IsCheck() || !IsReferenceOrAny()) {
689         return false;
690     }
691     switch (opcode_) {
692         case Opcode::NullPtr:
693         case Opcode::LoadClass:
694         case Opcode::InitClass:
695         case Opcode::LoadAndInitClass:
696         case Opcode::UnresolvedLoadAndInitClass:
697         case Opcode::LoadImmediate:
698         case Opcode::GetInstanceClass:
699         case Opcode::GetGlobalVarAddress:
700         case Opcode::ResolveObjectFieldStatic:
701         case Opcode::Constant:
702         case Opcode::LoadConstantPool:
703         case Opcode::LoadRuntimeClass:
704         case Opcode::LoadUndefined:
705             // The result of these instructions can't be moved by GC.
706             return false;
707         case Opcode::LoadObject:
708             // Classes in non moveble space.
709             return this->CastToLoadObject()->GetObjectType() != ObjectType::MEM_DYN_CLASS &&
710                    this->CastToLoadObject()->GetObjectType() != ObjectType::MEM_DYN_HCLASS;
711 
712         default:
713             return true;
714     }
715 }
716 
GetTryBeginInst(const BasicBlock * tryBeginBb)717 TryInst *GetTryBeginInst(const BasicBlock *tryBeginBb)
718 {
719     ASSERT(tryBeginBb != nullptr && tryBeginBb->IsTryBegin());
720     for (auto inst : tryBeginBb->AllInsts()) {
721         if (inst->GetOpcode() == Opcode::Try) {
722             return inst->CastToTry();
723         }
724     }
725     UNREACHABLE();
726     return nullptr;
727 }
728 
729 /**
730  * Regalloc's helper to checks if intrinsic's arguments should be located on the registers according to
731  * calling-convention
732  */
IsNativeCall() const733 bool IntrinsicInst::IsNativeCall() const
734 {
735     ASSERT(GetBasicBlock() != nullptr);
736     ASSERT(GetBasicBlock()->GetGraph() != nullptr);
737     if (IsFastpathIntrinsic(intrinsicId_)) {
738         return false;
739     }
740 #ifdef PANDA_WITH_IRTOC
741     if (IsIrtocIntrinsic(intrinsicId_)) {
742         return intrinsicId_ == RuntimeInterface::IntrinsicId::INTRINSIC_SLOW_PATH_ENTRY;
743     }
744 #endif
745     auto graph = GetBasicBlock()->GetGraph();
746     auto arch = graph->GetArch();
747     auto runtime = graph->GetRuntime();
748     return !EncodesBuiltin(runtime, intrinsicId_, arch) || IsRuntimeCall();
749 }
750 
GetDeoptimizeType() const751 DeoptimizeType AnyTypeCheckInst::GetDeoptimizeType() const
752 {
753     auto graph = GetBasicBlock()->GetGraph();
754     auto customDeoptimize = graph->IsAotMode() || graph->GetRuntime()->GetMethodProfile(graph->GetMethod(), true) !=
755                                                       profiling::INVALID_PROFILE;
756     if (!customDeoptimize) {
757         return DeoptimizeType::ANY_TYPE_CHECK;
758     }
759     switch (AnyBaseTypeToDataType(GetAnyType())) {
760         case DataType::Type::INT32:
761             return DeoptimizeType::NOT_SMALL_INT;
762         case DataType::Type::FLOAT64:
763             if (IsIntegerWasSeen()) {
764                 return DeoptimizeType::NOT_NUMBER;
765             }
766             return DeoptimizeType::DOUBLE_WITH_INT;
767         default:
768             return DeoptimizeType::ANY_TYPE_CHECK;
769     }
770 }
771 
ExtendFlags(Inst * inst)772 void HclassCheckInst::ExtendFlags(Inst *inst)
773 {
774     ASSERT(inst->GetOpcode() == Opcode::HclassCheck);
775     auto check = inst->CastToHclassCheck();
776     if (check->GetCheckFunctionIsNotClassConstructor()) {
777         SetCheckFunctionIsNotClassConstructor(true);
778     }
779     if (check->GetCheckIsFunction()) {
780         SetCheckIsFunction(true);
781     }
782 }
783 
784 }  // namespace panda::compiler
785