1 /**
2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "inst.h"
17 #include "graph.h"
18 #include "basicblock.h"
19 #include "graph_visitor.h"
20 #include "optimizer/optimizations/vn.h"
21
22 namespace panda::compiler {
23
ReserveInputs(size_t capacity)24 void Inst::ReserveInputs(size_t capacity)
25 {
26 ASSERT(IsOperandsDynamic());
27 GetDynamicOperands()->Reallocate(capacity);
28 }
29
GetInst()30 Inst *User::GetInst()
31 {
32 if (UNLIKELY(IsDynamic())) {
33 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
34 return *reinterpret_cast<Inst **>(this + GetIndex() + 1);
35 }
36 auto p = reinterpret_cast<uintptr_t>(this);
37 p += (GetIndex() + 1) * sizeof(User);
38
39 auto inputs_count {SizeField::Decode(properties_)};
40 p += (inputs_count + Input::GetPadding(RUNTIME_ARCH, inputs_count)) * sizeof(Input);
41 return reinterpret_cast<Inst *>(p);
42 }
43
InsertBefore(Inst * inst)44 void Inst::InsertBefore(Inst *inst)
45 {
46 ASSERT(bb_ != nullptr);
47 bb_->InsertBefore(inst, this);
48 }
49
InsertAfter(Inst * inst)50 void Inst::InsertAfter(Inst *inst)
51 {
52 ASSERT(bb_ != nullptr);
53 bb_->InsertAfter(inst, this);
54 }
55
Reallocate(size_t new_capacity)56 void DynamicOperands::Reallocate([[maybe_unused]] size_t new_capacity /* =0 */)
57 {
58 if (new_capacity == 0) {
59 constexpr auto IMM_2 = 2;
60 new_capacity = (((capacity_ != 0U) ? capacity_ : 1U) << 1U) + IMM_2;
61 } else if (new_capacity <= capacity_) {
62 return;
63 }
64 auto size = new_capacity * (sizeof(User) + sizeof(Inst *)) + sizeof(Inst *);
65 auto new_stor = reinterpret_cast<uintptr_t>(allocator_->Alloc(size));
66
67 auto owner_inst {GetOwnerInst()};
68 // Set pointer to owned instruction into new storage NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
69 *reinterpret_cast<Inst **>(reinterpret_cast<User *>(new_stor) + new_capacity) = owner_inst;
70
71 if (users_ == nullptr) {
72 users_ = reinterpret_cast<User *>(new_stor);
73 capacity_ = new_capacity;
74 return;
75 }
76 Input *old_inputs = Inputs();
77 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
78 auto *new_inputs = reinterpret_cast<Input *>(new_stor + sizeof(User) * new_capacity) + 1;
79
80 for (size_t i = 0; i < size_; i++) {
81 Inst *old_input = old_inputs[i].GetInst(); // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
82 ASSERT(old_input);
83 // Initialize new User in container. Since users are placed from end of array, i.e. zero index element
84 // will be at the end of array, we need to add capacity and substitute index.
85 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
86 User *new_user = new (reinterpret_cast<User *>(new_stor) + new_capacity - i - 1) User(false, i, new_capacity);
87 auto old_user {GetUser(i)};
88 if (owner_inst->IsSaveState()) {
89 new_user->SetVirtualRegister(old_user->GetVirtualRegister());
90 } else if (owner_inst->IsPhi()) {
91 new_user->SetBbNum(old_user->GetBbNum());
92 }
93 old_input->RemoveUser(old_user);
94 old_input->AddUser(new_user);
95 new_inputs[i] = Input(old_input); // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
96 }
97 capacity_ = new_capacity;
98 users_ = reinterpret_cast<User *>(new_stor);
99 }
100
Append(Inst * inst)101 unsigned DynamicOperands::Append(Inst *inst)
102 {
103 ASSERT(capacity_ >= size_);
104 if (capacity_ == size_) {
105 Reallocate();
106 }
107 SetInput(size_, Input(inst));
108 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
109 new (users_ + capacity_ - size_ - 1) User(false, size_, capacity_);
110 auto user {GetUser(size_)};
111 if (GetOwnerInst()->IsPhi()) {
112 user->SetBbNum(size_);
113 }
114 inst->AddUser(user);
115 return size_++;
116 }
117
Remove(unsigned index)118 void DynamicOperands::Remove(unsigned index)
119 {
120 size_--;
121 auto *curr_input = GetInput(index)->GetInst();
122 if (curr_input->GetBasicBlock() != nullptr && curr_input->HasUsers()) {
123 curr_input->RemoveUser(GetUser(index));
124 }
125
126 auto bb_num {GetUser(index)->GetBbNum()};
127 auto owner_inst {GetOwnerInst()};
128
129 if (index != size_) {
130 auto *last_input = GetInput(size_)->GetInst();
131 if (last_input->HasUsers()) {
132 last_input->RemoveUser(GetUser(size_));
133 last_input->AddUser(GetUser(index));
134 }
135 SetInput(index, *GetInput(size_));
136 if (owner_inst->IsSaveState()) {
137 GetUser(index)->SetVirtualRegister(GetUser(size_)->GetVirtualRegister());
138 } else if (owner_inst->IsPhi()) {
139 GetUser(index)->SetBbNum(GetUser(size_)->GetBbNum());
140 }
141 }
142
143 if (owner_inst->IsPhi()) {
144 for (size_t i {0}; i < size_; ++i) {
145 if (GetUser(i)->GetBbNum() == size_) {
146 GetUser(i)->SetBbNum(bb_num);
147 break;
148 }
149 }
150 }
151 }
152
SetVnObject(VnObject * vn_obj)153 void BinaryImmOperation::SetVnObject(VnObject *vn_obj)
154 {
155 vn_obj->Add(GetImm());
156 }
157
SetVnObject(VnObject * vn_obj)158 void BinaryShiftedRegisterOperation::SetVnObject(VnObject *vn_obj)
159 {
160 vn_obj->Add(GetImm());
161 vn_obj->Add(static_cast<uint64_t>(GetShiftType()));
162 }
163
SetVnObject(VnObject * vn_obj)164 void UnaryShiftedRegisterOperation::SetVnObject(VnObject *vn_obj)
165 {
166 vn_obj->Add(GetImm());
167 vn_obj->Add(static_cast<uint64_t>(GetShiftType()));
168 }
169
SetVnObject(VnObject * vn_obj)170 void CompareInst::SetVnObject(VnObject *vn_obj)
171 {
172 vn_obj->Add(static_cast<uint32_t>(GetCc()));
173 }
174
SetVnObject(VnObject * vn_obj)175 void SelectInst::SetVnObject(VnObject *vn_obj)
176 {
177 vn_obj->Add(static_cast<uint32_t>(GetCc()));
178 }
179
SetVnObject(VnObject * vn_obj)180 void SelectImmInst::SetVnObject(VnObject *vn_obj)
181 {
182 vn_obj->Add(static_cast<uint32_t>(GetCc()));
183 }
184
SetVnObject(VnObject * vn_obj)185 void IfInst::SetVnObject(VnObject *vn_obj)
186 {
187 vn_obj->Add(static_cast<uint32_t>(GetCc()));
188 }
189
SetVnObject(VnObject * vn_obj)190 void IfImmInst::SetVnObject(VnObject *vn_obj)
191 {
192 vn_obj->Add(static_cast<uint32_t>(GetCc()));
193 }
194
SetVnObject(VnObject * vn_obj)195 void UnaryOperation::SetVnObject(VnObject *vn_obj)
196 {
197 if (GetOpcode() == Opcode::Cast) {
198 vn_obj->Add(static_cast<uint32_t>(GetInput(0).GetInst()->GetType()));
199 }
200 }
201
SetVnObject(VnObject * vn_obj)202 void CmpInst::SetVnObject(VnObject *vn_obj)
203 {
204 if (DataType::IsFloatType(GetOperandsType())) {
205 vn_obj->Add(static_cast<uint32_t>(IsFcmpg()));
206 }
207 vn_obj->Add(static_cast<uint32_t>(GetInputType(0)));
208 }
209
SetVnObject(VnObject * vn_obj)210 void CastInst::SetVnObject(VnObject *vn_obj)
211 {
212 vn_obj->Add(static_cast<uint32_t>(GetInputType(0)));
213 }
214
215 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
216 #define DEFINE_INST(TYPE) \
217 void TYPE::Accept(GraphVisitor *v) \
218 { \
219 v->VisitInst(this); \
220 }
221 // TODO(msherstennikov): There must be another way to generate this list
OPCODE_CLASS_LIST(DEFINE_INST)222 OPCODE_CLASS_LIST(DEFINE_INST)
223 #undef DEFINE_INST
224
225 BasicBlock *PhiInst::GetPhiInputBb(unsigned index)
226 {
227 ASSERT(index < GetInputsCount());
228
229 auto bb_num {GetPhiInputBbNum(index)};
230 ASSERT(bb_num < GetBasicBlock()->GetPredsBlocks().size());
231 return GetBasicBlock()->GetPredsBlocks()[bb_num];
232 }
233
GetPhiInput(BasicBlock * bb)234 Inst *PhiInst::GetPhiInput(BasicBlock *bb)
235 {
236 auto index = GetPredBlockIndex(bb);
237 ASSERT(index < GetInputs().size());
238 return GetInput(index).GetInst();
239 }
240
GetPhiDataflowInput(BasicBlock * bb)241 Inst *PhiInst::GetPhiDataflowInput(BasicBlock *bb)
242 {
243 auto index = GetPredBlockIndex(bb);
244 ASSERT(index < GetInputs().size());
245 return GetDataFlowInput(index);
246 }
247
GetPredBlockIndex(const BasicBlock * block) const248 size_t PhiInst::GetPredBlockIndex(const BasicBlock *block) const
249 {
250 for (size_t i {0}; i < GetInputsCount(); ++i) {
251 if (GetPhiInputBb(i) == block) {
252 return i;
253 }
254 }
255 UNREACHABLE();
256 }
257
258 template <Opcode opc, size_t input_idx>
SkipInstructions(Inst * input_inst)259 Inst *SkipInstructions(Inst *input_inst)
260 {
261 // NOLINTNEXTLINE(readability-magic-numbers)
262 for (Opcode opcode = input_inst->GetOpcode(); opcode == opc; opcode = input_inst->GetOpcode()) {
263 input_inst = input_inst->GetInput(input_idx).GetInst();
264 }
265 return input_inst;
266 }
267 /*
268 * For instructions LoadArray, StoreArray, LoadArrayPair, StoreArrayPair, LoadArrayI, StoreArrayI, LoadArrayPairI,
269 * StoreArrayPairI, LenArray, LoadObject, StoreObject, CallVirtual, Monitor with NullCheck input the dataflow user
270 * is object, which is the first input of NullCheck instruction.
271 * For instructions LoadArray, StoreArray, LoadArrayPair, StoreArrayPair with BoundsCheck input the dataflow user is
272 * array index, which is the second input of BoundsCheck instruction
273 * For instructions Div and Mod with ZeroCheck input the dataflow user is the first input of ZeroCheck
274 */
GetDataFlowInput(Inst * input_inst) const275 Inst *Inst::GetDataFlowInput(Inst *input_inst) const
276 {
277 auto opcode = input_inst->GetOpcode();
278 if (opcode == Opcode::NullCheck) {
279 return SkipInstructions<Opcode::NullCheck, 0>(input_inst);
280 }
281 if (opcode == Opcode::BoundsCheck) {
282 return SkipInstructions<Opcode::BoundsCheck, 1>(input_inst);
283 }
284 if (opcode == Opcode::BoundsCheckI) {
285 return SkipInstructions<Opcode::BoundsCheckI, 0>(input_inst);
286 }
287 if (opcode == Opcode::ZeroCheck) {
288 return SkipInstructions<Opcode::ZeroCheck, 0>(input_inst);
289 }
290 if (opcode == Opcode::NegativeCheck) {
291 return SkipInstructions<Opcode::NegativeCheck, 0>(input_inst);
292 }
293 if (opcode == Opcode::AnyTypeCheck) {
294 return SkipInstructions<Opcode::AnyTypeCheck, 0>(input_inst);
295 }
296 if (opcode == Opcode::RefTypeCheck) {
297 input_inst = SkipInstructions<Opcode::RefTypeCheck, 1>(input_inst);
298 if (input_inst->GetOpcode() == Opcode::NullCheck) {
299 return SkipInstructions<Opcode::NullCheck, 0>(input_inst);
300 }
301 return input_inst;
302 }
303 return input_inst;
304 }
305
IsPrecedingInSameBlock(const Inst * other) const306 bool Inst::IsPrecedingInSameBlock(const Inst *other) const
307 {
308 ASSERT(other != nullptr && GetBasicBlock() == other->GetBasicBlock());
309 if (this == other) {
310 return true;
311 }
312 auto next = GetNext();
313 while (next != nullptr) {
314 if (next == other) {
315 return true;
316 }
317 next = next->GetNext();
318 }
319 return false;
320 }
321
IsDominate(const Inst * other) const322 bool Inst::IsDominate(const Inst *other) const
323 {
324 ASSERT(other != nullptr);
325 if (this == other) {
326 return true;
327 }
328 auto this_bb = GetBasicBlock();
329 auto other_bb = other->GetBasicBlock();
330 return this_bb == other_bb ? IsPrecedingInSameBlock(other) : this_bb->IsDominate(other_bb);
331 }
332
InSameBlockOrDominate(const Inst * other) const333 bool Inst::InSameBlockOrDominate(const Inst *other) const
334 {
335 return GetBasicBlock() == other->GetBasicBlock() || IsDominate(other);
336 }
337
Clone(const Graph * targetGraph) const338 Inst *Inst::Clone(const Graph *targetGraph) const
339 {
340 ASSERT(targetGraph != nullptr);
341 auto clone = targetGraph->CreateInst(GetOpcode());
342 clone->bit_fields_ = GetAllFields();
343 clone->pc_ = GetPc();
344 #ifndef NDEBUG
345 clone->SetDstReg(GetDstReg());
346 #endif
347 if (IsOperandsDynamic()) {
348 clone->ReserveInputs(GetInputsCount());
349 }
350 return clone;
351 }
352
353 template <size_t N>
Clone(const Graph * targetGraph) const354 Inst *FixedInputsInst<N>::Clone(const Graph *targetGraph) const
355 {
356 auto clone = static_cast<FixedInputsInst *>(Inst::Clone(targetGraph));
357 #ifndef NDEBUG
358 for (size_t i = 0; i < INPUT_COUNT; ++i) {
359 clone->SetSrcReg(i, GetSrcReg(i));
360 }
361 #endif
362 return clone;
363 }
364
365 #if PANDA_TARGET_MACOS
366 template class FixedInputsInst<0>;
367 template class FixedInputsInst<1>;
368 template class FixedInputsInst<2U>;
369 template class FixedInputsInst<3U>;
370 template class FixedInputsInst<4U>;
371 #endif
372
Clone(const Graph * targetGraph) const373 Inst *CallInst::Clone(const Graph *targetGraph) const
374 {
375 ASSERT(targetGraph != nullptr);
376 auto instClone = Inst::Clone(targetGraph);
377 auto callClone = static_cast<CallInst *>(instClone);
378 callClone->SetCallMethodId(GetCallMethodId());
379 callClone->SetCallMethod(GetCallMethod());
380 callClone->SetCanNativeException(GetCanNativeException());
381 CloneTypes(targetGraph->GetAllocator(), callClone);
382 return instClone;
383 }
384
Clone(const Graph * target_graph) const385 Inst *CallIndirectInst::Clone(const Graph *target_graph) const
386 {
387 auto clone = Inst::Clone(target_graph)->CastToCallIndirect();
388 CloneTypes(target_graph->GetAllocator(), clone);
389 return clone;
390 }
391
Clone(const Graph * targetGraph) const392 Inst *IntrinsicInst::Clone(const Graph *targetGraph) const
393 {
394 ASSERT(targetGraph != nullptr);
395 auto intrinsicClone = (GetOpcode() == Opcode::Intrinsic ? Inst::Clone(targetGraph)->CastToIntrinsic()
396 : Inst::Clone(targetGraph)->CastToBuiltin());
397 intrinsicClone->SetIntrinsicId(GetIntrinsicId());
398 CloneTypes(targetGraph->GetAllocator(), intrinsicClone);
399 if (HasImms()) {
400 for (auto imm : GetImms()) {
401 intrinsicClone->AddImm(targetGraph->GetAllocator(), imm);
402 }
403 }
404 return intrinsicClone;
405 }
406
Clone(const Graph * targetGraph) const407 Inst *ConstantInst::Clone(const Graph *targetGraph) const
408 {
409 Inst *new_cnst = nullptr;
410 bool is_support_int32 = GetBasicBlock()->GetGraph()->IsBytecodeOptimizer();
411 switch (GetType()) {
412 case DataType::INT32:
413 new_cnst = targetGraph->CreateInstConstant(static_cast<int32_t>(GetIntValue()), is_support_int32);
414 break;
415 case DataType::INT64:
416 new_cnst = targetGraph->CreateInstConstant(GetIntValue(), is_support_int32);
417 break;
418 case DataType::FLOAT32:
419 new_cnst = targetGraph->CreateInstConstant(GetFloatValue(), is_support_int32);
420 break;
421 case DataType::FLOAT64:
422 new_cnst = targetGraph->CreateInstConstant(GetDoubleValue(), is_support_int32);
423 break;
424 case DataType::ANY:
425 new_cnst = targetGraph->CreateInstConstant(GetRawValue(), is_support_int32);
426 new_cnst->SetType(DataType::ANY);
427 break;
428 default:
429 UNREACHABLE();
430 }
431 #ifndef NDEBUG
432 new_cnst->SetDstReg(GetDstReg());
433 #endif
434 return new_cnst;
435 }
436
Clone(const Graph * targetGraph) const437 Inst *ParameterInst::Clone(const Graph *targetGraph) const
438 {
439 auto clone = Inst::Clone(targetGraph)->CastToParameter();
440 clone->SetArgNumber(GetArgNumber());
441 clone->SetLocationData(GetLocationData());
442 return clone;
443 }
444
Clone(const Graph * targetGraph) const445 Inst *SaveStateInst::Clone(const Graph *targetGraph) const
446 {
447 auto clone = static_cast<SaveStateInst *>(Inst::Clone(targetGraph));
448 if (GetImmediatesCount() > 0) {
449 clone->AllocateImmediates(targetGraph->GetAllocator(), GetImmediatesCount());
450 std::copy(immediates_->begin(), immediates_->end(), clone->immediates_->begin());
451 }
452 clone->method_ = method_;
453 clone->caller_inst_ = caller_inst_;
454 return clone;
455 }
456
Clone(const Graph * targetGraph) const457 Inst *BinaryShiftedRegisterOperation::Clone(const Graph *targetGraph) const
458 {
459 auto clone = static_cast<BinaryShiftedRegisterOperation *>(FixedInputsInst::Clone(targetGraph));
460 clone->SetImm(GetImm());
461 clone->SetShiftType(GetShiftType());
462 return clone;
463 }
464
Clone(const Graph * targetGraph) const465 Inst *UnaryShiftedRegisterOperation::Clone(const Graph *targetGraph) const
466 {
467 auto clone = static_cast<UnaryShiftedRegisterOperation *>(FixedInputsInst::Clone(targetGraph));
468 clone->SetImm(GetImm());
469 clone->SetShiftType(GetShiftType());
470 return clone;
471 }
472
AppendImmediate(uint64_t imm,uint16_t vreg,DataType::Type type,bool is_acc)473 void SaveStateInst::AppendImmediate(uint64_t imm, uint16_t vreg, DataType::Type type, bool is_acc)
474 {
475 if (immediates_ == nullptr) {
476 ASSERT(GetBasicBlock() != nullptr);
477 AllocateImmediates(GetBasicBlock()->GetGraph()->GetAllocator(), 0);
478 }
479 immediates_->emplace_back(SaveStateImm {imm, vreg, type, is_acc});
480 }
481
AllocateImmediates(ArenaAllocator * allocator,size_t size)482 void SaveStateInst::AllocateImmediates(ArenaAllocator *allocator, size_t size)
483 {
484 immediates_ = allocator->New<ArenaVector<SaveStateImm>>(allocator->Adapter());
485 immediates_->resize(size);
486 }
487
AppendCatchTypeId(uint32_t id,uint32_t catch_edge_index)488 void TryInst::AppendCatchTypeId(uint32_t id, uint32_t catch_edge_index)
489 {
490 if (catch_type_ids_ == nullptr) {
491 ASSERT(catch_edge_indexes_ == nullptr);
492 ASSERT(GetBasicBlock() != nullptr);
493 auto allocator = GetBasicBlock()->GetGraph()->GetAllocator();
494 catch_type_ids_ = allocator->New<ArenaVector<uint32_t>>(allocator->Adapter());
495 catch_edge_indexes_ = allocator->New<ArenaVector<uint32_t>>(allocator->Adapter());
496 }
497 catch_type_ids_->push_back(id);
498 catch_edge_indexes_->push_back(catch_edge_index);
499 }
500
AppendThrowableInst(const Inst * inst)501 void CatchPhiInst::AppendThrowableInst(const Inst *inst)
502 {
503 if (throw_insts_ == nullptr) {
504 ASSERT(GetBasicBlock() != nullptr);
505 auto allocator = GetBasicBlock()->GetGraph()->GetAllocator();
506 throw_insts_ = allocator->New<ArenaVector<const Inst *>>(allocator->Adapter());
507 }
508 throw_insts_->push_back(inst);
509 }
510
ReplaceThrowableInst(const Inst * old_inst,const Inst * new_inst)511 void CatchPhiInst::ReplaceThrowableInst(const Inst *old_inst, const Inst *new_inst)
512 {
513 auto index = GetThrowableInstIndex(old_inst);
514 throw_insts_->at(index) = new_inst;
515 }
516
RemoveInput(unsigned index)517 void CatchPhiInst::RemoveInput(unsigned index)
518 {
519 Inst::RemoveInput(index);
520 if (throw_insts_ != nullptr) {
521 throw_insts_->at(index) = throw_insts_->back();
522 throw_insts_->pop_back();
523 }
524 }
525
Clone(const Graph * targetGraph) const526 Inst *TryInst::Clone(const Graph *targetGraph) const
527 {
528 auto clone = FixedInputsInst::Clone(targetGraph)->CastToTry();
529 if (auto ids_count = this->GetCatchTypeIdsCount(); ids_count > 0) {
530 if (clone->catch_type_ids_ == nullptr) {
531 auto allocator = targetGraph->GetAllocator();
532 clone->catch_type_ids_ = allocator->New<ArenaVector<uint32_t>>(allocator->Adapter());
533 clone->catch_edge_indexes_ = allocator->New<ArenaVector<uint32_t>>(allocator->Adapter());
534 }
535 clone->catch_type_ids_->resize(ids_count);
536 clone->catch_edge_indexes_->resize(ids_count);
537 std::copy(this->catch_type_ids_->begin(), this->catch_type_ids_->end(), clone->catch_type_ids_->begin());
538 std::copy(this->catch_edge_indexes_->begin(), this->catch_edge_indexes_->end(),
539 clone->catch_edge_indexes_->begin());
540 }
541 return clone;
542 }
543
GetEdgeIfInputTrue()544 BasicBlock *IfImmInst::GetEdgeIfInputTrue()
545 {
546 return GetBasicBlock()->GetSuccessor(GetTrueInputEdgeIdx());
547 }
548
GetEdgeIfInputFalse()549 BasicBlock *IfImmInst::GetEdgeIfInputFalse()
550 {
551 return GetBasicBlock()->GetSuccessor(1 - GetTrueInputEdgeIdx());
552 }
553
554 /**
555 * NB! Can be called before Lowering pass only
556 * Return if_imm's block successor index when input is true
557 */
GetTrueInputEdgeIdx()558 size_t IfImmInst::GetTrueInputEdgeIdx()
559 {
560 ASSERT(GetBasicBlock() != nullptr);
561 ASSERT(GetBasicBlock()->GetSuccsBlocks().size() == MAX_SUCCS_NUM);
562 ASSERT(GetCc() == ConditionCode::CC_NE || GetCc() == ConditionCode::CC_EQ);
563 ASSERT(GetImm() == 0);
564 return GetCc() == CC_NE ? 0 : 1;
565 }
566
IsPropagateLiveness() const567 bool Inst::IsPropagateLiveness() const
568 {
569 return (CanThrow() && GetBasicBlock()->IsTry()) || CanDeoptimize();
570 }
571
RequireRegMap() const572 bool Inst::RequireRegMap() const
573 {
574 if (GetOpcode() == Opcode::SafePoint) {
575 return false;
576 }
577 return (CanThrow() && GetBasicBlock()->IsTry()) || GetOpcode() == Opcode::SaveStateOsr || CanDeoptimize();
578 }
579
IsZeroRegInst() const580 bool Inst::IsZeroRegInst() const
581 {
582 ASSERT(GetBasicBlock() != nullptr);
583 ASSERT(GetBasicBlock()->GetGraph() != nullptr);
584 return GetBasicBlock()->GetGraph()->GetZeroReg() != INVALID_REG && IsZeroConstantOrNullPtr(this);
585 }
586
IsAccRead() const587 bool Inst::IsAccRead() const
588 {
589 return GetFlag(inst_flags::ACC_READ);
590 }
591
IsAccWrite() const592 bool Inst::IsAccWrite() const
593 {
594 if (GetBasicBlock()->GetGraph()->IsDynamicMethod() && IsConst()) {
595 return true;
596 }
597 return GetFlag(inst_flags::ACC_WRITE);
598 }
599
GetTryBeginInst(const BasicBlock * try_begin_bb)600 TryInst *GetTryBeginInst(const BasicBlock *try_begin_bb)
601 {
602 ASSERT(try_begin_bb != nullptr && try_begin_bb->IsTryBegin());
603 for (auto inst : try_begin_bb->AllInsts()) {
604 if (inst->GetOpcode() == Opcode::Try) {
605 return inst->CastToTry();
606 }
607 }
608 UNREACHABLE();
609 return nullptr;
610 }
611
612 /**
613 * Regalloc's helper to checks if intrinsic's arguments should be located on the registers according to
614 * calling-convention
615 */
IsNativeCall() const616 bool IntrinsicInst::IsNativeCall() const
617 {
618 ASSERT(GetBasicBlock() != nullptr);
619 ASSERT(GetBasicBlock()->GetGraph() != nullptr);
620 if (IsIrtocIntrinsic(intrinsic_id_)) {
621 return intrinsic_id_ == RuntimeInterface::IntrinsicId::INTRINSIC_SLOW_PATH_ENTRY;
622 }
623 auto graph = GetBasicBlock()->GetGraph();
624 auto arch = graph->GetArch();
625 auto runtime = graph->GetRuntime();
626 return !EncodesBuiltin(runtime, intrinsic_id_, arch) || IsRuntimeCall();
627 }
628
629 } // namespace panda::compiler
630