• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "inst.h"
17 #include "graph.h"
18 #include "basicblock.h"
19 #include "graph_visitor.h"
20 #include "optimizer/optimizations/vn.h"
21 
22 namespace panda::compiler {
23 
ReserveInputs(size_t capacity)24 void Inst::ReserveInputs(size_t capacity)
25 {
26     ASSERT(IsOperandsDynamic());
27     GetDynamicOperands()->Reallocate(capacity);
28 }
29 
GetInst()30 Inst *User::GetInst()
31 {
32     if (UNLIKELY(IsDynamic())) {
33         // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
34         return *reinterpret_cast<Inst **>(this + GetIndex() + 1);
35     }
36     auto p = reinterpret_cast<uintptr_t>(this);
37     p += (GetIndex() + 1) * sizeof(User);
38 
39     auto inputs_count {SizeField::Decode(properties_)};
40     p += (inputs_count + Input::GetPadding(RUNTIME_ARCH, inputs_count)) * sizeof(Input);
41     return reinterpret_cast<Inst *>(p);
42 }
43 
InsertBefore(Inst * inst)44 void Inst::InsertBefore(Inst *inst)
45 {
46     ASSERT(bb_ != nullptr);
47     bb_->InsertBefore(inst, this);
48 }
49 
InsertAfter(Inst * inst)50 void Inst::InsertAfter(Inst *inst)
51 {
52     ASSERT(bb_ != nullptr);
53     bb_->InsertAfter(inst, this);
54 }
55 
Reallocate(size_t new_capacity)56 void DynamicOperands::Reallocate([[maybe_unused]] size_t new_capacity /* =0 */)
57 {
58     if (new_capacity == 0) {
59         constexpr auto IMM_2 = 2;
60         new_capacity = (((capacity_ != 0U) ? capacity_ : 1U) << 1U) + IMM_2;
61     } else if (new_capacity <= capacity_) {
62         return;
63     }
64     auto size = new_capacity * (sizeof(User) + sizeof(Inst *)) + sizeof(Inst *);
65     auto new_stor = reinterpret_cast<uintptr_t>(allocator_->Alloc(size));
66 
67     auto owner_inst {GetOwnerInst()};
68     // Set pointer to owned instruction into new storage NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
69     *reinterpret_cast<Inst **>(reinterpret_cast<User *>(new_stor) + new_capacity) = owner_inst;
70 
71     if (users_ == nullptr) {
72         users_ = reinterpret_cast<User *>(new_stor);
73         capacity_ = new_capacity;
74         return;
75     }
76     Input *old_inputs = Inputs();
77     // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
78     auto *new_inputs = reinterpret_cast<Input *>(new_stor + sizeof(User) * new_capacity) + 1;
79 
80     for (size_t i = 0; i < size_; i++) {
81         Inst *old_input = old_inputs[i].GetInst();  // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
82         ASSERT(old_input);
83         // Initialize new User in container. Since users are placed from end of array, i.e. zero index element
84         // will be at the end of array, we need to add capacity and substitute index.
85         // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
86         User *new_user = new (reinterpret_cast<User *>(new_stor) + new_capacity - i - 1) User(false, i, new_capacity);
87         auto old_user {GetUser(i)};
88         if (owner_inst->IsSaveState()) {
89             new_user->SetVirtualRegister(old_user->GetVirtualRegister());
90         } else if (owner_inst->IsPhi()) {
91             new_user->SetBbNum(old_user->GetBbNum());
92         }
93         old_input->RemoveUser(old_user);
94         old_input->AddUser(new_user);
95         new_inputs[i] = Input(old_input);  // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
96     }
97     capacity_ = new_capacity;
98     users_ = reinterpret_cast<User *>(new_stor);
99 }
100 
Append(Inst * inst)101 unsigned DynamicOperands::Append(Inst *inst)
102 {
103     ASSERT(capacity_ >= size_);
104     if (capacity_ == size_) {
105         Reallocate();
106     }
107     SetInput(size_, Input(inst));
108     // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
109     new (users_ + capacity_ - size_ - 1) User(false, size_, capacity_);
110     auto user {GetUser(size_)};
111     if (GetOwnerInst()->IsPhi()) {
112         user->SetBbNum(size_);
113     }
114     inst->AddUser(user);
115     return size_++;
116 }
117 
Remove(unsigned index)118 void DynamicOperands::Remove(unsigned index)
119 {
120     size_--;
121     auto *curr_input = GetInput(index)->GetInst();
122     if (curr_input->GetBasicBlock() != nullptr && curr_input->HasUsers()) {
123         curr_input->RemoveUser(GetUser(index));
124     }
125 
126     auto bb_num {GetUser(index)->GetBbNum()};
127     auto owner_inst {GetOwnerInst()};
128 
129     if (index != size_) {
130         auto *last_input = GetInput(size_)->GetInst();
131         if (last_input->HasUsers()) {
132             last_input->RemoveUser(GetUser(size_));
133             last_input->AddUser(GetUser(index));
134         }
135         SetInput(index, *GetInput(size_));
136         if (owner_inst->IsSaveState()) {
137             GetUser(index)->SetVirtualRegister(GetUser(size_)->GetVirtualRegister());
138         } else if (owner_inst->IsPhi()) {
139             GetUser(index)->SetBbNum(GetUser(size_)->GetBbNum());
140         }
141     }
142 
143     if (owner_inst->IsPhi()) {
144         for (size_t i {0}; i < size_; ++i) {
145             if (GetUser(i)->GetBbNum() == size_) {
146                 GetUser(i)->SetBbNum(bb_num);
147                 break;
148             }
149         }
150     }
151 }
152 
SetVnObject(VnObject * vn_obj)153 void CompareInst::SetVnObject(VnObject *vn_obj)
154 {
155     vn_obj->Add(static_cast<uint32_t>(GetCc()));
156 }
157 
SetVnObject(VnObject * vn_obj)158 void IfInst::SetVnObject(VnObject *vn_obj)
159 {
160     vn_obj->Add(static_cast<uint32_t>(GetCc()));
161 }
162 
SetVnObject(VnObject * vn_obj)163 void IfImmInst::SetVnObject(VnObject *vn_obj)
164 {
165     vn_obj->Add(static_cast<uint32_t>(GetCc()));
166 }
167 
SetVnObject(VnObject * vn_obj)168 void CmpInst::SetVnObject(VnObject *vn_obj)
169 {
170     if (DataType::IsFloatType(GetOperandsType())) {
171         vn_obj->Add(static_cast<uint32_t>(IsFcmpg()));
172     }
173     vn_obj->Add(static_cast<uint32_t>(GetInputType(0)));
174 }
175 
GetPhiInputBb(unsigned index)176 BasicBlock *PhiInst::GetPhiInputBb(unsigned index)
177 {
178     ASSERT(index < GetInputsCount());
179 
180     auto bb_num {GetPhiInputBbNum(index)};
181     ASSERT(bb_num < GetBasicBlock()->GetPredsBlocks().size());
182     return GetBasicBlock()->GetPredsBlocks()[bb_num];
183 }
184 
GetPhiInput(BasicBlock * bb)185 Inst *PhiInst::GetPhiInput(BasicBlock *bb)
186 {
187     auto index = GetPredBlockIndex(bb);
188     ASSERT(index < GetInputs().size());
189     return GetInput(index).GetInst();
190 }
191 
GetPhiDataflowInput(BasicBlock * bb)192 Inst *PhiInst::GetPhiDataflowInput(BasicBlock *bb)
193 {
194     auto index = GetPredBlockIndex(bb);
195     ASSERT(index < GetInputs().size());
196     return GetDataFlowInput(index);
197 }
198 
GetPredBlockIndex(const BasicBlock * block) const199 size_t PhiInst::GetPredBlockIndex(const BasicBlock *block) const
200 {
201     for (size_t i {0}; i < GetInputsCount(); ++i) {
202         if (GetPhiInputBb(i) == block) {
203             return i;
204         }
205     }
206     UNREACHABLE();
207 }
208 
209 template <Opcode opc, size_t input_idx>
SkipInstructions(Inst * input_inst)210 Inst *SkipInstructions(Inst *input_inst)
211 {
212     // NOLINTNEXTLINE(readability-magic-numbers)
213     for (Opcode opcode = input_inst->GetOpcode(); opcode == opc; opcode = input_inst->GetOpcode()) {
214         input_inst = input_inst->GetInput(input_idx).GetInst();
215     }
216     return input_inst;
217 }
218 /*
219  * For instructions LoadArray, StoreArray, LoadArrayPair, StoreArrayPair, LoadArrayI, StoreArrayI, LoadArrayPairI,
220  * StoreArrayPairI, LenArray, LoadObject, StoreObject, CallVirtual, Monitor with NullCheck input the dataflow user
221  * is object, which is the first input of NullCheck instruction.
222  * For instructions LoadArray, StoreArray, LoadArrayPair, StoreArrayPair with BoundsCheck input the dataflow user is
223  * array index, which is the second input of BoundsCheck instruction
224  * For instructions Div and Mod with ZeroCheck input the dataflow user is the first input of ZeroCheck
225  */
GetDataFlowInput(Inst * input_inst) const226 Inst *Inst::GetDataFlowInput(Inst *input_inst) const
227 {
228     return input_inst;
229 }
230 
IsPrecedingInSameBlock(const Inst * other) const231 bool Inst::IsPrecedingInSameBlock(const Inst *other) const
232 {
233     ASSERT(other != nullptr && GetBasicBlock() == other->GetBasicBlock());
234     if (this == other) {
235         return true;
236     }
237     auto next = GetNext();
238     while (next != nullptr) {
239         if (next == other) {
240             return true;
241         }
242         next = next->GetNext();
243     }
244     return false;
245 }
246 
IsDominate(const Inst * other) const247 bool Inst::IsDominate(const Inst *other) const
248 {
249     ASSERT(other != nullptr);
250     if (this == other) {
251         return true;
252     }
253     auto this_bb = GetBasicBlock();
254     auto other_bb = other->GetBasicBlock();
255     return this_bb == other_bb ? IsPrecedingInSameBlock(other) : this_bb->IsDominate(other_bb);
256 }
257 
InSameBlockOrDominate(const Inst * other) const258 bool Inst::InSameBlockOrDominate(const Inst *other) const
259 {
260     return GetBasicBlock() == other->GetBasicBlock() || IsDominate(other);
261 }
262 
Clone(const Graph * targetGraph) const263 Inst *Inst::Clone(const Graph *targetGraph) const
264 {
265     ASSERT(targetGraph != nullptr);
266     auto clone = targetGraph->CreateInst(GetOpcode());
267     clone->bit_fields_ = GetAllFields();
268     clone->pc_ = GetPc();
269 #ifndef NDEBUG
270     clone->SetDstReg(GetDstReg());
271 #endif
272     if (IsOperandsDynamic()) {
273         clone->ReserveInputs(GetInputsCount());
274     }
275     return clone;
276 }
277 
278 #if PANDA_TARGET_MACOS
279 template class FixedInputsInst<0>;
280 template class FixedInputsInst<1>;
281 template class FixedInputsInst<2U>;
282 #endif
283 
Clone(const Graph * targetGraph) const284 Inst *SpillFillInst::Clone(const Graph *targetGraph) const
285 {
286     auto clone = FixedInputsInst::Clone(targetGraph)->CastToSpillFill();
287     for (auto spill_fill : spill_fills_) {
288         clone->AddSpillFill(spill_fill);
289     }
290     return clone;
291 }
292 
Clone(const Graph * targetGraph) const293 Inst *CompareInst::Clone(const Graph *targetGraph) const
294 {
295     auto clone = FixedInputsInst::Clone(targetGraph);
296     clone->CastToCompare()->SetCc(GetCc());
297     clone->CastToCompare()->SetOperandsType(GetOperandsType());
298     return clone;
299 }
300 
Clone(const Graph * targetGraph) const301 Inst *CmpInst::Clone(const Graph *targetGraph) const
302 {
303     auto clone = FixedInputsInst::Clone(targetGraph);
304     clone->CastToCmp()->SetOperandsType(GetOperandsType());
305     return clone;
306 }
307 
Clone(const Graph * targetGraph) const308 Inst *IfInst::Clone(const Graph *targetGraph) const
309 {
310     auto clone = FixedInputsInst::Clone(targetGraph);
311     static_cast<IfInst *>(clone)->SetCc(GetCc());
312     static_cast<IfInst *>(clone)->SetOperandsType(GetOperandsType());
313     static_cast<IfInst *>(clone)->SetMethod(GetMethod());
314     return clone;
315 }
316 
Clone(const Graph * targetGraph) const317 Inst *IntrinsicInst::Clone(const Graph *targetGraph) const
318 {
319     ASSERT(targetGraph != nullptr);
320     auto intrinsicClone = Inst::Clone(targetGraph)->CastToIntrinsic();
321     intrinsicClone->SetIntrinsicId(GetIntrinsicId());
322     CloneTypes(targetGraph->GetAllocator(), intrinsicClone);
323     if (HasImms()) {
324         for (auto imm : GetImms()) {
325             intrinsicClone->AddImm(targetGraph->GetAllocator(), imm);
326         }
327     }
328     return intrinsicClone;
329 }
330 
Clone(const Graph * targetGraph) const331 Inst *ConstantInst::Clone(const Graph *targetGraph) const
332 {
333     Inst *new_cnst = nullptr;
334     bool is_support_int32 = GetBasicBlock()->GetGraph()->IsBytecodeOptimizer();
335     switch (GetType()) {
336         case DataType::INT32:
337             new_cnst = targetGraph->CreateInstConstant(static_cast<int32_t>(GetIntValue()), is_support_int32);
338             break;
339         case DataType::INT64:
340             new_cnst = targetGraph->CreateInstConstant(GetIntValue(), is_support_int32);
341             break;
342         case DataType::FLOAT32:
343             new_cnst = targetGraph->CreateInstConstant(GetFloatValue(), is_support_int32);
344             break;
345         case DataType::FLOAT64:
346             new_cnst = targetGraph->CreateInstConstant(GetDoubleValue(), is_support_int32);
347             break;
348         case DataType::ANY:
349             new_cnst = targetGraph->CreateInstConstant(GetRawValue(), is_support_int32);
350             new_cnst->SetType(DataType::ANY);
351             break;
352         default:
353             UNREACHABLE();
354     }
355 #ifndef NDEBUG
356     new_cnst->SetDstReg(GetDstReg());
357 #endif
358     return new_cnst;
359 }
360 
Clone(const Graph * targetGraph) const361 Inst *ParameterInst::Clone(const Graph *targetGraph) const
362 {
363     auto clone = Inst::Clone(targetGraph)->CastToParameter();
364     clone->SetArgNumber(GetArgNumber());
365     clone->SetLocationData(GetLocationData());
366     return clone;
367 }
368 
Clone(const Graph * targetGraph) const369 Inst *SaveStateInst::Clone(const Graph *targetGraph) const
370 {
371     auto clone = static_cast<SaveStateInst *>(Inst::Clone(targetGraph));
372     if (GetImmediatesCount() > 0) {
373         clone->AllocateImmediates(targetGraph->GetAllocator(), GetImmediatesCount());
374         std::copy(immediates_->begin(), immediates_->end(), clone->immediates_->begin());
375     }
376     clone->method_ = method_;
377     return clone;
378 }
379 
AppendImmediate(uint64_t imm,uint16_t vreg,DataType::Type type,bool is_acc)380 void SaveStateInst::AppendImmediate(uint64_t imm, uint16_t vreg, DataType::Type type, bool is_acc)
381 {
382     if (immediates_ == nullptr) {
383         ASSERT(GetBasicBlock() != nullptr);
384         AllocateImmediates(GetBasicBlock()->GetGraph()->GetAllocator(), 0);
385     }
386     immediates_->emplace_back(SaveStateImm {imm, vreg, type, is_acc});
387 }
388 
AllocateImmediates(ArenaAllocator * allocator,size_t size)389 void SaveStateInst::AllocateImmediates(ArenaAllocator *allocator, size_t size)
390 {
391     immediates_ = allocator->New<ArenaVector<SaveStateImm>>(allocator->Adapter());
392     immediates_->resize(size);
393 }
394 
AppendCatchTypeId(uint32_t id,uint32_t catch_edge_index)395 void TryInst::AppendCatchTypeId(uint32_t id, uint32_t catch_edge_index)
396 {
397     if (catch_type_ids_ == nullptr) {
398         ASSERT(catch_edge_indexes_ == nullptr);
399         ASSERT(GetBasicBlock() != nullptr);
400         auto allocator = GetBasicBlock()->GetGraph()->GetAllocator();
401         catch_type_ids_ = allocator->New<ArenaVector<uint32_t>>(allocator->Adapter());
402         catch_edge_indexes_ = allocator->New<ArenaVector<uint32_t>>(allocator->Adapter());
403     }
404     catch_type_ids_->push_back(id);
405     catch_edge_indexes_->push_back(catch_edge_index);
406 }
407 
AppendThrowableInst(const Inst * inst)408 void CatchPhiInst::AppendThrowableInst(const Inst *inst)
409 {
410     if (throw_insts_ == nullptr) {
411         ASSERT(GetBasicBlock() != nullptr);
412         auto allocator = GetBasicBlock()->GetGraph()->GetAllocator();
413         throw_insts_ = allocator->New<ArenaVector<const Inst *>>(allocator->Adapter());
414     }
415     throw_insts_->push_back(inst);
416 }
417 
ReplaceThrowableInst(const Inst * old_inst,const Inst * new_inst)418 void CatchPhiInst::ReplaceThrowableInst(const Inst *old_inst, const Inst *new_inst)
419 {
420     auto index = GetThrowableInstIndex(old_inst);
421     throw_insts_->at(index) = new_inst;
422 }
423 
RemoveInput(unsigned index)424 void CatchPhiInst::RemoveInput(unsigned index)
425 {
426     Inst::RemoveInput(index);
427     if (throw_insts_ != nullptr) {
428         throw_insts_->at(index) = throw_insts_->back();
429         throw_insts_->pop_back();
430     }
431 }
432 
Clone(const Graph * targetGraph) const433 Inst *TryInst::Clone(const Graph *targetGraph) const
434 {
435     auto clone = FixedInputsInst::Clone(targetGraph)->CastToTry();
436     if (auto ids_count = this->GetCatchTypeIdsCount(); ids_count > 0) {
437         if (clone->catch_type_ids_ == nullptr) {
438             auto allocator = targetGraph->GetAllocator();
439             clone->catch_type_ids_ = allocator->New<ArenaVector<uint32_t>>(allocator->Adapter());
440             clone->catch_edge_indexes_ = allocator->New<ArenaVector<uint32_t>>(allocator->Adapter());
441         }
442         clone->catch_type_ids_->resize(ids_count);
443         clone->catch_edge_indexes_->resize(ids_count);
444         std::copy(this->catch_type_ids_->begin(), this->catch_type_ids_->end(), clone->catch_type_ids_->begin());
445         std::copy(this->catch_edge_indexes_->begin(), this->catch_edge_indexes_->end(),
446                   clone->catch_edge_indexes_->begin());
447     }
448     return clone;
449 }
450 
GetEdgeIfInputTrue()451 BasicBlock *IfImmInst::GetEdgeIfInputTrue()
452 {
453     return GetBasicBlock()->GetSuccessor(GetTrueInputEdgeIdx());
454 }
455 
GetEdgeIfInputFalse()456 BasicBlock *IfImmInst::GetEdgeIfInputFalse()
457 {
458     return GetBasicBlock()->GetSuccessor(1 - GetTrueInputEdgeIdx());
459 }
460 
461 /**
462  * NB! Can be called before Lowering pass only
463  * Return if_imm's block successor index when input is true
464  */
GetTrueInputEdgeIdx()465 size_t IfImmInst::GetTrueInputEdgeIdx()
466 {
467     ASSERT(GetBasicBlock() != nullptr);
468     ASSERT(GetBasicBlock()->GetSuccsBlocks().size() == MAX_SUCCS_NUM);
469     ASSERT(GetCc() == ConditionCode::CC_NE || GetCc() == ConditionCode::CC_EQ);
470     ASSERT(GetImm() == 0);
471     return GetCc() == CC_NE ? 0 : 1;
472 }
473 
IsPropagateLiveness() const474 bool Inst::IsPropagateLiveness() const
475 {
476     return (CanThrow() && GetBasicBlock()->IsTry()) || CanDeoptimize();
477 }
478 
RequireRegMap() const479 bool Inst::RequireRegMap() const
480 {
481     return CanThrow() && GetBasicBlock()->IsTry();
482 }
483 
IsZeroRegInst() const484 bool Inst::IsZeroRegInst() const
485 {
486     ASSERT(GetBasicBlock() != nullptr);
487     ASSERT(GetBasicBlock()->GetGraph() != nullptr);
488     return false;
489 }
490 
IsAccRead() const491 bool Inst::IsAccRead() const
492 {
493     return GetFlag(inst_flags::ACC_READ);
494 }
495 
IsAccWrite() const496 bool Inst::IsAccWrite() const
497 {
498     if (IsConst()) {
499         return true;
500     }
501     return GetFlag(inst_flags::ACC_WRITE);
502 }
503 
GetTryBeginInst(const BasicBlock * try_begin_bb)504 TryInst *GetTryBeginInst(const BasicBlock *try_begin_bb)
505 {
506     ASSERT(try_begin_bb != nullptr && try_begin_bb->IsTryBegin());
507     for (auto inst : try_begin_bb->AllInsts()) {
508         if (inst->GetOpcode() == Opcode::Try) {
509             return inst->CastToTry();
510         }
511     }
512     UNREACHABLE();
513     return nullptr;
514 }
515 
516 /**
517  * Regalloc's helper to checks if intrinsic's arguments should be located on the registers according to
518  * calling-convention
519  */
IsNativeCall() const520 bool IntrinsicInst::IsNativeCall() const
521 {
522     return false;
523 }
524 
525 }  // namespace panda::compiler
526