1 /**
2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #ifndef COMPILER_OPTIMIZER_IR_GRAPH_H
17 #define COMPILER_OPTIMIZER_IR_GRAPH_H
18
19 #include <algorithm>
20 #include <optional>
21 #include "aot_data.h"
22 #include "compiler_events_gen.h"
23 #include "inst.h"
24 #include "marker.h"
25 #include "optimizer/code_generator/method_properties.h"
26 #include "optimizer/pass_manager.h"
27 #include "utils/arena_containers.h"
28
29 namespace panda {
30 class Method;
31 class CodeAllocator;
32 } // namespace panda
33
34 namespace panda::compiler {
35 class BasicBlock;
36 class Graph;
37 class RuntimeInfo;
38 class PassManager;
39 class LivenessAnalyzer;
40 class DominatorsTree;
41 class Rpo;
42 class BoundsRangeInfo;
43 class Loop;
44 class CodeInfoBuilder;
45
46 class Encoder;
47 class CallingConvention;
48 class ParameterInfo;
49 class RegistersDescription;
50 class RelocationHandler;
51
52 enum AliasType : uint8_t;
53
54 /**
55 * Specifies graph compilation mode.
56 */
57 class GraphMode {
58 public:
GraphMode(uint32_t value)59 explicit GraphMode(uint32_t value) : value_(value) {}
60
61 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
62 #define DECLARE_GRAPH_MODE(name) \
63 static GraphMode name(bool set = true) \
64 { \
65 return GraphMode(Flag##name ::Encode(set)); \
66 } \
67 void Set##name(bool v) \
68 { \
69 Flag##name ::Set(v, &value_); \
70 } \
71 bool Is##name() const \
72 { \
73 return Flag##name ::Get(value_); \
74 }
75
76 DECLARE_GRAPH_MODE(Osr);
77 // The graph is used in BytecodeOptimizer mode
78 DECLARE_GRAPH_MODE(BytecodeOpt);
79 // The method from dynamic language
80 DECLARE_GRAPH_MODE(DynamicMethod);
81 // Graph will be compiled with native calling convention
82 DECLARE_GRAPH_MODE(Native);
83 // FastPath from compiled code to runtime
84 DECLARE_GRAPH_MODE(FastPath);
85 // Boundary frame is used for compiled code
86 DECLARE_GRAPH_MODE(Boundary);
87 // Graph will be compiled for calling inside interpreter
88 DECLARE_GRAPH_MODE(Interpreter);
89 // Graph will be compiled for interpreter main loop
90 DECLARE_GRAPH_MODE(InterpreterEntry);
91
92 #undef DECLARE_GRAPH_MODE
93
SupportManagedCode()94 bool SupportManagedCode() const
95 {
96 return !IsNative() && !IsFastPath() && !IsBoundary() && !IsInterpreter() && !IsInterpreterEntry();
97 }
98
99 void Dump(std::ostream &stm);
100
101 private:
102 using FlagOsr = BitField<bool, 0, 1>;
103 using FlagBytecodeOpt = FlagOsr::NextFlag;
104 using FlagDynamicMethod = FlagBytecodeOpt::NextFlag;
105 using FlagNative = FlagDynamicMethod::NextFlag;
106 using FlagFastPath = FlagNative::NextFlag;
107 using FlagBoundary = FlagFastPath::NextFlag;
108 using FlagInterpreter = FlagBoundary::NextFlag;
109 using FlagInterpreterEntry = FlagInterpreter::NextFlag;
110
111 uint32_t value_ {0};
112
113 friend GraphMode operator|(GraphMode a, GraphMode b);
114 };
115
116 inline GraphMode operator|(GraphMode a, GraphMode b)
117 {
118 return GraphMode(a.value_ | b.value_);
119 }
120
121 using EncodeDataType = Span<uint8_t>;
122
123 class Graph final : public MarkerMgr {
124 public:
Graph(ArenaAllocator * allocator,ArenaAllocator * local_allocator,Arch arch)125 explicit Graph(ArenaAllocator *allocator, ArenaAllocator *local_allocator, Arch arch)
126 : Graph(allocator, local_allocator, arch, false)
127 {
128 }
129
Graph(ArenaAllocator * allocator,ArenaAllocator * local_allocator,Arch arch,bool osr_mode)130 Graph(ArenaAllocator *allocator, ArenaAllocator *local_allocator, Arch arch, bool osr_mode)
131 : Graph(allocator, local_allocator, arch, nullptr, GetDefaultRuntime(), osr_mode)
132 {
133 }
134
Graph(ArenaAllocator * allocator,ArenaAllocator * local_allocator,Arch arch,bool dynamic_method,bool bytecode_opt)135 Graph(ArenaAllocator *allocator, ArenaAllocator *local_allocator, Arch arch, bool dynamic_method, bool bytecode_opt)
136 : Graph(allocator, local_allocator, arch, nullptr, GetDefaultRuntime(), false, nullptr, dynamic_method,
137 bytecode_opt)
138 {
139 }
140
Graph(ArenaAllocator * allocator,ArenaAllocator * local_allocator,Arch arch,RuntimeInterface::MethodPtr method,RuntimeInterface * runtime,bool osr_mode)141 Graph(ArenaAllocator *allocator, ArenaAllocator *local_allocator, Arch arch, RuntimeInterface::MethodPtr method,
142 RuntimeInterface *runtime, bool osr_mode)
143 : Graph(allocator, local_allocator, arch, method, runtime, osr_mode, nullptr)
144 {
145 }
146
147 Graph(ArenaAllocator *allocator, ArenaAllocator *local_allocator, Arch arch, RuntimeInterface::MethodPtr method,
148 RuntimeInterface *runtime, bool osr_mode, Graph *parent, bool dynamic_method = false,
149 bool bytecode_opt = false)
150 : Graph(allocator, local_allocator, arch, method, runtime, parent,
151 GraphMode::Osr(osr_mode) | GraphMode::BytecodeOpt(bytecode_opt) |
152 GraphMode::DynamicMethod(dynamic_method))
153 {
154 }
155
Graph(ArenaAllocator * allocator,ArenaAllocator * local_allocator,Arch arch,RuntimeInterface::MethodPtr method,RuntimeInterface * runtime,Graph * parent,GraphMode mode)156 Graph(ArenaAllocator *allocator, ArenaAllocator *local_allocator, Arch arch, RuntimeInterface::MethodPtr method,
157 RuntimeInterface *runtime, Graph *parent, GraphMode mode)
158 : ALLOCATOR(allocator),
159 LOCAL_ALLOCATOR(local_allocator),
160 arch_(arch),
161 vector_bb_(allocator->Adapter()),
162 throwable_insts_(allocator->Adapter()),
163 runtime_(runtime),
164 method_(method),
165 pass_manager_(this, parent != nullptr ? parent->GetPassManager() : nullptr),
166 event_writer_(runtime->GetClassNameFromMethod(method), runtime->GetMethodName(method)),
167 mode_(mode),
168 single_implementation_list_(allocator->Adapter()),
169 try_begin_blocks_(allocator->Adapter()),
170 spilled_constants_(allocator->Adapter()),
171 parent_graph_(parent)
172 {
173 SetNeedCleanup(true);
174 }
175
176 ~Graph() override;
177
CreateChildGraph(RuntimeInterface::MethodPtr method)178 Graph *CreateChildGraph(RuntimeInterface::MethodPtr method)
179 {
180 auto graph = GetAllocator()->New<Graph>(GetAllocator(), GetLocalAllocator(), GetArch(), method, GetRuntime(),
181 this, mode_);
182 graph->SetAotData(GetAotData());
183 return graph;
184 }
185
186 /// Get default runtime interface object
GetDefaultRuntime()187 static RuntimeInterface *GetDefaultRuntime()
188 {
189 static RuntimeInterface runtime_interface;
190 return &runtime_interface;
191 }
192
GetArch()193 Arch GetArch() const
194 {
195 return arch_;
196 }
197
198 void AddBlock(BasicBlock *block);
199 #ifndef NDEBUG
200 void AddBlock(BasicBlock *block, uint32_t id);
201 #endif
202 void DisconnectBlock(BasicBlock *block, bool remove_last_inst = true, bool fix_dom_tree = true);
203 void DisconnectBlockRec(BasicBlock *block, bool remove_last_inst = true, bool fix_dom_tree = true);
204
205 void EraseBlock(BasicBlock *block);
206 void RestoreBlock(BasicBlock *block);
207 // Remove empty block. Block must have one successor and no Phis.
208 void RemoveEmptyBlock(BasicBlock *block);
209
210 // Remove empty block. Block may have Phis and can't be a loop pre-header.
211 void RemoveEmptyBlockWithPhis(BasicBlock *block, bool irr_loop = false);
212
213 // Remove block predecessors.
214 void RemovePredecessors(BasicBlock *block, bool remove_last_inst = true);
215
216 // Remove block successors.
217 void RemoveSuccessors(BasicBlock *block);
218
219 // Remove unreachable blocks.
220 void RemoveUnreachableBlocks();
221
222 // get end block
GetEndBlock()223 BasicBlock *GetEndBlock()
224 {
225 return end_block_;
226 }
227
GetEndBlock()228 BasicBlock *GetEndBlock() const
229 {
230 return end_block_;
231 }
232 // set end block
SetEndBlock(BasicBlock * end_block)233 void SetEndBlock(BasicBlock *end_block)
234 {
235 end_block_ = end_block;
236 }
HasEndBlock()237 bool HasEndBlock()
238 {
239 return end_block_ != nullptr;
240 }
241 // get start block
GetStartBlock()242 BasicBlock *GetStartBlock()
243 {
244 return start_block_;
245 }
GetStartBlock()246 BasicBlock *GetStartBlock() const
247 {
248 return start_block_;
249 }
250 // set start block
SetStartBlock(BasicBlock * start_block)251 void SetStartBlock(BasicBlock *start_block)
252 {
253 start_block_ = start_block;
254 }
255 // get vector_bb_
GetVectorBlocks()256 const ArenaVector<BasicBlock *> &GetVectorBlocks() const
257 {
258 return vector_bb_;
259 }
260
GetAliveBlocksCount()261 size_t GetAliveBlocksCount() const
262 {
263 return std::count_if(vector_bb_.begin(), vector_bb_.end(), [](BasicBlock *block) { return block != nullptr; });
264 }
265
GetPassManager()266 PassManager *GetPassManager()
267 {
268 return &pass_manager_;
269 }
GetPassManager()270 const PassManager *GetPassManager() const
271 {
272 return &pass_manager_;
273 }
274
275 const BoundsRangeInfo *GetBoundsRangeInfo() const;
276
277 const ArenaVector<BasicBlock *> &GetBlocksRPO() const;
278
279 const ArenaVector<BasicBlock *> &GetBlocksLinearOrder() const;
280
281 template <class Callback>
282 void VisitAllInstructions(Callback callback);
283
284 AliasType CheckInstAlias(Inst *mem1, Inst *mem2);
285
286 /// Main allocator for graph, all related to Graph data should be allocated via this allocator.
GetAllocator()287 ArenaAllocator *GetAllocator() const
288 {
289 return ALLOCATOR;
290 }
291 /// Allocator for temproray usage, when allocated data is no longer needed after optimization/analysis finished.
GetLocalAllocator()292 ArenaAllocator *GetLocalAllocator() const
293 {
294 return LOCAL_ALLOCATOR;
295 }
IsDFConstruct()296 bool IsDFConstruct() const
297 {
298 return FlagDFConstruct::Get(bit_fields_);
299 }
SetDFConstruct()300 void SetDFConstruct()
301 {
302 FlagDFConstruct::Set(true, &bit_fields_);
303 }
304
SetAotData(AotData * data)305 void SetAotData(AotData *data)
306 {
307 aot_data_ = data;
308 }
GetAotData()309 AotData *GetAotData()
310 {
311 return aot_data_;
312 }
GetAotData()313 const AotData *GetAotData() const
314 {
315 return aot_data_;
316 }
317
IsAotMode()318 bool IsAotMode() const
319 {
320 return aot_data_ != nullptr;
321 }
322
IsOfflineCompilationMode()323 bool IsOfflineCompilationMode() const
324 {
325 return IsAotMode() || GetMode().IsInterpreter();
326 }
327
IsDefaultLocationsInit()328 bool IsDefaultLocationsInit() const
329 {
330 return FlagDefaultLocationsInit::Get(bit_fields_);
331 }
SetDefaultLocationsInit()332 void SetDefaultLocationsInit()
333 {
334 FlagDefaultLocationsInit::Set(true, &bit_fields_);
335 }
336 #ifndef NDEBUG
IsRegAllocApplied()337 bool IsRegAllocApplied() const
338 {
339 return FlagRegallocApplied::Get(bit_fields_);
340 }
SetRegAllocApplied()341 void SetRegAllocApplied()
342 {
343 FlagRegallocApplied::Set(true, &bit_fields_);
344 }
IsRegAccAllocApplied()345 bool IsRegAccAllocApplied() const
346 {
347 return FlagRegaccallocApplied::Get(bit_fields_);
348 }
SetRegAccAllocApplied()349 void SetRegAccAllocApplied()
350 {
351 FlagRegaccallocApplied::Set(true, &bit_fields_);
352 }
IsInliningComplete()353 bool IsInliningComplete() const
354 {
355 return FlagInliningComplete::Get(bit_fields_);
356 }
SetInliningComplete()357 void SetInliningComplete()
358 {
359 FlagInliningComplete::Set(true, &bit_fields_);
360 }
IsSchedulerComplete()361 bool IsSchedulerComplete() const
362 {
363 return FlagSchedulerComplete::Get(bit_fields_);
364 }
SetSchedulerComplete()365 void SetSchedulerComplete()
366 {
367 FlagSchedulerComplete::Set(true, &bit_fields_);
368 }
IsLowLevelInstructionsEnabled()369 bool IsLowLevelInstructionsEnabled() const
370 {
371 return FlagLowLevelInstnsEnabled::Get(bit_fields_);
372 }
SetLowLevelInstructionsEnabled()373 void SetLowLevelInstructionsEnabled()
374 {
375 FlagLowLevelInstnsEnabled::Set(true, &bit_fields_);
376 }
377 #else
IsRegAllocApplied()378 bool IsRegAllocApplied() const
379 {
380 return false;
381 }
382 #endif // NDEBUG
383
SetData(EncodeDataType data)384 void SetData(EncodeDataType data)
385 {
386 data_ = data;
387 }
388
GetData()389 EncodeDataType GetData() const
390 {
391 return data_;
392 }
393
GetData()394 EncodeDataType GetData()
395 {
396 return data_;
397 }
398
SetCodeInfo(Span<uint8_t> data)399 void SetCodeInfo(Span<uint8_t> data)
400 {
401 code_info_data_ = data.SubSpan<const uint8_t>(0, data.size());
402 }
403
GetCodeInfoData()404 Span<const uint8_t> GetCodeInfoData() const
405 {
406 return code_info_data_;
407 }
408
409 void DumpUsedRegs(std::ostream &out = std::cerr, const char *prefix = nullptr) const
410 {
411 if (prefix != nullptr) {
412 out << prefix;
413 }
414 out << "'\n used scalar regs: ";
415 if (used_regs_ != nullptr) {
416 for (unsigned i = 0; i < used_regs_->size(); ++i) {
417 if (used_regs_->at(i)) {
418 out << i << " ";
419 }
420 }
421 }
422 out << "\n used float regs: ";
423 if (used_regs_ != nullptr) {
424 for (unsigned i = 0; i < used_vregs_->size(); ++i) {
425 if (used_vregs_->at(i)) {
426 out << i << " ";
427 }
428 }
429 }
430 out << std::endl;
431 }
432
433 // Get registers mask which used in graph
434 template <DataType::Type reg_type>
GetUsedRegs()435 ArenaVector<bool> *GetUsedRegs() const
436 {
437 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
438 if constexpr (reg_type == DataType::INT64) {
439 return used_regs_;
440 }
441 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
442 if constexpr (reg_type == DataType::FLOAT64) {
443 return used_vregs_;
444 }
445 UNREACHABLE();
446 return nullptr;
447 }
448
SetRegUsage(Register reg,DataType::Type type)449 void SetRegUsage(Register reg, DataType::Type type)
450 {
451 ASSERT(reg != INVALID_REG);
452 if (DataType::IsFloatType(type)) {
453 SetUsedReg<DataType::FLOAT64>(reg);
454 } else {
455 SetUsedReg<DataType::INT64>(reg);
456 }
457 }
458
459 template <DataType::Type reg_type>
SetUsedReg(Register reg)460 void SetUsedReg(Register reg)
461 {
462 ArenaVector<bool> *graph_regs = nullptr;
463 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
464 if constexpr (reg_type == DataType::INT64) {
465 graph_regs = used_regs_;
466 // NOLINTNEXTLINE(readability-braces-around-statements, readability-misleading-indentation)
467 } else if constexpr (reg_type == DataType::FLOAT64) {
468 graph_regs = used_vregs_;
469 } else {
470 UNREACHABLE();
471 }
472 ASSERT(reg < graph_regs->size());
473 (*graph_regs)[reg] = true;
474 }
475
476 template <DataType::Type reg_type>
InitUsedRegs(const ArenaVector<bool> * used_regs)477 void InitUsedRegs(const ArenaVector<bool> *used_regs)
478 {
479 if (used_regs == nullptr) {
480 return;
481 }
482 ArenaVector<bool> *graph_regs = nullptr;
483 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
484 if constexpr (reg_type == DataType::INT64) {
485 used_regs_ = GetAllocator()->New<ArenaVector<bool>>(GetAllocator()->Adapter());
486 graph_regs = used_regs_;
487 // NOLINTNEXTLINE(readability-braces-around-statements, readability-misleading-indentation)
488 } else if constexpr (reg_type == DataType::FLOAT64) {
489 used_vregs_ = GetAllocator()->New<ArenaVector<bool>>(GetAllocator()->Adapter());
490 graph_regs = used_vregs_;
491 } else {
492 UNREACHABLE();
493 }
494 graph_regs->resize(used_regs->size());
495 std::copy(used_regs->begin(), used_regs->end(), graph_regs->begin());
496 }
497
498 Register GetZeroReg() const;
499 Register GetArchTempReg() const;
500 Register GetArchTempVReg() const;
501 // Get registers mask which used in codegen, runtime e.t.c
502 RegMask GetArchUsedRegs();
503 void SetArchUsedRegs(RegMask mask);
504
505 // Get vector registers mask which used in codegen, runtime e.t.c
506 VRegMask GetArchUsedVRegs();
507
508 // Return true if one 64-bit scalar register can be split to 2 32-bit
509 bool IsRegScalarMapped() const;
510
GetStackSlotsCount()511 uint32_t GetStackSlotsCount() const
512 {
513 return stack_slot_count_;
514 }
515
SetStackSlotsCount(uint32_t stack_slot_count)516 void SetStackSlotsCount(uint32_t stack_slot_count)
517 {
518 stack_slot_count_ = stack_slot_count;
519 }
520
UpdateStackSlotsCount(uint32_t stack_slot_count)521 void UpdateStackSlotsCount(uint32_t stack_slot_count)
522 {
523 stack_slot_count_ = std::max(stack_slot_count_, stack_slot_count);
524 }
525
526 uint32_t GetParametersSlotsCount() const;
527
GetExtSlotsStart()528 uint32_t GetExtSlotsStart() const
529 {
530 return ext_stack_slot_;
531 }
532
SetExtSlotsStart(uint32_t ext_stack_slot)533 void SetExtSlotsStart(uint32_t ext_stack_slot)
534 {
535 ext_stack_slot_ = ext_stack_slot;
536 }
537
538 BasicBlock *CreateEmptyBlock(uint32_t guest_pc = INVALID_PC);
539 BasicBlock *CreateEmptyBlock(BasicBlock *base_block);
540 #ifndef NDEBUG
541 BasicBlock *CreateEmptyBlock(uint32_t id, uint32_t guest_pc);
542 #endif
543 BasicBlock *CreateStartBlock();
544 BasicBlock *CreateEndBlock(uint32_t guest_pc = INVALID_PC);
GetFirstConstInst()545 ConstantInst *GetFirstConstInst()
546 {
547 return first_const_inst_;
548 }
SetFirstConstInst(ConstantInst * const_inst)549 void SetFirstConstInst(ConstantInst *const_inst)
550 {
551 first_const_inst_ = const_inst;
552 }
553
GetNullPtrInst()554 Inst *GetNullPtrInst() const
555 {
556 return nullptr_inst_;
557 }
HasNullPtrInst()558 bool HasNullPtrInst() const
559 {
560 return nullptr_inst_ != nullptr;
561 }
UnsetNullPtrInst()562 void UnsetNullPtrInst()
563 {
564 ASSERT(HasNullPtrInst());
565 nullptr_inst_ = nullptr;
566 }
567 Inst *GetOrCreateNullPtr();
568
569 /// Find constant in the list, return nullptr if not found
570 ConstantInst *FindConstant(DataType::Type type, uint64_t value);
571 /// Find constant in the list or create new one and insert at the end
572 template <typename T>
573 ConstantInst *FindOrCreateConstant(T value);
574
575 /**
576 * Find constant that is equal to the given one specified by inst. If not found, add inst to the graph.
577 * @param inst Constant instruction to be added
578 * @return Found instruction or inst if not found
579 */
580 ConstantInst *FindOrAddConstant(ConstantInst *inst);
581
582 ParameterInst *AddNewParameter(uint16_t arg_number);
583
AddNewParameter(uint16_t arg_number,DataType::Type type)584 ParameterInst *AddNewParameter(uint16_t arg_number, DataType::Type type)
585 {
586 ParameterInst *param = AddNewParameter(arg_number);
587 param->SetType(type);
588 return param;
589 }
590
591 /*
592 * The function remove the ConstantInst from the graph list
593 * !NOTE ConstantInst isn't removed from BasicBlock list
594 */
595 void RemoveConstFromList(ConstantInst *const_inst);
596
GetSpilledConstant(ImmTableSlot slot)597 ConstantInst *GetSpilledConstant(ImmTableSlot slot)
598 {
599 ASSERT(static_cast<size_t>(slot) < spilled_constants_.size());
600 return spilled_constants_[slot];
601 }
602
AddSpilledConstant(ConstantInst * const_inst)603 ImmTableSlot AddSpilledConstant(ConstantInst *const_inst)
604 {
605 // Constant already in the table
606 auto current_slot = const_inst->GetImmTableSlot();
607 if (current_slot != INVALID_IMM_TABLE_SLOT) {
608 ASSERT(spilled_constants_[current_slot] == const_inst);
609 return current_slot;
610 }
611
612 auto count = spilled_constants_.size();
613 if (count >= MAX_NUM_IMM_SLOTS) {
614 return INVALID_IMM_TABLE_SLOT;
615 }
616 spilled_constants_.push_back(const_inst);
617 const_inst->SetImmTableSlot(count);
618 return ImmTableSlot(count);
619 }
620
FindSpilledConstantSlot(ConstantInst * const_inst)621 ImmTableSlot FindSpilledConstantSlot(ConstantInst *const_inst) const
622 {
623 auto slot = std::find(spilled_constants_.begin(), spilled_constants_.end(), const_inst);
624 if (slot == spilled_constants_.end()) {
625 return INVALID_IMM_TABLE_SLOT;
626 }
627 return std::distance(spilled_constants_.begin(), slot);
628 }
629
GetSpilledConstantsCount()630 size_t GetSpilledConstantsCount() const
631 {
632 return spilled_constants_.size();
633 }
634
HasAvailableConstantSpillSlots()635 bool HasAvailableConstantSpillSlots() const
636 {
637 return GetSpilledConstantsCount() < MAX_NUM_IMM_SLOTS;
638 }
639
begin()640 auto begin() // NOLINT(readability-identifier-naming)
641 {
642 return vector_bb_.begin();
643 }
begin()644 auto begin() const // NOLINT(readability-identifier-naming)
645 {
646 return vector_bb_.begin();
647 }
end()648 auto end() // NOLINT(readability-identifier-naming)
649 {
650 return vector_bb_.end();
651 }
end()652 auto end() const // NOLINT(readability-identifier-naming)
653 {
654 return vector_bb_.end();
655 }
656
657 void Dump(std::ostream *out) const;
658
GetRootLoop()659 Loop *GetRootLoop()
660 {
661 return root_loop_;
662 }
GetRootLoop()663 const Loop *GetRootLoop() const
664 {
665 return root_loop_;
666 }
667
SetRootLoop(Loop * root_loop)668 void SetRootLoop(Loop *root_loop)
669 {
670 root_loop_ = root_loop;
671 }
672
SetHasIrreducibleLoop(bool has_irr_loop)673 void SetHasIrreducibleLoop(bool has_irr_loop)
674 {
675 FlagIrredicibleLoop::Set(has_irr_loop, &bit_fields_);
676 }
677
SetHasInfiniteLoop(bool has_inf_loop)678 void SetHasInfiniteLoop(bool has_inf_loop)
679 {
680 FlagInfiniteLoop::Set(has_inf_loop, &bit_fields_);
681 }
682
SetHasFloatRegs()683 void SetHasFloatRegs()
684 {
685 FlagFloatRegs::Set(true, &bit_fields_);
686 }
687
688 bool HasLoop() const;
689 bool HasIrreducibleLoop() const;
690 bool HasInfiniteLoop() const;
691 bool HasFloatRegs() const;
692
693 /**
694 * Try-catch info
695 * Vector of begin try-blocks in order they were declared in the bytecode
696 */
AppendTryBeginBlock(const BasicBlock * block)697 void AppendTryBeginBlock(const BasicBlock *block)
698 {
699 try_begin_blocks_.push_back(block);
700 }
701
EraseTryBeginBlock(const BasicBlock * block)702 void EraseTryBeginBlock(const BasicBlock *block)
703 {
704 auto it = std::find(try_begin_blocks_.begin(), try_begin_blocks_.end(), block);
705 if (it == try_begin_blocks_.end()) {
706 ASSERT(false && "Trying to remove non try_begin block");
707 return;
708 }
709 try_begin_blocks_.erase(it);
710 }
711
GetTryBeginBlocks()712 const auto &GetTryBeginBlocks() const
713 {
714 return try_begin_blocks_;
715 }
716
AppendThrowableInst(const Inst * inst,BasicBlock * catch_handler)717 void AppendThrowableInst(const Inst *inst, BasicBlock *catch_handler)
718 {
719 auto it = throwable_insts_.emplace(inst, GetAllocator()->Adapter()).first;
720 it->second.push_back(catch_handler);
721 }
722
IsInstThrowable(const Inst * inst)723 bool IsInstThrowable(const Inst *inst) const
724 {
725 return throwable_insts_.count(inst) > 0;
726 }
727
728 void RemoveThrowableInst(const Inst *inst);
729 void ReplaceThrowableInst(Inst *old_inst, Inst *new_inst);
730
GetThrowableInstHandlers(const Inst * inst)731 const auto &GetThrowableInstHandlers(const Inst *inst) const
732 {
733 ASSERT(IsInstThrowable(inst));
734 return throwable_insts_.at(inst);
735 }
736
ClearTryCatchInfo()737 void ClearTryCatchInfo()
738 {
739 throwable_insts_.clear();
740 try_begin_blocks_.clear();
741 }
742
743 void DumpThrowableInsts(std::ostream *out) const;
744
745 /**
746 * Run pass specified by template argument T.
747 * Optimization passes might take additional arguments that will passed to Optimization's constructor.
748 * Analyses can't take additional arguments.
749 * @tparam T Type of pass
750 * @param args Additional arguments for optimizations passes
751 * @return true if pass was successful
752 */
753 template <typename T, typename... Args>
RunPass(Args...args)754 bool RunPass(Args... args)
755 {
756 ASSERT(GetPassManager());
757 return pass_manager_.RunPass<T>(std::forward<Args>(args)...);
758 }
759 template <typename T, typename... Args>
RunPass(Args...args)760 bool RunPass(Args... args) const
761 {
762 ASSERT(GetPassManager());
763 return pass_manager_.RunPass<T>(std::forward<Args>(args)...);
764 }
765
766 template <typename T>
RunPass(T * pass)767 bool RunPass(T *pass)
768 {
769 ASSERT(GetPassManager());
770 return pass_manager_.RunPass(pass, GetLocalAllocator()->GetAllocatedSize());
771 }
772
773 /**
774 * Get analysis instance.
775 * All analyses are reside in Graph object in composition relationship.
776 * @tparam T Type of analysis
777 * @return Reference to analysis instance
778 */
779 template <typename T>
GetAnalysis()780 T &GetAnalysis()
781 {
782 ASSERT(GetPassManager());
783 return GetPassManager()->GetAnalysis<T>();
784 }
785 template <typename T>
GetAnalysis()786 const T &GetAnalysis() const
787 {
788 ASSERT(GetPassManager());
789 return pass_manager_.GetAnalysis<T>();
790 }
791
792 /**
793 * Same as GetAnalysis but additionaly checck that analysis in valid state.
794 * @tparam T Type of analysis
795 * @return Reference to analysis instance
796 */
797 template <typename T>
GetValidAnalysis()798 T &GetValidAnalysis()
799 {
800 RunPass<T>();
801 ASSERT(IsAnalysisValid<T>());
802 return GetAnalysis<T>();
803 }
804 template <typename T>
GetValidAnalysis()805 const T &GetValidAnalysis() const
806 {
807 RunPass<T>();
808 ASSERT(IsAnalysisValid<T>());
809 return GetAnalysis<T>();
810 }
811
812 /**
813 * Return true if Analysis valid, false otherwise
814 * @tparam T Type of analysis
815 */
816 template <typename T>
IsAnalysisValid()817 bool IsAnalysisValid() const
818 {
819 return GetAnalysis<T>().IsValid();
820 }
821
822 /**
823 * Reset valid state of specified analysis
824 * @tparam T Type of analysis
825 */
826 template <typename T>
InvalidateAnalysis()827 void InvalidateAnalysis()
828 {
829 ASSERT(GetPassManager());
830 GetPassManager()->GetAnalysis<T>().SetValid(false);
831 }
832
833 /// Accessors to the number of current instruction id.
GetCurrentInstructionId()834 auto GetCurrentInstructionId() const
835 {
836 return instr_current_id_;
837 }
SetCurrentInstructionId(size_t v)838 auto SetCurrentInstructionId(size_t v)
839 {
840 instr_current_id_ = v;
841 }
842
843 /// RuntimeInterface accessors
GetRuntime()844 RuntimeInterface *GetRuntime() const
845 {
846 return runtime_;
847 }
SetRuntime(RuntimeInterface * runtime)848 void SetRuntime(RuntimeInterface *runtime)
849 {
850 runtime_ = runtime;
851 }
GetMethod()852 auto GetMethod() const
853 {
854 return method_;
855 }
SetMethod(RuntimeInterface::MethodPtr method)856 auto SetMethod(RuntimeInterface::MethodPtr method)
857 {
858 method_ = method;
859 }
860
861 Encoder *GetEncoder();
862 RegistersDescription *GetRegisters() const;
863 CallingConvention *GetCallingConvention();
864 const MethodProperties &GetMethodProperties();
865 void ResetParameterInfo();
866 SpillFillData GetDataForNativeParam(DataType::Type type);
867
GetEventWriter()868 EventWriter &GetEventWriter()
869 {
870 return event_writer_;
871 }
872
SetCodeBuilder(CodeInfoBuilder * builder)873 void SetCodeBuilder(CodeInfoBuilder *builder)
874 {
875 ci_builder_ = builder;
876 }
877
878 // clang-format off
879
880 /**
881 * Create instruction by opcode
882 */
CreateInst(Opcode opc)883 [[nodiscard]] Inst* CreateInst(Opcode opc) const
884 {
885 switch (opc) {
886 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
887 #define INST_DEF(OPCODE, BASE, ...) \
888 case Opcode::OPCODE: { \
889 auto inst = Inst::New<BASE>(ALLOCATOR, Opcode::OPCODE); \
890 inst->SetId(instr_current_id_++); \
891 return inst; \
892 }
893 OPCODE_LIST(INST_DEF)
894
895 #undef INST_DEF
896 default:
897 return nullptr;
898 }
899 }
900 /**
901 * Define creation methods for all opcodes
902 */
903 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
904 #define INST_DEF(OPCODE, BASE, ...) \
905 template <typename... Args> \
906 [[nodiscard]] BASE* CreateInst##OPCODE(Args&&... args) const { \
907 auto inst = Inst::New<BASE>(ALLOCATOR, Opcode::OPCODE, std::forward<Args>(args)...); \
908 inst->SetId(instr_current_id_++); \
909 return inst; \
910 }
OPCODE_LIST(INST_DEF)911 OPCODE_LIST(INST_DEF)
912
913 #undef INST_DEF
914 // clang-format on
915
916 uint32_t GetBitFields()
917 {
918 return bit_fields_;
919 }
920
SetBitFields(uint32_t bit_fields)921 void SetBitFields(uint32_t bit_fields)
922 {
923 bit_fields_ = bit_fields;
924 }
925
NeedCleanup()926 bool NeedCleanup() const
927 {
928 return FlagNeedCleanup::Get(bit_fields_);
929 }
930
SetNeedCleanup(bool v)931 void SetNeedCleanup(bool v)
932 {
933 FlagNeedCleanup::Set(v, &bit_fields_);
934 }
935
IsOsrMode()936 bool IsOsrMode() const
937 {
938 return mode_.IsOsr();
939 }
940
IsBytecodeOptimizer()941 bool IsBytecodeOptimizer() const
942 {
943 return mode_.IsBytecodeOpt();
944 }
945
IsDynamicMethod()946 bool IsDynamicMethod() const
947 {
948 return mode_.IsDynamicMethod();
949 }
950
SupportManagedCode()951 bool SupportManagedCode() const
952 {
953 return mode_.SupportManagedCode();
954 }
955
GetMode()956 GraphMode GetMode() const
957 {
958 return mode_;
959 }
960
SetMode(GraphMode mode)961 void SetMode(GraphMode mode)
962 {
963 mode_ = mode;
964 }
965
966 #ifndef NDEBUG
GetCompilerMode()967 compiler::inst_modes::Mode GetCompilerMode()
968 {
969 if (IsBytecodeOptimizer()) {
970 return compiler::inst_modes::BYTECODE_OPT;
971 }
972 if (SupportManagedCode()) {
973 return compiler::inst_modes::JIT_AOT;
974 }
975 return compiler::inst_modes::IRTOC;
976 }
977 #endif
978
AddSingleImplementationMethod(RuntimeInterface::MethodPtr method)979 void AddSingleImplementationMethod(RuntimeInterface::MethodPtr method)
980 {
981 single_implementation_list_.push_back(method);
982 }
983
SetDynamicMethod()984 void SetDynamicMethod()
985 {
986 mode_.SetDynamicMethod(true);
987 }
988
GetSingleImplementationList()989 auto &GetSingleImplementationList()
990 {
991 return single_implementation_list_;
992 }
993
GetParentGraph()994 Graph *GetParentGraph()
995 {
996 return parent_graph_;
997 }
998
GetOutermostParentGraph()999 Graph *GetOutermostParentGraph()
1000 {
1001 auto graph = this;
1002 while (graph->GetParentGraph() != nullptr) {
1003 graph = graph->GetParentGraph();
1004 }
1005 return graph;
1006 }
1007
SetVRegsCount(size_t count)1008 void SetVRegsCount(size_t count)
1009 {
1010 vregs_count_ = count;
1011 }
1012
GetVRegsCount()1013 size_t GetVRegsCount() const
1014 {
1015 return vregs_count_;
1016 }
1017
GetRelocationHandler()1018 RelocationHandler *GetRelocationHandler()
1019 {
1020 return relocation_handler_;
1021 }
1022
SetRelocationHandler(RelocationHandler * handler)1023 void SetRelocationHandler(RelocationHandler *handler)
1024 {
1025 relocation_handler_ = handler;
1026 }
1027
1028 int64_t GetBranchCounter(const BasicBlock *block, bool true_succ);
1029
1030 /**
1031 * This class provides methods for ranged-based `for` loop over all parameters in the graph.
1032 */
1033 class ParameterList {
1034 public:
1035 class Iterator {
1036 public:
Iterator(Inst * inst)1037 explicit Iterator(Inst *inst) : inst_(inst) {}
1038
1039 Iterator &operator++()
1040 {
1041 for (inst_ = inst_->GetNext(); inst_ != nullptr && inst_->GetOpcode() != Opcode::Parameter;
1042 inst_ = inst_->GetNext()) {
1043 }
1044 return *this;
1045 }
1046 bool operator!=(const Iterator &other)
1047 {
1048 return inst_ != other.inst_;
1049 }
1050 Inst *operator*()
1051 {
1052 return inst_;
1053 }
1054 Inst *operator->()
1055 {
1056 return inst_;
1057 }
1058
1059 private:
1060 Inst *inst_ {nullptr};
1061 };
1062
ParameterList(const Graph * graph)1063 explicit ParameterList(const Graph *graph) : graph_(graph) {}
1064
1065 // NOLINTNEXTLINE(readability-identifier-naming)
1066 Iterator begin();
1067 // NOLINTNEXTLINE(readability-identifier-naming)
end()1068 static Iterator end()
1069 {
1070 return Iterator(nullptr);
1071 }
1072
1073 private:
1074 const Graph *graph_ {nullptr};
1075 };
1076
1077 /**
1078 * Get list of all parameters
1079 * @return instance of the ParameterList class
1080 */
GetParameters()1081 ParameterList GetParameters() const
1082 {
1083 return ParameterList(this);
1084 }
1085
1086 void InitDefaultLocations();
1087
1088 private:
1089 void AddConstInStartBlock(ConstantInst *const_inst);
1090
1091 NO_MOVE_SEMANTIC(Graph);
1092 NO_COPY_SEMANTIC(Graph);
1093
1094 private:
1095 ArenaAllocator *const ALLOCATOR;
1096 ArenaAllocator *const LOCAL_ALLOCATOR;
1097
1098 Arch arch_ {RUNTIME_ARCH};
1099
1100 // List of blocks in insertion order.
1101 ArenaVector<BasicBlock *> vector_bb_;
1102 BasicBlock *start_block_ {nullptr};
1103 BasicBlock *end_block_ {nullptr};
1104
1105 Loop *root_loop_ {nullptr};
1106
1107 AotData *aot_data_ {nullptr};
1108
1109 uint32_t bit_fields_ {0};
1110 using FlagDFConstruct = BitField<bool, 0, 1>;
1111 using FlagNeedCleanup = FlagDFConstruct::NextFlag;
1112 using FlagIrredicibleLoop = FlagNeedCleanup::NextFlag;
1113 using FlagInfiniteLoop = FlagIrredicibleLoop::NextFlag;
1114 using FlagFloatRegs = FlagInfiniteLoop::NextFlag;
1115 using FlagDefaultLocationsInit = FlagFloatRegs::NextFlag;
1116 #ifndef NDEBUG
1117 using FlagRegallocApplied = FlagDefaultLocationsInit::NextFlag;
1118 using FlagRegaccallocApplied = FlagRegallocApplied::NextFlag;
1119 using FlagInliningComplete = FlagRegaccallocApplied::NextFlag;
1120 using FlagSchedulerComplete = FlagInliningComplete::NextFlag;
1121 using FlagLowLevelInstnsEnabled = FlagSchedulerComplete::NextFlag;
1122 #endif // NDEBUG
1123
1124 // codegen data
1125 EncodeDataType data_;
1126 Span<const uint8_t> code_info_data_;
1127 ArenaVector<bool> *used_regs_ {nullptr};
1128 ArenaVector<bool> *used_vregs_ {nullptr};
1129
1130 // TODO (a.popov) Replace by ArenaMap from throwable_inst* to try_inst*
1131 ArenaMap<const Inst *, ArenaVector<BasicBlock *>> throwable_insts_;
1132
1133 RegMask arch_used_regs_ {0};
1134
1135 mutable size_t instr_current_id_ {0};
1136 // first constant instruction in graph !TODO rewrite it to hash-map
1137 ConstantInst *first_const_inst_ {nullptr};
1138 Inst *nullptr_inst_ {nullptr};
1139
1140 RuntimeInterface *runtime_ {nullptr};
1141 RuntimeInterface::MethodPtr method_ {nullptr};
1142
1143 Encoder *encoder_ {nullptr};
1144
1145 mutable RegistersDescription *registers_ {nullptr};
1146
1147 CallingConvention *callconv_ {nullptr};
1148
1149 std::optional<MethodProperties> method_properties_ {std::nullopt};
1150
1151 ParameterInfo *param_info_ {nullptr};
1152
1153 RelocationHandler *relocation_handler_ {nullptr};
1154
1155 mutable PassManager pass_manager_;
1156 EventWriter event_writer_;
1157
1158 GraphMode mode_;
1159
1160 CodeInfoBuilder *ci_builder_ {nullptr};
1161
1162 ArenaVector<RuntimeInterface::MethodPtr> single_implementation_list_;
1163 ArenaVector<const BasicBlock *> try_begin_blocks_;
1164 ArenaVector<ConstantInst *> spilled_constants_;
1165 // Graph that inlines this graph
1166 Graph *parent_graph_ {nullptr};
1167 // Number of used stack slots
1168 uint32_t stack_slot_count_ {0};
1169 // Number of used stack slots for parameters
1170 uint32_t param_slots_count_ {0};
1171 // First language extension slot
1172 uint32_t ext_stack_slot_ {0};
1173 // Number of the virtual registers used in the compiled method (inlined methods aren't included).
1174 uint32_t vregs_count_ {0};
1175 };
1176
1177 class MarkerHolder {
1178 public:
1179 NO_COPY_SEMANTIC(MarkerHolder);
1180 NO_MOVE_SEMANTIC(MarkerHolder);
1181
MarkerHolder(const Graph * graph)1182 explicit MarkerHolder(const Graph *graph) : graph_(graph), marker_(graph->NewMarker())
1183 {
1184 ASSERT(marker_ != UNDEF_MARKER);
1185 }
1186
~MarkerHolder()1187 ~MarkerHolder()
1188 {
1189 graph_->EraseMarker(marker_);
1190 }
1191
GetMarker()1192 Marker GetMarker()
1193 {
1194 return marker_;
1195 }
1196
1197 private:
1198 const Graph *graph_;
1199 Marker marker_ {UNDEF_MARKER};
1200 };
1201
1202 template <typename T>
FindOrCreateConstant(T value)1203 ConstantInst *Graph::FindOrCreateConstant(T value)
1204 {
1205 bool is_support_int32 = IsBytecodeOptimizer();
1206 if (first_const_inst_ == nullptr) {
1207 first_const_inst_ = CreateInstConstant(value, is_support_int32);
1208 AddConstInStartBlock(first_const_inst_);
1209 return first_const_inst_;
1210 }
1211 ConstantInst *current_const = first_const_inst_;
1212 ConstantInst *prev_const = nullptr;
1213 while (current_const != nullptr) {
1214 if (current_const->IsEqualConst(value, is_support_int32)) {
1215 return current_const;
1216 }
1217 prev_const = current_const;
1218 current_const = current_const->GetNextConst();
1219 }
1220 ASSERT(prev_const != nullptr);
1221 auto *new_const = CreateInstConstant(value, is_support_int32);
1222 AddConstInStartBlock(new_const);
1223
1224 prev_const->SetNextConst(new_const);
1225 return new_const;
1226 }
1227
1228 void InvalidateBlocksOrderAnalyzes(Graph *graph);
1229 void MarkLoopExits(const Graph *graph, Marker marker);
1230 void RemovePredecessorUpdateDF(BasicBlock *block, BasicBlock *rm_pred);
1231 std::string GetMethodFullName(const Graph *graph, RuntimeInterface::MethodPtr method);
1232 } // namespace panda::compiler
1233 #endif // COMPILER_OPTIMIZER_IR_GRAPH_H
1234