1 /**
2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #ifndef COMPILER_OPTIMIZER_IR_GRAPH_H
17 #define COMPILER_OPTIMIZER_IR_GRAPH_H
18
19 #include <algorithm>
20 #include <optional>
21 #include "compiler_events_gen.h"
22 #include "inst.h"
23 #include "marker.h"
24 #include "optimizer/pass_manager.h"
25 #include "utils/arena_containers.h"
26
27 namespace panda {
28 class Method;
29 class CodeAllocator;
30 } // namespace panda
31
32 namespace panda::compiler {
33 class BasicBlock;
34 class Graph;
35 class RuntimeInfo;
36 class PassManager;
37 class LivenessAnalyzer;
38 class DominatorsTree;
39 class Rpo;
40 class Loop;
41 class ParameterInfo;
42
43 /**
44 * Specifies graph compilation mode.
45 */
46 class GraphMode {
47 public:
GraphMode(uint32_t value)48 explicit GraphMode(uint32_t value) : value_(value) {}
49
50 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
51 #define DECLARE_GRAPH_MODE(name) \
52 static GraphMode name(bool set = true) \
53 { \
54 return GraphMode(Flag##name ::Encode(set)); \
55 } \
56 void Set##name(bool v) \
57 { \
58 Flag##name ::Set(v, &value_); \
59 } \
60 bool Is##name() const \
61 { \
62 return Flag##name ::Get(value_); \
63 }
64
65 DECLARE_GRAPH_MODE(Osr);
66 // The graph is used in BytecodeOptimizer mode
67 DECLARE_GRAPH_MODE(BytecodeOpt);
68 // The method from dynamic language
69 DECLARE_GRAPH_MODE(DynamicMethod);
70 // Graph will be compiled with native calling convention
71 DECLARE_GRAPH_MODE(Native);
72 // FastPath from compiled code to runtime
73 DECLARE_GRAPH_MODE(FastPath);
74 // Boundary frame is used for compiled code
75 DECLARE_GRAPH_MODE(Boundary);
76 // Graph will be compiled for calling inside interpreter
77 DECLARE_GRAPH_MODE(Interpreter);
78 // Graph will be compiled for interpreter main loop
79 DECLARE_GRAPH_MODE(InterpreterEntry);
80
81 #undef DECLARE_GRAPH_MODE
82
SupportManagedCode()83 bool SupportManagedCode() const
84 {
85 return !IsNative() && !IsFastPath() && !IsBoundary() && !IsInterpreter() && !IsInterpreterEntry();
86 }
87
88 void Dump(std::ostream &stm);
89
90 private:
91 using FlagOsr = BitField<bool, 0, 1>;
92 using FlagBytecodeOpt = FlagOsr::NextFlag;
93 using FlagDynamicMethod = FlagBytecodeOpt::NextFlag;
94 using FlagNative = FlagDynamicMethod::NextFlag;
95 using FlagFastPath = FlagNative::NextFlag;
96 using FlagBoundary = FlagFastPath::NextFlag;
97 using FlagInterpreter = FlagBoundary::NextFlag;
98 using FlagInterpreterEntry = FlagInterpreter::NextFlag;
99
100 uint32_t value_ {0};
101
102 friend GraphMode operator|(GraphMode a, GraphMode b);
103 };
104
105 inline GraphMode operator|(GraphMode a, GraphMode b)
106 {
107 return GraphMode(a.value_ | b.value_);
108 }
109
110 using EncodeDataType = Span<uint8_t>;
111
112 class Graph final : public MarkerMgr {
113 public:
Graph(ArenaAllocator * allocator,ArenaAllocator * local_allocator,Arch arch)114 explicit Graph(ArenaAllocator *allocator, ArenaAllocator *local_allocator, Arch arch)
115 : Graph(allocator, local_allocator, arch, false)
116 {
117 }
118
Graph(ArenaAllocator * allocator,ArenaAllocator * local_allocator,Arch arch,bool osr_mode)119 Graph(ArenaAllocator *allocator, ArenaAllocator *local_allocator, Arch arch, bool osr_mode)
120 : Graph(allocator, local_allocator, arch, nullptr, GetDefaultRuntime(), osr_mode)
121 {
122 }
123
Graph(ArenaAllocator * allocator,ArenaAllocator * local_allocator,Arch arch,bool dynamic_method,bool bytecode_opt)124 Graph(ArenaAllocator *allocator, ArenaAllocator *local_allocator, Arch arch, bool dynamic_method, bool bytecode_opt)
125 : Graph(allocator, local_allocator, arch, nullptr, GetDefaultRuntime(), false, nullptr, dynamic_method,
126 bytecode_opt)
127 {
128 }
129
Graph(ArenaAllocator * allocator,ArenaAllocator * local_allocator,Arch arch,RuntimeInterface::MethodPtr method,RuntimeInterface * runtime,bool osr_mode)130 Graph(ArenaAllocator *allocator, ArenaAllocator *local_allocator, Arch arch, RuntimeInterface::MethodPtr method,
131 RuntimeInterface *runtime, bool osr_mode)
132 : Graph(allocator, local_allocator, arch, method, runtime, osr_mode, nullptr)
133 {
134 }
135
136 Graph(ArenaAllocator *allocator, ArenaAllocator *local_allocator, Arch arch, RuntimeInterface::MethodPtr method,
137 RuntimeInterface *runtime, bool osr_mode, Graph *parent, bool dynamic_method = false,
138 bool bytecode_opt = false)
139 : Graph(allocator, local_allocator, arch, method, runtime, parent,
140 GraphMode::Osr(osr_mode) | GraphMode::BytecodeOpt(bytecode_opt) |
141 GraphMode::DynamicMethod(dynamic_method))
142 {
143 }
144
Graph(ArenaAllocator * allocator,ArenaAllocator * local_allocator,Arch arch,RuntimeInterface::MethodPtr method,RuntimeInterface * runtime,Graph * parent,GraphMode mode)145 Graph(ArenaAllocator *allocator, ArenaAllocator *local_allocator, Arch arch, RuntimeInterface::MethodPtr method,
146 RuntimeInterface *runtime, Graph *parent, GraphMode mode)
147 : ALLOCATOR(allocator),
148 LOCAL_ALLOCATOR(local_allocator),
149 arch_(arch),
150 vector_bb_(allocator->Adapter()),
151 throwable_insts_(allocator->Adapter()),
152 runtime_(runtime),
153 method_(method),
154 pass_manager_(this, parent != nullptr ? parent->GetPassManager() : nullptr),
155 event_writer_(runtime->GetClassNameFromMethod(method), runtime->GetMethodName(method)),
156 mode_(mode),
157 single_implementation_list_(allocator->Adapter()),
158 try_begin_blocks_(allocator->Adapter()),
159 spilled_constants_(allocator->Adapter()),
160 parent_graph_(parent)
161 {
162 SetNeedCleanup(true);
163 }
164
165 ~Graph() override;
166
CreateChildGraph(RuntimeInterface::MethodPtr method)167 Graph *CreateChildGraph(RuntimeInterface::MethodPtr method)
168 {
169 auto graph = GetAllocator()->New<Graph>(GetAllocator(), GetLocalAllocator(), GetArch(), method, GetRuntime(),
170 this, mode_);
171 return graph;
172 }
173
174 /// Get default runtime interface object
GetDefaultRuntime()175 static RuntimeInterface *GetDefaultRuntime()
176 {
177 static RuntimeInterface runtime_interface;
178 return &runtime_interface;
179 }
180
GetArch()181 Arch GetArch() const
182 {
183 return arch_;
184 }
185
186 void AddBlock(BasicBlock *block);
187 #ifndef NDEBUG
188 void AddBlock(BasicBlock *block, uint32_t id);
189 #endif
190 void DisconnectBlock(BasicBlock *block, bool remove_last_inst = true, bool fix_dom_tree = true);
191 void DisconnectBlockRec(BasicBlock *block, bool remove_last_inst = true, bool fix_dom_tree = true);
192
193 void EraseBlock(BasicBlock *block);
194 void RestoreBlock(BasicBlock *block);
195 // Remove empty block. Block must have one successor and no Phis.
196 void RemoveEmptyBlock(BasicBlock *block);
197
198 // Remove empty block. Block may have Phis and can't be a loop pre-header.
199 void RemoveEmptyBlockWithPhis(BasicBlock *block, bool irr_loop = false);
200
201 // Remove block predecessors.
202 void RemovePredecessors(BasicBlock *block, bool remove_last_inst = true);
203
204 // Remove block successors.
205 void RemoveSuccessors(BasicBlock *block);
206
207 // Remove unreachable blocks.
208 void RemoveUnreachableBlocks();
209
210 // get end block
GetEndBlock()211 BasicBlock *GetEndBlock()
212 {
213 return end_block_;
214 }
215
GetEndBlock()216 BasicBlock *GetEndBlock() const
217 {
218 return end_block_;
219 }
220 // set end block
SetEndBlock(BasicBlock * end_block)221 void SetEndBlock(BasicBlock *end_block)
222 {
223 end_block_ = end_block;
224 }
HasEndBlock()225 bool HasEndBlock()
226 {
227 return end_block_ != nullptr;
228 }
229 // get start block
GetStartBlock()230 BasicBlock *GetStartBlock()
231 {
232 return start_block_;
233 }
GetStartBlock()234 BasicBlock *GetStartBlock() const
235 {
236 return start_block_;
237 }
238 // set start block
SetStartBlock(BasicBlock * start_block)239 void SetStartBlock(BasicBlock *start_block)
240 {
241 start_block_ = start_block;
242 }
243 // get vector_bb_
GetVectorBlocks()244 const ArenaVector<BasicBlock *> &GetVectorBlocks() const
245 {
246 return vector_bb_;
247 }
248
GetAliveBlocksCount()249 size_t GetAliveBlocksCount() const
250 {
251 return std::count_if(vector_bb_.begin(), vector_bb_.end(), [](BasicBlock *block) { return block != nullptr; });
252 }
253
GetPassManager()254 PassManager *GetPassManager()
255 {
256 return &pass_manager_;
257 }
GetPassManager()258 const PassManager *GetPassManager() const
259 {
260 return &pass_manager_;
261 }
262
263 const ArenaVector<BasicBlock *> &GetBlocksRPO() const;
264
265 const ArenaVector<BasicBlock *> &GetBlocksLinearOrder() const;
266
267 template <class Callback>
268 void VisitAllInstructions(Callback callback);
269
270 /// Main allocator for graph, all related to Graph data should be allocated via this allocator.
GetAllocator()271 ArenaAllocator *GetAllocator() const
272 {
273 return ALLOCATOR;
274 }
275 /// Allocator for temproray usage, when allocated data is no longer needed after optimization/analysis finished.
GetLocalAllocator()276 ArenaAllocator *GetLocalAllocator() const
277 {
278 return LOCAL_ALLOCATOR;
279 }
IsDFConstruct()280 bool IsDFConstruct() const
281 {
282 return FlagDFConstruct::Get(bit_fields_);
283 }
SetDFConstruct()284 void SetDFConstruct()
285 {
286 FlagDFConstruct::Set(true, &bit_fields_);
287 }
288
IsAotMode()289 bool IsAotMode() const
290 {
291 return false;
292 }
293
IsOfflineCompilationMode()294 bool IsOfflineCompilationMode() const
295 {
296 return IsAotMode() || GetMode().IsInterpreter();
297 }
298
IsDefaultLocationsInit()299 bool IsDefaultLocationsInit() const
300 {
301 return FlagDefaultLocationsInit::Get(bit_fields_);
302 }
SetDefaultLocationsInit()303 void SetDefaultLocationsInit()
304 {
305 FlagDefaultLocationsInit::Set(true, &bit_fields_);
306 }
307 #ifndef NDEBUG
IsRegAllocApplied()308 bool IsRegAllocApplied() const
309 {
310 return FlagRegallocApplied::Get(bit_fields_);
311 }
SetRegAllocApplied()312 void SetRegAllocApplied()
313 {
314 FlagRegallocApplied::Set(true, &bit_fields_);
315 }
IsRegAccAllocApplied()316 bool IsRegAccAllocApplied() const
317 {
318 return FlagRegaccallocApplied::Get(bit_fields_);
319 }
SetRegAccAllocApplied()320 void SetRegAccAllocApplied()
321 {
322 FlagRegaccallocApplied::Set(true, &bit_fields_);
323 }
IsInliningComplete()324 bool IsInliningComplete() const
325 {
326 return FlagInliningComplete::Get(bit_fields_);
327 }
SetInliningComplete()328 void SetInliningComplete()
329 {
330 FlagInliningComplete::Set(true, &bit_fields_);
331 }
IsSchedulerComplete()332 bool IsSchedulerComplete() const
333 {
334 return FlagSchedulerComplete::Get(bit_fields_);
335 }
SetSchedulerComplete()336 void SetSchedulerComplete()
337 {
338 FlagSchedulerComplete::Set(true, &bit_fields_);
339 }
IsLowLevelInstructionsEnabled()340 bool IsLowLevelInstructionsEnabled() const
341 {
342 return FlagLowLevelInstnsEnabled::Get(bit_fields_);
343 }
SetLowLevelInstructionsEnabled()344 void SetLowLevelInstructionsEnabled()
345 {
346 FlagLowLevelInstnsEnabled::Set(true, &bit_fields_);
347 }
348 #else
IsRegAllocApplied()349 bool IsRegAllocApplied() const
350 {
351 return false;
352 }
353 #endif // NDEBUG
354
SetData(EncodeDataType data)355 void SetData(EncodeDataType data)
356 {
357 data_ = data;
358 }
359
GetData()360 EncodeDataType GetData() const
361 {
362 return data_;
363 }
364
GetData()365 EncodeDataType GetData()
366 {
367 return data_;
368 }
369
SetCodeInfo(Span<uint8_t> data)370 void SetCodeInfo(Span<uint8_t> data)
371 {
372 code_info_data_ = data.SubSpan<const uint8_t>(0, data.size());
373 }
374
GetCodeInfoData()375 Span<const uint8_t> GetCodeInfoData() const
376 {
377 return code_info_data_;
378 }
379
380 void DumpUsedRegs(std::ostream &out = std::cerr, const char *prefix = nullptr) const
381 {
382 if (prefix != nullptr) {
383 out << prefix;
384 }
385 out << "'\n used scalar regs: ";
386 if (used_regs_ != nullptr) {
387 for (unsigned i = 0; i < used_regs_->size(); ++i) {
388 if (used_regs_->at(i)) {
389 out << i << " ";
390 }
391 }
392 }
393 out << "\n used float regs: ";
394 if (used_regs_ != nullptr) {
395 for (unsigned i = 0; i < used_vregs_->size(); ++i) {
396 if (used_vregs_->at(i)) {
397 out << i << " ";
398 }
399 }
400 }
401 out << std::endl;
402 }
403
404 // Get registers mask which used in graph
405 template <DataType::Type reg_type>
GetUsedRegs()406 ArenaVector<bool> *GetUsedRegs() const
407 {
408 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
409 if constexpr (reg_type == DataType::INT64) {
410 return used_regs_;
411 }
412 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
413 if constexpr (reg_type == DataType::FLOAT64) {
414 return used_vregs_;
415 }
416 UNREACHABLE();
417 return nullptr;
418 }
419
SetRegUsage(Register reg,DataType::Type type)420 void SetRegUsage(Register reg, DataType::Type type)
421 {
422 ASSERT(reg != INVALID_REG);
423 if (DataType::IsFloatType(type)) {
424 SetUsedReg<DataType::FLOAT64>(reg);
425 } else {
426 SetUsedReg<DataType::INT64>(reg);
427 }
428 }
429
430 template <DataType::Type reg_type>
SetUsedReg(Register reg)431 void SetUsedReg(Register reg)
432 {
433 ArenaVector<bool> *graph_regs = nullptr;
434 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
435 if constexpr (reg_type == DataType::INT64) {
436 graph_regs = used_regs_;
437 // NOLINTNEXTLINE(readability-braces-around-statements, readability-misleading-indentation)
438 } else if constexpr (reg_type == DataType::FLOAT64) {
439 graph_regs = used_vregs_;
440 } else {
441 UNREACHABLE();
442 }
443 ASSERT(reg < graph_regs->size());
444 (*graph_regs)[reg] = true;
445 }
446
447 template <DataType::Type reg_type>
InitUsedRegs(const ArenaVector<bool> * used_regs)448 void InitUsedRegs(const ArenaVector<bool> *used_regs)
449 {
450 if (used_regs == nullptr) {
451 return;
452 }
453 ArenaVector<bool> *graph_regs = nullptr;
454 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
455 if constexpr (reg_type == DataType::INT64) {
456 used_regs_ = GetAllocator()->New<ArenaVector<bool>>(GetAllocator()->Adapter());
457 graph_regs = used_regs_;
458 // NOLINTNEXTLINE(readability-braces-around-statements, readability-misleading-indentation)
459 } else if constexpr (reg_type == DataType::FLOAT64) {
460 used_vregs_ = GetAllocator()->New<ArenaVector<bool>>(GetAllocator()->Adapter());
461 graph_regs = used_vregs_;
462 } else {
463 UNREACHABLE();
464 }
465 graph_regs->resize(used_regs->size());
466 std::copy(used_regs->begin(), used_regs->end(), graph_regs->begin());
467 }
468
GetStackSlotsCount()469 uint32_t GetStackSlotsCount() const
470 {
471 return stack_slot_count_;
472 }
473
SetStackSlotsCount(uint32_t stack_slot_count)474 void SetStackSlotsCount(uint32_t stack_slot_count)
475 {
476 stack_slot_count_ = stack_slot_count;
477 }
478
UpdateStackSlotsCount(uint32_t stack_slot_count)479 void UpdateStackSlotsCount(uint32_t stack_slot_count)
480 {
481 stack_slot_count_ = std::max(stack_slot_count_, stack_slot_count);
482 }
483
484 uint32_t GetParametersSlotsCount() const;
485
GetExtSlotsStart()486 uint32_t GetExtSlotsStart() const
487 {
488 return ext_stack_slot_;
489 }
490
SetExtSlotsStart(uint32_t ext_stack_slot)491 void SetExtSlotsStart(uint32_t ext_stack_slot)
492 {
493 ext_stack_slot_ = ext_stack_slot;
494 }
495
496 BasicBlock *CreateEmptyBlock(uint32_t guest_pc = INVALID_PC);
497 BasicBlock *CreateEmptyBlock(BasicBlock *base_block);
498 #ifndef NDEBUG
499 BasicBlock *CreateEmptyBlock(uint32_t id, uint32_t guest_pc);
500 #endif
501 BasicBlock *CreateStartBlock();
502 BasicBlock *CreateEndBlock(uint32_t guest_pc = INVALID_PC);
GetFirstConstInst()503 ConstantInst *GetFirstConstInst()
504 {
505 return first_const_inst_;
506 }
SetFirstConstInst(ConstantInst * const_inst)507 void SetFirstConstInst(ConstantInst *const_inst)
508 {
509 first_const_inst_ = const_inst;
510 }
511
GetNullPtrInst()512 Inst *GetNullPtrInst() const
513 {
514 return nullptr_inst_;
515 }
HasNullPtrInst()516 bool HasNullPtrInst() const
517 {
518 return nullptr_inst_ != nullptr;
519 }
UnsetNullPtrInst()520 void UnsetNullPtrInst()
521 {
522 ASSERT(HasNullPtrInst());
523 nullptr_inst_ = nullptr;
524 }
525
526 /// Find constant in the list, return nullptr if not found
527 ConstantInst *FindConstant(DataType::Type type, uint64_t value);
528 /// Find constant in the list or create new one and insert at the end
529 template <typename T>
530 ConstantInst *FindOrCreateConstant(T value);
531
532 /**
533 * Find constant that is equal to the given one specified by inst. If not found, add inst to the graph.
534 * @param inst Constant instruction to be added
535 * @return Found instruction or inst if not found
536 */
537 ConstantInst *FindOrAddConstant(ConstantInst *inst);
538
539 ParameterInst *AddNewParameter(uint16_t arg_number);
540
AddNewParameter(uint16_t arg_number,DataType::Type type)541 ParameterInst *AddNewParameter(uint16_t arg_number, DataType::Type type)
542 {
543 ParameterInst *param = AddNewParameter(arg_number);
544 param->SetType(type);
545 return param;
546 }
547
548 /*
549 * The function remove the ConstantInst from the graph list
550 * !NOTE ConstantInst isn't removed from BasicBlock list
551 */
552 void RemoveConstFromList(ConstantInst *const_inst);
553
GetSpilledConstant(ImmTableSlot slot)554 ConstantInst *GetSpilledConstant(ImmTableSlot slot)
555 {
556 ASSERT(static_cast<size_t>(slot) < spilled_constants_.size());
557 return spilled_constants_[slot];
558 }
559
AddSpilledConstant(ConstantInst * const_inst)560 ImmTableSlot AddSpilledConstant(ConstantInst *const_inst)
561 {
562 // Constant already in the table
563 auto current_slot = const_inst->GetImmTableSlot();
564 if (current_slot != INVALID_IMM_TABLE_SLOT) {
565 ASSERT(spilled_constants_[current_slot] == const_inst);
566 return current_slot;
567 }
568
569 auto count = spilled_constants_.size();
570 if (count >= MAX_NUM_IMM_SLOTS) {
571 return INVALID_IMM_TABLE_SLOT;
572 }
573 spilled_constants_.push_back(const_inst);
574 const_inst->SetImmTableSlot(count);
575 return ImmTableSlot(count);
576 }
577
FindSpilledConstantSlot(ConstantInst * const_inst)578 ImmTableSlot FindSpilledConstantSlot(ConstantInst *const_inst) const
579 {
580 auto slot = std::find(spilled_constants_.begin(), spilled_constants_.end(), const_inst);
581 if (slot == spilled_constants_.end()) {
582 return INVALID_IMM_TABLE_SLOT;
583 }
584 return std::distance(spilled_constants_.begin(), slot);
585 }
586
GetSpilledConstantsCount()587 size_t GetSpilledConstantsCount() const
588 {
589 return spilled_constants_.size();
590 }
591
HasAvailableConstantSpillSlots()592 bool HasAvailableConstantSpillSlots() const
593 {
594 return GetSpilledConstantsCount() < MAX_NUM_IMM_SLOTS;
595 }
596
begin()597 auto begin() // NOLINT(readability-identifier-naming)
598 {
599 return vector_bb_.begin();
600 }
begin()601 auto begin() const // NOLINT(readability-identifier-naming)
602 {
603 return vector_bb_.begin();
604 }
end()605 auto end() // NOLINT(readability-identifier-naming)
606 {
607 return vector_bb_.end();
608 }
end()609 auto end() const // NOLINT(readability-identifier-naming)
610 {
611 return vector_bb_.end();
612 }
613
614 void Dump(std::ostream *out) const;
615
GetRootLoop()616 Loop *GetRootLoop()
617 {
618 return root_loop_;
619 }
GetRootLoop()620 const Loop *GetRootLoop() const
621 {
622 return root_loop_;
623 }
624
SetRootLoop(Loop * root_loop)625 void SetRootLoop(Loop *root_loop)
626 {
627 root_loop_ = root_loop;
628 }
629
SetHasIrreducibleLoop(bool has_irr_loop)630 void SetHasIrreducibleLoop(bool has_irr_loop)
631 {
632 FlagIrredicibleLoop::Set(has_irr_loop, &bit_fields_);
633 }
634
SetHasInfiniteLoop(bool has_inf_loop)635 void SetHasInfiniteLoop(bool has_inf_loop)
636 {
637 FlagInfiniteLoop::Set(has_inf_loop, &bit_fields_);
638 }
639
SetHasFloatRegs()640 void SetHasFloatRegs()
641 {
642 FlagFloatRegs::Set(true, &bit_fields_);
643 }
644
645 bool HasLoop() const;
646 bool HasIrreducibleLoop() const;
647 bool HasInfiniteLoop() const;
648 bool HasFloatRegs() const;
649
650 /**
651 * Try-catch info
652 * Vector of begin try-blocks in order they were declared in the bytecode
653 */
AppendTryBeginBlock(const BasicBlock * block)654 void AppendTryBeginBlock(const BasicBlock *block)
655 {
656 try_begin_blocks_.push_back(block);
657 }
658
EraseTryBeginBlock(const BasicBlock * block)659 void EraseTryBeginBlock(const BasicBlock *block)
660 {
661 auto it = std::find(try_begin_blocks_.begin(), try_begin_blocks_.end(), block);
662 if (it == try_begin_blocks_.end()) {
663 ASSERT(false && "Trying to remove non try_begin block");
664 return;
665 }
666 try_begin_blocks_.erase(it);
667 }
668
GetTryBeginBlocks()669 const auto &GetTryBeginBlocks() const
670 {
671 return try_begin_blocks_;
672 }
673
AppendThrowableInst(const Inst * inst,BasicBlock * catch_handler)674 void AppendThrowableInst(const Inst *inst, BasicBlock *catch_handler)
675 {
676 auto it = throwable_insts_.emplace(inst, GetAllocator()->Adapter()).first;
677 it->second.push_back(catch_handler);
678 }
679
IsInstThrowable(const Inst * inst)680 bool IsInstThrowable(const Inst *inst) const
681 {
682 return throwable_insts_.count(inst) > 0;
683 }
684
685 void RemoveThrowableInst(const Inst *inst);
686 void ReplaceThrowableInst(Inst *old_inst, Inst *new_inst);
687
GetThrowableInstHandlers(const Inst * inst)688 const auto &GetThrowableInstHandlers(const Inst *inst) const
689 {
690 ASSERT(IsInstThrowable(inst));
691 return throwable_insts_.at(inst);
692 }
693
ClearTryCatchInfo()694 void ClearTryCatchInfo()
695 {
696 throwable_insts_.clear();
697 try_begin_blocks_.clear();
698 }
699
700 void DumpThrowableInsts(std::ostream *out) const;
701
702 /**
703 * Run pass specified by template argument T.
704 * Optimization passes might take additional arguments that will passed to Optimization's constructor.
705 * Analyses can't take additional arguments.
706 * @tparam T Type of pass
707 * @param args Additional arguments for optimizations passes
708 * @return true if pass was successful
709 */
710 template <typename T, typename... Args>
RunPass(Args...args)711 bool RunPass(Args... args)
712 {
713 ASSERT(GetPassManager());
714 return pass_manager_.RunPass<T>(std::forward<Args>(args)...);
715 }
716 template <typename T, typename... Args>
RunPass(Args...args)717 bool RunPass(Args... args) const
718 {
719 ASSERT(GetPassManager());
720 return pass_manager_.RunPass<T>(std::forward<Args>(args)...);
721 }
722
723 template <typename T>
RunPass(T * pass)724 bool RunPass(T *pass)
725 {
726 ASSERT(GetPassManager());
727 return pass_manager_.RunPass(pass, GetLocalAllocator()->GetAllocatedSize());
728 }
729
730 /**
731 * Get analysis instance.
732 * All analyses are reside in Graph object in composition relationship.
733 * @tparam T Type of analysis
734 * @return Reference to analysis instance
735 */
736 template <typename T>
GetAnalysis()737 T &GetAnalysis()
738 {
739 ASSERT(GetPassManager());
740 return GetPassManager()->GetAnalysis<T>();
741 }
742 template <typename T>
GetAnalysis()743 const T &GetAnalysis() const
744 {
745 ASSERT(GetPassManager());
746 return pass_manager_.GetAnalysis<T>();
747 }
748
749 /**
750 * Same as GetAnalysis but additionaly checck that analysis in valid state.
751 * @tparam T Type of analysis
752 * @return Reference to analysis instance
753 */
754 template <typename T>
GetValidAnalysis()755 T &GetValidAnalysis()
756 {
757 RunPass<T>();
758 ASSERT(IsAnalysisValid<T>());
759 return GetAnalysis<T>();
760 }
761 template <typename T>
GetValidAnalysis()762 const T &GetValidAnalysis() const
763 {
764 RunPass<T>();
765 ASSERT(IsAnalysisValid<T>());
766 return GetAnalysis<T>();
767 }
768
769 /**
770 * Return true if Analysis valid, false otherwise
771 * @tparam T Type of analysis
772 */
773 template <typename T>
IsAnalysisValid()774 bool IsAnalysisValid() const
775 {
776 return GetAnalysis<T>().IsValid();
777 }
778
779 /**
780 * Reset valid state of specified analysis
781 * @tparam T Type of analysis
782 */
783 template <typename T>
InvalidateAnalysis()784 void InvalidateAnalysis()
785 {
786 ASSERT(GetPassManager());
787 GetPassManager()->GetAnalysis<T>().SetValid(false);
788 }
789
790 /// Accessors to the number of current instruction id.
GetCurrentInstructionId()791 auto GetCurrentInstructionId() const
792 {
793 return instr_current_id_;
794 }
SetCurrentInstructionId(size_t v)795 auto SetCurrentInstructionId(size_t v)
796 {
797 instr_current_id_ = v;
798 }
799
800 /// RuntimeInterface accessors
GetRuntime()801 RuntimeInterface *GetRuntime() const
802 {
803 return runtime_;
804 }
SetRuntime(RuntimeInterface * runtime)805 void SetRuntime(RuntimeInterface *runtime)
806 {
807 runtime_ = runtime;
808 }
GetMethod()809 auto GetMethod() const
810 {
811 return method_;
812 }
SetMethod(RuntimeInterface::MethodPtr method)813 auto SetMethod(RuntimeInterface::MethodPtr method)
814 {
815 method_ = method;
816 }
817
818 void ResetParameterInfo();
819
GetEventWriter()820 EventWriter &GetEventWriter()
821 {
822 return event_writer_;
823 }
824
825 // clang-format off
826
827 /**
828 * Create instruction by opcode
829 */
CreateInst(Opcode opc)830 [[nodiscard]] Inst* CreateInst(Opcode opc) const
831 {
832 switch (opc) {
833 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
834 #define INST_DEF(OPCODE, BASE, ...) \
835 case Opcode::OPCODE: { \
836 auto inst = Inst::New<BASE>(ALLOCATOR, Opcode::OPCODE); \
837 inst->SetId(instr_current_id_++); \
838 return inst; \
839 }
840 OPCODE_LIST(INST_DEF)
841
842 #undef INST_DEF
843 default:
844 return nullptr;
845 }
846 }
847 /**
848 * Define creation methods for all opcodes
849 */
850 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
851 #define INST_DEF(OPCODE, BASE, ...) \
852 template <typename... Args> \
853 [[nodiscard]] BASE* CreateInst##OPCODE(Args&&... args) const { \
854 auto inst = Inst::New<BASE>(ALLOCATOR, Opcode::OPCODE, std::forward<Args>(args)...); \
855 inst->SetId(instr_current_id_++); \
856 return inst; \
857 }
OPCODE_LIST(INST_DEF)858 OPCODE_LIST(INST_DEF)
859
860 #undef INST_DEF
861 // clang-format on
862
863 uint32_t GetBitFields()
864 {
865 return bit_fields_;
866 }
867
SetBitFields(uint32_t bit_fields)868 void SetBitFields(uint32_t bit_fields)
869 {
870 bit_fields_ = bit_fields;
871 }
872
NeedCleanup()873 bool NeedCleanup() const
874 {
875 return FlagNeedCleanup::Get(bit_fields_);
876 }
877
SetNeedCleanup(bool v)878 void SetNeedCleanup(bool v)
879 {
880 FlagNeedCleanup::Set(v, &bit_fields_);
881 }
882
IsOsrMode()883 bool IsOsrMode() const
884 {
885 return mode_.IsOsr();
886 }
887
IsBytecodeOptimizer()888 bool IsBytecodeOptimizer() const
889 {
890 return mode_.IsBytecodeOpt();
891 }
892
IsDynamicMethod()893 bool IsDynamicMethod() const
894 {
895 return mode_.IsDynamicMethod();
896 }
897
SupportManagedCode()898 bool SupportManagedCode() const
899 {
900 return mode_.SupportManagedCode();
901 }
902
GetMode()903 GraphMode GetMode() const
904 {
905 return mode_;
906 }
907
SetMode(GraphMode mode)908 void SetMode(GraphMode mode)
909 {
910 mode_ = mode;
911 }
912
913 #ifndef NDEBUG
GetCompilerMode()914 compiler::inst_modes::Mode GetCompilerMode()
915 {
916 if (IsBytecodeOptimizer()) {
917 return compiler::inst_modes::BYTECODE_OPT;
918 }
919 if (SupportManagedCode()) {
920 return compiler::inst_modes::JIT_AOT;
921 }
922 return compiler::inst_modes::IRTOC;
923 }
924 #endif
925
AddSingleImplementationMethod(RuntimeInterface::MethodPtr method)926 void AddSingleImplementationMethod(RuntimeInterface::MethodPtr method)
927 {
928 single_implementation_list_.push_back(method);
929 }
930
SetDynamicMethod()931 void SetDynamicMethod()
932 {
933 mode_.SetDynamicMethod(true);
934 }
935
GetSingleImplementationList()936 auto &GetSingleImplementationList()
937 {
938 return single_implementation_list_;
939 }
940
GetParentGraph()941 Graph *GetParentGraph()
942 {
943 return parent_graph_;
944 }
945
GetOutermostParentGraph()946 Graph *GetOutermostParentGraph()
947 {
948 auto graph = this;
949 while (graph->GetParentGraph() != nullptr) {
950 graph = graph->GetParentGraph();
951 }
952 return graph;
953 }
954
SetVRegsCount(size_t count)955 void SetVRegsCount(size_t count)
956 {
957 vregs_count_ = count;
958 }
959
GetVRegsCount()960 size_t GetVRegsCount() const
961 {
962 return vregs_count_;
963 }
964
965 int64_t GetBranchCounter(const BasicBlock *block, bool true_succ);
966
967 /**
968 * This class provides methods for ranged-based `for` loop over all parameters in the graph.
969 */
970 class ParameterList {
971 public:
972 class Iterator {
973 public:
Iterator(Inst * inst)974 explicit Iterator(Inst *inst) : inst_(inst) {}
975
976 Iterator &operator++()
977 {
978 for (inst_ = inst_->GetNext(); inst_ != nullptr && inst_->GetOpcode() != Opcode::Parameter;
979 inst_ = inst_->GetNext()) {
980 }
981 return *this;
982 }
983 bool operator!=(const Iterator &other)
984 {
985 return inst_ != other.inst_;
986 }
987 Inst *operator*()
988 {
989 return inst_;
990 }
991 Inst *operator->()
992 {
993 return inst_;
994 }
995
996 private:
997 Inst *inst_ {nullptr};
998 };
999
ParameterList(const Graph * graph)1000 explicit ParameterList(const Graph *graph) : graph_(graph) {}
1001
1002 // NOLINTNEXTLINE(readability-identifier-naming)
1003 Iterator begin();
1004 // NOLINTNEXTLINE(readability-identifier-naming)
end()1005 static Iterator end()
1006 {
1007 return Iterator(nullptr);
1008 }
1009
1010 private:
1011 const Graph *graph_ {nullptr};
1012 };
1013
1014 /**
1015 * Get list of all parameters
1016 * @return instance of the ParameterList class
1017 */
GetParameters()1018 ParameterList GetParameters() const
1019 {
1020 return ParameterList(this);
1021 }
1022
1023 void InitDefaultLocations();
1024
1025 private:
1026 void AddConstInStartBlock(ConstantInst *const_inst);
1027
1028 NO_MOVE_SEMANTIC(Graph);
1029 NO_COPY_SEMANTIC(Graph);
1030
1031 private:
1032 ArenaAllocator *const ALLOCATOR;
1033 ArenaAllocator *const LOCAL_ALLOCATOR;
1034
1035 Arch arch_ {RUNTIME_ARCH};
1036
1037 // List of blocks in insertion order.
1038 ArenaVector<BasicBlock *> vector_bb_;
1039 BasicBlock *start_block_ {nullptr};
1040 BasicBlock *end_block_ {nullptr};
1041
1042 Loop *root_loop_ {nullptr};
1043
1044 uint32_t bit_fields_ {0};
1045 using FlagDFConstruct = BitField<bool, 0, 1>;
1046 using FlagNeedCleanup = FlagDFConstruct::NextFlag;
1047 using FlagIrredicibleLoop = FlagNeedCleanup::NextFlag;
1048 using FlagInfiniteLoop = FlagIrredicibleLoop::NextFlag;
1049 using FlagFloatRegs = FlagInfiniteLoop::NextFlag;
1050 using FlagDefaultLocationsInit = FlagFloatRegs::NextFlag;
1051 #ifndef NDEBUG
1052 using FlagRegallocApplied = FlagDefaultLocationsInit::NextFlag;
1053 using FlagRegaccallocApplied = FlagRegallocApplied::NextFlag;
1054 using FlagInliningComplete = FlagRegaccallocApplied::NextFlag;
1055 using FlagSchedulerComplete = FlagInliningComplete::NextFlag;
1056 using FlagLowLevelInstnsEnabled = FlagSchedulerComplete::NextFlag;
1057 #endif // NDEBUG
1058
1059 // codegen data
1060 EncodeDataType data_;
1061 Span<const uint8_t> code_info_data_;
1062 ArenaVector<bool> *used_regs_ {nullptr};
1063 ArenaVector<bool> *used_vregs_ {nullptr};
1064
1065 // TODO (a.popov) Replace by ArenaMap from throwable_inst* to try_inst*
1066 ArenaMap<const Inst *, ArenaVector<BasicBlock *>> throwable_insts_;
1067
1068 mutable size_t instr_current_id_ {0};
1069 // first constant instruction in graph !TODO rewrite it to hash-map
1070 ConstantInst *first_const_inst_ {nullptr};
1071 Inst *nullptr_inst_ {nullptr};
1072
1073 RuntimeInterface *runtime_ {nullptr};
1074 RuntimeInterface::MethodPtr method_ {nullptr};
1075
1076 ParameterInfo *param_info_ {nullptr};
1077
1078 mutable PassManager pass_manager_;
1079 EventWriter event_writer_;
1080
1081 GraphMode mode_;
1082
1083 ArenaVector<RuntimeInterface::MethodPtr> single_implementation_list_;
1084 ArenaVector<const BasicBlock *> try_begin_blocks_;
1085 ArenaVector<ConstantInst *> spilled_constants_;
1086 // Graph that inlines this graph
1087 Graph *parent_graph_ {nullptr};
1088 // Number of used stack slots
1089 uint32_t stack_slot_count_ {0};
1090 // Number of used stack slots for parameters
1091 uint32_t param_slots_count_ {0};
1092 // First language extension slot
1093 uint32_t ext_stack_slot_ {0};
1094 // Number of the virtual registers used in the compiled method (inlined methods aren't included).
1095 uint32_t vregs_count_ {0};
1096 };
1097
1098 class MarkerHolder {
1099 public:
1100 NO_COPY_SEMANTIC(MarkerHolder);
1101 NO_MOVE_SEMANTIC(MarkerHolder);
1102
MarkerHolder(const Graph * graph)1103 explicit MarkerHolder(const Graph *graph) : graph_(graph), marker_(graph->NewMarker())
1104 {
1105 ASSERT(marker_ != UNDEF_MARKER);
1106 }
1107
~MarkerHolder()1108 ~MarkerHolder()
1109 {
1110 graph_->EraseMarker(marker_);
1111 }
1112
GetMarker()1113 Marker GetMarker()
1114 {
1115 return marker_;
1116 }
1117
1118 private:
1119 const Graph *graph_;
1120 Marker marker_ {UNDEF_MARKER};
1121 };
1122
1123 template <typename T>
FindOrCreateConstant(T value)1124 ConstantInst *Graph::FindOrCreateConstant(T value)
1125 {
1126 bool is_support_int32 = IsBytecodeOptimizer();
1127 if (first_const_inst_ == nullptr) {
1128 first_const_inst_ = CreateInstConstant(value, is_support_int32);
1129 AddConstInStartBlock(first_const_inst_);
1130 return first_const_inst_;
1131 }
1132 ConstantInst *current_const = first_const_inst_;
1133 ConstantInst *prev_const = nullptr;
1134 while (current_const != nullptr) {
1135 if (current_const->IsEqualConst(value, is_support_int32)) {
1136 return current_const;
1137 }
1138 prev_const = current_const;
1139 current_const = current_const->GetNextConst();
1140 }
1141 ASSERT(prev_const != nullptr);
1142 auto *new_const = CreateInstConstant(value, is_support_int32);
1143 AddConstInStartBlock(new_const);
1144
1145 prev_const->SetNextConst(new_const);
1146 return new_const;
1147 }
1148
1149 void InvalidateBlocksOrderAnalyzes(Graph *graph);
1150 void MarkLoopExits(const Graph *graph, Marker marker);
1151 void RemovePredecessorUpdateDF(BasicBlock *block, BasicBlock *rm_pred);
1152 std::string GetMethodFullName(const Graph *graph, RuntimeInterface::MethodPtr method);
1153 } // namespace panda::compiler
1154
1155 #endif // COMPILER_OPTIMIZER_IR_GRAPH_H
1156