• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021-2025 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef COMPILER_OPTIMIZER_IR_GRAPH_H
17 #define COMPILER_OPTIMIZER_IR_GRAPH_H
18 
19 #include "aot_data.h"
20 #include "basicblock.h"
21 #include "compiler_events_gen.h"
22 #include "inst.h"
23 #include "marker.h"
24 #include "optimizer/code_generator/method_properties.h"
25 #include "optimizer/pass_manager.h"
26 #include "utils/arena_containers.h"
27 #include <algorithm>
28 #include <optional>
29 
30 // defines required for AbcKit
31 #if !defined(NDEBUG) || defined(ENABLE_LIBABCKIT)
32 #define COMPILER_DEBUG_CHECKS
33 #endif
34 #ifdef ENABLE_LIBABCKIT
35 // CC-OFFNXT(G.PRE.02) should be with define
36 #define ABCKIT_MODE_CHECK(cond, action) \
37     if (cond) {                         \
38         action;                         \
39     }
40 #else
41 // CC-OFFNXT(G.PRE.02) should be with define
42 #define ABCKIT_MODE_CHECK(cond, action)
43 #endif
44 
45 namespace ark {
46 class Method;
47 class CodeAllocator;
48 }  // namespace ark
49 
50 namespace ark::compiler {
51 class BasicBlock;
52 class Graph;
53 class RuntimeInfo;
54 class PassManager;
55 class LivenessAnalyzer;
56 class DominatorsTree;
57 class Rpo;
58 class BoundsRangeInfo;
59 class Loop;
60 class CodeInfoBuilder;
61 
62 class Encoder;
63 class CallingConvention;
64 class ParameterInfo;
65 class RegistersDescription;
66 class RelocationHandler;
67 
68 enum AliasType : uint8_t;
69 
70 /// Specifies graph compilation mode.
71 class GraphMode {
72 public:
GraphMode(uint32_t value)73     explicit GraphMode(uint32_t value) : value_(value) {}
74 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
75 #define DECLARE_GRAPH_MODE_MODIFIERS(name)     \
76     void Set##name(bool v)                     \
77     {                                          \
78         Flag##name ::Set(v, &value_);          \
79     }                                          \
80     bool Is##name() const                      \
81     {                                          \
82         /* CC-OFFNXT(G.PRE.05) function gen */ \
83         return Flag##name ::Get(value_);       \
84     }
85 
86 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
87 #define DECLARE_GRAPH_MODE(name)                    \
88     static GraphMode name(bool set = true)          \
89     {                                               \
90         /* CC-OFFNXT(G.PRE.05) function gen */      \
91         return GraphMode(Flag##name ::Encode(set)); \
92     }                                               \
93     DECLARE_GRAPH_MODE_MODIFIERS(name)
94 
95     DECLARE_GRAPH_MODE(Osr);
96     // The graph is used in BytecodeOptimizer mode
97     DECLARE_GRAPH_MODE(BytecodeOpt);
98     // The method from dynamic language
99     DECLARE_GRAPH_MODE(DynamicMethod);
100     // The method from dynamic language uses common calling convention
101     DECLARE_GRAPH_MODE(DynamicStub);
102     // Graph will be compiled with native calling convention
103     DECLARE_GRAPH_MODE(Native);
104     // FastPath from compiled code to runtime
105     DECLARE_GRAPH_MODE(FastPath);
106     // Boundary frame is used for compiled code
107     DECLARE_GRAPH_MODE(Boundary);
108     // Graph will be compiled for calling inside interpreter
109     DECLARE_GRAPH_MODE(Interpreter);
110     // Graph will be compiled for interpreter main loop
111     DECLARE_GRAPH_MODE(InterpreterEntry);
112     // Graph will be compiled for abckit
113     DECLARE_GRAPH_MODE(AbcKit);
114 
115 #undef DECLARE_GRAPH_MODE
116 #undef DECLARE_GRAPH_MODE_MODIFIERS
117 
SupportManagedCode()118     bool SupportManagedCode() const
119     {
120         return !IsNative() && !IsFastPath() && !IsBoundary() && !IsInterpreter() && !IsInterpreterEntry();
121     }
122 
123     void Dump(std::ostream &stm);
124 
125 private:
126     using FlagOsr = BitField<bool, 0, 1>;
127     using FlagBytecodeOpt = FlagOsr::NextFlag;
128     using FlagDynamicMethod = FlagBytecodeOpt::NextFlag;
129     using FlagDynamicStub = FlagDynamicMethod::NextFlag;
130     using FlagNative = FlagDynamicStub::NextFlag;
131     using FlagFastPath = FlagNative::NextFlag;
132     using FlagBoundary = FlagFastPath::NextFlag;
133     using FlagInterpreter = FlagBoundary::NextFlag;
134     using FlagInterpreterEntry = FlagInterpreter::NextFlag;
135     using FlagAbcKit = FlagInterpreterEntry::NextFlag;
136 
137     uint32_t value_ {0};
138 
139     friend GraphMode operator|(GraphMode a, GraphMode b);
140 };
141 
142 inline GraphMode operator|(GraphMode a, GraphMode b)
143 {
144     return GraphMode(a.value_ | b.value_);
145 }
146 
147 using EncodeDataType = Span<uint8_t>;
148 
149 class PANDA_PUBLIC_API Graph final : public MarkerMgr {
150 public:
151     struct GraphArgs {
152         ArenaAllocator *allocator;
153         ArenaAllocator *localAllocator;
154         Arch arch;
155         RuntimeInterface::MethodPtr method;
156         RuntimeInterface *runtime;
157     };
158 
Graph(ArenaAllocator * allocator,ArenaAllocator * localAllocator,Arch arch)159     explicit Graph(ArenaAllocator *allocator, ArenaAllocator *localAllocator, Arch arch)
160         : Graph({allocator, localAllocator, arch, nullptr, GetDefaultRuntime()}, false)
161     {
162     }
163 
Graph(ArenaAllocator * allocator,ArenaAllocator * localAllocator,Arch arch,bool osrMode)164     Graph(ArenaAllocator *allocator, ArenaAllocator *localAllocator, Arch arch, bool osrMode)
165         : Graph({allocator, localAllocator, arch, nullptr, GetDefaultRuntime()}, osrMode)
166     {
167     }
168 
Graph(ArenaAllocator * allocator,ArenaAllocator * localAllocator,Arch arch,bool dynamicMethod,bool bytecodeOpt)169     Graph(ArenaAllocator *allocator, ArenaAllocator *localAllocator, Arch arch, bool dynamicMethod, bool bytecodeOpt)
170         : Graph({allocator, localAllocator, arch, nullptr, GetDefaultRuntime()}, nullptr, false, dynamicMethod,
171                 bytecodeOpt)
172     {
173     }
174 
Graph(const GraphArgs & args,bool osrMode)175     Graph(const GraphArgs &args, bool osrMode) : Graph(args, nullptr, osrMode) {}
176 
177     Graph(const GraphArgs &args, Graph *parent, bool osrMode, bool dynamicMethod = false, bool bytecodeOpt = false)
178         : Graph(args, parent,
179                 GraphMode::Osr(osrMode) | GraphMode::BytecodeOpt(bytecodeOpt) | GraphMode::DynamicMethod(dynamicMethod))
180     {
181     }
182 
Graph(const GraphArgs & args,Graph * parent,GraphMode mode)183     Graph(const GraphArgs &args, Graph *parent, GraphMode mode)
184         : allocator_(args.allocator),
185           localAllocator_(args.localAllocator),
186           arch_(args.arch),
187           vectorBb_(args.allocator->Adapter()),
188           throwableInsts_(args.allocator->Adapter()),
189           runtime_(args.runtime),
190           method_(args.method),
191           passManager_(this, parent != nullptr ? parent->GetPassManager() : nullptr),
192           eventWriter_(args.runtime->GetClassNameFromMethod(args.method), args.runtime->GetMethodName(args.method)),
193           mode_(mode),
194           singleImplementationList_(args.allocator->Adapter()),
195           tryBeginBlocks_(args.allocator->Adapter()),
196           throwBlocks_(args.allocator->Adapter()),
197           spilledConstants_(args.allocator->Adapter()),
198           parentGraph_(parent)
199     {
200         SetNeedCleanup(true);
201         SetCanOptimizeNativeMethods(g_options.IsCompilerOptimizeNativeCalls() && (GetArch() != Arch::AARCH32) &&
202                                     GetRuntime()->IsNativeMethodOptimizationEnabled());
203     }
204 
205     ~Graph() override;
206 
CreateChildGraph(RuntimeInterface::MethodPtr method)207     Graph *CreateChildGraph(RuntimeInterface::MethodPtr method)
208     {
209         auto graph = GetAllocator()->New<Graph>(
210             GraphArgs {GetAllocator(), GetLocalAllocator(), GetArch(), method, GetRuntime()}, this, mode_);
211         ASSERT(graph != nullptr);
212         graph->SetAotData(GetAotData());
213         return graph;
214     }
215 
216     /// Get default runtime interface object
GetDefaultRuntime()217     static RuntimeInterface *GetDefaultRuntime()
218     {
219         static RuntimeInterface runtimeInterface;
220         return &runtimeInterface;
221     }
222 
GetArch()223     Arch GetArch() const
224     {
225         return arch_;
226     }
227 
GetLanguage()228     SourceLanguage GetLanguage() const
229     {
230         return lang_;
231     }
232 
SetLanguage(SourceLanguage lang)233     void SetLanguage(SourceLanguage lang)
234     {
235         lang_ = lang;
236     }
237 
238     void AddBlock(BasicBlock *block);
239 #ifndef NDEBUG
240     void AddBlock(BasicBlock *block, uint32_t id);
241 #endif
242     void DisconnectBlock(BasicBlock *block, bool removeLastInst = true, bool fixDomTree = true);
243     void DisconnectBlockRec(BasicBlock *block, bool removeLastInst = true, bool fixDomTree = true);
244 
245     void EraseBlock(BasicBlock *block);
246     void RestoreBlock(BasicBlock *block);
247     // Remove empty block. Block must have one successor and no Phis.
248     void RemoveEmptyBlock(BasicBlock *block);
249 
250     // Remove empty block. Block may have Phis and can't be a loop pre-header.
251     void RemoveEmptyBlockWithPhis(BasicBlock *block, bool irrLoop = false);
252 
253     // Remove block predecessors.
254     void RemovePredecessors(BasicBlock *block, bool removeLastInst = true);
255 
256     // Remove block successors.
257     void RemoveSuccessors(BasicBlock *block);
258 
259     // Remove unreachable blocks.
260     void RemoveUnreachableBlocks();
261 
262     // get end block
GetEndBlock()263     BasicBlock *GetEndBlock()
264     {
265         return endBlock_;
266     }
267     // set end block
SetEndBlock(BasicBlock * endBlock)268     void SetEndBlock(BasicBlock *endBlock)
269     {
270         endBlock_ = endBlock;
271     }
HasEndBlock()272     bool HasEndBlock()
273     {
274         return endBlock_ != nullptr;
275     }
276     // get start block
GetStartBlock()277     BasicBlock *GetStartBlock()
278     {
279         return startBlock_;
280     }
GetStartBlock()281     BasicBlock *GetStartBlock() const
282     {
283         return startBlock_;
284     }
285     // set start block
SetStartBlock(BasicBlock * startBlock)286     void SetStartBlock(BasicBlock *startBlock)
287     {
288         startBlock_ = startBlock;
289     }
290     // get vector_bb_
GetVectorBlocks()291     const ArenaVector<BasicBlock *> &GetVectorBlocks() const
292     {
293         return vectorBb_;
294     }
295 
GetAliveBlocksCount()296     size_t GetAliveBlocksCount() const
297     {
298         return std::count_if(vectorBb_.begin(), vectorBb_.end(), [](BasicBlock *block) { return block != nullptr; });
299     }
300 
GetPassManager()301     PassManager *GetPassManager()
302     {
303         return &passManager_;
304     }
GetPassManager()305     const PassManager *GetPassManager() const
306     {
307         return &passManager_;
308     }
309 
310     const BoundsRangeInfo *GetBoundsRangeInfo() const;
311 
312     PANDA_PUBLIC_API const ArenaVector<BasicBlock *> &GetBlocksRPO() const;
313 
314     PANDA_PUBLIC_API const ArenaVector<BasicBlock *> &GetBlocksLinearOrder() const;
315 
316     template <class Callback>
317     void VisitAllInstructions(Callback callback);
318 
319     AliasType CheckInstAlias(Inst *mem1, Inst *mem2);
320 
321     /// Main allocator for graph, all related to Graph data should be allocated via this allocator.
GetAllocator()322     ArenaAllocator *GetAllocator() const
323     {
324         return allocator_;
325     }
326     /// Allocator for temporary usage, when allocated data is no longer needed after optimization/analysis finished.
GetLocalAllocator()327     ArenaAllocator *GetLocalAllocator() const
328     {
329         return localAllocator_;
330     }
IsDFConstruct()331     bool IsDFConstruct() const
332     {
333         return FlagDFConstruct::Get(bitFields_);
334     }
SetDFConstruct()335     void SetDFConstruct()
336     {
337         FlagDFConstruct::Set(true, &bitFields_);
338     }
339 
SetAotData(AotData * data)340     void SetAotData(AotData *data)
341     {
342         aotData_ = data;
343     }
GetAotData()344     AotData *GetAotData()
345     {
346         return aotData_;
347     }
GetAotData()348     const AotData *GetAotData() const
349     {
350         return aotData_;
351     }
352 
IsAotMode()353     bool IsAotMode() const
354     {
355         return aotData_ != nullptr;
356     }
357 
IsAotNoChaMode()358     bool IsAotNoChaMode() const
359     {
360         return aotData_ != nullptr && !aotData_->GetUseCha();
361     }
362 
IsOfflineCompilationMode()363     bool IsOfflineCompilationMode() const
364     {
365         return IsAotMode() || GetMode().IsInterpreter() || GetMode().IsFastPath() || GetMode().IsInterpreterEntry();
366     }
367 
IsDefaultLocationsInit()368     bool IsDefaultLocationsInit() const
369     {
370         return FlagDefaultLocationsInit::Get(bitFields_);
371     }
SetDefaultLocationsInit()372     void SetDefaultLocationsInit()
373     {
374         FlagDefaultLocationsInit::Set(true, &bitFields_);
375     }
IsIrtocPrologEpilogOptimized()376     bool IsIrtocPrologEpilogOptimized() const
377     {
378         return FlagIrtocPrologEpilogOptimized::Get(bitFields_);
379     }
SetIrtocPrologEpilogOptimized()380     void SetIrtocPrologEpilogOptimized()
381     {
382         FlagIrtocPrologEpilogOptimized::Set(true, &bitFields_);
383     }
IsUnrollComplete()384     bool IsUnrollComplete() const
385     {
386         return FlagUnrollComplete::Get(bitFields_);
387     }
SetUnrollComplete()388     void SetUnrollComplete()
389     {
390         FlagUnrollComplete::Set(true, &bitFields_);
391     }
392 #ifdef COMPILER_DEBUG_CHECKS
IsRegAllocApplied()393     bool IsRegAllocApplied() const
394     {
395         return FlagRegallocApplied::Get(bitFields_);
396     }
SetRegAllocApplied()397     void SetRegAllocApplied()
398     {
399         FlagRegallocApplied::Set(true, &bitFields_);
400     }
IsRegAccAllocApplied()401     bool IsRegAccAllocApplied() const
402     {
403         return FlagRegaccallocApplied::Get(bitFields_);
404     }
SetRegAccAllocApplied()405     void SetRegAccAllocApplied()
406     {
407         FlagRegaccallocApplied::Set(true, &bitFields_);
408     }
IsInliningComplete()409     bool IsInliningComplete() const
410     {
411         return FlagInliningComplete::Get(bitFields_) || IsOsrMode();
412     }
SetInliningComplete()413     void SetInliningComplete()
414     {
415         FlagInliningComplete::Set(true, &bitFields_);
416     }
IsLowLevelInstructionsEnabled()417     bool IsLowLevelInstructionsEnabled() const
418     {
419         return FlagLowLevelInstnsEnabled::Get(bitFields_);
420     }
SetLowLevelInstructionsEnabled()421     void SetLowLevelInstructionsEnabled()
422     {
423         FlagLowLevelInstnsEnabled::Set(true, &bitFields_);
424     }
IsDynUnitTest()425     bool IsDynUnitTest() const
426     {
427         return FlagDynUnitTest::Get(bitFields_);
428     }
SetDynUnitTestFlag()429     void SetDynUnitTestFlag()
430     {
431         FlagDynUnitTest::Set(true, &bitFields_);
432     }
IsUnitTest()433     bool IsUnitTest() const
434     {
435         static constexpr uintptr_t FAKE_FILE = 0xdeadf;
436         return method_ == nullptr || ToUintPtr(runtime_->GetBinaryFileForMethod(method_)) == FAKE_FILE;
437     }
438 #else
IsRegAllocApplied()439     bool IsRegAllocApplied() const
440     {
441         return false;
442     }
443 #endif  // COMPILER_DEBUG_CHECKS
444 
IsThrowApplied()445     bool IsThrowApplied() const
446     {
447         return FlagThrowApplied::Get(bitFields_);
448     }
SetThrowApplied()449     void SetThrowApplied()
450     {
451         FlagThrowApplied::Set(true, &bitFields_);
452     }
UnsetThrowApplied()453     void UnsetThrowApplied()
454     {
455         FlagThrowApplied::Set(false, &bitFields_);
456     }
457 
458 #ifdef PANDA_COMPILER_DEBUG_INFO
IsLineDebugInfoEnabled()459     bool IsLineDebugInfoEnabled() const
460     {
461         return FlagLineDebugInfoEnabled::Get(bitFields_);
462     }
SetLineDebugInfoEnabled()463     void SetLineDebugInfoEnabled()
464     {
465         FlagLineDebugInfoEnabled::Set(true, &bitFields_);
466     }
467 #endif
468 
SetCode(EncodeDataType data)469     void SetCode(EncodeDataType data)
470     {
471         data_ = data;
472     }
473 
GetData()474     EncodeDataType GetData() const
475     {
476         return data_;
477     }
478 
GetData()479     EncodeDataType GetData()
480     {
481         return data_;
482     }
483 
GetCode()484     EncodeDataType GetCode() const
485     {
486         return data_;
487     }
488 
GetCode()489     EncodeDataType GetCode()
490     {
491         return data_;
492     }
493 
SetCodeInfo(Span<uint8_t> data)494     void SetCodeInfo(Span<uint8_t> data)
495     {
496         codeInfoData_ = data.SubSpan<const uint8_t>(0, data.size());
497     }
498 
GetCodeInfoData()499     Span<const uint8_t> GetCodeInfoData() const
500     {
501         return codeInfoData_;
502     }
503 
504     void DumpUsedRegs(std::ostream &out = std::cerr, const char *prefix = nullptr) const
505     {
506         if (prefix != nullptr) {
507             out << prefix;
508         }
509         out << "'\n  used scalar regs: ";
510         if (usedRegs_ != nullptr) {
511             for (unsigned i = 0; i < usedRegs_->size(); ++i) {
512                 if (usedRegs_->at(i)) {
513                     out << i << " ";
514                 }
515             }
516         }
517         out << "\n  used float  regs: ";
518         if (usedRegs_ != nullptr) {
519             for (unsigned i = 0; i < usedVregs_->size(); ++i) {
520                 if (usedVregs_->at(i)) {
521                     out << i << " ";
522                 }
523             }
524         }
525         out << std::endl;
526     }
527 
528     // Get registers mask which used in graph
529     template <DataType::Type REG_TYPE>
GetUsedRegs()530     ArenaVector<bool> *GetUsedRegs() const
531     {
532         // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
533         if constexpr (REG_TYPE == DataType::INT64) {
534             return usedRegs_;
535         }
536         // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
537         if constexpr (REG_TYPE == DataType::FLOAT64) {
538             return usedVregs_;
539         }
540         UNREACHABLE();
541         return nullptr;
542     }
543 
SetRegUsage(Register reg,DataType::Type type)544     void SetRegUsage(Register reg, DataType::Type type)
545     {
546         ASSERT(reg != GetInvalidReg());
547         if (DataType::IsFloatType(type)) {
548             SetUsedReg<DataType::FLOAT64>(reg);
549         } else {
550             SetUsedReg<DataType::INT64>(reg);
551         }
552     }
553 
SetRegUsage(Location location)554     void SetRegUsage(Location location)
555     {
556         ASSERT(location.IsFixedRegister());
557         if (location.IsFpRegister()) {
558             SetUsedReg<DataType::FLOAT64>(location.GetValue());
559         } else {
560             SetUsedReg<DataType::INT64>(location.GetValue());
561         }
562     }
563 
564     template <DataType::Type REG_TYPE>
SetUsedReg(Register reg)565     void SetUsedReg(Register reg)
566     {
567         ArenaVector<bool> *graphRegs = nullptr;
568         // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
569         if constexpr (REG_TYPE == DataType::INT64) {
570             graphRegs = usedRegs_;
571             // NOLINTNEXTLINE(readability-braces-around-statements, readability-misleading-indentation)
572         } else if constexpr (REG_TYPE == DataType::FLOAT64) {
573             graphRegs = usedVregs_;
574         } else {
575             UNREACHABLE();
576         }
577         ASSERT(graphRegs != nullptr);
578         ASSERT(reg < graphRegs->size());
579         (*graphRegs)[reg] = true;
580     }
581 
582     template <DataType::Type REG_TYPE>
InitUsedRegs(const ArenaVector<bool> * usedRegs)583     void InitUsedRegs(const ArenaVector<bool> *usedRegs)
584     {
585         if (usedRegs == nullptr) {
586             return;
587         }
588         ArenaVector<bool> *graphRegs = nullptr;
589         // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
590         if constexpr (REG_TYPE == DataType::INT64) {
591             usedRegs_ = GetAllocator()->New<ArenaVector<bool>>(GetAllocator()->Adapter());
592             graphRegs = usedRegs_;
593             // NOLINTNEXTLINE(readability-braces-around-statements, readability-misleading-indentation)
594         } else if constexpr (REG_TYPE == DataType::FLOAT64) {
595             usedVregs_ = GetAllocator()->New<ArenaVector<bool>>(GetAllocator()->Adapter());
596             graphRegs = usedVregs_;
597         } else {
598             UNREACHABLE();
599         }
600         ASSERT(graphRegs != nullptr);
601         graphRegs->resize(usedRegs->size());
602         std::copy(usedRegs->begin(), usedRegs->end(), graphRegs->begin());
603     }
604 
605     Register GetZeroReg() const;
606     Register GetArchTempReg() const;
607     Register GetArchTempVReg() const;
608     // Get registers mask which used in codegen, runtime e.t.c
609     RegMask GetArchUsedRegs();
610     void SetArchUsedRegs(RegMask mask);
611 
612     // Get vector registers mask which used in codegen, runtime e.t.c
613     VRegMask GetArchUsedVRegs();
614 
615     // Return true if one 64-bit scalar register can be split to 2 32-bit
616     bool IsRegScalarMapped() const;
617 
GetStackSlotsCount()618     uint32_t GetStackSlotsCount() const
619     {
620         return stackSlotCount_;
621     }
622 
SetStackSlotsCount(uint32_t stackSlotCount)623     void SetStackSlotsCount(uint32_t stackSlotCount)
624     {
625         stackSlotCount_ = stackSlotCount;
626     }
627 
UpdateStackSlotsCount(uint32_t stackSlotCount)628     void UpdateStackSlotsCount(uint32_t stackSlotCount)
629     {
630         stackSlotCount_ = std::max(stackSlotCount_, stackSlotCount);
631     }
632 
633     uint32_t GetParametersSlotsCount() const;
634 
GetExtSlotsStart()635     uint32_t GetExtSlotsStart() const
636     {
637         return extStackSlot_;
638     }
639 
SetExtSlotsStart(uint32_t extStackSlot)640     void SetExtSlotsStart(uint32_t extStackSlot)
641     {
642         extStackSlot_ = extStackSlot;
643     }
644 
645     BasicBlock *CreateEmptyBlock(uint32_t guestPc = INVALID_PC);
646     BasicBlock *CreateEmptyBlock(BasicBlock *baseBlock);
647 #ifndef NDEBUG
648     BasicBlock *CreateEmptyBlock(uint32_t id, uint32_t guestPc);
649 #endif
650     BasicBlock *CreateStartBlock();
651     BasicBlock *CreateEndBlock(uint32_t guestPc = INVALID_PC);
GetFirstConstInst()652     ConstantInst *GetFirstConstInst()
653     {
654         return firstConstInst_;
655     }
SetFirstConstInst(ConstantInst * constInst)656     void SetFirstConstInst(ConstantInst *constInst)
657     {
658         firstConstInst_ = constInst;
659     }
660 
GetNullPtrInst()661     Inst *GetNullPtrInst() const
662     {
663         return nullptrInst_;
664     }
HasNullPtrInst()665     bool HasNullPtrInst() const
666     {
667         return nullptrInst_ != nullptr;
668     }
UnsetNullPtrInst()669     void UnsetNullPtrInst()
670     {
671         ASSERT(HasNullPtrInst());
672         nullptrInst_ = nullptr;
673     }
674     Inst *GetOrCreateNullPtr();
675 
GetUniqueObjectInst()676     Inst *GetUniqueObjectInst() const
677     {
678         return uniqueObjectInst_;
679     }
HasUniqueObjectInst()680     bool HasUniqueObjectInst() const
681     {
682         return uniqueObjectInst_ != nullptr;
683     }
UnsetUniqueObjectInst()684     void UnsetUniqueObjectInst()
685     {
686         ASSERT(HasUniqueObjectInst());
687         uniqueObjectInst_ = nullptr;
688     }
689     Inst *GetOrCreateUniqueObjectInst();
690 
691     /// Find constant in the list, return nullptr if not found
692     ConstantInst *FindConstant(DataType::Type type, uint64_t value);
693     /// Find constant in the list or create new one and insert at the end
694     template <typename T>
695     ConstantInst *FindOrCreateConstant(T value);
696 
697     /**
698      * Find constant that is equal to the given one specified by inst. If not found, add inst to the graph.
699      * @param inst Constant instruction to be added
700      * @return Found instruction or inst if not found
701      */
702     ConstantInst *FindOrAddConstant(ConstantInst *inst);
703 
704     ParameterInst *AddNewParameter(uint16_t argNumber);
705 
AddNewParameter(uint16_t argNumber,DataType::Type type)706     ParameterInst *AddNewParameter(uint16_t argNumber, DataType::Type type)
707     {
708         ParameterInst *param = AddNewParameter(argNumber);
709         param->SetType(type);
710         return param;
711     }
712 
713     ParameterInst *FindParameter(uint16_t argNumber);
714 
715     /*
716      * The function remove the ConstantInst from the graph list
717      * !NOTE ConstantInst isn't removed from BasicBlock list
718      */
719     void RemoveConstFromList(ConstantInst *constInst);
720 
GetSpilledConstant(ImmTableSlot slot)721     ConstantInst *GetSpilledConstant(ImmTableSlot slot)
722     {
723         ASSERT(static_cast<size_t>(slot) < spilledConstants_.size());
724         return spilledConstants_[slot];
725     }
726 
AddSpilledConstant(ConstantInst * constInst)727     ImmTableSlot AddSpilledConstant(ConstantInst *constInst)
728     {
729         // Constant already in the table
730         auto currentSlot = constInst->GetImmTableSlot();
731         if (currentSlot != GetInvalidImmTableSlot()) {
732             ASSERT(spilledConstants_[currentSlot] == constInst);
733             return currentSlot;
734         }
735 
736         auto count = spilledConstants_.size();
737         if (count >= GetMaxNumImmSlots()) {
738             return GetInvalidImmTableSlot();
739         }
740         spilledConstants_.push_back(constInst);
741         constInst->SetImmTableSlot(count);
742         return ImmTableSlot(count);
743     }
744 
FindSpilledConstantSlot(ConstantInst * constInst)745     ImmTableSlot FindSpilledConstantSlot(ConstantInst *constInst) const
746     {
747         auto slot = std::find(spilledConstants_.begin(), spilledConstants_.end(), constInst);
748         if (slot == spilledConstants_.end()) {
749             return GetInvalidImmTableSlot();
750         }
751         return std::distance(spilledConstants_.begin(), slot);
752     }
753 
GetSpilledConstantsCount()754     size_t GetSpilledConstantsCount() const
755     {
756         return spilledConstants_.size();
757     }
758 
HasAvailableConstantSpillSlots()759     bool HasAvailableConstantSpillSlots() const
760     {
761         return GetSpilledConstantsCount() < GetMaxNumImmSlots();
762     }
763 
begin()764     auto begin()  // NOLINT(readability-identifier-naming)
765     {
766         return vectorBb_.begin();
767     }
begin()768     auto begin() const  // NOLINT(readability-identifier-naming)
769     {
770         return vectorBb_.begin();
771     }
end()772     auto end()  // NOLINT(readability-identifier-naming)
773     {
774         return vectorBb_.end();
775     }
end()776     auto end() const  // NOLINT(readability-identifier-naming)
777     {
778         return vectorBb_.end();
779     }
780 
781     void Dump(std::ostream *out) const;
782 
GetRootLoop()783     Loop *GetRootLoop()
784     {
785         return rootLoop_;
786     }
GetRootLoop()787     const Loop *GetRootLoop() const
788     {
789         return rootLoop_;
790     }
791 
SetRootLoop(Loop * rootLoop)792     void SetRootLoop(Loop *rootLoop)
793     {
794         rootLoop_ = rootLoop;
795     }
796 
SetHasIrreducibleLoop(bool hasIrrLoop)797     void SetHasIrreducibleLoop(bool hasIrrLoop)
798     {
799         FlagIrredicibleLoop::Set(hasIrrLoop, &bitFields_);
800     }
801 
SetHasInfiniteLoop(bool hasInfLoop)802     void SetHasInfiniteLoop(bool hasInfLoop)
803     {
804         FlagInfiniteLoop::Set(hasInfLoop, &bitFields_);
805     }
806 
SetHasFloatRegs()807     void SetHasFloatRegs()
808     {
809         FlagFloatRegs::Set(true, &bitFields_);
810     }
811 
812     bool HasLoop() const;
813     PANDA_PUBLIC_API bool HasIrreducibleLoop() const;
814     bool HasInfiniteLoop() const;
815     bool HasFloatRegs() const;
816 
817     /**
818      * Try-catch info
819      * Vector of begin try-blocks in order they were declared in the bytecode
820      */
AppendTryBeginBlock(const BasicBlock * block)821     void AppendTryBeginBlock(const BasicBlock *block)
822     {
823         tryBeginBlocks_.push_back(block);
824     }
825 
EraseTryBeginBlock(const BasicBlock * block)826     void EraseTryBeginBlock(const BasicBlock *block)
827     {
828         auto it = std::find(tryBeginBlocks_.begin(), tryBeginBlocks_.end(), block);
829         if (it == tryBeginBlocks_.end()) {
830             ASSERT(false && "Trying to remove non try_begin block");
831             return;
832         }
833         tryBeginBlocks_.erase(it);
834     }
835 
GetTryBeginBlocks()836     const auto &GetTryBeginBlocks() const
837     {
838         return tryBeginBlocks_;
839     }
840 
841     void RemovePredecessorUpdateDF(BasicBlock *block, BasicBlock *rmPred);
842 
FindThrowBlock(BasicBlock * block)843     bool FindThrowBlock(BasicBlock *block)
844     {
845         auto it = std::find(throwBlocks_.begin(), throwBlocks_.end(), block);
846         return (it != throwBlocks_.end());
847     }
848 
AppendThrowBlock(BasicBlock * block)849     bool AppendThrowBlock(BasicBlock *block)
850     {
851         if (!FindThrowBlock(block)) {
852             throwBlocks_.insert(block);
853             return true;
854         }
855         return false;
856     }
857 
EraseThrowBlock(BasicBlock * block)858     bool EraseThrowBlock(BasicBlock *block)
859     {
860         auto it = std::find(throwBlocks_.begin(), throwBlocks_.end(), block);
861         if (it == throwBlocks_.end()) {
862             return false;
863         }
864         throwBlocks_.erase(it);
865         return true;
866     }
867 
GetThrowBlocks()868     const auto &GetThrowBlocks() const
869     {
870         return throwBlocks_;
871     }
872 
ClearThrowBlocks()873     void ClearThrowBlocks()
874     {
875         throwBlocks_.clear();
876     }
877 
AppendThrowableInst(const Inst * inst,BasicBlock * catchHandler)878     void AppendThrowableInst(const Inst *inst, BasicBlock *catchHandler)
879     {
880         auto it = throwableInsts_.emplace(inst, GetAllocator()->Adapter()).first;
881         it->second.push_back(catchHandler);
882     }
883 
IsInstThrowable(const Inst * inst)884     bool IsInstThrowable(const Inst *inst) const
885     {
886         return throwableInsts_.count(inst) > 0;
887     }
888 
889     void RemoveThrowableInst(const Inst *inst);
890     PANDA_PUBLIC_API void ReplaceThrowableInst(Inst *oldInst, Inst *newInst);
891 
GetThrowableInstHandlers(const Inst * inst)892     const auto &GetThrowableInstHandlers(const Inst *inst) const
893     {
894         ASSERT(IsInstThrowable(inst));
895         return throwableInsts_.at(inst);
896     }
897 
ClearTryCatchInfo()898     void ClearTryCatchInfo()
899     {
900         throwableInsts_.clear();
901         tryBeginBlocks_.clear();
902     }
903 
904     void DumpThrowableInsts(std::ostream *out) const;
905 
906     /**
907      * Run pass specified by template argument T.
908      * Optimization passes might take additional arguments that will passed to Optimization's constructor.
909      * Analyses can't take additional arguments.
910      * @tparam T Type of pass
911      * @param args Additional arguments for optimizations passes
912      * @return true if pass was successful
913      */
914     template <typename T, typename... Args>
RunPass(Args...args)915     bool RunPass(Args... args)
916     {
917         ASSERT(GetPassManager());
918         return passManager_.RunPass<T>(std::forward<Args>(args)...);
919     }
920     template <typename T, typename... Args>
RunPass(Args...args)921     bool RunPass(Args... args) const
922     {
923         ASSERT(GetPassManager());
924         return passManager_.RunPass<T>(std::forward<Args>(args)...);
925     }
926 
927     template <typename T>
RunPass(T * pass)928     bool RunPass(T *pass)
929     {
930         ASSERT(GetPassManager());
931         return passManager_.RunPass(pass, GetLocalAllocator()->GetAllocatedSize());
932     }
933 
934     /**
935      * Get analysis instance.
936      * All analyses are reside in Graph object in composition relationship.
937      * @tparam T Type of analysis
938      * @return Reference to analysis instance
939      */
940     template <typename T>
GetAnalysis()941     T &GetAnalysis()
942     {
943         ASSERT(GetPassManager());
944         return GetPassManager()->GetAnalysis<T>();
945     }
946     template <typename T>
GetAnalysis()947     const T &GetAnalysis() const
948     {
949         ASSERT(GetPassManager());
950         return passManager_.GetAnalysis<T>();
951     }
952 
953     /**
954      * Same as GetAnalysis but additionaly checck that analysis in valid state.
955      * @tparam T Type of analysis
956      * @return Reference to analysis instance
957      */
958     template <typename T>
GetValidAnalysis()959     T &GetValidAnalysis()
960     {
961         RunPass<T>();
962         ASSERT(IsAnalysisValid<T>());
963         return GetAnalysis<T>();
964     }
965     template <typename T>
GetValidAnalysis()966     const T &GetValidAnalysis() const
967     {
968         RunPass<T>();
969         ASSERT(IsAnalysisValid<T>());
970         return GetAnalysis<T>();
971     }
972 
973     /**
974      * Return true if Analysis valid, false otherwise
975      * @tparam T Type of analysis
976      */
977     template <typename T>
IsAnalysisValid()978     bool IsAnalysisValid() const
979     {
980         return GetAnalysis<T>().IsValid();
981     }
982 
983     /**
984      * Reset valid state of specified analysis
985      * @tparam T Type of analysis
986      */
987     template <typename T>
InvalidateAnalysis()988     void InvalidateAnalysis()
989     {
990         ASSERT(GetPassManager());
991         GetPassManager()->GetAnalysis<T>().SetValid(false);
992     }
993 
994     /// Accessors to the number of current instruction id.
GetCurrentInstructionId()995     auto GetCurrentInstructionId() const
996     {
997         return instrCurrentId_;
998     }
SetCurrentInstructionId(size_t v)999     auto SetCurrentInstructionId(size_t v)
1000     {
1001         instrCurrentId_ = v;
1002     }
1003 
1004     /// RuntimeInterface accessors
GetRuntime()1005     RuntimeInterface *GetRuntime() const
1006     {
1007         return runtime_;
1008     }
SetRuntime(RuntimeInterface * runtime)1009     void SetRuntime(RuntimeInterface *runtime)
1010     {
1011         runtime_ = runtime;
1012     }
GetMethod()1013     auto GetMethod() const
1014     {
1015         return method_;
1016     }
SetMethod(RuntimeInterface::MethodPtr method)1017     auto SetMethod(RuntimeInterface::MethodPtr method)
1018     {
1019         method_ = method;
1020     }
1021 
1022     Encoder *GetEncoder();
1023     RegistersDescription *GetRegisters() const;
1024     CallingConvention *GetCallingConvention();
1025     const MethodProperties &GetMethodProperties();
1026     void ResetParameterInfo();
1027     SpillFillData GetDataForNativeParam(DataType::Type type);
1028 
1029     template <bool GRAPH_ENCODED = false>
EstimateCodeSize()1030     size_t EstimateCodeSize()
1031     {
1032         if constexpr (GRAPH_ENCODED) {
1033             return encoder_->BufferSize();
1034         }
1035         auto maxIrInstsCount = GetCurrentInstructionId();
1036         auto encoder = GetEncoder();
1037         ASSERT(encoder != nullptr);
1038         auto maxArchInstsPerIrInsts = encoder->MaxArchInstPerEncoded();
1039         auto maxBytesInArchInst = GetInstructionSizeBits(GetArch());
1040         return maxIrInstsCount * maxArchInstsPerIrInsts * maxBytesInArchInst;
1041     }
1042 
GetEventWriter()1043     EventWriter &GetEventWriter()
1044     {
1045         return eventWriter_;
1046     }
1047 
SetCodeBuilder(CodeInfoBuilder * builder)1048     void SetCodeBuilder(CodeInfoBuilder *builder)
1049     {
1050         ciBuilder_ = builder;
1051     }
1052 
1053     // clang-format off
1054 
1055     /// Create instruction by opcode
1056     // NOLINTNEXTLINE(readability-function-size)
CreateInst(Opcode opc)1057     [[nodiscard]] Inst* CreateInst(Opcode opc) const
1058     {
1059         switch (opc) {
1060 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
1061 #define RETURN_INST(OPCODE, BASE, ...)                                   \
1062             case Opcode::OPCODE: {                                       \
1063                 auto inst = Inst::New<BASE>(allocator_, Opcode::OPCODE); \
1064                 inst->SetId(instrCurrentId_++);                          \
1065                 if (IsAbcKit()) {                                        \
1066                     SetAbcKitFlags(inst);                                \
1067                 }                                                        \
1068                 return inst;                                             \
1069             }
1070             OPCODE_LIST(RETURN_INST)
1071 
1072 #undef RETURN_INST
1073             default:
1074                 return nullptr;
1075         }
1076     }
1077     /// Define creation methods for all opcodes
1078 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
1079 #define RETURN_INST(OPCODE, BASE, ...)                                                         \
1080     template <typename... Args>                                                                \
1081     [[nodiscard]] BASE* CreateInst##OPCODE(Args&&... args) const {                             \
1082         auto inst = Inst::New<BASE>(allocator_, Opcode::OPCODE, std::forward<Args>(args)...);  \
1083         inst->SetId(instrCurrentId_++);                                                        \
1084         if (IsAbcKit()) {                                                                      \
1085             SetAbcKitFlags(inst);                                                              \
1086         }                                                                                      \
1087         return inst;                                                                           \
1088     }
1089     OPCODE_LIST(RETURN_INST)
1090 
1091 #undef RETURN_INST
1092 
1093 #ifdef PANDA_COMPILER_DEBUG_INFO
1094 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
1095 #define RETURN_INST(OPCODE, BASE, ...)                                               \
1096     template <typename... Args>                                                   \
1097     [[nodiscard]] BASE* CreateInst##OPCODE(Inst* inst, Args&&... args) const {    \
1098         auto new_inst = CreateInst##OPCODE(inst->GetType(), inst->GetPc(), std::forward<Args>(args)...);  \
1099         new_inst->SetCurrentMethod(inst->GetCurrentMethod());                     \
1100         return new_inst;                                                          \
1101     }
OPCODE_LIST(RETURN_INST)1102     OPCODE_LIST(RETURN_INST)
1103 
1104 #undef RETURN_INST
1105 #else
1106 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
1107 #define RETURN_INST(OPCODE, BASE, ...)                                               \
1108     template <typename... Args>                                                   \
1109     [[nodiscard]] BASE* CreateInst##OPCODE(Inst* inst, Args&&... args) const {    \
1110         auto new_inst = CreateInst##OPCODE(inst->GetType(), inst->GetPc(), std::forward<Args>(args)...);  \
1111         return new_inst;                                                          \
1112     }
1113     OPCODE_LIST(RETURN_INST)
1114 
1115 #undef RETURN_INST
1116 #endif
1117 
1118     // clang-format on
1119 
1120     uint32_t GetBitFields()
1121     {
1122         return bitFields_;
1123     }
1124 
SetBitFields(uint32_t bitFields)1125     void SetBitFields(uint32_t bitFields)
1126     {
1127         bitFields_ = bitFields;
1128     }
1129 
NeedCleanup()1130     bool NeedCleanup() const
1131     {
1132         return FlagNeedCleanup::Get(bitFields_);
1133     }
1134 
SetNeedCleanup(bool v)1135     void SetNeedCleanup(bool v)
1136     {
1137         FlagNeedCleanup::Set(v, &bitFields_);
1138     }
1139 
CanOptimizeNativeMethods()1140     bool CanOptimizeNativeMethods() const
1141     {
1142         return FlagCanOptimizeNativeMethods::Get(bitFields_);
1143     }
1144 
SetCanOptimizeNativeMethods(bool v)1145     void SetCanOptimizeNativeMethods(bool v)
1146     {
1147         FlagCanOptimizeNativeMethods::Set(v, &bitFields_);
1148     }
1149 
IsJitOrOsrMode()1150     bool IsJitOrOsrMode() const
1151     {
1152         return !IsAotMode() && !IsBytecodeOptimizer() && SupportManagedCode();
1153     }
1154 
IsOsrMode()1155     bool IsOsrMode() const
1156     {
1157         return mode_.IsOsr();
1158     }
1159 
IsJitMode()1160     bool IsJitMode() const
1161     {
1162         return !IsOsrMode() && IsJitOrOsrMode();
1163     }
1164 
IsBytecodeOptimizer()1165     bool IsBytecodeOptimizer() const
1166     {
1167         return mode_.IsBytecodeOpt();
1168     }
1169 
IsDynamicMethod()1170     bool IsDynamicMethod() const
1171     {
1172         return mode_.IsDynamicMethod();
1173     }
1174 
IsAbcKit()1175     bool IsAbcKit() const
1176     {
1177 #ifdef ENABLE_LIBABCKIT
1178         return mode_.IsAbcKit();
1179 #else
1180         return false;
1181 #endif
1182     }
1183 
SupportManagedCode()1184     bool SupportManagedCode() const
1185     {
1186         return mode_.SupportManagedCode();
1187     }
1188 
GetMode()1189     GraphMode GetMode() const
1190     {
1191         return mode_;
1192     }
1193 
SetMode(GraphMode mode)1194     void SetMode(GraphMode mode)
1195     {
1196         mode_ = mode;
1197     }
1198 
1199 #ifndef NDEBUG
GetCompilerMode()1200     compiler::inst_modes::Mode GetCompilerMode()
1201     {
1202         if (IsBytecodeOptimizer()) {
1203             return compiler::inst_modes::BYTECODE_OPT;
1204         }
1205         if (SupportManagedCode()) {
1206             return compiler::inst_modes::JIT_AOT;
1207         }
1208         return compiler::inst_modes::IRTOC;
1209     }
1210 #endif
1211 
AddSingleImplementationMethod(RuntimeInterface::MethodPtr method)1212     void AddSingleImplementationMethod(RuntimeInterface::MethodPtr method)
1213     {
1214         singleImplementationList_.push_back(method);
1215     }
1216 
SetDynamicMethod()1217     void SetDynamicMethod()
1218     {
1219         mode_.SetDynamicMethod(true);
1220     }
1221 
SetAbcKit()1222     void SetAbcKit()
1223     {
1224         mode_.SetAbcKit(true);
1225     }
1226 
SetDynamicStub()1227     void SetDynamicStub()
1228     {
1229         mode_.SetDynamicStub(true);
1230     }
1231 
GetSingleImplementationList()1232     auto &GetSingleImplementationList()
1233     {
1234         return singleImplementationList_;
1235     }
1236 
GetParentGraph()1237     Graph *GetParentGraph() const
1238     {
1239         return parentGraph_;
1240     }
1241 
GetOutermostParentGraph()1242     Graph *GetOutermostParentGraph()
1243     {
1244         auto graph = this;
1245         while (graph->GetParentGraph() != nullptr) {
1246             graph = graph->GetParentGraph();
1247         }
1248         return graph;
1249     }
1250 
SetVRegsCount(size_t count)1251     void SetVRegsCount(size_t count)
1252     {
1253         vregsCount_ = count;
1254     }
1255 
GetVRegsCount()1256     size_t GetVRegsCount() const
1257     {
1258         return vregsCount_;
1259     }
1260 
GetEnvCount()1261     size_t GetEnvCount() const
1262     {
1263         return (IsDynamicMethod() && !IsBytecodeOptimizer()) ? VRegInfo::ENV_COUNT : 0;
1264     }
1265 
GetRelocationHandler()1266     RelocationHandler *GetRelocationHandler()
1267     {
1268         return relocationHandler_;
1269     }
1270 
SetRelocationHandler(RelocationHandler * handler)1271     void SetRelocationHandler(RelocationHandler *handler)
1272     {
1273         relocationHandler_ = handler;
1274     }
1275 
1276     int64_t GetBranchCounter(const BasicBlock *block, bool trueSucc);
1277 
1278     int64_t GetThrowCounter(const BasicBlock *block);
1279 
1280     /// This class provides methods for ranged-based `for` loop over all parameters in the graph.
1281     class ParameterList {
1282     public:
1283         class Iterator {
1284         public:
Iterator(Inst * inst)1285             explicit Iterator(Inst *inst) : inst_(inst) {}
1286 
1287             Iterator &operator++()
1288             {
1289                 for (inst_ = inst_->GetNext(); inst_ != nullptr && inst_->GetOpcode() != Opcode::Parameter;
1290                      inst_ = inst_->GetNext()) {
1291                 }
1292                 return *this;
1293             }
1294             bool operator!=(const Iterator &other)
1295             {
1296                 return inst_ != other.inst_;
1297             }
1298             Inst *operator*()
1299             {
1300                 return inst_;
1301             }
1302             Inst *operator->()
1303             {
1304                 return inst_;
1305             }
1306 
1307         private:
1308             Inst *inst_ {nullptr};
1309         };
1310 
ParameterList(const Graph * graph)1311         explicit ParameterList(const Graph *graph) : graph_(graph) {}
1312 
1313         // NOLINTNEXTLINE(readability-identifier-naming)
1314         PANDA_PUBLIC_API Iterator begin();
1315         // NOLINTNEXTLINE(readability-identifier-naming)
end()1316         static Iterator end()
1317         {
1318             return Iterator(nullptr);
1319         }
1320 
1321     private:
1322         const Graph *graph_ {nullptr};
1323     };
1324 
1325     /**
1326      * Get list of all parameters
1327      * @return instance of the ParameterList class
1328      */
GetParameters()1329     ParameterList GetParameters() const
1330     {
1331         return ParameterList(this);
1332     }
1333 
1334     void InitDefaultLocations();
1335 
SupportsIrtocBarriers()1336     bool SupportsIrtocBarriers() const
1337     {
1338         return (IsJitOrOsrMode() || IsAotMode() || GetMode().IsInterpreter() || GetMode().IsInterpreterEntry()) &&
1339                !IsDynamicMethod() && GetArch() != Arch::AARCH32;
1340     }
1341 
SetMaxInliningDepth(uint32_t depth)1342     void SetMaxInliningDepth(uint32_t depth)
1343     {
1344         maxInliningDepth_ = std::max(maxInliningDepth_, depth);
1345     }
1346 
GetMaxInliningDepth()1347     uint32_t GetMaxInliningDepth()
1348     {
1349         return maxInliningDepth_;
1350     }
1351 
1352 private:
1353     void AddConstInStartBlock(ConstantInst *constInst);
1354 
1355     NO_MOVE_SEMANTIC(Graph);
1356     NO_COPY_SEMANTIC(Graph);
1357 
1358 private:
1359     uint32_t maxInliningDepth_ {0};
1360     ArenaAllocator *const allocator_;
1361     ArenaAllocator *const localAllocator_;
1362 
1363     Arch arch_ {RUNTIME_ARCH};
1364 
1365     // List of blocks in insertion order.
1366     ArenaVector<BasicBlock *> vectorBb_;
1367     BasicBlock *startBlock_ {nullptr};
1368     BasicBlock *endBlock_ {nullptr};
1369 
1370     Loop *rootLoop_ {nullptr};
1371 
1372     AotData *aotData_ {nullptr};
1373 
1374     uint32_t bitFields_ {0};
1375     using FlagDFConstruct = BitField<bool, 0, 1>;
1376     using FlagNeedCleanup = FlagDFConstruct::NextFlag;
1377     using FlagIrredicibleLoop = FlagNeedCleanup::NextFlag;
1378     using FlagInfiniteLoop = FlagIrredicibleLoop::NextFlag;
1379     using FlagFloatRegs = FlagInfiniteLoop::NextFlag;
1380     using FlagDefaultLocationsInit = FlagFloatRegs::NextFlag;
1381     using FlagIrtocPrologEpilogOptimized = FlagDefaultLocationsInit::NextFlag;
1382     using FlagThrowApplied = FlagIrtocPrologEpilogOptimized::NextFlag;
1383     using FlagCanOptimizeNativeMethods = FlagThrowApplied::NextFlag;
1384     using FlagUnrollComplete = FlagCanOptimizeNativeMethods::NextFlag;
1385 #if defined(NDEBUG) && !defined(ENABLE_LIBABCKIT)
1386     using LastField = FlagUnrollComplete;
1387 #else
1388     using FlagRegallocApplied = FlagUnrollComplete::NextFlag;
1389     using FlagRegaccallocApplied = FlagRegallocApplied::NextFlag;
1390     using FlagInliningComplete = FlagRegaccallocApplied::NextFlag;
1391     using FlagLowLevelInstnsEnabled = FlagInliningComplete::NextFlag;
1392     using FlagDynUnitTest = FlagLowLevelInstnsEnabled::NextFlag;
1393     using LastField = FlagDynUnitTest;
1394 #endif  // NDEBUG
1395 
1396 #ifdef PANDA_COMPILER_DEBUG_INFO
1397     using FlagLineDebugInfoEnabled = LastField::NextFlag;
1398 #endif
1399 
1400     // codegen data
1401     EncodeDataType data_;
1402     Span<const uint8_t> codeInfoData_;
1403     ArenaVector<bool> *usedRegs_ {nullptr};
1404     ArenaVector<bool> *usedVregs_ {nullptr};
1405 
1406     // NOTE (a.popov) Replace by ArenaMap from throwable_inst* to try_inst*
1407     ArenaMap<const Inst *, ArenaVector<BasicBlock *>> throwableInsts_;
1408 
1409     RegMask archUsedRegs_ {0};
1410 
1411     mutable size_t instrCurrentId_ {0};
1412     // first constant instruction in graph !NOTE rewrite it to hash-map
1413     ConstantInst *firstConstInst_ {nullptr};
1414     Inst *nullptrInst_ {nullptr};
1415     Inst *uniqueObjectInst_ {nullptr};
1416     RuntimeInterface *runtime_ {nullptr};
1417     RuntimeInterface::MethodPtr method_ {nullptr};
1418 
1419     Encoder *encoder_ {nullptr};
1420 
1421     mutable RegistersDescription *registers_ {nullptr};
1422 
1423     CallingConvention *callconv_ {nullptr};
1424 
1425     std::optional<MethodProperties> methodProperties_ {std::nullopt};
1426 
1427     ParameterInfo *paramInfo_ {nullptr};
1428 
1429     RelocationHandler *relocationHandler_ {nullptr};
1430 
1431     mutable PassManager passManager_;
1432     EventWriter eventWriter_;
1433 
1434     GraphMode mode_;
1435 
1436     CodeInfoBuilder *ciBuilder_ {nullptr};
1437 
1438     ArenaVector<RuntimeInterface::MethodPtr> singleImplementationList_;
1439     ArenaVector<const BasicBlock *> tryBeginBlocks_;
1440     ArenaSet<BasicBlock *> throwBlocks_;
1441     ArenaVector<ConstantInst *> spilledConstants_;
1442     // Graph that inlines this graph
1443     Graph *parentGraph_ {nullptr};
1444     // Number of used stack slots
1445     uint32_t stackSlotCount_ {0};
1446     // Number of used stack slots for parameters
1447     uint32_t paramSlotsCount_ {0};
1448     // First language extension slot
1449     uint32_t extStackSlot_ {0};
1450     // Number of the virtual registers used in the compiled method (inlined methods aren't included).
1451     uint32_t vregsCount_ {0};
1452     // Source language of the method being compiled
1453     SourceLanguage lang_ {SourceLanguage::PANDA_ASSEMBLY};
1454 };
1455 
1456 class MarkerHolder {
1457 public:
1458     NO_COPY_SEMANTIC(MarkerHolder);
1459     NO_MOVE_SEMANTIC(MarkerHolder);
1460 
MarkerHolder(const Graph * graph)1461     explicit MarkerHolder(const Graph *graph) : graph_(graph), marker_(graph->NewMarker())
1462     {
1463         ASSERT(marker_ != UNDEF_MARKER);
1464     }
1465 
~MarkerHolder()1466     ~MarkerHolder()
1467     {
1468         graph_->EraseMarker(marker_);
1469     }
1470 
GetMarker()1471     Marker GetMarker()
1472     {
1473         return marker_;
1474     }
1475 
1476 private:
1477     const Graph *graph_;
1478     Marker marker_ {UNDEF_MARKER};
1479 };
1480 
1481 template <typename T>
FindOrCreateConstant(T value)1482 ConstantInst *Graph::FindOrCreateConstant(T value)
1483 {
1484     bool isSupportInt32 = IsBytecodeOptimizer();
1485     if (firstConstInst_ == nullptr) {
1486         firstConstInst_ = CreateInstConstant(value, isSupportInt32);
1487         AddConstInStartBlock(firstConstInst_);
1488         return firstConstInst_;
1489     }
1490     ConstantInst *currentConst = firstConstInst_;
1491     ConstantInst *prevConst = nullptr;
1492     while (currentConst != nullptr) {
1493         if (currentConst->IsEqualConst(value, isSupportInt32)) {
1494             return currentConst;
1495         }
1496         prevConst = currentConst;
1497         currentConst = currentConst->GetNextConst();
1498     }
1499     ASSERT(prevConst != nullptr);
1500     auto *newConst = CreateInstConstant(value, isSupportInt32);
1501     AddConstInStartBlock(newConst);
1502 
1503     prevConst->SetNextConst(newConst);
1504     return newConst;
1505 }
1506 
1507 void InvalidateBlocksOrderAnalyzes(Graph *graph);
1508 void MarkLoopExits(const Graph *graph, Marker marker);
1509 std::string GetMethodFullName(const Graph *graph, RuntimeInterface::MethodPtr method);
1510 size_t GetObjectOffset(const Graph *graph, ObjectType objType, RuntimeInterface::FieldPtr field, uint32_t typeId);
1511 }  // namespace ark::compiler
1512 #endif  // COMPILER_OPTIMIZER_IR_GRAPH_H
1513