1 /*
2 * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #ifndef COMPILER_OPTIMIZER_IR_GRAPH_H
17 #define COMPILER_OPTIMIZER_IR_GRAPH_H
18
19 #include "aot_data.h"
20 #include "basicblock.h"
21 #include "compiler_events_gen.h"
22 #include "inst.h"
23 #include "marker.h"
24 #include "optimizer/code_generator/method_properties.h"
25 #include "optimizer/pass_manager.h"
26 #include "utils/arena_containers.h"
27 #include <algorithm>
28 #include <optional>
29
30 // defines required for AbcKit
31 #if !defined(NDEBUG) || defined(ENABLE_LIBABCKIT)
32 #define COMPILER_DEBUG_CHECKS
33 #endif
34 #ifdef ENABLE_LIBABCKIT
35 // CC-OFFNXT(G.PRE.02) should be with define
36 #define ABCKIT_MODE_CHECK(cond, action) \
37 if (cond) { \
38 action; \
39 }
40 #else
41 // CC-OFFNXT(G.PRE.02) should be with define
42 #define ABCKIT_MODE_CHECK(cond, action)
43 #endif
44
45 namespace ark {
46 class Method;
47 class CodeAllocator;
48 } // namespace ark
49
50 namespace ark::compiler {
51 class BasicBlock;
52 class Graph;
53 class RuntimeInfo;
54 class PassManager;
55 class LivenessAnalyzer;
56 class DominatorsTree;
57 class Rpo;
58 class BoundsRangeInfo;
59 class Loop;
60 class CodeInfoBuilder;
61
62 class Encoder;
63 class CallingConvention;
64 class ParameterInfo;
65 class RegistersDescription;
66 class RelocationHandler;
67
68 enum AliasType : uint8_t;
69
70 /// Specifies graph compilation mode.
71 class GraphMode {
72 public:
GraphMode(uint32_t value)73 explicit GraphMode(uint32_t value) : value_(value) {}
74 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
75 #define DECLARE_GRAPH_MODE_MODIFIERS(name) \
76 void Set##name(bool v) \
77 { \
78 Flag##name ::Set(v, &value_); \
79 } \
80 bool Is##name() const \
81 { \
82 /* CC-OFFNXT(G.PRE.05) function gen */ \
83 return Flag##name ::Get(value_); \
84 }
85
86 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
87 #define DECLARE_GRAPH_MODE(name) \
88 static GraphMode name(bool set = true) \
89 { \
90 /* CC-OFFNXT(G.PRE.05) function gen */ \
91 return GraphMode(Flag##name ::Encode(set)); \
92 } \
93 DECLARE_GRAPH_MODE_MODIFIERS(name)
94
95 DECLARE_GRAPH_MODE(Osr);
96 // The graph is used in BytecodeOptimizer mode
97 DECLARE_GRAPH_MODE(BytecodeOpt);
98 // The method from dynamic language
99 DECLARE_GRAPH_MODE(DynamicMethod);
100 // The method from dynamic language uses common calling convention
101 DECLARE_GRAPH_MODE(DynamicStub);
102 // Graph will be compiled with native calling convention
103 DECLARE_GRAPH_MODE(Native);
104 // FastPath from compiled code to runtime
105 DECLARE_GRAPH_MODE(FastPath);
106 // Boundary frame is used for compiled code
107 DECLARE_GRAPH_MODE(Boundary);
108 // Graph will be compiled for calling inside interpreter
109 DECLARE_GRAPH_MODE(Interpreter);
110 // Graph will be compiled for interpreter main loop
111 DECLARE_GRAPH_MODE(InterpreterEntry);
112 // Graph will be compiled for abckit
113 DECLARE_GRAPH_MODE(AbcKit);
114
115 #undef DECLARE_GRAPH_MODE
116 #undef DECLARE_GRAPH_MODE_MODIFIERS
117
SupportManagedCode()118 bool SupportManagedCode() const
119 {
120 return !IsNative() && !IsFastPath() && !IsBoundary() && !IsInterpreter() && !IsInterpreterEntry();
121 }
122
123 void Dump(std::ostream &stm);
124
125 private:
126 using FlagOsr = BitField<bool, 0, 1>;
127 using FlagBytecodeOpt = FlagOsr::NextFlag;
128 using FlagDynamicMethod = FlagBytecodeOpt::NextFlag;
129 using FlagDynamicStub = FlagDynamicMethod::NextFlag;
130 using FlagNative = FlagDynamicStub::NextFlag;
131 using FlagFastPath = FlagNative::NextFlag;
132 using FlagBoundary = FlagFastPath::NextFlag;
133 using FlagInterpreter = FlagBoundary::NextFlag;
134 using FlagInterpreterEntry = FlagInterpreter::NextFlag;
135 using FlagAbcKit = FlagInterpreterEntry::NextFlag;
136
137 uint32_t value_ {0};
138
139 friend GraphMode operator|(GraphMode a, GraphMode b);
140 };
141
142 inline GraphMode operator|(GraphMode a, GraphMode b)
143 {
144 return GraphMode(a.value_ | b.value_);
145 }
146
147 using EncodeDataType = Span<uint8_t>;
148
149 class PANDA_PUBLIC_API Graph final : public MarkerMgr {
150 public:
151 struct GraphArgs {
152 ArenaAllocator *allocator;
153 ArenaAllocator *localAllocator;
154 Arch arch;
155 RuntimeInterface::MethodPtr method;
156 RuntimeInterface *runtime;
157 };
158
Graph(ArenaAllocator * allocator,ArenaAllocator * localAllocator,Arch arch)159 explicit Graph(ArenaAllocator *allocator, ArenaAllocator *localAllocator, Arch arch)
160 : Graph({allocator, localAllocator, arch, nullptr, GetDefaultRuntime()}, false)
161 {
162 }
163
Graph(ArenaAllocator * allocator,ArenaAllocator * localAllocator,Arch arch,bool osrMode)164 Graph(ArenaAllocator *allocator, ArenaAllocator *localAllocator, Arch arch, bool osrMode)
165 : Graph({allocator, localAllocator, arch, nullptr, GetDefaultRuntime()}, osrMode)
166 {
167 }
168
Graph(ArenaAllocator * allocator,ArenaAllocator * localAllocator,Arch arch,bool dynamicMethod,bool bytecodeOpt)169 Graph(ArenaAllocator *allocator, ArenaAllocator *localAllocator, Arch arch, bool dynamicMethod, bool bytecodeOpt)
170 : Graph({allocator, localAllocator, arch, nullptr, GetDefaultRuntime()}, nullptr, false, dynamicMethod,
171 bytecodeOpt)
172 {
173 }
174
Graph(const GraphArgs & args,bool osrMode)175 Graph(const GraphArgs &args, bool osrMode) : Graph(args, nullptr, osrMode) {}
176
177 Graph(const GraphArgs &args, Graph *parent, bool osrMode, bool dynamicMethod = false, bool bytecodeOpt = false)
178 : Graph(args, parent,
179 GraphMode::Osr(osrMode) | GraphMode::BytecodeOpt(bytecodeOpt) | GraphMode::DynamicMethod(dynamicMethod))
180 {
181 }
182
Graph(const GraphArgs & args,Graph * parent,GraphMode mode)183 Graph(const GraphArgs &args, Graph *parent, GraphMode mode)
184 : allocator_(args.allocator),
185 localAllocator_(args.localAllocator),
186 arch_(args.arch),
187 vectorBb_(args.allocator->Adapter()),
188 throwableInsts_(args.allocator->Adapter()),
189 runtime_(args.runtime),
190 method_(args.method),
191 passManager_(this, parent != nullptr ? parent->GetPassManager() : nullptr),
192 eventWriter_(args.runtime->GetClassNameFromMethod(args.method), args.runtime->GetMethodName(args.method)),
193 mode_(mode),
194 singleImplementationList_(args.allocator->Adapter()),
195 tryBeginBlocks_(args.allocator->Adapter()),
196 throwBlocks_(args.allocator->Adapter()),
197 spilledConstants_(args.allocator->Adapter()),
198 parentGraph_(parent)
199 {
200 SetNeedCleanup(true);
201 }
202
203 ~Graph() override;
204
CreateChildGraph(RuntimeInterface::MethodPtr method)205 Graph *CreateChildGraph(RuntimeInterface::MethodPtr method)
206 {
207 auto graph = GetAllocator()->New<Graph>(
208 GraphArgs {GetAllocator(), GetLocalAllocator(), GetArch(), method, GetRuntime()}, this, mode_);
209 graph->SetAotData(GetAotData());
210 return graph;
211 }
212
213 /// Get default runtime interface object
GetDefaultRuntime()214 static RuntimeInterface *GetDefaultRuntime()
215 {
216 static RuntimeInterface runtimeInterface;
217 return &runtimeInterface;
218 }
219
GetArch()220 Arch GetArch() const
221 {
222 return arch_;
223 }
224
GetLanguage()225 SourceLanguage GetLanguage() const
226 {
227 return lang_;
228 }
229
SetLanguage(SourceLanguage lang)230 void SetLanguage(SourceLanguage lang)
231 {
232 lang_ = lang;
233 }
234
235 void AddBlock(BasicBlock *block);
236 #ifndef NDEBUG
237 void AddBlock(BasicBlock *block, uint32_t id);
238 #endif
239 void DisconnectBlock(BasicBlock *block, bool removeLastInst = true, bool fixDomTree = true);
240 void DisconnectBlockRec(BasicBlock *block, bool removeLastInst = true, bool fixDomTree = true);
241
242 void EraseBlock(BasicBlock *block);
243 void RestoreBlock(BasicBlock *block);
244 // Remove empty block. Block must have one successor and no Phis.
245 void RemoveEmptyBlock(BasicBlock *block);
246
247 // Remove empty block. Block may have Phis and can't be a loop pre-header.
248 void RemoveEmptyBlockWithPhis(BasicBlock *block, bool irrLoop = false);
249
250 // Remove block predecessors.
251 void RemovePredecessors(BasicBlock *block, bool removeLastInst = true);
252
253 // Remove block successors.
254 void RemoveSuccessors(BasicBlock *block);
255
256 // Remove unreachable blocks.
257 void RemoveUnreachableBlocks();
258
259 // get end block
GetEndBlock()260 BasicBlock *GetEndBlock()
261 {
262 return endBlock_;
263 }
264 // set end block
SetEndBlock(BasicBlock * endBlock)265 void SetEndBlock(BasicBlock *endBlock)
266 {
267 endBlock_ = endBlock;
268 }
HasEndBlock()269 bool HasEndBlock()
270 {
271 return endBlock_ != nullptr;
272 }
273 // get start block
GetStartBlock()274 BasicBlock *GetStartBlock()
275 {
276 return startBlock_;
277 }
GetStartBlock()278 BasicBlock *GetStartBlock() const
279 {
280 return startBlock_;
281 }
282 // set start block
SetStartBlock(BasicBlock * startBlock)283 void SetStartBlock(BasicBlock *startBlock)
284 {
285 startBlock_ = startBlock;
286 }
287 // get vector_bb_
GetVectorBlocks()288 const ArenaVector<BasicBlock *> &GetVectorBlocks() const
289 {
290 return vectorBb_;
291 }
292
GetAliveBlocksCount()293 size_t GetAliveBlocksCount() const
294 {
295 return std::count_if(vectorBb_.begin(), vectorBb_.end(), [](BasicBlock *block) { return block != nullptr; });
296 }
297
GetPassManager()298 PassManager *GetPassManager()
299 {
300 return &passManager_;
301 }
GetPassManager()302 const PassManager *GetPassManager() const
303 {
304 return &passManager_;
305 }
306
307 const BoundsRangeInfo *GetBoundsRangeInfo() const;
308
309 PANDA_PUBLIC_API const ArenaVector<BasicBlock *> &GetBlocksRPO() const;
310
311 PANDA_PUBLIC_API const ArenaVector<BasicBlock *> &GetBlocksLinearOrder() const;
312
313 template <class Callback>
314 void VisitAllInstructions(Callback callback);
315
316 AliasType CheckInstAlias(Inst *mem1, Inst *mem2);
317
318 /// Main allocator for graph, all related to Graph data should be allocated via this allocator.
GetAllocator()319 ArenaAllocator *GetAllocator() const
320 {
321 return allocator_;
322 }
323 /// Allocator for temporary usage, when allocated data is no longer needed after optimization/analysis finished.
GetLocalAllocator()324 ArenaAllocator *GetLocalAllocator() const
325 {
326 return localAllocator_;
327 }
IsDFConstruct()328 bool IsDFConstruct() const
329 {
330 return FlagDFConstruct::Get(bitFields_);
331 }
SetDFConstruct()332 void SetDFConstruct()
333 {
334 FlagDFConstruct::Set(true, &bitFields_);
335 }
336
SetAotData(AotData * data)337 void SetAotData(AotData *data)
338 {
339 aotData_ = data;
340 }
GetAotData()341 AotData *GetAotData()
342 {
343 return aotData_;
344 }
GetAotData()345 const AotData *GetAotData() const
346 {
347 return aotData_;
348 }
349
IsAotMode()350 bool IsAotMode() const
351 {
352 return aotData_ != nullptr;
353 }
354
IsAotNoChaMode()355 bool IsAotNoChaMode() const
356 {
357 return aotData_ != nullptr && !aotData_->GetUseCha();
358 }
359
IsOfflineCompilationMode()360 bool IsOfflineCompilationMode() const
361 {
362 return IsAotMode() || GetMode().IsInterpreter() || GetMode().IsFastPath() || GetMode().IsInterpreterEntry();
363 }
364
IsDefaultLocationsInit()365 bool IsDefaultLocationsInit() const
366 {
367 return FlagDefaultLocationsInit::Get(bitFields_);
368 }
SetDefaultLocationsInit()369 void SetDefaultLocationsInit()
370 {
371 FlagDefaultLocationsInit::Set(true, &bitFields_);
372 }
IsIrtocPrologEpilogOptimized()373 bool IsIrtocPrologEpilogOptimized() const
374 {
375 return FlagIrtocPrologEpilogOptimized::Get(bitFields_);
376 }
SetIrtocPrologEpilogOptimized()377 void SetIrtocPrologEpilogOptimized()
378 {
379 FlagIrtocPrologEpilogOptimized::Set(true, &bitFields_);
380 }
IsUnrollComplete()381 bool IsUnrollComplete() const
382 {
383 return FlagUnrollComplete::Get(bitFields_);
384 }
SetUnrollComplete()385 void SetUnrollComplete()
386 {
387 FlagUnrollComplete::Set(true, &bitFields_);
388 }
389 #ifdef COMPILER_DEBUG_CHECKS
IsRegAllocApplied()390 bool IsRegAllocApplied() const
391 {
392 return FlagRegallocApplied::Get(bitFields_);
393 }
SetRegAllocApplied()394 void SetRegAllocApplied()
395 {
396 FlagRegallocApplied::Set(true, &bitFields_);
397 }
IsRegAccAllocApplied()398 bool IsRegAccAllocApplied() const
399 {
400 return FlagRegaccallocApplied::Get(bitFields_);
401 }
SetRegAccAllocApplied()402 void SetRegAccAllocApplied()
403 {
404 FlagRegaccallocApplied::Set(true, &bitFields_);
405 }
IsInliningComplete()406 bool IsInliningComplete() const
407 {
408 return FlagInliningComplete::Get(bitFields_) || IsOsrMode();
409 }
SetInliningComplete()410 void SetInliningComplete()
411 {
412 FlagInliningComplete::Set(true, &bitFields_);
413 }
IsLowLevelInstructionsEnabled()414 bool IsLowLevelInstructionsEnabled() const
415 {
416 return FlagLowLevelInstnsEnabled::Get(bitFields_);
417 }
SetLowLevelInstructionsEnabled()418 void SetLowLevelInstructionsEnabled()
419 {
420 FlagLowLevelInstnsEnabled::Set(true, &bitFields_);
421 }
IsDynUnitTest()422 bool IsDynUnitTest() const
423 {
424 return FlagDynUnitTest::Get(bitFields_);
425 }
SetDynUnitTestFlag()426 void SetDynUnitTestFlag()
427 {
428 FlagDynUnitTest::Set(true, &bitFields_);
429 }
430 #else
IsRegAllocApplied()431 bool IsRegAllocApplied() const
432 {
433 return false;
434 }
435 #endif // COMPILER_DEBUG_CHECKS
436
IsThrowApplied()437 bool IsThrowApplied() const
438 {
439 return FlagThrowApplied::Get(bitFields_);
440 }
SetThrowApplied()441 void SetThrowApplied()
442 {
443 FlagThrowApplied::Set(true, &bitFields_);
444 }
UnsetThrowApplied()445 void UnsetThrowApplied()
446 {
447 FlagThrowApplied::Set(false, &bitFields_);
448 }
449
450 #ifdef PANDA_COMPILER_DEBUG_INFO
IsLineDebugInfoEnabled()451 bool IsLineDebugInfoEnabled() const
452 {
453 return FlagLineDebugInfoEnabled::Get(bitFields_);
454 }
SetLineDebugInfoEnabled()455 void SetLineDebugInfoEnabled()
456 {
457 FlagLineDebugInfoEnabled::Set(true, &bitFields_);
458 }
459 #endif
460
SetCode(EncodeDataType data)461 void SetCode(EncodeDataType data)
462 {
463 data_ = data;
464 }
465
GetData()466 EncodeDataType GetData() const
467 {
468 return data_;
469 }
470
GetData()471 EncodeDataType GetData()
472 {
473 return data_;
474 }
475
GetCode()476 EncodeDataType GetCode() const
477 {
478 return data_;
479 }
480
GetCode()481 EncodeDataType GetCode()
482 {
483 return data_;
484 }
485
SetCodeInfo(Span<uint8_t> data)486 void SetCodeInfo(Span<uint8_t> data)
487 {
488 codeInfoData_ = data.SubSpan<const uint8_t>(0, data.size());
489 }
490
GetCodeInfoData()491 Span<const uint8_t> GetCodeInfoData() const
492 {
493 return codeInfoData_;
494 }
495
496 void DumpUsedRegs(std::ostream &out = std::cerr, const char *prefix = nullptr) const
497 {
498 if (prefix != nullptr) {
499 out << prefix;
500 }
501 out << "'\n used scalar regs: ";
502 if (usedRegs_ != nullptr) {
503 for (unsigned i = 0; i < usedRegs_->size(); ++i) {
504 if (usedRegs_->at(i)) {
505 out << i << " ";
506 }
507 }
508 }
509 out << "\n used float regs: ";
510 if (usedRegs_ != nullptr) {
511 for (unsigned i = 0; i < usedVregs_->size(); ++i) {
512 if (usedVregs_->at(i)) {
513 out << i << " ";
514 }
515 }
516 }
517 out << std::endl;
518 }
519
520 // Get registers mask which used in graph
521 template <DataType::Type REG_TYPE>
GetUsedRegs()522 ArenaVector<bool> *GetUsedRegs() const
523 {
524 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
525 if constexpr (REG_TYPE == DataType::INT64) {
526 return usedRegs_;
527 }
528 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
529 if constexpr (REG_TYPE == DataType::FLOAT64) {
530 return usedVregs_;
531 }
532 UNREACHABLE();
533 return nullptr;
534 }
535
SetRegUsage(Register reg,DataType::Type type)536 void SetRegUsage(Register reg, DataType::Type type)
537 {
538 ASSERT(reg != GetInvalidReg());
539 if (DataType::IsFloatType(type)) {
540 SetUsedReg<DataType::FLOAT64>(reg);
541 } else {
542 SetUsedReg<DataType::INT64>(reg);
543 }
544 }
545
SetRegUsage(Location location)546 void SetRegUsage(Location location)
547 {
548 ASSERT(location.IsFixedRegister());
549 if (location.IsFpRegister()) {
550 SetUsedReg<DataType::FLOAT64>(location.GetValue());
551 } else {
552 SetUsedReg<DataType::INT64>(location.GetValue());
553 }
554 }
555
556 template <DataType::Type REG_TYPE>
SetUsedReg(Register reg)557 void SetUsedReg(Register reg)
558 {
559 ArenaVector<bool> *graphRegs = nullptr;
560 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
561 if constexpr (REG_TYPE == DataType::INT64) {
562 graphRegs = usedRegs_;
563 // NOLINTNEXTLINE(readability-braces-around-statements, readability-misleading-indentation)
564 } else if constexpr (REG_TYPE == DataType::FLOAT64) {
565 graphRegs = usedVregs_;
566 } else {
567 UNREACHABLE();
568 }
569 ASSERT(graphRegs != nullptr);
570 ASSERT(reg < graphRegs->size());
571 (*graphRegs)[reg] = true;
572 }
573
574 template <DataType::Type REG_TYPE>
InitUsedRegs(const ArenaVector<bool> * usedRegs)575 void InitUsedRegs(const ArenaVector<bool> *usedRegs)
576 {
577 if (usedRegs == nullptr) {
578 return;
579 }
580 ArenaVector<bool> *graphRegs = nullptr;
581 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
582 if constexpr (REG_TYPE == DataType::INT64) {
583 usedRegs_ = GetAllocator()->New<ArenaVector<bool>>(GetAllocator()->Adapter());
584 graphRegs = usedRegs_;
585 // NOLINTNEXTLINE(readability-braces-around-statements, readability-misleading-indentation)
586 } else if constexpr (REG_TYPE == DataType::FLOAT64) {
587 usedVregs_ = GetAllocator()->New<ArenaVector<bool>>(GetAllocator()->Adapter());
588 graphRegs = usedVregs_;
589 } else {
590 UNREACHABLE();
591 }
592 graphRegs->resize(usedRegs->size());
593 std::copy(usedRegs->begin(), usedRegs->end(), graphRegs->begin());
594 }
595
596 Register GetZeroReg() const;
597 Register GetArchTempReg() const;
598 Register GetArchTempVReg() const;
599 // Get registers mask which used in codegen, runtime e.t.c
600 RegMask GetArchUsedRegs();
601 void SetArchUsedRegs(RegMask mask);
602
603 // Get vector registers mask which used in codegen, runtime e.t.c
604 VRegMask GetArchUsedVRegs();
605
606 // Return true if one 64-bit scalar register can be split to 2 32-bit
607 bool IsRegScalarMapped() const;
608
GetStackSlotsCount()609 uint32_t GetStackSlotsCount() const
610 {
611 return stackSlotCount_;
612 }
613
SetStackSlotsCount(uint32_t stackSlotCount)614 void SetStackSlotsCount(uint32_t stackSlotCount)
615 {
616 stackSlotCount_ = stackSlotCount;
617 }
618
UpdateStackSlotsCount(uint32_t stackSlotCount)619 void UpdateStackSlotsCount(uint32_t stackSlotCount)
620 {
621 stackSlotCount_ = std::max(stackSlotCount_, stackSlotCount);
622 }
623
624 uint32_t GetParametersSlotsCount() const;
625
GetExtSlotsStart()626 uint32_t GetExtSlotsStart() const
627 {
628 return extStackSlot_;
629 }
630
SetExtSlotsStart(uint32_t extStackSlot)631 void SetExtSlotsStart(uint32_t extStackSlot)
632 {
633 extStackSlot_ = extStackSlot;
634 }
635
636 BasicBlock *CreateEmptyBlock(uint32_t guestPc = INVALID_PC);
637 BasicBlock *CreateEmptyBlock(BasicBlock *baseBlock);
638 #ifndef NDEBUG
639 BasicBlock *CreateEmptyBlock(uint32_t id, uint32_t guestPc);
640 #endif
641 BasicBlock *CreateStartBlock();
642 BasicBlock *CreateEndBlock(uint32_t guestPc = INVALID_PC);
GetFirstConstInst()643 ConstantInst *GetFirstConstInst()
644 {
645 return firstConstInst_;
646 }
SetFirstConstInst(ConstantInst * constInst)647 void SetFirstConstInst(ConstantInst *constInst)
648 {
649 firstConstInst_ = constInst;
650 }
651
GetNullPtrInst()652 Inst *GetNullPtrInst() const
653 {
654 return nullptrInst_;
655 }
HasNullPtrInst()656 bool HasNullPtrInst() const
657 {
658 return nullptrInst_ != nullptr;
659 }
UnsetNullPtrInst()660 void UnsetNullPtrInst()
661 {
662 ASSERT(HasNullPtrInst());
663 nullptrInst_ = nullptr;
664 }
665 Inst *GetOrCreateNullPtr();
666
GetUndefinedInst()667 Inst *GetUndefinedInst() const
668 {
669 return undefinedInst_;
670 }
HasUndefinedInst()671 bool HasUndefinedInst() const
672 {
673 return undefinedInst_ != nullptr;
674 }
UnsetUndefinedInst()675 void UnsetUndefinedInst()
676 {
677 ASSERT(HasUndefinedInst());
678 undefinedInst_ = nullptr;
679 }
680 Inst *GetOrCreateUndefinedInst();
681
682 /// Find constant in the list, return nullptr if not found
683 ConstantInst *FindConstant(DataType::Type type, uint64_t value);
684 /// Find constant in the list or create new one and insert at the end
685 template <typename T>
686 ConstantInst *FindOrCreateConstant(T value);
687
688 /**
689 * Find constant that is equal to the given one specified by inst. If not found, add inst to the graph.
690 * @param inst Constant instruction to be added
691 * @return Found instruction or inst if not found
692 */
693 ConstantInst *FindOrAddConstant(ConstantInst *inst);
694
695 ParameterInst *AddNewParameter(uint16_t argNumber);
696
AddNewParameter(uint16_t argNumber,DataType::Type type)697 ParameterInst *AddNewParameter(uint16_t argNumber, DataType::Type type)
698 {
699 ParameterInst *param = AddNewParameter(argNumber);
700 param->SetType(type);
701 return param;
702 }
703
704 ParameterInst *FindParameter(uint16_t argNumber);
705
706 /*
707 * The function remove the ConstantInst from the graph list
708 * !NOTE ConstantInst isn't removed from BasicBlock list
709 */
710 void RemoveConstFromList(ConstantInst *constInst);
711
GetSpilledConstant(ImmTableSlot slot)712 ConstantInst *GetSpilledConstant(ImmTableSlot slot)
713 {
714 ASSERT(static_cast<size_t>(slot) < spilledConstants_.size());
715 return spilledConstants_[slot];
716 }
717
AddSpilledConstant(ConstantInst * constInst)718 ImmTableSlot AddSpilledConstant(ConstantInst *constInst)
719 {
720 // Constant already in the table
721 auto currentSlot = constInst->GetImmTableSlot();
722 if (currentSlot != GetInvalidImmTableSlot()) {
723 ASSERT(spilledConstants_[currentSlot] == constInst);
724 return currentSlot;
725 }
726
727 auto count = spilledConstants_.size();
728 if (count >= GetMaxNumImmSlots()) {
729 return GetInvalidImmTableSlot();
730 }
731 spilledConstants_.push_back(constInst);
732 constInst->SetImmTableSlot(count);
733 return ImmTableSlot(count);
734 }
735
FindSpilledConstantSlot(ConstantInst * constInst)736 ImmTableSlot FindSpilledConstantSlot(ConstantInst *constInst) const
737 {
738 auto slot = std::find(spilledConstants_.begin(), spilledConstants_.end(), constInst);
739 if (slot == spilledConstants_.end()) {
740 return GetInvalidImmTableSlot();
741 }
742 return std::distance(spilledConstants_.begin(), slot);
743 }
744
GetSpilledConstantsCount()745 size_t GetSpilledConstantsCount() const
746 {
747 return spilledConstants_.size();
748 }
749
HasAvailableConstantSpillSlots()750 bool HasAvailableConstantSpillSlots() const
751 {
752 return GetSpilledConstantsCount() < GetMaxNumImmSlots();
753 }
754
begin()755 auto begin() // NOLINT(readability-identifier-naming)
756 {
757 return vectorBb_.begin();
758 }
begin()759 auto begin() const // NOLINT(readability-identifier-naming)
760 {
761 return vectorBb_.begin();
762 }
end()763 auto end() // NOLINT(readability-identifier-naming)
764 {
765 return vectorBb_.end();
766 }
end()767 auto end() const // NOLINT(readability-identifier-naming)
768 {
769 return vectorBb_.end();
770 }
771
772 void Dump(std::ostream *out) const;
773
GetRootLoop()774 Loop *GetRootLoop()
775 {
776 return rootLoop_;
777 }
GetRootLoop()778 const Loop *GetRootLoop() const
779 {
780 return rootLoop_;
781 }
782
SetRootLoop(Loop * rootLoop)783 void SetRootLoop(Loop *rootLoop)
784 {
785 rootLoop_ = rootLoop;
786 }
787
SetHasIrreducibleLoop(bool hasIrrLoop)788 void SetHasIrreducibleLoop(bool hasIrrLoop)
789 {
790 FlagIrredicibleLoop::Set(hasIrrLoop, &bitFields_);
791 }
792
SetHasInfiniteLoop(bool hasInfLoop)793 void SetHasInfiniteLoop(bool hasInfLoop)
794 {
795 FlagInfiniteLoop::Set(hasInfLoop, &bitFields_);
796 }
797
SetHasFloatRegs()798 void SetHasFloatRegs()
799 {
800 FlagFloatRegs::Set(true, &bitFields_);
801 }
802
803 bool HasLoop() const;
804 PANDA_PUBLIC_API bool HasIrreducibleLoop() const;
805 bool HasInfiniteLoop() const;
806 bool HasFloatRegs() const;
807
808 /**
809 * Try-catch info
810 * Vector of begin try-blocks in order they were declared in the bytecode
811 */
AppendTryBeginBlock(const BasicBlock * block)812 void AppendTryBeginBlock(const BasicBlock *block)
813 {
814 tryBeginBlocks_.push_back(block);
815 }
816
EraseTryBeginBlock(const BasicBlock * block)817 void EraseTryBeginBlock(const BasicBlock *block)
818 {
819 auto it = std::find(tryBeginBlocks_.begin(), tryBeginBlocks_.end(), block);
820 if (it == tryBeginBlocks_.end()) {
821 ASSERT(false && "Trying to remove non try_begin block");
822 return;
823 }
824 tryBeginBlocks_.erase(it);
825 }
826
GetTryBeginBlocks()827 const auto &GetTryBeginBlocks() const
828 {
829 return tryBeginBlocks_;
830 }
831
832 void RemovePredecessorUpdateDF(BasicBlock *block, BasicBlock *rmPred);
833
FindThrowBlock(BasicBlock * block)834 bool FindThrowBlock(BasicBlock *block)
835 {
836 auto it = std::find(throwBlocks_.begin(), throwBlocks_.end(), block);
837 return (it != throwBlocks_.end());
838 }
839
AppendThrowBlock(BasicBlock * block)840 bool AppendThrowBlock(BasicBlock *block)
841 {
842 if (!FindThrowBlock(block)) {
843 throwBlocks_.insert(block);
844 return true;
845 }
846 return false;
847 }
848
EraseThrowBlock(BasicBlock * block)849 bool EraseThrowBlock(BasicBlock *block)
850 {
851 auto it = std::find(throwBlocks_.begin(), throwBlocks_.end(), block);
852 if (it == throwBlocks_.end()) {
853 return false;
854 }
855 throwBlocks_.erase(it);
856 return true;
857 }
858
GetThrowBlocks()859 const auto &GetThrowBlocks() const
860 {
861 return throwBlocks_;
862 }
863
ClearThrowBlocks()864 void ClearThrowBlocks()
865 {
866 throwBlocks_.clear();
867 }
868
AppendThrowableInst(const Inst * inst,BasicBlock * catchHandler)869 void AppendThrowableInst(const Inst *inst, BasicBlock *catchHandler)
870 {
871 auto it = throwableInsts_.emplace(inst, GetAllocator()->Adapter()).first;
872 it->second.push_back(catchHandler);
873 }
874
IsInstThrowable(const Inst * inst)875 bool IsInstThrowable(const Inst *inst) const
876 {
877 return throwableInsts_.count(inst) > 0;
878 }
879
880 void RemoveThrowableInst(const Inst *inst);
881 PANDA_PUBLIC_API void ReplaceThrowableInst(Inst *oldInst, Inst *newInst);
882
GetThrowableInstHandlers(const Inst * inst)883 const auto &GetThrowableInstHandlers(const Inst *inst) const
884 {
885 ASSERT(IsInstThrowable(inst));
886 return throwableInsts_.at(inst);
887 }
888
ClearTryCatchInfo()889 void ClearTryCatchInfo()
890 {
891 throwableInsts_.clear();
892 tryBeginBlocks_.clear();
893 }
894
895 void DumpThrowableInsts(std::ostream *out) const;
896
897 /**
898 * Run pass specified by template argument T.
899 * Optimization passes might take additional arguments that will passed to Optimization's constructor.
900 * Analyses can't take additional arguments.
901 * @tparam T Type of pass
902 * @param args Additional arguments for optimizations passes
903 * @return true if pass was successful
904 */
905 template <typename T, typename... Args>
RunPass(Args...args)906 bool RunPass(Args... args)
907 {
908 ASSERT(GetPassManager());
909 return passManager_.RunPass<T>(std::forward<Args>(args)...);
910 }
911 template <typename T, typename... Args>
RunPass(Args...args)912 bool RunPass(Args... args) const
913 {
914 ASSERT(GetPassManager());
915 return passManager_.RunPass<T>(std::forward<Args>(args)...);
916 }
917
918 template <typename T>
RunPass(T * pass)919 bool RunPass(T *pass)
920 {
921 ASSERT(GetPassManager());
922 return passManager_.RunPass(pass, GetLocalAllocator()->GetAllocatedSize());
923 }
924
925 /**
926 * Get analysis instance.
927 * All analyses are reside in Graph object in composition relationship.
928 * @tparam T Type of analysis
929 * @return Reference to analysis instance
930 */
931 template <typename T>
GetAnalysis()932 T &GetAnalysis()
933 {
934 ASSERT(GetPassManager());
935 return GetPassManager()->GetAnalysis<T>();
936 }
937 template <typename T>
GetAnalysis()938 const T &GetAnalysis() const
939 {
940 ASSERT(GetPassManager());
941 return passManager_.GetAnalysis<T>();
942 }
943
944 /**
945 * Same as GetAnalysis but additionaly checck that analysis in valid state.
946 * @tparam T Type of analysis
947 * @return Reference to analysis instance
948 */
949 template <typename T>
GetValidAnalysis()950 T &GetValidAnalysis()
951 {
952 RunPass<T>();
953 ASSERT(IsAnalysisValid<T>());
954 return GetAnalysis<T>();
955 }
956 template <typename T>
GetValidAnalysis()957 const T &GetValidAnalysis() const
958 {
959 RunPass<T>();
960 ASSERT(IsAnalysisValid<T>());
961 return GetAnalysis<T>();
962 }
963
964 /**
965 * Return true if Analysis valid, false otherwise
966 * @tparam T Type of analysis
967 */
968 template <typename T>
IsAnalysisValid()969 bool IsAnalysisValid() const
970 {
971 return GetAnalysis<T>().IsValid();
972 }
973
974 /**
975 * Reset valid state of specified analysis
976 * @tparam T Type of analysis
977 */
978 template <typename T>
InvalidateAnalysis()979 void InvalidateAnalysis()
980 {
981 ASSERT(GetPassManager());
982 GetPassManager()->GetAnalysis<T>().SetValid(false);
983 }
984
985 /// Accessors to the number of current instruction id.
GetCurrentInstructionId()986 auto GetCurrentInstructionId() const
987 {
988 return instrCurrentId_;
989 }
SetCurrentInstructionId(size_t v)990 auto SetCurrentInstructionId(size_t v)
991 {
992 instrCurrentId_ = v;
993 }
994
995 /// RuntimeInterface accessors
GetRuntime()996 RuntimeInterface *GetRuntime() const
997 {
998 return runtime_;
999 }
SetRuntime(RuntimeInterface * runtime)1000 void SetRuntime(RuntimeInterface *runtime)
1001 {
1002 runtime_ = runtime;
1003 }
GetMethod()1004 auto GetMethod() const
1005 {
1006 return method_;
1007 }
SetMethod(RuntimeInterface::MethodPtr method)1008 auto SetMethod(RuntimeInterface::MethodPtr method)
1009 {
1010 method_ = method;
1011 }
1012
1013 Encoder *GetEncoder();
1014 RegistersDescription *GetRegisters() const;
1015 CallingConvention *GetCallingConvention();
1016 const MethodProperties &GetMethodProperties();
1017 void ResetParameterInfo();
1018 SpillFillData GetDataForNativeParam(DataType::Type type);
1019
1020 template <bool GRAPH_ENCODED = false>
EstimateCodeSize()1021 size_t EstimateCodeSize()
1022 {
1023 if constexpr (GRAPH_ENCODED) {
1024 return encoder_->BufferSize();
1025 }
1026 auto maxIrInstsCount = GetCurrentInstructionId();
1027 auto maxArchInstsPerIrInsts = GetEncoder()->MaxArchInstPerEncoded();
1028 auto maxBytesInArchInst = GetInstructionSizeBits(GetArch());
1029 return maxIrInstsCount * maxArchInstsPerIrInsts * maxBytesInArchInst;
1030 }
1031
GetEventWriter()1032 EventWriter &GetEventWriter()
1033 {
1034 return eventWriter_;
1035 }
1036
SetCodeBuilder(CodeInfoBuilder * builder)1037 void SetCodeBuilder(CodeInfoBuilder *builder)
1038 {
1039 ciBuilder_ = builder;
1040 }
1041
1042 // clang-format off
1043
1044 /// Create instruction by opcode
1045 // NOLINTNEXTLINE(readability-function-size)
CreateInst(Opcode opc)1046 [[nodiscard]] Inst* CreateInst(Opcode opc) const
1047 {
1048 switch (opc) {
1049 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
1050 #define RETURN_INST(OPCODE, BASE, ...) \
1051 case Opcode::OPCODE: { \
1052 auto inst = Inst::New<BASE>(allocator_, Opcode::OPCODE); \
1053 inst->SetId(instrCurrentId_++); \
1054 if (IsAbcKit()) { \
1055 SetAbcKitFlags(inst); \
1056 } \
1057 return inst; \
1058 }
1059 OPCODE_LIST(RETURN_INST)
1060
1061 #undef RETURN_INST
1062 default:
1063 return nullptr;
1064 }
1065 }
1066 /// Define creation methods for all opcodes
1067 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
1068 #define RETURN_INST(OPCODE, BASE, ...) \
1069 template <typename... Args> \
1070 [[nodiscard]] BASE* CreateInst##OPCODE(Args&&... args) const { \
1071 auto inst = Inst::New<BASE>(allocator_, Opcode::OPCODE, std::forward<Args>(args)...); \
1072 inst->SetId(instrCurrentId_++); \
1073 if (IsAbcKit()) { \
1074 SetAbcKitFlags(inst); \
1075 } \
1076 return inst; \
1077 }
1078 OPCODE_LIST(RETURN_INST)
1079
1080 #undef RETURN_INST
1081
1082 #ifdef PANDA_COMPILER_DEBUG_INFO
1083 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
1084 #define RETURN_INST(OPCODE, BASE, ...) \
1085 template <typename... Args> \
1086 [[nodiscard]] BASE* CreateInst##OPCODE(Inst* inst, Args&&... args) const { \
1087 auto new_inst = CreateInst##OPCODE(inst->GetType(), inst->GetPc(), std::forward<Args>(args)...); \
1088 new_inst->SetCurrentMethod(inst->GetCurrentMethod()); \
1089 return new_inst; \
1090 }
OPCODE_LIST(RETURN_INST)1091 OPCODE_LIST(RETURN_INST)
1092
1093 #undef RETURN_INST
1094 #else
1095 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
1096 #define RETURN_INST(OPCODE, BASE, ...) \
1097 template <typename... Args> \
1098 [[nodiscard]] BASE* CreateInst##OPCODE(Inst* inst, Args&&... args) const { \
1099 auto new_inst = CreateInst##OPCODE(inst->GetType(), inst->GetPc(), std::forward<Args>(args)...); \
1100 return new_inst; \
1101 }
1102 OPCODE_LIST(RETURN_INST)
1103
1104 #undef RETURN_INST
1105 #endif
1106
1107 // clang-format on
1108
1109 uint32_t GetBitFields()
1110 {
1111 return bitFields_;
1112 }
1113
SetBitFields(uint32_t bitFields)1114 void SetBitFields(uint32_t bitFields)
1115 {
1116 bitFields_ = bitFields;
1117 }
1118
NeedCleanup()1119 bool NeedCleanup() const
1120 {
1121 return FlagNeedCleanup::Get(bitFields_);
1122 }
1123
SetNeedCleanup(bool v)1124 void SetNeedCleanup(bool v)
1125 {
1126 FlagNeedCleanup::Set(v, &bitFields_);
1127 }
1128
IsJitOrOsrMode()1129 bool IsJitOrOsrMode() const
1130 {
1131 return !IsAotMode() && !IsBytecodeOptimizer() && SupportManagedCode();
1132 }
1133
IsOsrMode()1134 bool IsOsrMode() const
1135 {
1136 return mode_.IsOsr();
1137 }
1138
IsJitMode()1139 bool IsJitMode() const
1140 {
1141 return !IsOsrMode() && IsJitOrOsrMode();
1142 }
1143
IsBytecodeOptimizer()1144 bool IsBytecodeOptimizer() const
1145 {
1146 return mode_.IsBytecodeOpt();
1147 }
1148
IsDynamicMethod()1149 bool IsDynamicMethod() const
1150 {
1151 return mode_.IsDynamicMethod();
1152 }
1153
IsAbcKit()1154 bool IsAbcKit() const
1155 {
1156 #ifdef ENABLE_LIBABCKIT
1157 return mode_.IsAbcKit();
1158 #else
1159 return false;
1160 #endif
1161 }
1162
SupportManagedCode()1163 bool SupportManagedCode() const
1164 {
1165 return mode_.SupportManagedCode();
1166 }
1167
GetMode()1168 GraphMode GetMode() const
1169 {
1170 return mode_;
1171 }
1172
SetMode(GraphMode mode)1173 void SetMode(GraphMode mode)
1174 {
1175 mode_ = mode;
1176 }
1177
1178 #ifndef NDEBUG
GetCompilerMode()1179 compiler::inst_modes::Mode GetCompilerMode()
1180 {
1181 if (IsBytecodeOptimizer()) {
1182 return compiler::inst_modes::BYTECODE_OPT;
1183 }
1184 if (SupportManagedCode()) {
1185 return compiler::inst_modes::JIT_AOT;
1186 }
1187 return compiler::inst_modes::IRTOC;
1188 }
1189 #endif
1190
AddSingleImplementationMethod(RuntimeInterface::MethodPtr method)1191 void AddSingleImplementationMethod(RuntimeInterface::MethodPtr method)
1192 {
1193 singleImplementationList_.push_back(method);
1194 }
1195
SetDynamicMethod()1196 void SetDynamicMethod()
1197 {
1198 mode_.SetDynamicMethod(true);
1199 }
1200
SetAbcKit()1201 void SetAbcKit()
1202 {
1203 mode_.SetAbcKit(true);
1204 }
1205
SetDynamicStub()1206 void SetDynamicStub()
1207 {
1208 mode_.SetDynamicStub(true);
1209 }
1210
GetSingleImplementationList()1211 auto &GetSingleImplementationList()
1212 {
1213 return singleImplementationList_;
1214 }
1215
GetParentGraph()1216 Graph *GetParentGraph() const
1217 {
1218 return parentGraph_;
1219 }
1220
GetOutermostParentGraph()1221 Graph *GetOutermostParentGraph()
1222 {
1223 auto graph = this;
1224 while (graph->GetParentGraph() != nullptr) {
1225 graph = graph->GetParentGraph();
1226 }
1227 return graph;
1228 }
1229
SetVRegsCount(size_t count)1230 void SetVRegsCount(size_t count)
1231 {
1232 vregsCount_ = count;
1233 }
1234
GetVRegsCount()1235 size_t GetVRegsCount() const
1236 {
1237 return vregsCount_;
1238 }
1239
GetEnvCount()1240 size_t GetEnvCount() const
1241 {
1242 return (IsDynamicMethod() && !IsBytecodeOptimizer()) ? VRegInfo::ENV_COUNT : 0;
1243 }
1244
GetRelocationHandler()1245 RelocationHandler *GetRelocationHandler()
1246 {
1247 return relocationHandler_;
1248 }
1249
SetRelocationHandler(RelocationHandler * handler)1250 void SetRelocationHandler(RelocationHandler *handler)
1251 {
1252 relocationHandler_ = handler;
1253 }
1254
1255 int64_t GetBranchCounter(const BasicBlock *block, bool trueSucc);
1256
1257 int64_t GetThrowCounter(const BasicBlock *block);
1258
1259 /// This class provides methods for ranged-based `for` loop over all parameters in the graph.
1260 class ParameterList {
1261 public:
1262 class Iterator {
1263 public:
Iterator(Inst * inst)1264 explicit Iterator(Inst *inst) : inst_(inst) {}
1265
1266 Iterator &operator++()
1267 {
1268 for (inst_ = inst_->GetNext(); inst_ != nullptr && inst_->GetOpcode() != Opcode::Parameter;
1269 inst_ = inst_->GetNext()) {
1270 }
1271 return *this;
1272 }
1273 bool operator!=(const Iterator &other)
1274 {
1275 return inst_ != other.inst_;
1276 }
1277 Inst *operator*()
1278 {
1279 return inst_;
1280 }
1281 Inst *operator->()
1282 {
1283 return inst_;
1284 }
1285
1286 private:
1287 Inst *inst_ {nullptr};
1288 };
1289
ParameterList(const Graph * graph)1290 explicit ParameterList(const Graph *graph) : graph_(graph) {}
1291
1292 // NOLINTNEXTLINE(readability-identifier-naming)
1293 PANDA_PUBLIC_API Iterator begin();
1294 // NOLINTNEXTLINE(readability-identifier-naming)
end()1295 static Iterator end()
1296 {
1297 return Iterator(nullptr);
1298 }
1299
1300 private:
1301 const Graph *graph_ {nullptr};
1302 };
1303
1304 /**
1305 * Get list of all parameters
1306 * @return instance of the ParameterList class
1307 */
GetParameters()1308 ParameterList GetParameters() const
1309 {
1310 return ParameterList(this);
1311 }
1312
1313 void InitDefaultLocations();
1314
SupportsIrtocBarriers()1315 bool SupportsIrtocBarriers() const
1316 {
1317 return (IsJitOrOsrMode() || IsAotMode() || GetMode().IsInterpreter() || GetMode().IsInterpreterEntry()) &&
1318 !IsDynamicMethod() && GetArch() != Arch::AARCH32;
1319 }
1320
SetMaxInliningDepth(uint32_t depth)1321 void SetMaxInliningDepth(uint32_t depth)
1322 {
1323 maxInliningDepth_ = std::max(maxInliningDepth_, depth);
1324 }
1325
GetMaxInliningDepth()1326 uint32_t GetMaxInliningDepth()
1327 {
1328 return maxInliningDepth_;
1329 }
1330
1331 private:
1332 void AddConstInStartBlock(ConstantInst *constInst);
1333
1334 NO_MOVE_SEMANTIC(Graph);
1335 NO_COPY_SEMANTIC(Graph);
1336
1337 private:
1338 uint32_t maxInliningDepth_ {0};
1339 ArenaAllocator *const allocator_;
1340 ArenaAllocator *const localAllocator_;
1341
1342 Arch arch_ {RUNTIME_ARCH};
1343
1344 // List of blocks in insertion order.
1345 ArenaVector<BasicBlock *> vectorBb_;
1346 BasicBlock *startBlock_ {nullptr};
1347 BasicBlock *endBlock_ {nullptr};
1348
1349 Loop *rootLoop_ {nullptr};
1350
1351 AotData *aotData_ {nullptr};
1352
1353 uint32_t bitFields_ {0};
1354 using FlagDFConstruct = BitField<bool, 0, 1>;
1355 using FlagNeedCleanup = FlagDFConstruct::NextFlag;
1356 using FlagIrredicibleLoop = FlagNeedCleanup::NextFlag;
1357 using FlagInfiniteLoop = FlagIrredicibleLoop::NextFlag;
1358 using FlagFloatRegs = FlagInfiniteLoop::NextFlag;
1359 using FlagDefaultLocationsInit = FlagFloatRegs::NextFlag;
1360 using FlagIrtocPrologEpilogOptimized = FlagDefaultLocationsInit::NextFlag;
1361 using FlagThrowApplied = FlagIrtocPrologEpilogOptimized::NextFlag;
1362 using FlagUnrollComplete = FlagThrowApplied::NextFlag;
1363 #if defined(NDEBUG) && !defined(ENABLE_LIBABCKIT)
1364 using LastField = FlagUnrollComplete;
1365 #else
1366 using FlagRegallocApplied = FlagUnrollComplete::NextFlag;
1367 using FlagRegaccallocApplied = FlagRegallocApplied::NextFlag;
1368 using FlagInliningComplete = FlagRegaccallocApplied::NextFlag;
1369 using FlagLowLevelInstnsEnabled = FlagInliningComplete::NextFlag;
1370 using FlagDynUnitTest = FlagLowLevelInstnsEnabled::NextFlag;
1371 using LastField = FlagDynUnitTest;
1372 #endif // NDEBUG
1373
1374 #ifdef PANDA_COMPILER_DEBUG_INFO
1375 using FlagLineDebugInfoEnabled = LastField::NextFlag;
1376 #endif
1377
1378 // codegen data
1379 EncodeDataType data_;
1380 Span<const uint8_t> codeInfoData_;
1381 ArenaVector<bool> *usedRegs_ {nullptr};
1382 ArenaVector<bool> *usedVregs_ {nullptr};
1383
1384 // NOTE (a.popov) Replace by ArenaMap from throwable_inst* to try_inst*
1385 ArenaMap<const Inst *, ArenaVector<BasicBlock *>> throwableInsts_;
1386
1387 RegMask archUsedRegs_ {0};
1388
1389 mutable size_t instrCurrentId_ {0};
1390 // first constant instruction in graph !NOTE rewrite it to hash-map
1391 ConstantInst *firstConstInst_ {nullptr};
1392 Inst *nullptrInst_ {nullptr};
1393 Inst *undefinedInst_ {nullptr};
1394 RuntimeInterface *runtime_ {nullptr};
1395 RuntimeInterface::MethodPtr method_ {nullptr};
1396
1397 Encoder *encoder_ {nullptr};
1398
1399 mutable RegistersDescription *registers_ {nullptr};
1400
1401 CallingConvention *callconv_ {nullptr};
1402
1403 std::optional<MethodProperties> methodProperties_ {std::nullopt};
1404
1405 ParameterInfo *paramInfo_ {nullptr};
1406
1407 RelocationHandler *relocationHandler_ {nullptr};
1408
1409 mutable PassManager passManager_;
1410 EventWriter eventWriter_;
1411
1412 GraphMode mode_;
1413
1414 CodeInfoBuilder *ciBuilder_ {nullptr};
1415
1416 ArenaVector<RuntimeInterface::MethodPtr> singleImplementationList_;
1417 ArenaVector<const BasicBlock *> tryBeginBlocks_;
1418 ArenaSet<BasicBlock *> throwBlocks_;
1419 ArenaVector<ConstantInst *> spilledConstants_;
1420 // Graph that inlines this graph
1421 Graph *parentGraph_ {nullptr};
1422 // Number of used stack slots
1423 uint32_t stackSlotCount_ {0};
1424 // Number of used stack slots for parameters
1425 uint32_t paramSlotsCount_ {0};
1426 // First language extension slot
1427 uint32_t extStackSlot_ {0};
1428 // Number of the virtual registers used in the compiled method (inlined methods aren't included).
1429 uint32_t vregsCount_ {0};
1430 // Source language of the method being compiled
1431 SourceLanguage lang_ {SourceLanguage::PANDA_ASSEMBLY};
1432 };
1433
1434 class MarkerHolder {
1435 public:
1436 NO_COPY_SEMANTIC(MarkerHolder);
1437 NO_MOVE_SEMANTIC(MarkerHolder);
1438
MarkerHolder(const Graph * graph)1439 explicit MarkerHolder(const Graph *graph) : graph_(graph), marker_(graph->NewMarker())
1440 {
1441 ASSERT(marker_ != UNDEF_MARKER);
1442 }
1443
~MarkerHolder()1444 ~MarkerHolder()
1445 {
1446 graph_->EraseMarker(marker_);
1447 }
1448
GetMarker()1449 Marker GetMarker()
1450 {
1451 return marker_;
1452 }
1453
1454 private:
1455 const Graph *graph_;
1456 Marker marker_ {UNDEF_MARKER};
1457 };
1458
1459 template <typename T>
FindOrCreateConstant(T value)1460 ConstantInst *Graph::FindOrCreateConstant(T value)
1461 {
1462 bool isSupportInt32 = IsBytecodeOptimizer();
1463 if (firstConstInst_ == nullptr) {
1464 firstConstInst_ = CreateInstConstant(value, isSupportInt32);
1465 AddConstInStartBlock(firstConstInst_);
1466 return firstConstInst_;
1467 }
1468 ConstantInst *currentConst = firstConstInst_;
1469 ConstantInst *prevConst = nullptr;
1470 while (currentConst != nullptr) {
1471 if (currentConst->IsEqualConst(value, isSupportInt32)) {
1472 return currentConst;
1473 }
1474 prevConst = currentConst;
1475 currentConst = currentConst->GetNextConst();
1476 }
1477 ASSERT(prevConst != nullptr);
1478 auto *newConst = CreateInstConstant(value, isSupportInt32);
1479 AddConstInStartBlock(newConst);
1480
1481 prevConst->SetNextConst(newConst);
1482 return newConst;
1483 }
1484
1485 void InvalidateBlocksOrderAnalyzes(Graph *graph);
1486 void MarkLoopExits(const Graph *graph, Marker marker);
1487 std::string GetMethodFullName(const Graph *graph, RuntimeInterface::MethodPtr method);
1488 size_t GetObjectOffset(const Graph *graph, ObjectType objType, RuntimeInterface::FieldPtr field, uint32_t typeId);
1489 } // namespace ark::compiler
1490 #endif // COMPILER_OPTIMIZER_IR_GRAPH_H
1491