1 /*
2 * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #ifndef COMPILER_OPTIMIZER_IR_GRAPH_H
17 #define COMPILER_OPTIMIZER_IR_GRAPH_H
18
19 #include "aot_data.h"
20 #include "basicblock.h"
21 #include "compiler_events_gen.h"
22 #include "inst.h"
23 #include "marker.h"
24 #include "optimizer/code_generator/method_properties.h"
25 #include "optimizer/pass_manager.h"
26 #include "utils/arena_containers.h"
27 #include <algorithm>
28 #include <optional>
29
30 namespace ark {
31 class Method;
32 class CodeAllocator;
33 } // namespace ark
34
35 namespace ark::compiler {
36 class BasicBlock;
37 class Graph;
38 class RuntimeInfo;
39 class PassManager;
40 class LivenessAnalyzer;
41 class DominatorsTree;
42 class Rpo;
43 class BoundsRangeInfo;
44 class Loop;
45 class CodeInfoBuilder;
46
47 class Encoder;
48 class CallingConvention;
49 class ParameterInfo;
50 class RegistersDescription;
51 class RelocationHandler;
52
53 enum AliasType : uint8_t;
54
55 /// Specifies graph compilation mode.
56 class GraphMode {
57 public:
GraphMode(uint32_t value)58 explicit GraphMode(uint32_t value) : value_(value) {}
59 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
60 #define DECLARE_GRAPH_MODE_MODIFIERS(name) \
61 void Set##name(bool v) \
62 { \
63 Flag##name ::Set(v, &value_); \
64 } \
65 bool Is##name() const \
66 { \
67 return Flag##name ::Get(value_); \
68 }
69
70 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
71 #define DECLARE_GRAPH_MODE(name) \
72 static GraphMode name(bool set = true) \
73 { \
74 return GraphMode(Flag##name ::Encode(set)); \
75 } \
76 DECLARE_GRAPH_MODE_MODIFIERS(name)
77
78 DECLARE_GRAPH_MODE(Osr);
79 // The graph is used in BytecodeOptimizer mode
80 DECLARE_GRAPH_MODE(BytecodeOpt);
81 // The method from dynamic language
82 DECLARE_GRAPH_MODE(DynamicMethod);
83 // The method from dynamic language uses common calling convention
84 DECLARE_GRAPH_MODE(DynamicStub);
85 // Graph will be compiled with native calling convention
86 DECLARE_GRAPH_MODE(Native);
87 // FastPath from compiled code to runtime
88 DECLARE_GRAPH_MODE(FastPath);
89 // Boundary frame is used for compiled code
90 DECLARE_GRAPH_MODE(Boundary);
91 // Graph will be compiled for calling inside interpreter
92 DECLARE_GRAPH_MODE(Interpreter);
93 // Graph will be compiled for interpreter main loop
94 DECLARE_GRAPH_MODE(InterpreterEntry);
95
96 #undef DECLARE_GRAPH_MODE
97 #undef DECLARE_GRAPH_MODE_MODIFIERS
98
SupportManagedCode()99 bool SupportManagedCode() const
100 {
101 return !IsNative() && !IsFastPath() && !IsBoundary() && !IsInterpreter() && !IsInterpreterEntry();
102 }
103
104 void Dump(std::ostream &stm);
105
106 private:
107 using FlagOsr = BitField<bool, 0, 1>;
108 using FlagBytecodeOpt = FlagOsr::NextFlag;
109 using FlagDynamicMethod = FlagBytecodeOpt::NextFlag;
110 using FlagDynamicStub = FlagDynamicMethod::NextFlag;
111 using FlagNative = FlagDynamicStub::NextFlag;
112 using FlagFastPath = FlagNative::NextFlag;
113 using FlagBoundary = FlagFastPath::NextFlag;
114 using FlagInterpreter = FlagBoundary::NextFlag;
115 using FlagInterpreterEntry = FlagInterpreter::NextFlag;
116
117 uint32_t value_ {0};
118
119 friend GraphMode operator|(GraphMode a, GraphMode b);
120 };
121
122 inline GraphMode operator|(GraphMode a, GraphMode b)
123 {
124 return GraphMode(a.value_ | b.value_);
125 }
126
127 using EncodeDataType = Span<uint8_t>;
128
129 class Graph final : public MarkerMgr {
130 public:
Graph(ArenaAllocator * allocator,ArenaAllocator * localAllocator,Arch arch)131 explicit Graph(ArenaAllocator *allocator, ArenaAllocator *localAllocator, Arch arch)
132 : Graph(allocator, localAllocator, arch, false)
133 {
134 }
135
Graph(ArenaAllocator * allocator,ArenaAllocator * localAllocator,Arch arch,bool osrMode)136 Graph(ArenaAllocator *allocator, ArenaAllocator *localAllocator, Arch arch, bool osrMode)
137 : Graph(allocator, localAllocator, arch, nullptr, GetDefaultRuntime(), osrMode)
138 {
139 }
140
Graph(ArenaAllocator * allocator,ArenaAllocator * localAllocator,Arch arch,bool dynamicMethod,bool bytecodeOpt)141 Graph(ArenaAllocator *allocator, ArenaAllocator *localAllocator, Arch arch, bool dynamicMethod, bool bytecodeOpt)
142 : Graph(allocator, localAllocator, arch, nullptr, GetDefaultRuntime(), false, nullptr, dynamicMethod,
143 bytecodeOpt)
144 {
145 }
146
Graph(ArenaAllocator * allocator,ArenaAllocator * localAllocator,Arch arch,RuntimeInterface::MethodPtr method,RuntimeInterface * runtime,bool osrMode)147 Graph(ArenaAllocator *allocator, ArenaAllocator *localAllocator, Arch arch, RuntimeInterface::MethodPtr method,
148 RuntimeInterface *runtime, bool osrMode)
149 : Graph(allocator, localAllocator, arch, method, runtime, osrMode, nullptr)
150 {
151 }
152
153 Graph(ArenaAllocator *allocator, ArenaAllocator *localAllocator, Arch arch, RuntimeInterface::MethodPtr method,
154 RuntimeInterface *runtime, bool osrMode, Graph *parent, bool dynamicMethod = false, bool bytecodeOpt = false)
155 : Graph(allocator, localAllocator, arch, method, runtime, parent,
156 GraphMode::Osr(osrMode) | GraphMode::BytecodeOpt(bytecodeOpt) | GraphMode::DynamicMethod(dynamicMethod))
157 {
158 }
159
Graph(ArenaAllocator * allocator,ArenaAllocator * localAllocator,Arch arch,RuntimeInterface::MethodPtr method,RuntimeInterface * runtime,Graph * parent,GraphMode mode)160 Graph(ArenaAllocator *allocator, ArenaAllocator *localAllocator, Arch arch, RuntimeInterface::MethodPtr method,
161 RuntimeInterface *runtime, Graph *parent, GraphMode mode)
162 : allocator_(allocator),
163 localAllocator_(localAllocator),
164 arch_(arch),
165 vectorBb_(allocator->Adapter()),
166 throwableInsts_(allocator->Adapter()),
167 runtime_(runtime),
168 method_(method),
169 passManager_(this, parent != nullptr ? parent->GetPassManager() : nullptr),
170 eventWriter_(runtime->GetClassNameFromMethod(method), runtime->GetMethodName(method)),
171 mode_(mode),
172 singleImplementationList_(allocator->Adapter()),
173 tryBeginBlocks_(allocator->Adapter()),
174 throwBlocks_(allocator->Adapter()),
175 spilledConstants_(allocator->Adapter()),
176 parentGraph_(parent)
177 {
178 SetNeedCleanup(true);
179 }
180
181 ~Graph() override;
182
CreateChildGraph(RuntimeInterface::MethodPtr method)183 Graph *CreateChildGraph(RuntimeInterface::MethodPtr method)
184 {
185 auto graph = GetAllocator()->New<Graph>(GetAllocator(), GetLocalAllocator(), GetArch(), method, GetRuntime(),
186 this, mode_);
187 graph->SetAotData(GetAotData());
188 return graph;
189 }
190
191 /// Get default runtime interface object
GetDefaultRuntime()192 static RuntimeInterface *GetDefaultRuntime()
193 {
194 static RuntimeInterface runtimeInterface;
195 return &runtimeInterface;
196 }
197
GetArch()198 Arch GetArch() const
199 {
200 return arch_;
201 }
202
GetLanguage()203 SourceLanguage GetLanguage() const
204 {
205 return lang_;
206 }
207
SetLanguage(SourceLanguage lang)208 void SetLanguage(SourceLanguage lang)
209 {
210 lang_ = lang;
211 }
212
213 void AddBlock(BasicBlock *block);
214 #ifndef NDEBUG
215 void AddBlock(BasicBlock *block, uint32_t id);
216 #endif
217 void DisconnectBlock(BasicBlock *block, bool removeLastInst = true, bool fixDomTree = true);
218 void DisconnectBlockRec(BasicBlock *block, bool removeLastInst = true, bool fixDomTree = true);
219
220 void EraseBlock(BasicBlock *block);
221 void RestoreBlock(BasicBlock *block);
222 // Remove empty block. Block must have one successor and no Phis.
223 void RemoveEmptyBlock(BasicBlock *block);
224
225 // Remove empty block. Block may have Phis and can't be a loop pre-header.
226 void RemoveEmptyBlockWithPhis(BasicBlock *block, bool irrLoop = false);
227
228 // Remove block predecessors.
229 void RemovePredecessors(BasicBlock *block, bool removeLastInst = true);
230
231 // Remove block successors.
232 void RemoveSuccessors(BasicBlock *block);
233
234 // Remove unreachable blocks.
235 void RemoveUnreachableBlocks();
236
237 // get end block
GetEndBlock()238 BasicBlock *GetEndBlock()
239 {
240 return endBlock_;
241 }
242 // set end block
SetEndBlock(BasicBlock * endBlock)243 void SetEndBlock(BasicBlock *endBlock)
244 {
245 endBlock_ = endBlock;
246 }
HasEndBlock()247 bool HasEndBlock()
248 {
249 return endBlock_ != nullptr;
250 }
251 // get start block
GetStartBlock()252 BasicBlock *GetStartBlock()
253 {
254 return startBlock_;
255 }
GetStartBlock()256 BasicBlock *GetStartBlock() const
257 {
258 return startBlock_;
259 }
260 // set start block
SetStartBlock(BasicBlock * startBlock)261 void SetStartBlock(BasicBlock *startBlock)
262 {
263 startBlock_ = startBlock;
264 }
265 // get vector_bb_
GetVectorBlocks()266 const ArenaVector<BasicBlock *> &GetVectorBlocks() const
267 {
268 return vectorBb_;
269 }
270
GetAliveBlocksCount()271 size_t GetAliveBlocksCount() const
272 {
273 return std::count_if(vectorBb_.begin(), vectorBb_.end(), [](BasicBlock *block) { return block != nullptr; });
274 }
275
GetPassManager()276 PassManager *GetPassManager()
277 {
278 return &passManager_;
279 }
GetPassManager()280 const PassManager *GetPassManager() const
281 {
282 return &passManager_;
283 }
284
285 const BoundsRangeInfo *GetBoundsRangeInfo() const;
286
287 const ArenaVector<BasicBlock *> &GetBlocksRPO() const;
288
289 const ArenaVector<BasicBlock *> &GetBlocksLinearOrder() const;
290
291 template <class Callback>
292 void VisitAllInstructions(Callback callback);
293
294 AliasType CheckInstAlias(Inst *mem1, Inst *mem2);
295
296 /// Main allocator for graph, all related to Graph data should be allocated via this allocator.
GetAllocator()297 ArenaAllocator *GetAllocator() const
298 {
299 return allocator_;
300 }
301 /// Allocator for temporary usage, when allocated data is no longer needed after optimization/analysis finished.
GetLocalAllocator()302 ArenaAllocator *GetLocalAllocator() const
303 {
304 return localAllocator_;
305 }
IsDFConstruct()306 bool IsDFConstruct() const
307 {
308 return FlagDFConstruct::Get(bitFields_);
309 }
SetDFConstruct()310 void SetDFConstruct()
311 {
312 FlagDFConstruct::Set(true, &bitFields_);
313 }
314
SetAotData(AotData * data)315 void SetAotData(AotData *data)
316 {
317 aotData_ = data;
318 }
GetAotData()319 AotData *GetAotData()
320 {
321 return aotData_;
322 }
GetAotData()323 const AotData *GetAotData() const
324 {
325 return aotData_;
326 }
327
IsAotMode()328 bool IsAotMode() const
329 {
330 return aotData_ != nullptr;
331 }
332
IsAotNoChaMode()333 bool IsAotNoChaMode() const
334 {
335 return aotData_ != nullptr && !aotData_->GetUseCha();
336 }
337
IsOfflineCompilationMode()338 bool IsOfflineCompilationMode() const
339 {
340 return IsAotMode() || GetMode().IsInterpreter() || GetMode().IsFastPath() || GetMode().IsInterpreterEntry();
341 }
342
IsDefaultLocationsInit()343 bool IsDefaultLocationsInit() const
344 {
345 return FlagDefaultLocationsInit::Get(bitFields_);
346 }
SetDefaultLocationsInit()347 void SetDefaultLocationsInit()
348 {
349 FlagDefaultLocationsInit::Set(true, &bitFields_);
350 }
IsIrtocPrologEpilogOptimized()351 bool IsIrtocPrologEpilogOptimized() const
352 {
353 return FlagIrtocPrologEpilogOptimized::Get(bitFields_);
354 }
SetIrtocPrologEpilogOptimized()355 void SetIrtocPrologEpilogOptimized()
356 {
357 FlagIrtocPrologEpilogOptimized::Set(true, &bitFields_);
358 }
IsUnrollComplete()359 bool IsUnrollComplete() const
360 {
361 return FlagUnrollComplete::Get(bitFields_);
362 }
SetUnrollComplete()363 void SetUnrollComplete()
364 {
365 FlagUnrollComplete::Set(true, &bitFields_);
366 }
367 #ifndef NDEBUG
IsRegAllocApplied()368 bool IsRegAllocApplied() const
369 {
370 return FlagRegallocApplied::Get(bitFields_);
371 }
SetRegAllocApplied()372 void SetRegAllocApplied()
373 {
374 FlagRegallocApplied::Set(true, &bitFields_);
375 }
IsRegAccAllocApplied()376 bool IsRegAccAllocApplied() const
377 {
378 return FlagRegaccallocApplied::Get(bitFields_);
379 }
SetRegAccAllocApplied()380 void SetRegAccAllocApplied()
381 {
382 FlagRegaccallocApplied::Set(true, &bitFields_);
383 }
IsInliningComplete()384 bool IsInliningComplete() const
385 {
386 return FlagInliningComplete::Get(bitFields_) || IsOsrMode();
387 }
SetInliningComplete()388 void SetInliningComplete()
389 {
390 FlagInliningComplete::Set(true, &bitFields_);
391 }
IsLowLevelInstructionsEnabled()392 bool IsLowLevelInstructionsEnabled() const
393 {
394 return FlagLowLevelInstnsEnabled::Get(bitFields_);
395 }
SetLowLevelInstructionsEnabled()396 void SetLowLevelInstructionsEnabled()
397 {
398 FlagLowLevelInstnsEnabled::Set(true, &bitFields_);
399 }
IsDynUnitTest()400 bool IsDynUnitTest() const
401 {
402 return FlagDynUnitTest::Get(bitFields_);
403 }
SetDynUnitTestFlag()404 void SetDynUnitTestFlag()
405 {
406 FlagDynUnitTest::Set(true, &bitFields_);
407 }
408 #else
IsRegAllocApplied()409 bool IsRegAllocApplied() const
410 {
411 return false;
412 }
413 #endif // NDEBUG
414
IsThrowApplied()415 bool IsThrowApplied() const
416 {
417 return FlagThrowApplied::Get(bitFields_);
418 }
SetThrowApplied()419 void SetThrowApplied()
420 {
421 FlagThrowApplied::Set(true, &bitFields_);
422 }
UnsetThrowApplied()423 void UnsetThrowApplied()
424 {
425 FlagThrowApplied::Set(false, &bitFields_);
426 }
427
428 #ifdef PANDA_COMPILER_DEBUG_INFO
IsLineDebugInfoEnabled()429 bool IsLineDebugInfoEnabled() const
430 {
431 return FlagLineDebugInfoEnabled::Get(bitFields_);
432 }
SetLineDebugInfoEnabled()433 void SetLineDebugInfoEnabled()
434 {
435 FlagLineDebugInfoEnabled::Set(true, &bitFields_);
436 }
437 #endif
438
SetCode(EncodeDataType data)439 void SetCode(EncodeDataType data)
440 {
441 data_ = data;
442 }
443
GetData()444 EncodeDataType GetData() const
445 {
446 return data_;
447 }
448
GetData()449 EncodeDataType GetData()
450 {
451 return data_;
452 }
453
GetCode()454 EncodeDataType GetCode() const
455 {
456 return data_;
457 }
458
GetCode()459 EncodeDataType GetCode()
460 {
461 return data_;
462 }
463
SetCodeInfo(Span<uint8_t> data)464 void SetCodeInfo(Span<uint8_t> data)
465 {
466 codeInfoData_ = data.SubSpan<const uint8_t>(0, data.size());
467 }
468
GetCodeInfoData()469 Span<const uint8_t> GetCodeInfoData() const
470 {
471 return codeInfoData_;
472 }
473
474 void DumpUsedRegs(std::ostream &out = std::cerr, const char *prefix = nullptr) const
475 {
476 if (prefix != nullptr) {
477 out << prefix;
478 }
479 out << "'\n used scalar regs: ";
480 if (usedRegs_ != nullptr) {
481 for (unsigned i = 0; i < usedRegs_->size(); ++i) {
482 if (usedRegs_->at(i)) {
483 out << i << " ";
484 }
485 }
486 }
487 out << "\n used float regs: ";
488 if (usedRegs_ != nullptr) {
489 for (unsigned i = 0; i < usedVregs_->size(); ++i) {
490 if (usedVregs_->at(i)) {
491 out << i << " ";
492 }
493 }
494 }
495 out << std::endl;
496 }
497
498 // Get registers mask which used in graph
499 template <DataType::Type REG_TYPE>
GetUsedRegs()500 ArenaVector<bool> *GetUsedRegs() const
501 {
502 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
503 if constexpr (REG_TYPE == DataType::INT64) {
504 return usedRegs_;
505 }
506 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
507 if constexpr (REG_TYPE == DataType::FLOAT64) {
508 return usedVregs_;
509 }
510 UNREACHABLE();
511 return nullptr;
512 }
513
SetRegUsage(Register reg,DataType::Type type)514 void SetRegUsage(Register reg, DataType::Type type)
515 {
516 ASSERT(reg != INVALID_REG);
517 if (DataType::IsFloatType(type)) {
518 SetUsedReg<DataType::FLOAT64>(reg);
519 } else {
520 SetUsedReg<DataType::INT64>(reg);
521 }
522 }
523
SetRegUsage(Location location)524 void SetRegUsage(Location location)
525 {
526 ASSERT(location.IsFixedRegister());
527 if (location.IsFpRegister()) {
528 SetUsedReg<DataType::FLOAT64>(location.GetValue());
529 } else {
530 SetUsedReg<DataType::INT64>(location.GetValue());
531 }
532 }
533
534 template <DataType::Type REG_TYPE>
SetUsedReg(Register reg)535 void SetUsedReg(Register reg)
536 {
537 ArenaVector<bool> *graphRegs = nullptr;
538 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
539 if constexpr (REG_TYPE == DataType::INT64) {
540 graphRegs = usedRegs_;
541 // NOLINTNEXTLINE(readability-braces-around-statements, readability-misleading-indentation)
542 } else if constexpr (REG_TYPE == DataType::FLOAT64) {
543 graphRegs = usedVregs_;
544 } else {
545 UNREACHABLE();
546 }
547 ASSERT(graphRegs != nullptr);
548 ASSERT(reg < graphRegs->size());
549 (*graphRegs)[reg] = true;
550 }
551
552 template <DataType::Type REG_TYPE>
InitUsedRegs(const ArenaVector<bool> * usedRegs)553 void InitUsedRegs(const ArenaVector<bool> *usedRegs)
554 {
555 if (usedRegs == nullptr) {
556 return;
557 }
558 ArenaVector<bool> *graphRegs = nullptr;
559 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
560 if constexpr (REG_TYPE == DataType::INT64) {
561 usedRegs_ = GetAllocator()->New<ArenaVector<bool>>(GetAllocator()->Adapter());
562 graphRegs = usedRegs_;
563 // NOLINTNEXTLINE(readability-braces-around-statements, readability-misleading-indentation)
564 } else if constexpr (REG_TYPE == DataType::FLOAT64) {
565 usedVregs_ = GetAllocator()->New<ArenaVector<bool>>(GetAllocator()->Adapter());
566 graphRegs = usedVregs_;
567 } else {
568 UNREACHABLE();
569 }
570 graphRegs->resize(usedRegs->size());
571 std::copy(usedRegs->begin(), usedRegs->end(), graphRegs->begin());
572 }
573
574 Register GetZeroReg() const;
575 Register GetArchTempReg() const;
576 Register GetArchTempVReg() const;
577 // Get registers mask which used in codegen, runtime e.t.c
578 RegMask GetArchUsedRegs();
579 void SetArchUsedRegs(RegMask mask);
580
581 // Get vector registers mask which used in codegen, runtime e.t.c
582 VRegMask GetArchUsedVRegs();
583
584 // Return true if one 64-bit scalar register can be split to 2 32-bit
585 bool IsRegScalarMapped() const;
586
GetStackSlotsCount()587 uint32_t GetStackSlotsCount() const
588 {
589 return stackSlotCount_;
590 }
591
SetStackSlotsCount(uint32_t stackSlotCount)592 void SetStackSlotsCount(uint32_t stackSlotCount)
593 {
594 stackSlotCount_ = stackSlotCount;
595 }
596
UpdateStackSlotsCount(uint32_t stackSlotCount)597 void UpdateStackSlotsCount(uint32_t stackSlotCount)
598 {
599 stackSlotCount_ = std::max(stackSlotCount_, stackSlotCount);
600 }
601
602 uint32_t GetParametersSlotsCount() const;
603
GetExtSlotsStart()604 uint32_t GetExtSlotsStart() const
605 {
606 return extStackSlot_;
607 }
608
SetExtSlotsStart(uint32_t extStackSlot)609 void SetExtSlotsStart(uint32_t extStackSlot)
610 {
611 extStackSlot_ = extStackSlot;
612 }
613
614 BasicBlock *CreateEmptyBlock(uint32_t guestPc = INVALID_PC);
615 BasicBlock *CreateEmptyBlock(BasicBlock *baseBlock);
616 #ifndef NDEBUG
617 BasicBlock *CreateEmptyBlock(uint32_t id, uint32_t guestPc);
618 #endif
619 BasicBlock *CreateStartBlock();
620 BasicBlock *CreateEndBlock(uint32_t guestPc = INVALID_PC);
GetFirstConstInst()621 ConstantInst *GetFirstConstInst()
622 {
623 return firstConstInst_;
624 }
SetFirstConstInst(ConstantInst * constInst)625 void SetFirstConstInst(ConstantInst *constInst)
626 {
627 firstConstInst_ = constInst;
628 }
629
GetNullPtrInst()630 Inst *GetNullPtrInst() const
631 {
632 return nullptrInst_;
633 }
HasNullPtrInst()634 bool HasNullPtrInst() const
635 {
636 return nullptrInst_ != nullptr;
637 }
UnsetNullPtrInst()638 void UnsetNullPtrInst()
639 {
640 ASSERT(HasNullPtrInst());
641 nullptrInst_ = nullptr;
642 }
643 Inst *GetOrCreateNullPtr();
644
GetUndefinedInst()645 Inst *GetUndefinedInst() const
646 {
647 return undefinedInst_;
648 }
HasUndefinedInst()649 bool HasUndefinedInst() const
650 {
651 return undefinedInst_ != nullptr;
652 }
UnsetUndefinedInst()653 void UnsetUndefinedInst()
654 {
655 ASSERT(HasUndefinedInst());
656 undefinedInst_ = nullptr;
657 }
658 Inst *GetOrCreateUndefinedInst();
659
660 /// Find constant in the list, return nullptr if not found
661 ConstantInst *FindConstant(DataType::Type type, uint64_t value);
662 /// Find constant in the list or create new one and insert at the end
663 template <typename T>
664 ConstantInst *FindOrCreateConstant(T value);
665
666 /**
667 * Find constant that is equal to the given one specified by inst. If not found, add inst to the graph.
668 * @param inst Constant instruction to be added
669 * @return Found instruction or inst if not found
670 */
671 ConstantInst *FindOrAddConstant(ConstantInst *inst);
672
673 ParameterInst *AddNewParameter(uint16_t argNumber);
674
AddNewParameter(uint16_t argNumber,DataType::Type type)675 ParameterInst *AddNewParameter(uint16_t argNumber, DataType::Type type)
676 {
677 ParameterInst *param = AddNewParameter(argNumber);
678 param->SetType(type);
679 return param;
680 }
681
682 ParameterInst *FindParameter(uint16_t argNumber);
683
684 /*
685 * The function remove the ConstantInst from the graph list
686 * !NOTE ConstantInst isn't removed from BasicBlock list
687 */
688 void RemoveConstFromList(ConstantInst *constInst);
689
GetSpilledConstant(ImmTableSlot slot)690 ConstantInst *GetSpilledConstant(ImmTableSlot slot)
691 {
692 ASSERT(static_cast<size_t>(slot) < spilledConstants_.size());
693 return spilledConstants_[slot];
694 }
695
AddSpilledConstant(ConstantInst * constInst)696 ImmTableSlot AddSpilledConstant(ConstantInst *constInst)
697 {
698 // Constant already in the table
699 auto currentSlot = constInst->GetImmTableSlot();
700 if (currentSlot != INVALID_IMM_TABLE_SLOT) {
701 ASSERT(spilledConstants_[currentSlot] == constInst);
702 return currentSlot;
703 }
704
705 auto count = spilledConstants_.size();
706 if (count >= MAX_NUM_IMM_SLOTS) {
707 return INVALID_IMM_TABLE_SLOT;
708 }
709 spilledConstants_.push_back(constInst);
710 constInst->SetImmTableSlot(count);
711 return ImmTableSlot(count);
712 }
713
FindSpilledConstantSlot(ConstantInst * constInst)714 ImmTableSlot FindSpilledConstantSlot(ConstantInst *constInst) const
715 {
716 auto slot = std::find(spilledConstants_.begin(), spilledConstants_.end(), constInst);
717 if (slot == spilledConstants_.end()) {
718 return INVALID_IMM_TABLE_SLOT;
719 }
720 return std::distance(spilledConstants_.begin(), slot);
721 }
722
GetSpilledConstantsCount()723 size_t GetSpilledConstantsCount() const
724 {
725 return spilledConstants_.size();
726 }
727
HasAvailableConstantSpillSlots()728 bool HasAvailableConstantSpillSlots() const
729 {
730 return GetSpilledConstantsCount() < MAX_NUM_IMM_SLOTS;
731 }
732
begin()733 auto begin() // NOLINT(readability-identifier-naming)
734 {
735 return vectorBb_.begin();
736 }
begin()737 auto begin() const // NOLINT(readability-identifier-naming)
738 {
739 return vectorBb_.begin();
740 }
end()741 auto end() // NOLINT(readability-identifier-naming)
742 {
743 return vectorBb_.end();
744 }
end()745 auto end() const // NOLINT(readability-identifier-naming)
746 {
747 return vectorBb_.end();
748 }
749
750 void Dump(std::ostream *out) const;
751
GetRootLoop()752 Loop *GetRootLoop()
753 {
754 return rootLoop_;
755 }
GetRootLoop()756 const Loop *GetRootLoop() const
757 {
758 return rootLoop_;
759 }
760
SetRootLoop(Loop * rootLoop)761 void SetRootLoop(Loop *rootLoop)
762 {
763 rootLoop_ = rootLoop;
764 }
765
SetHasIrreducibleLoop(bool hasIrrLoop)766 void SetHasIrreducibleLoop(bool hasIrrLoop)
767 {
768 FlagIrredicibleLoop::Set(hasIrrLoop, &bitFields_);
769 }
770
SetHasInfiniteLoop(bool hasInfLoop)771 void SetHasInfiniteLoop(bool hasInfLoop)
772 {
773 FlagInfiniteLoop::Set(hasInfLoop, &bitFields_);
774 }
775
SetHasFloatRegs()776 void SetHasFloatRegs()
777 {
778 FlagFloatRegs::Set(true, &bitFields_);
779 }
780
781 bool HasLoop() const;
782 bool HasIrreducibleLoop() const;
783 bool HasInfiniteLoop() const;
784 bool HasFloatRegs() const;
785
786 /**
787 * Try-catch info
788 * Vector of begin try-blocks in order they were declared in the bytecode
789 */
AppendTryBeginBlock(const BasicBlock * block)790 void AppendTryBeginBlock(const BasicBlock *block)
791 {
792 tryBeginBlocks_.push_back(block);
793 }
794
EraseTryBeginBlock(const BasicBlock * block)795 void EraseTryBeginBlock(const BasicBlock *block)
796 {
797 auto it = std::find(tryBeginBlocks_.begin(), tryBeginBlocks_.end(), block);
798 if (it == tryBeginBlocks_.end()) {
799 ASSERT(false && "Trying to remove non try_begin block");
800 return;
801 }
802 tryBeginBlocks_.erase(it);
803 }
804
GetTryBeginBlocks()805 const auto &GetTryBeginBlocks() const
806 {
807 return tryBeginBlocks_;
808 }
809
810 void RemovePredecessorUpdateDF(BasicBlock *block, BasicBlock *rmPred);
811
FindThrowBlock(BasicBlock * block)812 bool FindThrowBlock(BasicBlock *block)
813 {
814 auto it = std::find(throwBlocks_.begin(), throwBlocks_.end(), block);
815 return (it != throwBlocks_.end());
816 }
817
AppendThrowBlock(BasicBlock * block)818 bool AppendThrowBlock(BasicBlock *block)
819 {
820 if (!FindThrowBlock(block)) {
821 throwBlocks_.insert(block);
822 return true;
823 }
824 return false;
825 }
826
EraseThrowBlock(BasicBlock * block)827 bool EraseThrowBlock(BasicBlock *block)
828 {
829 auto it = std::find(throwBlocks_.begin(), throwBlocks_.end(), block);
830 if (it == throwBlocks_.end()) {
831 return false;
832 }
833 throwBlocks_.erase(it);
834 return true;
835 }
836
GetThrowBlocks()837 const auto &GetThrowBlocks() const
838 {
839 return throwBlocks_;
840 }
841
ClearThrowBlocks()842 void ClearThrowBlocks()
843 {
844 throwBlocks_.clear();
845 }
846
AppendThrowableInst(const Inst * inst,BasicBlock * catchHandler)847 void AppendThrowableInst(const Inst *inst, BasicBlock *catchHandler)
848 {
849 auto it = throwableInsts_.emplace(inst, GetAllocator()->Adapter()).first;
850 it->second.push_back(catchHandler);
851 }
852
IsInstThrowable(const Inst * inst)853 bool IsInstThrowable(const Inst *inst) const
854 {
855 return throwableInsts_.count(inst) > 0;
856 }
857
858 void RemoveThrowableInst(const Inst *inst);
859 void ReplaceThrowableInst(Inst *oldInst, Inst *newInst);
860
GetThrowableInstHandlers(const Inst * inst)861 const auto &GetThrowableInstHandlers(const Inst *inst) const
862 {
863 ASSERT(IsInstThrowable(inst));
864 return throwableInsts_.at(inst);
865 }
866
ClearTryCatchInfo()867 void ClearTryCatchInfo()
868 {
869 throwableInsts_.clear();
870 tryBeginBlocks_.clear();
871 }
872
873 void DumpThrowableInsts(std::ostream *out) const;
874
875 /**
876 * Run pass specified by template argument T.
877 * Optimization passes might take additional arguments that will passed to Optimization's constructor.
878 * Analyses can't take additional arguments.
879 * @tparam T Type of pass
880 * @param args Additional arguments for optimizations passes
881 * @return true if pass was successful
882 */
883 template <typename T, typename... Args>
RunPass(Args...args)884 bool RunPass(Args... args)
885 {
886 ASSERT(GetPassManager());
887 return passManager_.RunPass<T>(std::forward<Args>(args)...);
888 }
889 template <typename T, typename... Args>
RunPass(Args...args)890 bool RunPass(Args... args) const
891 {
892 ASSERT(GetPassManager());
893 return passManager_.RunPass<T>(std::forward<Args>(args)...);
894 }
895
896 template <typename T>
RunPass(T * pass)897 bool RunPass(T *pass)
898 {
899 ASSERT(GetPassManager());
900 return passManager_.RunPass(pass, GetLocalAllocator()->GetAllocatedSize());
901 }
902
903 /**
904 * Get analysis instance.
905 * All analyses are reside in Graph object in composition relationship.
906 * @tparam T Type of analysis
907 * @return Reference to analysis instance
908 */
909 template <typename T>
GetAnalysis()910 T &GetAnalysis()
911 {
912 ASSERT(GetPassManager());
913 return GetPassManager()->GetAnalysis<T>();
914 }
915 template <typename T>
GetAnalysis()916 const T &GetAnalysis() const
917 {
918 ASSERT(GetPassManager());
919 return passManager_.GetAnalysis<T>();
920 }
921
922 /**
923 * Same as GetAnalysis but additionaly checck that analysis in valid state.
924 * @tparam T Type of analysis
925 * @return Reference to analysis instance
926 */
927 template <typename T>
GetValidAnalysis()928 T &GetValidAnalysis()
929 {
930 RunPass<T>();
931 ASSERT(IsAnalysisValid<T>());
932 return GetAnalysis<T>();
933 }
934 template <typename T>
GetValidAnalysis()935 const T &GetValidAnalysis() const
936 {
937 RunPass<T>();
938 ASSERT(IsAnalysisValid<T>());
939 return GetAnalysis<T>();
940 }
941
942 /**
943 * Return true if Analysis valid, false otherwise
944 * @tparam T Type of analysis
945 */
946 template <typename T>
IsAnalysisValid()947 bool IsAnalysisValid() const
948 {
949 return GetAnalysis<T>().IsValid();
950 }
951
952 /**
953 * Reset valid state of specified analysis
954 * @tparam T Type of analysis
955 */
956 template <typename T>
InvalidateAnalysis()957 void InvalidateAnalysis()
958 {
959 ASSERT(GetPassManager());
960 GetPassManager()->GetAnalysis<T>().SetValid(false);
961 }
962
963 /// Accessors to the number of current instruction id.
GetCurrentInstructionId()964 auto GetCurrentInstructionId() const
965 {
966 return instrCurrentId_;
967 }
SetCurrentInstructionId(size_t v)968 auto SetCurrentInstructionId(size_t v)
969 {
970 instrCurrentId_ = v;
971 }
972
973 /// RuntimeInterface accessors
GetRuntime()974 RuntimeInterface *GetRuntime() const
975 {
976 return runtime_;
977 }
SetRuntime(RuntimeInterface * runtime)978 void SetRuntime(RuntimeInterface *runtime)
979 {
980 runtime_ = runtime;
981 }
GetMethod()982 auto GetMethod() const
983 {
984 return method_;
985 }
SetMethod(RuntimeInterface::MethodPtr method)986 auto SetMethod(RuntimeInterface::MethodPtr method)
987 {
988 method_ = method;
989 }
990
991 Encoder *GetEncoder();
992 RegistersDescription *GetRegisters() const;
993 CallingConvention *GetCallingConvention();
994 const MethodProperties &GetMethodProperties();
995 void ResetParameterInfo();
996 SpillFillData GetDataForNativeParam(DataType::Type type);
997
998 template <bool GRAPH_ENCODED = false>
EstimateCodeSize()999 size_t EstimateCodeSize()
1000 {
1001 if constexpr (GRAPH_ENCODED) {
1002 return encoder_->BufferSize();
1003 }
1004 auto maxIrInstsCount = GetCurrentInstructionId();
1005 auto maxArchInstsPerIrInsts = GetEncoder()->MaxArchInstPerEncoded();
1006 auto maxBytesInArchInst = GetInstructionSizeBits(GetArch());
1007 return maxIrInstsCount * maxArchInstsPerIrInsts * maxBytesInArchInst;
1008 }
1009
GetEventWriter()1010 EventWriter &GetEventWriter()
1011 {
1012 return eventWriter_;
1013 }
1014
SetCodeBuilder(CodeInfoBuilder * builder)1015 void SetCodeBuilder(CodeInfoBuilder *builder)
1016 {
1017 ciBuilder_ = builder;
1018 }
1019
1020 // clang-format off
1021
1022 /// Create instruction by opcode
CreateInst(Opcode opc)1023 [[nodiscard]] Inst* CreateInst(Opcode opc) const
1024 {
1025 switch (opc) {
1026 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
1027 #define RETURN_INST(OPCODE, BASE, ...) \
1028 case Opcode::OPCODE: { \
1029 auto inst = Inst::New<BASE>(allocator_, Opcode::OPCODE); \
1030 inst->SetId(instrCurrentId_++); \
1031 return inst; \
1032 }
1033 OPCODE_LIST(RETURN_INST)
1034
1035 #undef RETURN_INST
1036 default:
1037 return nullptr;
1038 }
1039 }
1040 /// Define creation methods for all opcodes
1041 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
1042 #define RETURN_INST(OPCODE, BASE, ...) \
1043 template <typename... Args> \
1044 [[nodiscard]] BASE* CreateInst##OPCODE(Args&&... args) const { \
1045 auto inst = Inst::New<BASE>(allocator_, Opcode::OPCODE, std::forward<Args>(args)...); \
1046 inst->SetId(instrCurrentId_++); \
1047 return inst; \
1048 }
1049 OPCODE_LIST(RETURN_INST)
1050
1051 #undef RETURN_INST
1052
1053 #ifdef PANDA_COMPILER_DEBUG_INFO
1054 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
1055 #define RETURN_INST(OPCODE, BASE, ...) \
1056 template <typename... Args> \
1057 [[nodiscard]] BASE* CreateInst##OPCODE(Inst* inst, Args&&... args) const { \
1058 auto new_inst = CreateInst##OPCODE(inst->GetType(), inst->GetPc(), std::forward<Args>(args)...); \
1059 new_inst->SetCurrentMethod(inst->GetCurrentMethod()); \
1060 return new_inst; \
1061 }
OPCODE_LIST(RETURN_INST)1062 OPCODE_LIST(RETURN_INST)
1063
1064 #undef RETURN_INST
1065 #else
1066 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
1067 #define RETURN_INST(OPCODE, BASE, ...) \
1068 template <typename... Args> \
1069 [[nodiscard]] BASE* CreateInst##OPCODE(Inst* inst, Args&&... args) const { \
1070 auto new_inst = CreateInst##OPCODE(inst->GetType(), inst->GetPc(), std::forward<Args>(args)...); \
1071 return new_inst; \
1072 }
1073 OPCODE_LIST(RETURN_INST)
1074
1075 #undef RETURN_INST
1076 #endif
1077
1078 // clang-format on
1079
1080 uint32_t GetBitFields()
1081 {
1082 return bitFields_;
1083 }
1084
SetBitFields(uint32_t bitFields)1085 void SetBitFields(uint32_t bitFields)
1086 {
1087 bitFields_ = bitFields;
1088 }
1089
NeedCleanup()1090 bool NeedCleanup() const
1091 {
1092 return FlagNeedCleanup::Get(bitFields_);
1093 }
1094
SetNeedCleanup(bool v)1095 void SetNeedCleanup(bool v)
1096 {
1097 FlagNeedCleanup::Set(v, &bitFields_);
1098 }
1099
IsJitOrOsrMode()1100 bool IsJitOrOsrMode() const
1101 {
1102 return !IsAotMode() && !IsBytecodeOptimizer() && SupportManagedCode();
1103 }
1104
IsOsrMode()1105 bool IsOsrMode() const
1106 {
1107 return mode_.IsOsr();
1108 }
1109
IsJitMode()1110 bool IsJitMode() const
1111 {
1112 return !IsOsrMode() && IsJitOrOsrMode();
1113 }
1114
IsBytecodeOptimizer()1115 bool IsBytecodeOptimizer() const
1116 {
1117 return mode_.IsBytecodeOpt();
1118 }
1119
IsDynamicMethod()1120 bool IsDynamicMethod() const
1121 {
1122 return mode_.IsDynamicMethod();
1123 }
1124
SupportManagedCode()1125 bool SupportManagedCode() const
1126 {
1127 return mode_.SupportManagedCode();
1128 }
1129
GetMode()1130 GraphMode GetMode() const
1131 {
1132 return mode_;
1133 }
1134
SetMode(GraphMode mode)1135 void SetMode(GraphMode mode)
1136 {
1137 mode_ = mode;
1138 }
1139
1140 #ifndef NDEBUG
GetCompilerMode()1141 compiler::inst_modes::Mode GetCompilerMode()
1142 {
1143 if (IsBytecodeOptimizer()) {
1144 return compiler::inst_modes::BYTECODE_OPT;
1145 }
1146 if (SupportManagedCode()) {
1147 return compiler::inst_modes::JIT_AOT;
1148 }
1149 return compiler::inst_modes::IRTOC;
1150 }
1151 #endif
1152
AddSingleImplementationMethod(RuntimeInterface::MethodPtr method)1153 void AddSingleImplementationMethod(RuntimeInterface::MethodPtr method)
1154 {
1155 singleImplementationList_.push_back(method);
1156 }
1157
SetDynamicMethod()1158 void SetDynamicMethod()
1159 {
1160 mode_.SetDynamicMethod(true);
1161 }
1162
SetDynamicStub()1163 void SetDynamicStub()
1164 {
1165 mode_.SetDynamicStub(true);
1166 }
1167
GetSingleImplementationList()1168 auto &GetSingleImplementationList()
1169 {
1170 return singleImplementationList_;
1171 }
1172
GetParentGraph()1173 Graph *GetParentGraph() const
1174 {
1175 return parentGraph_;
1176 }
1177
GetOutermostParentGraph()1178 Graph *GetOutermostParentGraph()
1179 {
1180 auto graph = this;
1181 while (graph->GetParentGraph() != nullptr) {
1182 graph = graph->GetParentGraph();
1183 }
1184 return graph;
1185 }
1186
SetVRegsCount(size_t count)1187 void SetVRegsCount(size_t count)
1188 {
1189 vregsCount_ = count;
1190 }
1191
GetVRegsCount()1192 size_t GetVRegsCount() const
1193 {
1194 return vregsCount_;
1195 }
1196
GetEnvCount()1197 size_t GetEnvCount() const
1198 {
1199 return (IsDynamicMethod() && !IsBytecodeOptimizer()) ? VRegInfo::ENV_COUNT : 0;
1200 }
1201
GetRelocationHandler()1202 RelocationHandler *GetRelocationHandler()
1203 {
1204 return relocationHandler_;
1205 }
1206
SetRelocationHandler(RelocationHandler * handler)1207 void SetRelocationHandler(RelocationHandler *handler)
1208 {
1209 relocationHandler_ = handler;
1210 }
1211
1212 int64_t GetBranchCounter(const BasicBlock *block, bool trueSucc);
1213
1214 int64_t GetThrowCounter(const BasicBlock *block);
1215
1216 /// This class provides methods for ranged-based `for` loop over all parameters in the graph.
1217 class ParameterList {
1218 public:
1219 class Iterator {
1220 public:
Iterator(Inst * inst)1221 explicit Iterator(Inst *inst) : inst_(inst) {}
1222
1223 Iterator &operator++()
1224 {
1225 for (inst_ = inst_->GetNext(); inst_ != nullptr && inst_->GetOpcode() != Opcode::Parameter;
1226 inst_ = inst_->GetNext()) {
1227 }
1228 return *this;
1229 }
1230 bool operator!=(const Iterator &other)
1231 {
1232 return inst_ != other.inst_;
1233 }
1234 Inst *operator*()
1235 {
1236 return inst_;
1237 }
1238 Inst *operator->()
1239 {
1240 return inst_;
1241 }
1242
1243 private:
1244 Inst *inst_ {nullptr};
1245 };
1246
ParameterList(const Graph * graph)1247 explicit ParameterList(const Graph *graph) : graph_(graph) {}
1248
1249 // NOLINTNEXTLINE(readability-identifier-naming)
1250 Iterator begin();
1251 // NOLINTNEXTLINE(readability-identifier-naming)
end()1252 static Iterator end()
1253 {
1254 return Iterator(nullptr);
1255 }
1256
1257 private:
1258 const Graph *graph_ {nullptr};
1259 };
1260
1261 /**
1262 * Get list of all parameters
1263 * @return instance of the ParameterList class
1264 */
GetParameters()1265 ParameterList GetParameters() const
1266 {
1267 return ParameterList(this);
1268 }
1269
1270 void InitDefaultLocations();
1271
SupportsIrtocBarriers()1272 bool SupportsIrtocBarriers() const
1273 {
1274 return (IsJitOrOsrMode() || IsAotMode() || GetMode().IsInterpreter() || GetMode().IsInterpreterEntry()) &&
1275 !IsDynamicMethod() && GetArch() != Arch::AARCH32;
1276 }
1277
SetMaxInliningDepth(uint32_t depth)1278 void SetMaxInliningDepth(uint32_t depth)
1279 {
1280 maxInliningDepth_ = std::max(maxInliningDepth_, depth);
1281 }
1282
GetMaxInliningDepth()1283 uint32_t GetMaxInliningDepth()
1284 {
1285 return maxInliningDepth_;
1286 }
1287
1288 private:
1289 void AddConstInStartBlock(ConstantInst *constInst);
1290
1291 NO_MOVE_SEMANTIC(Graph);
1292 NO_COPY_SEMANTIC(Graph);
1293
1294 private:
1295 uint32_t maxInliningDepth_ {0};
1296 ArenaAllocator *const allocator_;
1297 ArenaAllocator *const localAllocator_;
1298
1299 Arch arch_ {RUNTIME_ARCH};
1300
1301 // List of blocks in insertion order.
1302 ArenaVector<BasicBlock *> vectorBb_;
1303 BasicBlock *startBlock_ {nullptr};
1304 BasicBlock *endBlock_ {nullptr};
1305
1306 Loop *rootLoop_ {nullptr};
1307
1308 AotData *aotData_ {nullptr};
1309
1310 uint32_t bitFields_ {0};
1311 using FlagDFConstruct = BitField<bool, 0, 1>;
1312 using FlagNeedCleanup = FlagDFConstruct::NextFlag;
1313 using FlagIrredicibleLoop = FlagNeedCleanup::NextFlag;
1314 using FlagInfiniteLoop = FlagIrredicibleLoop::NextFlag;
1315 using FlagFloatRegs = FlagInfiniteLoop::NextFlag;
1316 using FlagDefaultLocationsInit = FlagFloatRegs::NextFlag;
1317 using FlagIrtocPrologEpilogOptimized = FlagDefaultLocationsInit::NextFlag;
1318 using FlagThrowApplied = FlagIrtocPrologEpilogOptimized::NextFlag;
1319 using FlagUnrollComplete = FlagThrowApplied::NextFlag;
1320 #ifdef NDEBUG
1321 using LastField = FlagUnrollComplete;
1322 #else
1323 using FlagRegallocApplied = FlagUnrollComplete::NextFlag;
1324 using FlagRegaccallocApplied = FlagRegallocApplied::NextFlag;
1325 using FlagInliningComplete = FlagRegaccallocApplied::NextFlag;
1326 using FlagLowLevelInstnsEnabled = FlagInliningComplete::NextFlag;
1327 using FlagDynUnitTest = FlagLowLevelInstnsEnabled::NextFlag;
1328 using LastField = FlagDynUnitTest;
1329 #endif // NDEBUG
1330
1331 #ifdef PANDA_COMPILER_DEBUG_INFO
1332 using FlagLineDebugInfoEnabled = LastField::NextFlag;
1333 #endif
1334
1335 // codegen data
1336 EncodeDataType data_;
1337 Span<const uint8_t> codeInfoData_;
1338 ArenaVector<bool> *usedRegs_ {nullptr};
1339 ArenaVector<bool> *usedVregs_ {nullptr};
1340
1341 // NOTE (a.popov) Replace by ArenaMap from throwable_inst* to try_inst*
1342 ArenaMap<const Inst *, ArenaVector<BasicBlock *>> throwableInsts_;
1343
1344 RegMask archUsedRegs_ {0};
1345
1346 mutable size_t instrCurrentId_ {0};
1347 // first constant instruction in graph !NOTE rewrite it to hash-map
1348 ConstantInst *firstConstInst_ {nullptr};
1349 Inst *nullptrInst_ {nullptr};
1350 Inst *undefinedInst_ {nullptr};
1351 RuntimeInterface *runtime_ {nullptr};
1352 RuntimeInterface::MethodPtr method_ {nullptr};
1353
1354 Encoder *encoder_ {nullptr};
1355
1356 mutable RegistersDescription *registers_ {nullptr};
1357
1358 CallingConvention *callconv_ {nullptr};
1359
1360 std::optional<MethodProperties> methodProperties_ {std::nullopt};
1361
1362 ParameterInfo *paramInfo_ {nullptr};
1363
1364 RelocationHandler *relocationHandler_ {nullptr};
1365
1366 mutable PassManager passManager_;
1367 EventWriter eventWriter_;
1368
1369 GraphMode mode_;
1370
1371 CodeInfoBuilder *ciBuilder_ {nullptr};
1372
1373 ArenaVector<RuntimeInterface::MethodPtr> singleImplementationList_;
1374 ArenaVector<const BasicBlock *> tryBeginBlocks_;
1375 ArenaSet<BasicBlock *> throwBlocks_;
1376 ArenaVector<ConstantInst *> spilledConstants_;
1377 // Graph that inlines this graph
1378 Graph *parentGraph_ {nullptr};
1379 // Number of used stack slots
1380 uint32_t stackSlotCount_ {0};
1381 // Number of used stack slots for parameters
1382 uint32_t paramSlotsCount_ {0};
1383 // First language extension slot
1384 uint32_t extStackSlot_ {0};
1385 // Number of the virtual registers used in the compiled method (inlined methods aren't included).
1386 uint32_t vregsCount_ {0};
1387 // Source language of the method being compiled
1388 SourceLanguage lang_ {SourceLanguage::PANDA_ASSEMBLY};
1389 };
1390
1391 class MarkerHolder {
1392 public:
1393 NO_COPY_SEMANTIC(MarkerHolder);
1394 NO_MOVE_SEMANTIC(MarkerHolder);
1395
MarkerHolder(const Graph * graph)1396 explicit MarkerHolder(const Graph *graph) : graph_(graph), marker_(graph->NewMarker())
1397 {
1398 ASSERT(marker_ != UNDEF_MARKER);
1399 }
1400
~MarkerHolder()1401 ~MarkerHolder()
1402 {
1403 graph_->EraseMarker(marker_);
1404 }
1405
GetMarker()1406 Marker GetMarker()
1407 {
1408 return marker_;
1409 }
1410
1411 private:
1412 const Graph *graph_;
1413 Marker marker_ {UNDEF_MARKER};
1414 };
1415
1416 template <typename T>
FindOrCreateConstant(T value)1417 ConstantInst *Graph::FindOrCreateConstant(T value)
1418 {
1419 bool isSupportInt32 = IsBytecodeOptimizer();
1420 if (firstConstInst_ == nullptr) {
1421 firstConstInst_ = CreateInstConstant(value, isSupportInt32);
1422 AddConstInStartBlock(firstConstInst_);
1423 return firstConstInst_;
1424 }
1425 ConstantInst *currentConst = firstConstInst_;
1426 ConstantInst *prevConst = nullptr;
1427 while (currentConst != nullptr) {
1428 if (currentConst->IsEqualConst(value, isSupportInt32)) {
1429 return currentConst;
1430 }
1431 prevConst = currentConst;
1432 currentConst = currentConst->GetNextConst();
1433 }
1434 ASSERT(prevConst != nullptr);
1435 auto *newConst = CreateInstConstant(value, isSupportInt32);
1436 AddConstInStartBlock(newConst);
1437
1438 prevConst->SetNextConst(newConst);
1439 return newConst;
1440 }
1441
1442 void InvalidateBlocksOrderAnalyzes(Graph *graph);
1443 void MarkLoopExits(const Graph *graph, Marker marker);
1444 std::string GetMethodFullName(const Graph *graph, RuntimeInterface::MethodPtr method);
1445 size_t GetObjectOffset(const Graph *graph, ObjectType objType, RuntimeInterface::FieldPtr field, uint32_t typeId);
1446 } // namespace ark::compiler
1447 #endif // COMPILER_OPTIMIZER_IR_GRAPH_H
1448