1 /*
2 * Copyright (c) 2021-2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #ifndef COMPILER_OPTIMIZER_IR_GRAPH_H
17 #define COMPILER_OPTIMIZER_IR_GRAPH_H
18
19 #include "aot_data.h"
20 #include "compiler_events_gen.h"
21 #include "inst.h"
22 #include "marker.h"
23 #include "optimizer/code_generator/method_properties.h"
24 #include "optimizer/pass_manager.h"
25 #include "utils/arena_containers.h"
26 #include <algorithm>
27 #include <optional>
28
29 namespace panda {
30 class Method;
31 class CodeAllocator;
32 } // namespace panda
33
34 namespace panda::compiler {
35 class BasicBlock;
36 class Graph;
37 class RuntimeInfo;
38 class PassManager;
39 class LivenessAnalyzer;
40 class DominatorsTree;
41 class Rpo;
42 class BoundsRangeInfo;
43 class Loop;
44 class CodeInfoBuilder;
45
46 class Encoder;
47 class CallingConvention;
48 class ParameterInfo;
49 class RegistersDescription;
50 class RelocationHandler;
51
52 enum AliasType : uint8_t;
53
54 /// Specifies graph compilation mode.
55 class GraphMode {
56 public:
GraphMode(uint32_t value)57 explicit GraphMode(uint32_t value) : value_(value) {}
58 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
59 #define DECLARE_GRAPH_MODE_MODIFIERS(name) \
60 void Set##name(bool v) \
61 { \
62 Flag##name ::Set(v, &value_); \
63 } \
64 bool Is##name() const \
65 { \
66 return Flag##name ::Get(value_); \
67 }
68
69 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
70 #define DECLARE_GRAPH_MODE(name) \
71 static GraphMode name(bool set = true) \
72 { \
73 return GraphMode(Flag##name ::Encode(set)); \
74 } \
75 DECLARE_GRAPH_MODE_MODIFIERS(name)
76
77 DECLARE_GRAPH_MODE(Osr);
78 // The graph is used in BytecodeOptimizer mode
79 DECLARE_GRAPH_MODE(BytecodeOpt);
80 // The method from dynamic language
81 DECLARE_GRAPH_MODE(DynamicMethod);
82 // The method from dynamic language uses common calling convention
83 DECLARE_GRAPH_MODE(DynamicStub);
84 // Graph will be compiled with native calling convention
85 DECLARE_GRAPH_MODE(Native);
86 // FastPath from compiled code to runtime
87 DECLARE_GRAPH_MODE(FastPath);
88 // Boundary frame is used for compiled code
89 DECLARE_GRAPH_MODE(Boundary);
90 // Graph will be compiled for calling inside interpreter
91 DECLARE_GRAPH_MODE(Interpreter);
92 // Graph will be compiled for interpreter main loop
93 DECLARE_GRAPH_MODE(InterpreterEntry);
94
95 #undef DECLARE_GRAPH_MODE
96 #undef DECLARE_GRAPH_MODE_MODIFIERS
97
SupportManagedCode()98 bool SupportManagedCode() const
99 {
100 return !IsNative() && !IsFastPath() && !IsBoundary() && !IsInterpreter() && !IsInterpreterEntry();
101 }
102
103 void Dump(std::ostream &stm);
104
105 private:
106 using FlagOsr = BitField<bool, 0, 1>;
107 using FlagBytecodeOpt = FlagOsr::NextFlag;
108 using FlagDynamicMethod = FlagBytecodeOpt::NextFlag;
109 using FlagDynamicStub = FlagDynamicMethod::NextFlag;
110 using FlagNative = FlagDynamicStub::NextFlag;
111 using FlagFastPath = FlagNative::NextFlag;
112 using FlagBoundary = FlagFastPath::NextFlag;
113 using FlagInterpreter = FlagBoundary::NextFlag;
114 using FlagInterpreterEntry = FlagInterpreter::NextFlag;
115
116 uint32_t value_ {0};
117
118 friend GraphMode operator|(GraphMode a, GraphMode b);
119 };
120
121 inline GraphMode operator|(GraphMode a, GraphMode b)
122 {
123 return GraphMode(a.value_ | b.value_);
124 }
125
126 using EncodeDataType = Span<uint8_t>;
127
128 class Graph final : public MarkerMgr {
129 public:
Graph(ArenaAllocator * allocator,ArenaAllocator * localAllocator,Arch arch)130 explicit Graph(ArenaAllocator *allocator, ArenaAllocator *localAllocator, Arch arch)
131 : Graph(allocator, localAllocator, arch, false)
132 {
133 }
134
Graph(ArenaAllocator * allocator,ArenaAllocator * localAllocator,Arch arch,bool osrMode)135 Graph(ArenaAllocator *allocator, ArenaAllocator *localAllocator, Arch arch, bool osrMode)
136 : Graph(allocator, localAllocator, arch, nullptr, GetDefaultRuntime(), osrMode)
137 {
138 }
139
Graph(ArenaAllocator * allocator,ArenaAllocator * localAllocator,Arch arch,bool dynamicMethod,bool bytecodeOpt)140 Graph(ArenaAllocator *allocator, ArenaAllocator *localAllocator, Arch arch, bool dynamicMethod, bool bytecodeOpt)
141 : Graph(allocator, localAllocator, arch, nullptr, GetDefaultRuntime(), false, nullptr, dynamicMethod,
142 bytecodeOpt)
143 {
144 }
145
Graph(ArenaAllocator * allocator,ArenaAllocator * localAllocator,Arch arch,RuntimeInterface::MethodPtr method,RuntimeInterface * runtime,bool osrMode)146 Graph(ArenaAllocator *allocator, ArenaAllocator *localAllocator, Arch arch, RuntimeInterface::MethodPtr method,
147 RuntimeInterface *runtime, bool osrMode)
148 : Graph(allocator, localAllocator, arch, method, runtime, osrMode, nullptr)
149 {
150 }
151
152 Graph(ArenaAllocator *allocator, ArenaAllocator *localAllocator, Arch arch, RuntimeInterface::MethodPtr method,
153 RuntimeInterface *runtime, bool osrMode, Graph *parent, bool dynamicMethod = false, bool bytecodeOpt = false)
154 : Graph(allocator, localAllocator, arch, method, runtime, parent,
155 GraphMode::Osr(osrMode) | GraphMode::BytecodeOpt(bytecodeOpt) | GraphMode::DynamicMethod(dynamicMethod))
156 {
157 }
158
Graph(ArenaAllocator * allocator,ArenaAllocator * localAllocator,Arch arch,RuntimeInterface::MethodPtr method,RuntimeInterface * runtime,Graph * parent,GraphMode mode)159 Graph(ArenaAllocator *allocator, ArenaAllocator *localAllocator, Arch arch, RuntimeInterface::MethodPtr method,
160 RuntimeInterface *runtime, Graph *parent, GraphMode mode)
161 : allocator_(allocator),
162 localAllocator_(localAllocator),
163 arch_(arch),
164 vectorBb_(allocator->Adapter()),
165 throwableInsts_(allocator->Adapter()),
166 runtime_(runtime),
167 method_(method),
168 passManager_(this, parent != nullptr ? parent->GetPassManager() : nullptr),
169 eventWriter_(runtime->GetClassNameFromMethod(method), runtime->GetMethodName(method)),
170 mode_(mode),
171 singleImplementationList_(allocator->Adapter()),
172 tryBeginBlocks_(allocator->Adapter()),
173 spilledConstants_(allocator->Adapter()),
174 parentGraph_(parent)
175 {
176 SetNeedCleanup(true);
177 }
178
179 ~Graph() override;
180
CreateChildGraph(RuntimeInterface::MethodPtr method)181 Graph *CreateChildGraph(RuntimeInterface::MethodPtr method)
182 {
183 auto graph = GetAllocator()->New<Graph>(GetAllocator(), GetLocalAllocator(), GetArch(), method, GetRuntime(),
184 this, mode_);
185 graph->SetAotData(GetAotData());
186 return graph;
187 }
188
189 /// Get default runtime interface object
GetDefaultRuntime()190 static RuntimeInterface *GetDefaultRuntime()
191 {
192 static RuntimeInterface runtimeInterface;
193 return &runtimeInterface;
194 }
195
GetArch()196 Arch GetArch() const
197 {
198 return arch_;
199 }
200
GetLanguage()201 SourceLanguage GetLanguage() const
202 {
203 return lang_;
204 }
205
SetLanguage(SourceLanguage lang)206 void SetLanguage(SourceLanguage lang)
207 {
208 lang_ = lang;
209 }
210
211 void AddBlock(BasicBlock *block);
212 #ifndef NDEBUG
213 void AddBlock(BasicBlock *block, uint32_t id);
214 #endif
215 void DisconnectBlock(BasicBlock *block, bool removeLastInst = true, bool fixDomTree = true);
216 void DisconnectBlockRec(BasicBlock *block, bool removeLastInst = true, bool fixDomTree = true);
217
218 void EraseBlock(BasicBlock *block);
219 void RestoreBlock(BasicBlock *block);
220 // Remove empty block. Block must have one successor and no Phis.
221 void RemoveEmptyBlock(BasicBlock *block);
222
223 // Remove empty block. Block may have Phis and can't be a loop pre-header.
224 void RemoveEmptyBlockWithPhis(BasicBlock *block, bool irrLoop = false);
225
226 // Remove block predecessors.
227 void RemovePredecessors(BasicBlock *block, bool removeLastInst = true);
228
229 // Remove block successors.
230 void RemoveSuccessors(BasicBlock *block);
231
232 // Remove unreachable blocks.
233 void RemoveUnreachableBlocks();
234
235 // get end block
GetEndBlock()236 BasicBlock *GetEndBlock()
237 {
238 return endBlock_;
239 }
240 // set end block
SetEndBlock(BasicBlock * endBlock)241 void SetEndBlock(BasicBlock *endBlock)
242 {
243 endBlock_ = endBlock;
244 }
HasEndBlock()245 bool HasEndBlock()
246 {
247 return endBlock_ != nullptr;
248 }
249 // get start block
GetStartBlock()250 BasicBlock *GetStartBlock()
251 {
252 return startBlock_;
253 }
GetStartBlock()254 BasicBlock *GetStartBlock() const
255 {
256 return startBlock_;
257 }
258 // set start block
SetStartBlock(BasicBlock * startBlock)259 void SetStartBlock(BasicBlock *startBlock)
260 {
261 startBlock_ = startBlock;
262 }
263 // get vector_bb_
GetVectorBlocks()264 const ArenaVector<BasicBlock *> &GetVectorBlocks() const
265 {
266 return vectorBb_;
267 }
268
GetAliveBlocksCount()269 size_t GetAliveBlocksCount() const
270 {
271 return std::count_if(vectorBb_.begin(), vectorBb_.end(), [](BasicBlock *block) { return block != nullptr; });
272 }
273
GetPassManager()274 PassManager *GetPassManager()
275 {
276 return &passManager_;
277 }
GetPassManager()278 const PassManager *GetPassManager() const
279 {
280 return &passManager_;
281 }
282
283 const BoundsRangeInfo *GetBoundsRangeInfo() const;
284
285 const ArenaVector<BasicBlock *> &GetBlocksRPO() const;
286
287 const ArenaVector<BasicBlock *> &GetBlocksLinearOrder() const;
288
289 template <class Callback>
290 void VisitAllInstructions(Callback callback);
291
292 AliasType CheckInstAlias(Inst *mem1, Inst *mem2);
293
294 /// Main allocator for graph, all related to Graph data should be allocated via this allocator.
GetAllocator()295 ArenaAllocator *GetAllocator() const
296 {
297 return allocator_;
298 }
299 /// Allocator for temporary usage, when allocated data is no longer needed after optimization/analysis finished.
GetLocalAllocator()300 ArenaAllocator *GetLocalAllocator() const
301 {
302 return localAllocator_;
303 }
IsDFConstruct()304 bool IsDFConstruct() const
305 {
306 return FlagDFConstruct::Get(bitFields_);
307 }
SetDFConstruct()308 void SetDFConstruct()
309 {
310 FlagDFConstruct::Set(true, &bitFields_);
311 }
312
SetAotData(AotData * data)313 void SetAotData(AotData *data)
314 {
315 aotData_ = data;
316 }
GetAotData()317 AotData *GetAotData()
318 {
319 return aotData_;
320 }
GetAotData()321 const AotData *GetAotData() const
322 {
323 return aotData_;
324 }
325
IsAotMode()326 bool IsAotMode() const
327 {
328 return aotData_ != nullptr;
329 }
330
IsAotNoChaMode()331 bool IsAotNoChaMode() const
332 {
333 return aotData_ != nullptr && !aotData_->GetUseCha();
334 }
335
IsOfflineCompilationMode()336 bool IsOfflineCompilationMode() const
337 {
338 return IsAotMode() || GetMode().IsInterpreter() || GetMode().IsFastPath() || GetMode().IsInterpreterEntry();
339 }
340
IsDefaultLocationsInit()341 bool IsDefaultLocationsInit() const
342 {
343 return FlagDefaultLocationsInit::Get(bitFields_);
344 }
SetDefaultLocationsInit()345 void SetDefaultLocationsInit()
346 {
347 FlagDefaultLocationsInit::Set(true, &bitFields_);
348 }
IsIrtocPrologEpilogOptimized()349 bool IsIrtocPrologEpilogOptimized() const
350 {
351 return FlagIrtocPrologEpilogOptimized::Get(bitFields_);
352 }
SetIrtocPrologEpilogOptimized()353 void SetIrtocPrologEpilogOptimized()
354 {
355 FlagIrtocPrologEpilogOptimized::Set(true, &bitFields_);
356 }
IsUnrollComplete()357 bool IsUnrollComplete() const
358 {
359 return FlagUnrollComplete::Get(bitFields_);
360 }
SetUnrollComplete()361 void SetUnrollComplete()
362 {
363 FlagUnrollComplete::Set(true, &bitFields_);
364 }
365 #ifndef NDEBUG
IsRegAllocApplied()366 bool IsRegAllocApplied() const
367 {
368 return FlagRegallocApplied::Get(bitFields_);
369 }
SetRegAllocApplied()370 void SetRegAllocApplied()
371 {
372 FlagRegallocApplied::Set(true, &bitFields_);
373 }
IsRegAccAllocApplied()374 bool IsRegAccAllocApplied() const
375 {
376 return FlagRegaccallocApplied::Get(bitFields_);
377 }
SetRegAccAllocApplied()378 void SetRegAccAllocApplied()
379 {
380 FlagRegaccallocApplied::Set(true, &bitFields_);
381 }
IsInliningComplete()382 bool IsInliningComplete() const
383 {
384 return FlagInliningComplete::Get(bitFields_) || IsOsrMode();
385 }
SetInliningComplete()386 void SetInliningComplete()
387 {
388 FlagInliningComplete::Set(true, &bitFields_);
389 }
IsLowLevelInstructionsEnabled()390 bool IsLowLevelInstructionsEnabled() const
391 {
392 return FlagLowLevelInstnsEnabled::Get(bitFields_);
393 }
SetLowLevelInstructionsEnabled()394 void SetLowLevelInstructionsEnabled()
395 {
396 FlagLowLevelInstnsEnabled::Set(true, &bitFields_);
397 }
IsDynUnitTest()398 bool IsDynUnitTest() const
399 {
400 return FlagDynUnitTest::Get(bitFields_);
401 }
SetDynUnitTestFlag()402 void SetDynUnitTestFlag()
403 {
404 FlagDynUnitTest::Set(true, &bitFields_);
405 }
406 #else
IsRegAllocApplied()407 bool IsRegAllocApplied() const
408 {
409 return false;
410 }
411 #endif // NDEBUG
412
413 #ifdef PANDA_COMPILER_DEBUG_INFO
IsLineDebugInfoEnabled()414 bool IsLineDebugInfoEnabled() const
415 {
416 return FlagLineDebugInfoEnabled::Get(bitFields_);
417 }
SetLineDebugInfoEnabled()418 void SetLineDebugInfoEnabled()
419 {
420 FlagLineDebugInfoEnabled::Set(true, &bitFields_);
421 }
422 #endif
423
SetCode(EncodeDataType data)424 void SetCode(EncodeDataType data)
425 {
426 data_ = data;
427 }
428
GetData()429 EncodeDataType GetData() const
430 {
431 return data_;
432 }
433
GetData()434 EncodeDataType GetData()
435 {
436 return data_;
437 }
438
GetCode()439 EncodeDataType GetCode() const
440 {
441 return data_;
442 }
443
GetCode()444 EncodeDataType GetCode()
445 {
446 return data_;
447 }
448
SetCodeInfo(Span<uint8_t> data)449 void SetCodeInfo(Span<uint8_t> data)
450 {
451 codeInfoData_ = data.SubSpan<const uint8_t>(0, data.size());
452 }
453
GetCodeInfoData()454 Span<const uint8_t> GetCodeInfoData() const
455 {
456 return codeInfoData_;
457 }
458
459 void DumpUsedRegs(std::ostream &out = std::cerr, const char *prefix = nullptr) const
460 {
461 if (prefix != nullptr) {
462 out << prefix;
463 }
464 out << "'\n used scalar regs: ";
465 if (usedRegs_ != nullptr) {
466 for (unsigned i = 0; i < usedRegs_->size(); ++i) {
467 if (usedRegs_->at(i)) {
468 out << i << " ";
469 }
470 }
471 }
472 out << "\n used float regs: ";
473 if (usedRegs_ != nullptr) {
474 for (unsigned i = 0; i < usedVregs_->size(); ++i) {
475 if (usedVregs_->at(i)) {
476 out << i << " ";
477 }
478 }
479 }
480 out << std::endl;
481 }
482
483 // Get registers mask which used in graph
484 template <DataType::Type REG_TYPE>
GetUsedRegs()485 ArenaVector<bool> *GetUsedRegs() const
486 {
487 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
488 if constexpr (REG_TYPE == DataType::INT64) {
489 return usedRegs_;
490 }
491 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
492 if constexpr (REG_TYPE == DataType::FLOAT64) {
493 return usedVregs_;
494 }
495 UNREACHABLE();
496 return nullptr;
497 }
498
SetRegUsage(Register reg,DataType::Type type)499 void SetRegUsage(Register reg, DataType::Type type)
500 {
501 ASSERT(reg != INVALID_REG);
502 if (DataType::IsFloatType(type)) {
503 SetUsedReg<DataType::FLOAT64>(reg);
504 } else {
505 SetUsedReg<DataType::INT64>(reg);
506 }
507 }
508
SetRegUsage(Location location)509 void SetRegUsage(Location location)
510 {
511 ASSERT(location.IsFixedRegister());
512 if (location.IsFpRegister()) {
513 SetUsedReg<DataType::FLOAT64>(location.GetValue());
514 } else {
515 SetUsedReg<DataType::INT64>(location.GetValue());
516 }
517 }
518
519 template <DataType::Type REG_TYPE>
SetUsedReg(Register reg)520 void SetUsedReg(Register reg)
521 {
522 ArenaVector<bool> *graphRegs = nullptr;
523 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
524 if constexpr (REG_TYPE == DataType::INT64) {
525 graphRegs = usedRegs_;
526 // NOLINTNEXTLINE(readability-braces-around-statements, readability-misleading-indentation)
527 } else if constexpr (REG_TYPE == DataType::FLOAT64) {
528 graphRegs = usedVregs_;
529 } else {
530 UNREACHABLE();
531 }
532 ASSERT(graphRegs != nullptr);
533 ASSERT(reg < graphRegs->size());
534 (*graphRegs)[reg] = true;
535 }
536
537 template <DataType::Type REG_TYPE>
InitUsedRegs(const ArenaVector<bool> * usedRegs)538 void InitUsedRegs(const ArenaVector<bool> *usedRegs)
539 {
540 if (usedRegs == nullptr) {
541 return;
542 }
543 ArenaVector<bool> *graphRegs = nullptr;
544 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
545 if constexpr (REG_TYPE == DataType::INT64) {
546 usedRegs_ = GetAllocator()->New<ArenaVector<bool>>(GetAllocator()->Adapter());
547 graphRegs = usedRegs_;
548 // NOLINTNEXTLINE(readability-braces-around-statements, readability-misleading-indentation)
549 } else if constexpr (REG_TYPE == DataType::FLOAT64) {
550 usedVregs_ = GetAllocator()->New<ArenaVector<bool>>(GetAllocator()->Adapter());
551 graphRegs = usedVregs_;
552 } else {
553 UNREACHABLE();
554 }
555 graphRegs->resize(usedRegs->size());
556 std::copy(usedRegs->begin(), usedRegs->end(), graphRegs->begin());
557 }
558
559 Register GetZeroReg() const;
560 Register GetArchTempReg() const;
561 Register GetArchTempVReg() const;
562 // Get registers mask which used in codegen, runtime e.t.c
563 RegMask GetArchUsedRegs();
564 void SetArchUsedRegs(RegMask mask);
565
566 // Get vector registers mask which used in codegen, runtime e.t.c
567 VRegMask GetArchUsedVRegs();
568
569 // Return true if one 64-bit scalar register can be split to 2 32-bit
570 bool IsRegScalarMapped() const;
571
GetStackSlotsCount()572 uint32_t GetStackSlotsCount() const
573 {
574 return stackSlotCount_;
575 }
576
SetStackSlotsCount(uint32_t stackSlotCount)577 void SetStackSlotsCount(uint32_t stackSlotCount)
578 {
579 stackSlotCount_ = stackSlotCount;
580 }
581
UpdateStackSlotsCount(uint32_t stackSlotCount)582 void UpdateStackSlotsCount(uint32_t stackSlotCount)
583 {
584 stackSlotCount_ = std::max(stackSlotCount_, stackSlotCount);
585 }
586
587 uint32_t GetParametersSlotsCount() const;
588
GetExtSlotsStart()589 uint32_t GetExtSlotsStart() const
590 {
591 return extStackSlot_;
592 }
593
SetExtSlotsStart(uint32_t extStackSlot)594 void SetExtSlotsStart(uint32_t extStackSlot)
595 {
596 extStackSlot_ = extStackSlot;
597 }
598
599 BasicBlock *CreateEmptyBlock(uint32_t guestPc = INVALID_PC);
600 BasicBlock *CreateEmptyBlock(BasicBlock *baseBlock);
601 #ifndef NDEBUG
602 BasicBlock *CreateEmptyBlock(uint32_t id, uint32_t guestPc);
603 #endif
604 BasicBlock *CreateStartBlock();
605 BasicBlock *CreateEndBlock(uint32_t guestPc = INVALID_PC);
GetFirstConstInst()606 ConstantInst *GetFirstConstInst()
607 {
608 return firstConstInst_;
609 }
SetFirstConstInst(ConstantInst * constInst)610 void SetFirstConstInst(ConstantInst *constInst)
611 {
612 firstConstInst_ = constInst;
613 }
614
GetNullPtrInst()615 Inst *GetNullPtrInst() const
616 {
617 return nullptrInst_;
618 }
HasNullPtrInst()619 bool HasNullPtrInst() const
620 {
621 return nullptrInst_ != nullptr;
622 }
UnsetNullPtrInst()623 void UnsetNullPtrInst()
624 {
625 ASSERT(HasNullPtrInst());
626 nullptrInst_ = nullptr;
627 }
628 Inst *GetOrCreateNullPtr();
629
GetUndefinedInst()630 Inst *GetUndefinedInst() const
631 {
632 return undefinedInst_;
633 }
HasUndefinedInst()634 bool HasUndefinedInst() const
635 {
636 return undefinedInst_ != nullptr;
637 }
UnsetUndefinedInst()638 void UnsetUndefinedInst()
639 {
640 ASSERT(HasUndefinedInst());
641 undefinedInst_ = nullptr;
642 }
643 Inst *GetOrCreateUndefinedInst();
644
645 /// Find constant in the list, return nullptr if not found
646 ConstantInst *FindConstant(DataType::Type type, uint64_t value);
647 /// Find constant in the list or create new one and insert at the end
648 template <typename T>
649 ConstantInst *FindOrCreateConstant(T value);
650
651 /**
652 * Find constant that is equal to the given one specified by inst. If not found, add inst to the graph.
653 * @param inst Constant instruction to be added
654 * @return Found instruction or inst if not found
655 */
656 ConstantInst *FindOrAddConstant(ConstantInst *inst);
657
658 ParameterInst *AddNewParameter(uint16_t argNumber);
659
AddNewParameter(uint16_t argNumber,DataType::Type type)660 ParameterInst *AddNewParameter(uint16_t argNumber, DataType::Type type)
661 {
662 ParameterInst *param = AddNewParameter(argNumber);
663 param->SetType(type);
664 return param;
665 }
666
667 ParameterInst *FindParameter(uint16_t argNumber);
668
669 /*
670 * The function remove the ConstantInst from the graph list
671 * !NOTE ConstantInst isn't removed from BasicBlock list
672 */
673 void RemoveConstFromList(ConstantInst *constInst);
674
GetSpilledConstant(ImmTableSlot slot)675 ConstantInst *GetSpilledConstant(ImmTableSlot slot)
676 {
677 ASSERT(static_cast<size_t>(slot) < spilledConstants_.size());
678 return spilledConstants_[slot];
679 }
680
AddSpilledConstant(ConstantInst * constInst)681 ImmTableSlot AddSpilledConstant(ConstantInst *constInst)
682 {
683 // Constant already in the table
684 auto currentSlot = constInst->GetImmTableSlot();
685 if (currentSlot != INVALID_IMM_TABLE_SLOT) {
686 ASSERT(spilledConstants_[currentSlot] == constInst);
687 return currentSlot;
688 }
689
690 auto count = spilledConstants_.size();
691 if (count >= MAX_NUM_IMM_SLOTS) {
692 return INVALID_IMM_TABLE_SLOT;
693 }
694 spilledConstants_.push_back(constInst);
695 constInst->SetImmTableSlot(count);
696 return ImmTableSlot(count);
697 }
698
FindSpilledConstantSlot(ConstantInst * constInst)699 ImmTableSlot FindSpilledConstantSlot(ConstantInst *constInst) const
700 {
701 auto slot = std::find(spilledConstants_.begin(), spilledConstants_.end(), constInst);
702 if (slot == spilledConstants_.end()) {
703 return INVALID_IMM_TABLE_SLOT;
704 }
705 return std::distance(spilledConstants_.begin(), slot);
706 }
707
GetSpilledConstantsCount()708 size_t GetSpilledConstantsCount() const
709 {
710 return spilledConstants_.size();
711 }
712
HasAvailableConstantSpillSlots()713 bool HasAvailableConstantSpillSlots() const
714 {
715 return GetSpilledConstantsCount() < MAX_NUM_IMM_SLOTS;
716 }
717
begin()718 auto begin() // NOLINT(readability-identifier-naming)
719 {
720 return vectorBb_.begin();
721 }
begin()722 auto begin() const // NOLINT(readability-identifier-naming)
723 {
724 return vectorBb_.begin();
725 }
end()726 auto end() // NOLINT(readability-identifier-naming)
727 {
728 return vectorBb_.end();
729 }
end()730 auto end() const // NOLINT(readability-identifier-naming)
731 {
732 return vectorBb_.end();
733 }
734
735 void Dump(std::ostream *out) const;
736
GetRootLoop()737 Loop *GetRootLoop()
738 {
739 return rootLoop_;
740 }
GetRootLoop()741 const Loop *GetRootLoop() const
742 {
743 return rootLoop_;
744 }
745
SetRootLoop(Loop * rootLoop)746 void SetRootLoop(Loop *rootLoop)
747 {
748 rootLoop_ = rootLoop;
749 }
750
SetHasIrreducibleLoop(bool hasIrrLoop)751 void SetHasIrreducibleLoop(bool hasIrrLoop)
752 {
753 FlagIrredicibleLoop::Set(hasIrrLoop, &bitFields_);
754 }
755
SetHasInfiniteLoop(bool hasInfLoop)756 void SetHasInfiniteLoop(bool hasInfLoop)
757 {
758 FlagInfiniteLoop::Set(hasInfLoop, &bitFields_);
759 }
760
SetHasFloatRegs()761 void SetHasFloatRegs()
762 {
763 FlagFloatRegs::Set(true, &bitFields_);
764 }
765
766 bool HasLoop() const;
767 bool HasIrreducibleLoop() const;
768 bool HasInfiniteLoop() const;
769 bool HasFloatRegs() const;
770
771 /**
772 * Try-catch info
773 * Vector of begin try-blocks in order they were declared in the bytecode
774 */
AppendTryBeginBlock(const BasicBlock * block)775 void AppendTryBeginBlock(const BasicBlock *block)
776 {
777 tryBeginBlocks_.push_back(block);
778 }
779
EraseTryBeginBlock(const BasicBlock * block)780 void EraseTryBeginBlock(const BasicBlock *block)
781 {
782 auto it = std::find(tryBeginBlocks_.begin(), tryBeginBlocks_.end(), block);
783 if (it == tryBeginBlocks_.end()) {
784 ASSERT(false && "Trying to remove non try_begin block");
785 return;
786 }
787 tryBeginBlocks_.erase(it);
788 }
789
GetTryBeginBlocks()790 const auto &GetTryBeginBlocks() const
791 {
792 return tryBeginBlocks_;
793 }
794
AppendThrowableInst(const Inst * inst,BasicBlock * catchHandler)795 void AppendThrowableInst(const Inst *inst, BasicBlock *catchHandler)
796 {
797 auto it = throwableInsts_.emplace(inst, GetAllocator()->Adapter()).first;
798 it->second.push_back(catchHandler);
799 }
800
IsInstThrowable(const Inst * inst)801 bool IsInstThrowable(const Inst *inst) const
802 {
803 return throwableInsts_.count(inst) > 0;
804 }
805
806 void RemoveThrowableInst(const Inst *inst);
807 void ReplaceThrowableInst(Inst *inst, Inst *newInst);
808
GetThrowableInstHandlers(const Inst * inst)809 const auto &GetThrowableInstHandlers(const Inst *inst) const
810 {
811 ASSERT(IsInstThrowable(inst));
812 return throwableInsts_.at(inst);
813 }
814
ClearTryCatchInfo()815 void ClearTryCatchInfo()
816 {
817 throwableInsts_.clear();
818 tryBeginBlocks_.clear();
819 }
820
821 void DumpThrowableInsts(std::ostream *out) const;
822
823 /**
824 * Run pass specified by template argument T.
825 * Optimization passes might take additional arguments that will passed to Optimization's constructor.
826 * Analyses can't take additional arguments.
827 * @tparam T Type of pass
828 * @param args Additional arguments for optimizations passes
829 * @return true if pass was successful
830 */
831 template <typename T, typename... Args>
RunPass(Args...args)832 bool RunPass(Args... args)
833 {
834 ASSERT(GetPassManager());
835 return passManager_.RunPass<T>(std::forward<Args>(args)...);
836 }
837 template <typename T, typename... Args>
RunPass(Args...args)838 bool RunPass(Args... args) const
839 {
840 ASSERT(GetPassManager());
841 return passManager_.RunPass<T>(std::forward<Args>(args)...);
842 }
843
844 template <typename T>
RunPass(T * pass)845 bool RunPass(T *pass)
846 {
847 ASSERT(GetPassManager());
848 return passManager_.RunPass(pass, GetLocalAllocator()->GetAllocatedSize());
849 }
850
851 /**
852 * Get analysis instance.
853 * All analyses are reside in Graph object in composition relationship.
854 * @tparam T Type of analysis
855 * @return Reference to analysis instance
856 */
857 template <typename T>
GetAnalysis()858 T &GetAnalysis()
859 {
860 ASSERT(GetPassManager());
861 return GetPassManager()->GetAnalysis<T>();
862 }
863 template <typename T>
GetAnalysis()864 const T &GetAnalysis() const
865 {
866 ASSERT(GetPassManager());
867 return passManager_.GetAnalysis<T>();
868 }
869
870 /**
871 * Same as GetAnalysis but additionaly checck that analysis in valid state.
872 * @tparam T Type of analysis
873 * @return Reference to analysis instance
874 */
875 template <typename T>
GetValidAnalysis()876 T &GetValidAnalysis()
877 {
878 RunPass<T>();
879 ASSERT(IsAnalysisValid<T>());
880 return GetAnalysis<T>();
881 }
882 template <typename T>
GetValidAnalysis()883 const T &GetValidAnalysis() const
884 {
885 RunPass<T>();
886 ASSERT(IsAnalysisValid<T>());
887 return GetAnalysis<T>();
888 }
889
890 /**
891 * Return true if Analysis valid, false otherwise
892 * @tparam T Type of analysis
893 */
894 template <typename T>
IsAnalysisValid()895 bool IsAnalysisValid() const
896 {
897 return GetAnalysis<T>().IsValid();
898 }
899
900 /**
901 * Reset valid state of specified analysis
902 * @tparam T Type of analysis
903 */
904 template <typename T>
InvalidateAnalysis()905 void InvalidateAnalysis()
906 {
907 ASSERT(GetPassManager());
908 GetPassManager()->GetAnalysis<T>().SetValid(false);
909 }
910
911 /// Accessors to the number of current instruction id.
GetCurrentInstructionId()912 auto GetCurrentInstructionId() const
913 {
914 return instrCurrentId_;
915 }
SetCurrentInstructionId(size_t v)916 auto SetCurrentInstructionId(size_t v)
917 {
918 instrCurrentId_ = v;
919 }
920
921 /// RuntimeInterface accessors
GetRuntime()922 RuntimeInterface *GetRuntime() const
923 {
924 return runtime_;
925 }
SetRuntime(RuntimeInterface * runtime)926 void SetRuntime(RuntimeInterface *runtime)
927 {
928 runtime_ = runtime;
929 }
GetMethod()930 auto GetMethod() const
931 {
932 return method_;
933 }
SetMethod(RuntimeInterface::MethodPtr method)934 auto SetMethod(RuntimeInterface::MethodPtr method)
935 {
936 method_ = method;
937 }
938
939 Encoder *GetEncoder();
940 RegistersDescription *GetRegisters() const;
941 CallingConvention *GetCallingConvention();
942 const MethodProperties &GetMethodProperties();
943 void ResetParameterInfo();
944 SpillFillData GetDataForNativeParam(DataType::Type type);
945
GetEventWriter()946 EventWriter &GetEventWriter()
947 {
948 return eventWriter_;
949 }
950
SetCodeBuilder(CodeInfoBuilder * builder)951 void SetCodeBuilder(CodeInfoBuilder *builder)
952 {
953 ciBuilder_ = builder;
954 }
955
956 // clang-format off
957
958 /// Create instruction by opcode
CreateInst(Opcode opc)959 [[nodiscard]] Inst* CreateInst(Opcode opc) const
960 {
961 switch (opc) {
962 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
963 #define INST_DEF(OPCODE, BASE, ...) \
964 case Opcode::OPCODE: { \
965 auto inst = Inst::New<BASE>(allocator_, Opcode::OPCODE); \
966 inst->SetId(instrCurrentId_++); \
967 return inst; \
968 }
969 OPCODE_LIST(INST_DEF)
970
971 #undef INST_DEF
972 default:
973 return nullptr;
974 }
975 }
976 /// Define creation methods for all opcodes
977 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
978 #define INST_DEF(OPCODE, BASE, ...) \
979 template <typename... Args> \
980 [[nodiscard]] BASE* CreateInst##OPCODE(Args&&... args) const { \
981 auto inst = Inst::New<BASE>(allocator_, Opcode::OPCODE, std::forward<Args>(args)...); \
982 inst->SetId(instrCurrentId_++); \
983 return inst; \
984 }
985 OPCODE_LIST(INST_DEF)
986
987 #undef INST_DEF
988
989 #ifdef PANDA_COMPILER_DEBUG_INFO
990 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
991 #define INST_DEF(OPCODE, BASE, ...) \
992 template <typename... Args> \
993 [[nodiscard]] BASE* CreateInst##OPCODE(Inst* inst, Args&&... args) const { \
994 auto new_inst = CreateInst##OPCODE(inst->GetType(), inst->GetPc(), std::forward<Args>(args)...); \
995 new_inst->SetCurrentMethod(inst->GetCurrentMethod()); \
996 return new_inst; \
997 }
OPCODE_LIST(INST_DEF)998 OPCODE_LIST(INST_DEF)
999
1000 #undef INST_DEF
1001 #else
1002 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
1003 #define INST_DEF(OPCODE, BASE, ...) \
1004 template <typename... Args> \
1005 [[nodiscard]] BASE* CreateInst##OPCODE(Inst* inst, Args&&... args) const { \
1006 auto new_inst = CreateInst##OPCODE(inst->GetType(), inst->GetPc(), std::forward<Args>(args)...); \
1007 return new_inst; \
1008 }
1009 OPCODE_LIST(INST_DEF)
1010
1011 #undef INST_DEF
1012 #endif
1013
1014 // clang-format on
1015
1016 uint32_t GetBitFields()
1017 {
1018 return bitFields_;
1019 }
1020
SetBitFields(uint32_t bitFields)1021 void SetBitFields(uint32_t bitFields)
1022 {
1023 bitFields_ = bitFields;
1024 }
1025
NeedCleanup()1026 bool NeedCleanup() const
1027 {
1028 return FlagNeedCleanup::Get(bitFields_);
1029 }
1030
SetNeedCleanup(bool v)1031 void SetNeedCleanup(bool v)
1032 {
1033 FlagNeedCleanup::Set(v, &bitFields_);
1034 }
1035
IsJitOrOsrMode()1036 bool IsJitOrOsrMode() const
1037 {
1038 return !IsAotMode() && !IsBytecodeOptimizer() && SupportManagedCode();
1039 }
1040
IsOsrMode()1041 bool IsOsrMode() const
1042 {
1043 return mode_.IsOsr();
1044 }
1045
IsJitMode()1046 bool IsJitMode() const
1047 {
1048 return !IsOsrMode() && IsJitOrOsrMode();
1049 }
1050
IsBytecodeOptimizer()1051 bool IsBytecodeOptimizer() const
1052 {
1053 return mode_.IsBytecodeOpt();
1054 }
1055
IsDynamicMethod()1056 bool IsDynamicMethod() const
1057 {
1058 return mode_.IsDynamicMethod();
1059 }
1060
SupportManagedCode()1061 bool SupportManagedCode() const
1062 {
1063 return mode_.SupportManagedCode();
1064 }
1065
GetMode()1066 GraphMode GetMode() const
1067 {
1068 return mode_;
1069 }
1070
SetMode(GraphMode mode)1071 void SetMode(GraphMode mode)
1072 {
1073 mode_ = mode;
1074 }
1075
1076 #ifndef NDEBUG
GetCompilerMode()1077 compiler::inst_modes::Mode GetCompilerMode()
1078 {
1079 if (IsBytecodeOptimizer()) {
1080 return compiler::inst_modes::BYTECODE_OPT;
1081 }
1082 if (SupportManagedCode()) {
1083 return compiler::inst_modes::JIT_AOT;
1084 }
1085 return compiler::inst_modes::IRTOC;
1086 }
1087 #endif
1088
AddSingleImplementationMethod(RuntimeInterface::MethodPtr method)1089 void AddSingleImplementationMethod(RuntimeInterface::MethodPtr method)
1090 {
1091 singleImplementationList_.push_back(method);
1092 }
1093
SetDynamicMethod()1094 void SetDynamicMethod()
1095 {
1096 mode_.SetDynamicMethod(true);
1097 }
1098
SetDynamicStub()1099 void SetDynamicStub()
1100 {
1101 mode_.SetDynamicStub(true);
1102 }
1103
GetSingleImplementationList()1104 auto &GetSingleImplementationList()
1105 {
1106 return singleImplementationList_;
1107 }
1108
GetParentGraph()1109 Graph *GetParentGraph()
1110 {
1111 return parentGraph_;
1112 }
1113
GetOutermostParentGraph()1114 Graph *GetOutermostParentGraph()
1115 {
1116 auto graph = this;
1117 while (graph->GetParentGraph() != nullptr) {
1118 graph = graph->GetParentGraph();
1119 }
1120 return graph;
1121 }
1122
SetVRegsCount(size_t count)1123 void SetVRegsCount(size_t count)
1124 {
1125 vregsCount_ = count;
1126 }
1127
GetVRegsCount()1128 size_t GetVRegsCount() const
1129 {
1130 return vregsCount_;
1131 }
1132
GetEnvCount()1133 size_t GetEnvCount() const
1134 {
1135 return (IsDynamicMethod() && !IsBytecodeOptimizer()) ? VRegInfo::ENV_COUNT : 0;
1136 }
1137
GetRelocationHandler()1138 RelocationHandler *GetRelocationHandler()
1139 {
1140 return relocationHandler_;
1141 }
1142
SetRelocationHandler(RelocationHandler * handler)1143 void SetRelocationHandler(RelocationHandler *handler)
1144 {
1145 relocationHandler_ = handler;
1146 }
1147
1148 int64_t GetBranchCounter(const BasicBlock *block, bool trueSucc);
1149
1150 int64_t GetThrowCounter(const BasicBlock *block);
1151
1152 /// This class provides methods for ranged-based `for` loop over all parameters in the graph.
1153 class ParameterList {
1154 public:
1155 class Iterator {
1156 public:
Iterator(Inst * inst)1157 explicit Iterator(Inst *inst) : inst_(inst) {}
1158
1159 Iterator &operator++()
1160 {
1161 for (inst_ = inst_->GetNext(); inst_ != nullptr && inst_->GetOpcode() != Opcode::Parameter;
1162 inst_ = inst_->GetNext()) {
1163 }
1164 return *this;
1165 }
1166 bool operator!=(const Iterator &other)
1167 {
1168 return inst_ != other.inst_;
1169 }
1170 Inst *operator*()
1171 {
1172 return inst_;
1173 }
1174 Inst *operator->()
1175 {
1176 return inst_;
1177 }
1178
1179 private:
1180 Inst *inst_ {nullptr};
1181 };
1182
ParameterList(const Graph * graph)1183 explicit ParameterList(const Graph *graph) : graph_(graph) {}
1184
1185 // NOLINTNEXTLINE(readability-identifier-naming)
1186 Iterator begin();
1187 // NOLINTNEXTLINE(readability-identifier-naming)
end()1188 static Iterator end()
1189 {
1190 return Iterator(nullptr);
1191 }
1192
1193 private:
1194 const Graph *graph_ {nullptr};
1195 };
1196
1197 /**
1198 * Get list of all parameters
1199 * @return instance of the ParameterList class
1200 */
GetParameters()1201 ParameterList GetParameters() const
1202 {
1203 return ParameterList(this);
1204 }
1205
1206 void InitDefaultLocations();
1207
SupportsIrtocBarriers()1208 bool SupportsIrtocBarriers() const
1209 {
1210 return (IsJitOrOsrMode() || IsAotMode() || GetMode().IsInterpreter() || GetMode().IsInterpreterEntry()) &&
1211 !IsDynamicMethod() && GetArch() != Arch::AARCH32;
1212 }
1213
SetMaxInliningDepth(uint32_t depth)1214 void SetMaxInliningDepth(uint32_t depth)
1215 {
1216 maxInliningDepth_ = std::max(maxInliningDepth_, depth);
1217 }
1218
GetMaxInliningDepth()1219 uint32_t GetMaxInliningDepth()
1220 {
1221 return maxInliningDepth_;
1222 }
1223
1224 private:
1225 void AddConstInStartBlock(ConstantInst *constInst);
1226
1227 NO_MOVE_SEMANTIC(Graph);
1228 NO_COPY_SEMANTIC(Graph);
1229
1230 private:
1231 uint32_t maxInliningDepth_ {0};
1232 ArenaAllocator *const allocator_;
1233 ArenaAllocator *const localAllocator_;
1234
1235 Arch arch_ {RUNTIME_ARCH};
1236
1237 // List of blocks in insertion order.
1238 ArenaVector<BasicBlock *> vectorBb_;
1239 BasicBlock *startBlock_ {nullptr};
1240 BasicBlock *endBlock_ {nullptr};
1241
1242 Loop *rootLoop_ {nullptr};
1243
1244 AotData *aotData_ {nullptr};
1245
1246 uint32_t bitFields_ {0};
1247 using FlagDFConstruct = BitField<bool, 0, 1>;
1248 using FlagNeedCleanup = FlagDFConstruct::NextFlag;
1249 using FlagIrredicibleLoop = FlagNeedCleanup::NextFlag;
1250 using FlagInfiniteLoop = FlagIrredicibleLoop::NextFlag;
1251 using FlagFloatRegs = FlagInfiniteLoop::NextFlag;
1252 using FlagDefaultLocationsInit = FlagFloatRegs::NextFlag;
1253 using FlagIrtocPrologEpilogOptimized = FlagDefaultLocationsInit::NextFlag;
1254 using FlagUnrollComplete = FlagIrtocPrologEpilogOptimized::NextFlag;
1255 #ifdef NDEBUG
1256 using LastField = FlagUnrollComplete;
1257 #else
1258 using FlagRegallocApplied = FlagUnrollComplete::NextFlag;
1259 using FlagRegaccallocApplied = FlagRegallocApplied::NextFlag;
1260 using FlagInliningComplete = FlagRegaccallocApplied::NextFlag;
1261 using FlagLowLevelInstnsEnabled = FlagInliningComplete::NextFlag;
1262 using FlagDynUnitTest = FlagLowLevelInstnsEnabled::NextFlag;
1263 using LastField = FlagDynUnitTest;
1264 #endif // NDEBUG
1265
1266 #ifdef PANDA_COMPILER_DEBUG_INFO
1267 using FlagLineDebugInfoEnabled = LastField::NextFlag;
1268 #endif
1269
1270 // codegen data
1271 EncodeDataType data_;
1272 Span<const uint8_t> codeInfoData_;
1273 ArenaVector<bool> *usedRegs_ {nullptr};
1274 ArenaVector<bool> *usedVregs_ {nullptr};
1275
1276 // NOTE (a.popov) Replace by ArenaMap from throwable_inst* to try_inst*
1277 ArenaMap<const Inst *, ArenaVector<BasicBlock *>> throwableInsts_;
1278
1279 RegMask archUsedRegs_ {0};
1280
1281 mutable size_t instrCurrentId_ {0};
1282 // first constant instruction in graph !NOTE rewrite it to hash-map
1283 ConstantInst *firstConstInst_ {nullptr};
1284 Inst *nullptrInst_ {nullptr};
1285 Inst *undefinedInst_ {nullptr};
1286 RuntimeInterface *runtime_ {nullptr};
1287 RuntimeInterface::MethodPtr method_ {nullptr};
1288
1289 Encoder *encoder_ {nullptr};
1290
1291 mutable RegistersDescription *registers_ {nullptr};
1292
1293 CallingConvention *callconv_ {nullptr};
1294
1295 std::optional<MethodProperties> methodProperties_ {std::nullopt};
1296
1297 ParameterInfo *paramInfo_ {nullptr};
1298
1299 RelocationHandler *relocationHandler_ {nullptr};
1300
1301 mutable PassManager passManager_;
1302 EventWriter eventWriter_;
1303
1304 GraphMode mode_;
1305
1306 CodeInfoBuilder *ciBuilder_ {nullptr};
1307
1308 ArenaVector<RuntimeInterface::MethodPtr> singleImplementationList_;
1309 ArenaVector<const BasicBlock *> tryBeginBlocks_;
1310 ArenaVector<ConstantInst *> spilledConstants_;
1311 // Graph that inlines this graph
1312 Graph *parentGraph_ {nullptr};
1313 // Number of used stack slots
1314 uint32_t stackSlotCount_ {0};
1315 // Number of used stack slots for parameters
1316 uint32_t paramSlotsCount_ {0};
1317 // First language extension slot
1318 uint32_t extStackSlot_ {0};
1319 // Number of the virtual registers used in the compiled method (inlined methods aren't included).
1320 uint32_t vregsCount_ {0};
1321 // Source language of the method being compiled
1322 SourceLanguage lang_ {SourceLanguage::PANDA_ASSEMBLY};
1323 };
1324
1325 class MarkerHolder {
1326 public:
1327 NO_COPY_SEMANTIC(MarkerHolder);
1328 NO_MOVE_SEMANTIC(MarkerHolder);
1329
MarkerHolder(const Graph * graph)1330 explicit MarkerHolder(const Graph *graph) : graph_(graph), marker_(graph->NewMarker())
1331 {
1332 ASSERT(marker_ != UNDEF_MARKER);
1333 }
1334
~MarkerHolder()1335 ~MarkerHolder()
1336 {
1337 graph_->EraseMarker(marker_);
1338 }
1339
GetMarker()1340 Marker GetMarker()
1341 {
1342 return marker_;
1343 }
1344
1345 private:
1346 const Graph *graph_;
1347 Marker marker_ {UNDEF_MARKER};
1348 };
1349
1350 template <typename T>
FindOrCreateConstant(T value)1351 ConstantInst *Graph::FindOrCreateConstant(T value)
1352 {
1353 bool isSupportInt32 = IsBytecodeOptimizer();
1354 if (firstConstInst_ == nullptr) {
1355 firstConstInst_ = CreateInstConstant(value, isSupportInt32);
1356 AddConstInStartBlock(firstConstInst_);
1357 return firstConstInst_;
1358 }
1359 ConstantInst *currentConst = firstConstInst_;
1360 ConstantInst *prevConst = nullptr;
1361 while (currentConst != nullptr) {
1362 if (currentConst->IsEqualConst(value, isSupportInt32)) {
1363 return currentConst;
1364 }
1365 prevConst = currentConst;
1366 currentConst = currentConst->GetNextConst();
1367 }
1368 ASSERT(prevConst != nullptr);
1369 auto *newConst = CreateInstConstant(value, isSupportInt32);
1370 AddConstInStartBlock(newConst);
1371
1372 prevConst->SetNextConst(newConst);
1373 return newConst;
1374 }
1375
1376 void InvalidateBlocksOrderAnalyzes(Graph *graph);
1377 void MarkLoopExits(const Graph *graph, Marker marker);
1378 void RemovePredecessorUpdateDF(BasicBlock *block, BasicBlock *rmPred);
1379 std::string GetMethodFullName(const Graph *graph, RuntimeInterface::MethodPtr method);
1380 size_t GetObjectOffset(const Graph *graph, ObjectType objType, RuntimeInterface::FieldPtr field, uint32_t typeId);
1381 } // namespace panda::compiler
1382 #endif // COMPILER_OPTIMIZER_IR_GRAPH_H
1383