• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef MAPLEBE_INCLUDE_CG_INSN_H
17 #define MAPLEBE_INCLUDE_CG_INSN_H
18 /* C++ headers */
19 #include <cstddef> /* for nullptr */
20 #include <string>
21 #include <vector>
22 #include <list>
23 /* Maple CG headers */
24 #include "operand.h"
25 #include "isa.h"
26 #include "common_utils.h"
27 /* Maple IR header */
28 #include "types_def.h" /* for uint32 */
29 /* Maple Util headers */
30 #include "mpl_logging.h"
31 
32 #include "stackmap.h"
33 #include "sparse_datainfo.h"
34 
35 /* Maple Util headers */
36 #include "mem_reference_table.h" /* for alias */
37 namespace maplebe {
38 /* forward declaration */
39 class BB;
40 class CG;
41 class Emitter;
42 class DepNode;
43 struct InsnDesc;
44 class InsnBuilder;
45 class OperandBuilder;
46 
47 class Insn {
48 public:
49     /* MCC_DecRefResetPair clear 2 stack position, MCC_ClearLocalStackRef clear 1 stack position */
50     static constexpr uint8 kMaxStackOffsetSize = 2;
51 
Insn(MemPool & memPool,MOperator opc)52     Insn(MemPool &memPool, MOperator opc)
53         : mOp(opc),
54           localAlloc(&memPool),
55           opnds(localAlloc.Adapter()),
56           registerBinding(localAlloc.Adapter()),
57           comment(&memPool),
58           stackMapLiveIn(localAlloc.Adapter())
59     {
60     }
Insn(MemPool & memPool,MOperator opc,Operand & opnd0)61     Insn(MemPool &memPool, MOperator opc, Operand &opnd0) : Insn(memPool, opc)
62     {
63         opnds.emplace_back(&opnd0);
64     }
Insn(MemPool & memPool,MOperator opc,Operand & opnd0,Operand & opnd1)65     Insn(MemPool &memPool, MOperator opc, Operand &opnd0, Operand &opnd1) : Insn(memPool, opc)
66     {
67         opnds.emplace_back(&opnd0);
68         opnds.emplace_back(&opnd1);
69     }
Insn(MemPool & memPool,MOperator opc,Operand & opnd0,Operand & opnd1,Operand & opnd2)70     Insn(MemPool &memPool, MOperator opc, Operand &opnd0, Operand &opnd1, Operand &opnd2) : Insn(memPool, opc)
71     {
72         opnds.emplace_back(&opnd0);
73         opnds.emplace_back(&opnd1);
74         opnds.emplace_back(&opnd2);
75     }
Insn(MemPool & memPool,MOperator opc,Operand & opnd0,Operand & opnd1,Operand & opnd2,Operand & opnd3)76     Insn(MemPool &memPool, MOperator opc, Operand &opnd0, Operand &opnd1, Operand &opnd2, Operand &opnd3)
77         : Insn(memPool, opc)
78     {
79         opnds.emplace_back(&opnd0);
80         opnds.emplace_back(&opnd1);
81         opnds.emplace_back(&opnd2);
82         opnds.emplace_back(&opnd3);
83     }
Insn(MemPool & memPool,MOperator opc,Operand & opnd0,Operand & opnd1,Operand & opnd2,Operand & opnd3,Operand & opnd4)84     Insn(MemPool &memPool, MOperator opc, Operand &opnd0, Operand &opnd1, Operand &opnd2, Operand &opnd3,
85          Operand &opnd4)
86         : Insn(memPool, opc)
87     {
88         opnds.emplace_back(&opnd0);
89         opnds.emplace_back(&opnd1);
90         opnds.emplace_back(&opnd2);
91         opnds.emplace_back(&opnd3);
92         opnds.emplace_back(&opnd4);
93     }
94     virtual ~Insn() = default;
95 
DeepClone(const Insn & insn,MapleAllocator & allocator)96     void DeepClone(const Insn &insn, MapleAllocator &allocator)
97     {
98         opnds.clear();
99         for (auto opnd : insn.opnds) {
100             opnds.emplace_back(opnd->CloneTree(allocator));
101         }
102     }
103 
104     // Custom deep copy
CloneTree(MapleAllocator & allocator)105     virtual Insn *CloneTree(MapleAllocator &allocator) const
106     {
107         auto *insn = allocator.GetMemPool()->New<Insn>(*this);
108         insn->DeepClone(*this, allocator);
109         return insn;
110     }
111 
GetMachineOpcode()112     MOperator GetMachineOpcode() const
113     {
114         return mOp;
115     }
116 
117     void SetMOP(const InsnDesc &idesc);
118 
AddOperand(Operand & opnd)119     void AddOperand(Operand &opnd)
120     {
121         opnds.emplace_back(&opnd);
122     }
123 
AddOpndChain(Operand & opnd)124     Insn &AddOpndChain(Operand &opnd)
125     {
126         AddOperand(opnd);
127         return *this;
128     }
129     /* use carefully which might cause insn to illegal */
130     void CommuteOperands(uint32 dIndex, uint32 sIndex);
CleanAllOperand()131     void CleanAllOperand()
132     {
133         opnds.clear();
134     }
135 
PopBackOperand()136     void PopBackOperand()
137     {
138         opnds.pop_back();
139     }
140 
GetOperand(uint32 index)141     Operand &GetOperand(uint32 index) const
142     {
143         DEBUG_ASSERT(index < opnds.size(), "index out of range");
144         return *opnds[index];
145     }
146 
ResizeOpnds(uint32 newSize)147     void ResizeOpnds(uint32 newSize)
148     {
149         opnds.resize(static_cast<std::size_t>(newSize));
150     }
151 
GetOperandSize()152     uint32 GetOperandSize() const
153     {
154         return static_cast<uint32>(opnds.size());
155     }
156 
SetOperand(uint32 index,Operand & opnd)157     void SetOperand(uint32 index, Operand &opnd)
158     {
159         DEBUG_ASSERT(index <= opnds.size(), "index out of range");
160         opnds[index] = &opnd;
161     }
162 
163     /* Get size info from machine description */
GetOperandSize(uint32 index)164     uint32 GetOperandSize(uint32 index) const
165     {
166         CHECK_FATAL(index < opnds.size(), "index out of range!");
167         const OpndDesc *opndMD = md->GetOpndDes(index);
168         return opndMD->GetSize();
169     }
170 
SetRetSize(uint32 size)171     void SetRetSize(uint32 size)
172     {
173         DEBUG_ASSERT(IsCall(), "Insn should be a call.");
174         retSize = size;
175     }
176 
GetRetSize()177     uint32 GetRetSize() const
178     {
179         DEBUG_ASSERT(IsCall(), "Insn should be a call.");
180         return retSize;
181     }
182 
183     // Insn Function: check legitimacy of opnds.
VerifySelf()184     bool VerifySelf() const
185     {
186         if (this->IsCfiInsn() || this->IsDbgInsn()) {
187             return true;
188         }
189         return md->Verify(opnds);
190     }
191 
SplitSelf(bool isAfterRegAlloc,InsnBuilder * insnBuilder,OperandBuilder * opndBuilder)192     void SplitSelf(bool isAfterRegAlloc, InsnBuilder *insnBuilder, OperandBuilder *opndBuilder)
193     {
194         md->Split(this, isAfterRegAlloc, insnBuilder, opndBuilder);
195     }
196 
197     virtual bool IsMachineInstruction() const;
198 
IsIntrinsic()199     virtual bool IsIntrinsic() const
200     {
201         return false;
202     }
203 
204     bool OpndIsDef(uint32 id) const;
205 
IsPCLoad()206     virtual bool IsPCLoad() const
207     {
208         return false;
209     }
210 
211     Operand *GetMemOpnd() const;
212 
213     bool IsCall() const;
214     bool IsSpecialCall() const;
215     bool IsTailCall() const;
216     bool IsAsmInsn() const;
217     bool IsBranch() const;
218     bool IsCondBranch() const;
219     bool IsUnCondBranch() const;
220     bool IsMove() const;
221     bool IsBasicOp() const;
222     bool IsUnaryOp() const;
223     bool IsShift() const;
224     bool IsPhi() const;
225     bool IsLoad() const;
226     bool IsStore() const;
227     bool IsConversion() const;
228     bool IsAtomic() const;
229     bool IsLoadStorePair() const;
230     bool IsLoadLabel() const;
231 
NoAlias()232     virtual bool NoAlias() const
233     {
234         return false;
235     }
236 
237     bool IsMemAccess() const;
238 
HasSideEffects()239     virtual bool HasSideEffects() const
240     {
241         return false;
242     }
243 
244     virtual bool IsSpecialIntrinsic() const;
245 
246     bool IsComment() const;
247     bool IsImmaterialInsn() const;
248 
249     bool IsPseudo() const;
250 
IsTargetInsn()251     virtual bool IsTargetInsn() const
252     {
253         return true;
254     }
255 
IsCfiInsn()256     virtual bool IsCfiInsn() const
257     {
258         return false;
259     }
260 
IsDbgInsn()261     virtual bool IsDbgInsn() const
262     {
263         return false;
264     }
265 
IsDbgLine()266     virtual bool IsDbgLine() const
267     {
268         return false;
269     }
270 
271     virtual Operand *GetCallTargetOperand() const;
272 
273     uint32 GetAtomicNum() const;
IsAtomicStore()274     bool IsAtomicStore() const
275     {
276         return IsStore() && IsAtomic();
277     }
278 
SetCondDef()279     void SetCondDef()
280     {
281         flags |= kOpCondDef;
282     }
283 
IsCondDef()284     bool IsCondDef() const
285     {
286         return flags & kOpCondDef;
287     }
288 
AccessMem()289     bool AccessMem() const
290     {
291         return IsLoad() || IsStore();
292     }
293 
IsFrameDef()294     bool IsFrameDef() const
295     {
296         return isFrameDef;
297     }
298 
SetFrameDef(bool b)299     void SetFrameDef(bool b)
300     {
301         isFrameDef = b;
302     }
303 
IsStackDef()304     bool IsStackDef() const
305     {
306         return isStackDef;
307     }
308 
SetStackDef(bool flag)309     void SetStackDef(bool flag)
310     {
311         isStackDef = flag;
312     }
313 
IsStackRevert()314     bool IsStackRevert() const
315     {
316         return isStackRevert;
317     }
318 
SetStackRevert(bool flag)319     void SetStackRevert(bool flag)
320     {
321         isStackRevert = flag;
322     }
323 
IsAsmDefCondCode()324     bool IsAsmDefCondCode() const
325     {
326         return asmDefCondCode;
327     }
328 
SetAsmDefCondCode()329     void SetAsmDefCondCode()
330     {
331         asmDefCondCode = true;
332     }
333 
IsAsmModMem()334     bool IsAsmModMem() const
335     {
336         return asmModMem;
337     }
338 
SetAsmModMem()339     void SetAsmModMem()
340     {
341         asmModMem = true;
342     }
343 
GetUnitType()344     virtual uint32 GetUnitType()
345     {
346         return 0;
347     }
348 
349     virtual void Dump() const;
350 
351     virtual bool CheckMD() const;
352 
SetComment(const std::string & str)353     void SetComment(const std::string &str)
354     {
355         comment = str;
356     }
357 
SetComment(const MapleString & str)358     void SetComment(const MapleString &str)
359     {
360         comment = str;
361     }
362 
GetComment()363     const MapleString &GetComment() const
364     {
365         return comment;
366     }
367 
AppendComment(const std::string & str)368     void AppendComment(const std::string &str)
369     {
370         comment += str;
371     }
372 
SetDebugComment(const MapleString * str)373     void SetDebugComment(const MapleString *str)
374     {
375         debugComment = str;
376     }
377 
GetDebugComment()378     const MapleString *GetDebugComment() const
379     {
380         return debugComment;
381     }
382 
MarkAsSaveRetValToLocal()383     void MarkAsSaveRetValToLocal()
384     {
385         flags |= kOpDassignToSaveRetValToLocal;
386     }
387 
IsSaveRetValToLocal()388     bool IsSaveRetValToLocal() const
389     {
390         return ((flags & kOpDassignToSaveRetValToLocal) != 0);
391     }
392 
MarkAsAccessRefField(bool cond)393     void MarkAsAccessRefField(bool cond)
394     {
395         if (cond) {
396             flags |= kOpAccessRefField;
397         }
398     }
399 
IsAccessRefField()400     bool IsAccessRefField() const
401     {
402         return ((flags & kOpAccessRefField) != 0);
403     }
404 
GetPreviousMachineInsn()405     Insn *GetPreviousMachineInsn() const
406     {
407         for (Insn *returnInsn = prev; returnInsn != nullptr; returnInsn = returnInsn->prev) {
408             DEBUG_ASSERT(returnInsn->bb == bb, "insn and it's prev insn must have same bb");
409             if (returnInsn->IsMachineInstruction()) {
410                 return returnInsn;
411             }
412         }
413         return nullptr;
414     }
415 
GetNextMachineInsn()416     Insn *GetNextMachineInsn() const
417     {
418         for (Insn *returnInsn = next; returnInsn != nullptr; returnInsn = returnInsn->next) {
419             CHECK_FATAL(returnInsn->bb == bb, "insn and it's next insn must have same bb");
420             if (returnInsn->IsMachineInstruction()) {
421                 return returnInsn;
422             }
423         }
424         return nullptr;
425     }
426 
SetPrev(Insn * prev)427     void SetPrev(Insn *prev)
428     {
429         this->prev = prev;
430     }
431 
GetPrev()432     Insn *GetPrev()
433     {
434         return prev;
435     }
436 
GetPrev()437     const Insn *GetPrev() const
438     {
439         return prev;
440     }
441 
SetNext(Insn * next)442     void SetNext(Insn *next)
443     {
444         this->next = next;
445     }
446 
GetNext()447     Insn *GetNext() const
448     {
449         return next;
450     }
451 
SetBB(BB * bb)452     void SetBB(BB *bb)
453     {
454         this->bb = bb;
455     }
456 
GetBB()457     BB *GetBB()
458     {
459         return bb;
460     }
461 
GetBB()462     const BB *GetBB() const
463     {
464         return bb;
465     }
466 
SetId(uint32 id)467     void SetId(uint32 id)
468     {
469         this->id = id;
470     }
471 
GetId()472     uint32 GetId() const
473     {
474         return id;
475     }
476 
SetAddress(uint32 addr)477     void SetAddress(uint32 addr)
478     {
479         address = addr;
480     }
481 
GetAddress()482     uint32 GetAddress() const
483     {
484         return address;
485     }
486 
SetNopNum(uint32 num)487     void SetNopNum(uint32 num)
488     {
489         nopNum = num;
490     }
491 
GetNopNum()492     uint32 GetNopNum() const
493     {
494         return nopNum;
495     }
496 
SetNeedSplit(bool flag)497     void SetNeedSplit(bool flag)
498     {
499         needSplit = flag;
500     }
501 
IsNeedSplit()502     bool IsNeedSplit() const
503     {
504         return needSplit;
505     }
506 
SetIsThrow(bool isThrow)507     void SetIsThrow(bool isThrow)
508     {
509         this->isThrow = isThrow;
510     }
511 
GetIsThrow()512     bool GetIsThrow() const
513     {
514         return isThrow;
515     }
516 
SetDoNotRemove(bool doNotRemove)517     void SetDoNotRemove(bool doNotRemove)
518     {
519         this->doNotRemove = doNotRemove;
520     }
521 
GetDoNotRemove()522     bool GetDoNotRemove() const
523     {
524         return doNotRemove;
525     }
526 
SetIsSpill()527     void SetIsSpill()
528     {
529         this->isSpill = true;
530     }
531 
GetIsSpill()532     bool GetIsSpill() const
533     {
534         return isSpill;
535     }
536 
SetIsReload()537     void SetIsReload()
538     {
539         this->isReload = true;
540     }
541 
GetIsReload()542     bool GetIsReload() const
543     {
544         return isReload;
545     }
546 
IsSpillInsn()547     bool IsSpillInsn() const
548     {
549         return (isSpill || isReload);
550     }
551 
SetIsCallReturnUnsigned(bool unSigned)552     void SetIsCallReturnUnsigned(bool unSigned)
553     {
554         DEBUG_ASSERT(IsCall(), "Insn should be a call.");
555         this->isCallReturnUnsigned = unSigned;
556     }
557 
GetIsCallReturnUnsigned()558     bool GetIsCallReturnUnsigned() const
559     {
560         DEBUG_ASSERT(IsCall(), "Insn should be a call.");
561         return isCallReturnUnsigned;
562     }
563 
GetIsCallReturnSigned()564     bool GetIsCallReturnSigned() const
565     {
566         DEBUG_ASSERT(IsCall(), "Insn should be a call.");
567         return !isCallReturnUnsigned;
568     }
569 
SetClearStackOffset(short index,int64 offset)570     void SetClearStackOffset(short index, int64 offset)
571     {
572         CHECK_FATAL(index < kMaxStackOffsetSize, "out of clearStackOffset's range");
573         clearStackOffset[index] = offset;
574     }
575 
GetClearStackOffset(short index)576     int64 GetClearStackOffset(short index) const
577     {
578         CHECK_FATAL(index < kMaxStackOffsetSize, "out of clearStackOffset's range");
579         return clearStackOffset[index];
580     }
581 
582     /* if function name is MCC_ClearLocalStackRef or MCC_DecRefResetPair, will clear designate stack slot */
IsClearDesignateStackCall()583     bool IsClearDesignateStackCall() const
584     {
585         return clearStackOffset[0] != -1 || clearStackOffset[1] != -1;
586     }
587 
SetDepNode(DepNode & depNode)588     void SetDepNode(DepNode &depNode)
589     {
590         this->depNode = &depNode;
591     }
592 
GetDepNode()593     DepNode *GetDepNode()
594     {
595         return depNode;
596     }
597 
GetDepNode()598     const DepNode *GetDepNode() const
599     {
600         return depNode;
601     }
602 
SetIsPhiMovInsn(bool val)603     void SetIsPhiMovInsn(bool val)
604     {
605         isPhiMovInsn = val;
606     }
607 
IsPhiMovInsn()608     bool IsPhiMovInsn() const
609     {
610         return isPhiMovInsn;
611     }
612 
SetInsnDescrption(const InsnDesc & newMD)613     void SetInsnDescrption(const InsnDesc &newMD)
614     {
615         md = &newMD;
616     }
617 
GetDesc()618     const InsnDesc *GetDesc() const
619     {
620         return md;
621     }
622 
AddRegBinding(uint32 regA,uint32 regB)623     void AddRegBinding(uint32 regA, uint32 regB)
624     {
625         (void)registerBinding.emplace(regA, regB);
626     }
627 
GetRegBinding()628     const MapleMap<uint32, uint32> &GetRegBinding() const
629     {
630         return registerBinding;
631     }
632 
SetReferenceOsts(MemDefUse * memDefUse)633     void SetReferenceOsts(MemDefUse *memDefUse)
634     {
635         referenceOsts = memDefUse;
636     }
637 
GetReferenceOsts()638     const MemDefUse *GetReferenceOsts() const
639     {
640         return referenceOsts;
641     }
642 
MergeReferenceOsts(Insn & rhs)643     void MergeReferenceOsts(Insn &rhs)
644     {
645         if (referenceOsts == nullptr) {
646             SetReferenceOsts(rhs.referenceOsts);
647         } else if (rhs.referenceOsts != nullptr) {
648             referenceOsts->MergeOthers(*rhs.referenceOsts);
649         }
650     }
651 
Equals(const Insn & rhs)652     bool Equals(const Insn &rhs) const
653     {
654         if (&rhs == this) {
655             return true;
656         }
657         if (mOp != rhs.mOp || opnds.size() != rhs.opnds.size()) {
658             return false;
659         }
660         for (uint32 i = 0; i < opnds.size(); ++i) {
661             if (!opnds[i]->Equals(*rhs.opnds[i])) {
662                 return false;
663             }
664         }
665         return true;
666     }
667 
AddDeoptBundleInfo(int32 deoptVreg,Operand & opnd)668     void AddDeoptBundleInfo(int32 deoptVreg, Operand &opnd)
669     {
670         if (stackMap == nullptr) {
671             stackMap = localAlloc.New<StackMap>(localAlloc);
672         }
673         stackMap->GetDeoptInfo().AddDeoptBundleInfo(deoptVreg, opnd);
674     }
675 
GetStackMap()676     StackMap *GetStackMap()
677     {
678         return stackMap;
679     }
680 
GetStackMap()681     const StackMap *GetStackMap() const
682     {
683         return stackMap;
684     }
685 
InitStackMapInfo()686     void InitStackMapInfo()
687     {
688         if (stackMap != nullptr) {
689             return;
690         }
691         stackMap = localAlloc.New<StackMap>(localAlloc);
692     }
693 
SetRefSkipIdx(int32 index)694     void SetRefSkipIdx(int32 index)
695     {
696         refSkipIdx = index;
697     }
698 
699     /* Get Size of memory write/read by insn */
700     uint32 GetMemoryByteSize() const;
701 
702     /* return ture if register appears */
703     virtual bool ScanReg(regno_t regNO) const;
704 
705     virtual bool IsRegDefined(regno_t regNO) const;
706 
707     virtual std::set<uint32> GetDefRegs() const;
708 
SetStackMapDef(SparseDataInfo & def)709     void SetStackMapDef(SparseDataInfo &def)
710     {
711         stackMapDef = &def;
712     }
713 
GetStackMapDef()714     SparseDataInfo *GetStackMapDef()
715     {
716         return stackMapDef;
717     }
718 
SetStackMapUse(SparseDataInfo & use)719     void SetStackMapUse(SparseDataInfo &use)
720     {
721         stackMapUse = &use;
722     }
723 
GetStackMapUse()724     SparseDataInfo *GetStackMapUse()
725     {
726         return stackMapUse;
727     }
728 
SetStackMapLiveIn(SparseDataInfo & liveIn)729     void SetStackMapLiveIn(SparseDataInfo &liveIn)
730     {
731         liveIn.GetInfo().ConvertToSet(stackMapLiveIn);
732     }
733 
GetStackMapLiveIn()734     const MapleSet<regno_t> &GetStackMapLiveIn() const
735     {
736         return stackMapLiveIn;
737     }
738 
ClearStackMapDefUse()739     void ClearStackMapDefUse()
740     {
741         stackMapDef = nullptr;
742         stackMapUse = nullptr;
743     }
744 
745 protected:
746     MOperator mOp;
747     MapleAllocator localAlloc;
748     MapleVector<Operand *> opnds;
749     Insn *prev = nullptr;
750     Insn *next = nullptr;
751     BB *bb = nullptr; /* BB to which this insn belongs */
752     uint32 flags = 0;
753     bool isPhiMovInsn = false;
754 
755 private:
756     MapleMap<uint32, uint32> registerBinding; /* used for inline asm only */
757     StackMap *stackMap = nullptr;
758     enum OpKind : uint32 {
759         kOpUnknown = 0,
760         kOpCondDef = 0x1,
761         kOpAccessRefField = (1ULL << 30),            /* load-from/store-into a ref flag-fieldGetMachineOpcode() */
762         kOpDassignToSaveRetValToLocal = (1ULL << 31) /* save return value to local flag */
763     };
764 
765     uint32 id = 0;
766     uint32 address = 0;
767     uint32 nopNum = 0;
768     uint32 retSize = 0; /* Byte size of the return value if insn is a call. */
769     /* record the stack cleared by MCC_ClearLocalStackRef or MCC_DecRefResetPair */
770     int64 clearStackOffset[kMaxStackOffsetSize] = {-1, -1};
771     DepNode *depNode = nullptr; /* For dependence analysis, pointing to a dependence node. */
772     const MapleString *debugComment = nullptr;
773     MapleString comment;
774     bool isThrow = false;
775     bool doNotRemove = false;          /* caller reg cross call */
776     bool isCallReturnUnsigned = false; /* for call insn only. false: signed, true: unsigned */
777     bool isSpill = false;              /* used as hint for optimization */
778     bool isReload = false;             /* used as hint for optimization */
779     bool isFrameDef = false;
780     bool isStackDef = false;     // def sp in prolog
781     bool isStackRevert = false;  // revert sp in epilog
782     bool asmDefCondCode = false;
783     bool asmModMem = false;
784     bool needSplit = false;
785 
786     /* for dynamic language to mark reference counting */
787     int32 refSkipIdx = -1;
788 
789     /* for multiple architecture */
790     const InsnDesc *md = nullptr;
791     MemDefUse *referenceOsts = nullptr;
792     SparseDataInfo *stackMapDef = nullptr;
793     SparseDataInfo *stackMapUse = nullptr;
794     MapleSet<regno_t> stackMapLiveIn;
795 };
796 
797 struct InsnIdCmp {
operatorInsnIdCmp798     bool operator()(const Insn *lhs, const Insn *rhs) const
799     {
800         CHECK_FATAL(lhs != nullptr, "lhs is nullptr in InsnIdCmp");
801         CHECK_FATAL(rhs != nullptr, "rhs is nullptr in InsnIdCmp");
802         return lhs->GetId() < rhs->GetId();
803     }
804 };
805 using InsnSet = std::set<Insn *, InsnIdCmp>;
806 using InsnMapleSet = MapleSet<Insn *, InsnIdCmp>;
807 } /* namespace maplebe */
808 
809 #endif /* MAPLEBE_INCLUDE_CG_INSN_H */
810