• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef MAPLEBE_INCLUDE_CG_OPERAND_H
17 #define MAPLEBE_INCLUDE_CG_OPERAND_H
18 
19 #include "becommon.h"
20 #include "cg_option.h"
21 #include "aarch64/aarch64_imm_valid.h"
22 #include "visitor_common.h"
23 
24 /* maple_ir */
25 #include "mir_symbol.h"
26 #include "prim_types.h" /* for PrimType */
27 #include "types_def.h"  /* need uint8 etc */
28 
29 /* Mempool */
30 #include "memlayout.h"
31 #include "mempool_allocator.h" /* MapleList */
32 
33 namespace maplebe {
34 class OpndDesc;
35 class Emitter;
36 class FuncEmitInfo;
37 
38 bool IsMoveWidableImmediate(uint64 val, uint32 bitLen);
39 bool BetterUseMOVZ(uint64 val);
40 
41 using MOperator = uint32;
42 enum RegType : maple::uint8 {
43     kRegTyUndef,
44     kRegTyInt,
45     kRegTyFloat,
46     kRegTyCc,
47     kRegTyX87,
48     kRegTyVary,
49     kRegTyFpsc,
50     kRegTyIndex,
51     kRegTyLast,
52 };
53 
54 class Operand {
55 public:
56     enum OperandType : uint8 {
57         kOpdRegister,
58         kOpdImmediate,
59         kOpdMem,
60         kOpdCond, /*  for condition code */
61         kOpdPhi,  /*  for phi operand */
62         kOpdFPImmediate,
63         kOpdStImmediate, /* use the symbol name as the offset */
64         kOpdOffset,      /* for the offset operand in MemOperand */
65         kOpdBBAddress,
66         kOpdList,     /*  for list operand */
67         kOpdShift,    /*  for imm shift operand */
68         kOpdRegShift, /*  for reg shift operand */
69         kOpdExtend,   /*  for extend operand */
70         kOpdString,   /*  for comments */
71         kOpdUndef
72     };
73 
Operand(OperandType type,uint32 size)74     Operand(OperandType type, uint32 size) : opndKind(type), size(size) {}
75     virtual ~Operand() = default;
76 
GetSize()77     uint32 GetSize() const
78     {
79         return size;
80     }
81 
SetSize(uint32 sz)82     void SetSize(uint32 sz)
83     {
84         size = sz;
85     }
86 
IsReference()87     bool IsReference() const
88     {
89         return isReference;
90     }
91 
SetIsReference(bool isRef)92     void SetIsReference(bool isRef)
93     {
94         isReference = isRef;
95     }
96 
GetKind()97     OperandType GetKind() const
98     {
99         return opndKind;
100     }
101 
IsIntImmediate()102     bool IsIntImmediate() const
103     {
104         return opndKind == kOpdImmediate || opndKind == kOpdOffset;
105     }
106 
IsConstImmediate()107     bool IsConstImmediate() const
108     {
109         return opndKind == kOpdImmediate || opndKind == kOpdOffset || opndKind == kOpdFPImmediate;
110     }
111 
IsOfstImmediate()112     bool IsOfstImmediate() const
113     {
114         return opndKind == kOpdOffset;
115     }
116 
IsStImmediate()117     bool IsStImmediate() const
118     {
119         return opndKind == kOpdStImmediate;
120     }
121 
IsImmediate()122     bool IsImmediate() const
123     {
124         return (kOpdFPImmediate <= opndKind && opndKind <= kOpdOffset) || opndKind == kOpdImmediate;
125     }
126 
IsRegister()127     bool IsRegister() const
128     {
129         return opndKind == kOpdRegister;
130     }
131 
IsList()132     bool IsList() const
133     {
134         return opndKind == kOpdList;
135     }
136 
IsPhi()137     bool IsPhi() const
138     {
139         return opndKind == kOpdPhi;
140     }
141 
IsMemoryAccessOperand()142     bool IsMemoryAccessOperand() const
143     {
144         return opndKind == kOpdMem;
145     }
146 
IsLabel()147     bool IsLabel() const
148     {
149         return opndKind == kOpdBBAddress;
150     }
151 
IsZeroRegister()152     virtual bool IsZeroRegister() const
153     {
154         return false;
155     };
156 
IsConditionCode()157     bool IsConditionCode() const
158     {
159         return opndKind == kOpdCond;
160     }
161 
IsOpdShift()162     bool IsOpdShift() const
163     {
164         return opndKind == kOpdShift;
165     }
166 
IsRegShift()167     bool IsRegShift() const
168     {
169         return opndKind == kOpdRegShift;
170     }
171 
IsOpdExtend()172     bool IsOpdExtend() const
173     {
174         return opndKind == kOpdExtend;
175     }
176 
IsLabelOpnd()177     virtual bool IsLabelOpnd() const
178     {
179         return false;
180     }
181 
IsFuncNameOpnd()182     virtual bool IsFuncNameOpnd() const
183     {
184         return false;
185     }
186 
IsCommentOpnd()187     virtual bool IsCommentOpnd() const
188     {
189         return false;
190     }
191 
192     // Custom deep copy
193     virtual Operand *CloneTree(MapleAllocator &allocator) const = 0;
194     virtual Operand *Clone(MemPool &memPool) const = 0;
195 
196     /*
197      * A simple implementation here.
198      * Each subclass can elaborate on demand.
199      */
Equals(Operand & op)200     virtual bool Equals(Operand &op) const
201     {
202         return BasicEquals(op) && (&op == this);
203     }
204 
BasicEquals(const Operand & op)205     bool BasicEquals(const Operand &op) const
206     {
207         return opndKind == op.GetKind() && size == op.GetSize();
208     }
209 
210     virtual void Dump() const = 0;
211 
212     virtual bool Less(const Operand &right) const = 0;
213 
214     virtual void Accept(OperandVisitorBase &v) = 0;
215 
216 protected:
217     OperandType opndKind; /* operand type */
218     uint32 size;          /* size in bits */
219     uint64 flag = 0;      /* operand property*/
220     bool isReference = false;
221 };
222 
223 /* RegOperand */
224 enum RegOperandState : uint32 { kRegOpndNone = 0, kRegOpndSetLow32 = 0x1, kRegOpndSetHigh32 = 0x2 };
225 
226 template <typename VisitableTy>
227 class OperandVisitable : public Operand {
228 public:
229     using Operand::Operand;
Accept(OperandVisitorBase & v)230     void Accept(OperandVisitorBase &v) override
231     {
232         if (OperandVisitor<VisitableTy> *typeV = dynamic_cast<OperandVisitor<VisitableTy> *>(&v)) {
233             typeV->Visit(static_cast<VisitableTy *>(this));
234         } else {
235             /* the type which has no implements */
236         }
237     }
238 };
239 
240 class RegOperand : public OperandVisitable<RegOperand> {
241 public:
242     RegOperand(regno_t regNum, uint32 size, RegType type, uint32 flg = 0)
OperandVisitable(kOpdRegister,size)243         : OperandVisitable(kOpdRegister, size), regNO(regNum), regType(type), validBitsNum(size), regFlag(flg)
244     {
245     }
246 
247     ~RegOperand() override = default;
248     using OperandVisitable<RegOperand>::OperandVisitable;
249 
CloneTree(MapleAllocator & allocator)250     RegOperand *CloneTree(MapleAllocator &allocator) const override
251     {
252         return allocator.GetMemPool()->New<RegOperand>(*this);
253     }
254 
Clone(MemPool & memPool)255     Operand *Clone(MemPool &memPool) const override
256     {
257         return memPool.Clone<RegOperand>(*this);
258     }
259 
SetValidBitsNum(uint32 validNum)260     void SetValidBitsNum(uint32 validNum)
261     {
262         validBitsNum = validNum;
263     }
264 
GetValidBitsNum()265     uint32 GetValidBitsNum() const
266     {
267         return validBitsNum;
268     }
269 
IsOfIntClass()270     bool IsOfIntClass() const
271     {
272         return regType == kRegTyInt;
273     }
274 
IsOfFloatOrSIMDClass()275     bool IsOfFloatOrSIMDClass() const
276     {
277         return regType == kRegTyFloat;
278     }
279 
IsOfCC()280     bool IsOfCC() const
281     {
282         return regType == kRegTyCc;
283     }
284 
IsOfVary()285     bool IsOfVary() const
286     {
287         return regType == kRegTyVary;
288     }
289 
GetRegisterType()290     RegType GetRegisterType() const
291     {
292         return regType;
293     }
294 
SetRegisterType(RegType newTy)295     void SetRegisterType(RegType newTy)
296     {
297         regType = newTy;
298     }
299 
IsBBLocalReg()300     virtual bool IsBBLocalReg() const
301     {
302         return isBBLocal;
303     }
304 
SetRegNotBBLocal()305     void SetRegNotBBLocal()
306     {
307         isBBLocal = false;
308     }
309 
GetRegisterNumber()310     regno_t GetRegisterNumber() const
311     {
312         return regNO;
313     }
314 
SetRegisterNumber(regno_t regNum)315     void SetRegisterNumber(regno_t regNum)
316     {
317         regNO = regNum;
318     }
319 
Dump()320     void Dump() const override
321     {
322         LogInfo::MapleLogger() << "reg ";
323         LogInfo::MapleLogger() << "size : " << GetSize();
324         LogInfo::MapleLogger() << " NO_" << GetRegisterNumber();
325         if (IsReference()) {
326             LogInfo::MapleLogger() << " is_ref";
327         }
328     };
329 
Less(const Operand & right)330     bool Less(const Operand &right) const override
331     {
332         if (&right == this) {
333             return false;
334         }
335 
336         /* For different type. */
337         if (opndKind != right.GetKind()) {
338             return opndKind < right.GetKind();
339         }
340 
341         auto *rightOpnd = static_cast<const RegOperand *>(&right);
342 
343         /* The same type. */
344         return regNO < rightOpnd->regNO;
345     }
346 
Less(const RegOperand & right)347     bool Less(const RegOperand &right) const
348     {
349         return regNO < right.regNO;
350     }
351 
RegNumEqual(const RegOperand & right)352     bool RegNumEqual(const RegOperand &right) const
353     {
354         return regNO == right.GetRegisterNumber();
355     }
356 
RegCompare(const RegOperand & right)357     int32 RegCompare(const RegOperand &right) const
358     {
359         return (regNO - right.GetRegisterNumber());
360     }
361 
Equals(Operand & operand)362     bool Equals(Operand &operand) const override
363     {
364         if (!operand.IsRegister()) {
365             return false;
366         }
367         auto &op = static_cast<RegOperand &>(operand);
368         if (&op == this) {
369             return true;
370         }
371         return (BasicEquals(op) && regNO == op.GetRegisterNumber() && regType == op.GetRegisterType() &&
372                 IsBBLocalReg() == op.IsBBLocalReg());
373     }
374 
IsSameRegNO(const Operand & firstOpnd,const Operand & secondOpnd)375     static bool IsSameRegNO(const Operand &firstOpnd, const Operand &secondOpnd)
376     {
377         if (!firstOpnd.IsRegister() || !secondOpnd.IsRegister()) {
378             return false;
379         }
380         auto &firstReg = static_cast<const RegOperand &>(firstOpnd);
381         auto &secondReg = static_cast<const RegOperand &>(secondOpnd);
382         return firstReg.RegNumEqual(secondReg);
383     }
384 
IsSameReg(const Operand & firstOpnd,const Operand & secondOpnd)385     static bool IsSameReg(const Operand &firstOpnd, const Operand &secondOpnd)
386     {
387         if (firstOpnd.GetSize() != secondOpnd.GetSize()) {
388             return false;
389         }
390         return IsSameRegNO(firstOpnd, secondOpnd);
391     }
392 
SetOpndSSAForm()393     void SetOpndSSAForm()
394     {
395         isSSAForm = true;
396     }
397 
SetOpndOutOfSSAForm()398     void SetOpndOutOfSSAForm()
399     {
400         isSSAForm = false;
401     }
402 
IsSSAForm()403     bool IsSSAForm() const
404     {
405         return isSSAForm;
406     }
407 
SetRefField(bool newIsRefField)408     void SetRefField(bool newIsRefField)
409     {
410         isRefField = newIsRefField;
411     }
412 
IsPhysicalRegister()413     bool IsPhysicalRegister() const
414     {
415         constexpr uint32 maxPhysicalRegisterNumber = 100;
416         return GetRegisterNumber() > 0 && GetRegisterNumber() < maxPhysicalRegisterNumber && !IsOfCC();
417     }
418 
IsVirtualRegister()419     bool IsVirtualRegister() const
420     {
421         return !IsPhysicalRegister();
422     }
423 
IsBBLocalVReg()424     bool IsBBLocalVReg() const
425     {
426         return IsVirtualRegister() && IsBBLocalReg();
427     }
428 
SetHigh8Bit()429     void SetHigh8Bit()
430     {
431         isHigh8Bit = true;
432     }
433 
IsHigh8Bit()434     bool IsHigh8Bit()
435     {
436         return isHigh8Bit;
437     }
438 
SetBaseRefOpnd(RegOperand & regOpnd)439     void SetBaseRefOpnd(RegOperand &regOpnd)
440     {
441         baseRefOpnd = &regOpnd;
442     }
443 
GetBaseRefOpnd()444     const RegOperand *GetBaseRefOpnd() const
445     {
446         return baseRefOpnd;
447     }
448 
GetBaseRefOpnd()449     RegOperand *GetBaseRefOpnd()
450     {
451         return baseRefOpnd;
452     }
453 
454     bool operator==(const RegOperand &o) const;
455 
456     bool operator<(const RegOperand &o) const;
457 
458 protected:
459     regno_t regNO;
460     RegType regType;
461 
462     /*
463      * used for EBO(-O1), it can recognize the registers whose use and def are in
464      * different BB. It is true by default. Sometime it should be false such as
465      * when handle intrinsiccall for target
466      * aarch64(AArch64CGFunc::SelectIntrinsicCall).
467      */
468     bool isBBLocal = true;
469     uint32 validBitsNum;
470     /* use for SSA analysis */
471     bool isSSAForm = false;
472     bool isRefField = false;
473     uint32 regFlag = 0;
474     int16 vecLane = -1;        /* -1 for whole reg, 0 to 15 to specify each lane one at a time */
475     uint16 vecLaneSize = 0;    /* Number of lanes */
476     uint64 vecElementSize = 0; /* size of vector element in each lane */
477     bool if64Vec = false;      /* operand returning 64x1's int value in FP/Simd register */
478     bool isHigh8Bit = false;
479     RegOperand *baseRefOpnd = nullptr;
480 }; /* class RegOperand */
481 
482 enum VaryType : uint8 {
483     kNotVary = 0,
484     kUnAdjustVary,
485     kAdjustVary,
486 };
487 
488 class ImmOperand : public OperandVisitable<ImmOperand> {
489 public:
490     ImmOperand(int64 val, uint32 size, bool isSigned, VaryType isVar = kNotVary, bool isFloat = false)
OperandVisitable(kOpdImmediate,size)491         : OperandVisitable(kOpdImmediate, size), value(val), isSigned(isSigned), isVary(isVar), isFmov(isFloat)
492     {
493     }
494     ImmOperand(OperandType type, int64 val, uint32 size, bool isSigned, VaryType isVar = kNotVary, bool isFloat = false)
OperandVisitable(type,size)495         : OperandVisitable(type, size), value(val), isSigned(isSigned), isVary(isVar), isFmov(isFloat)
496     {
497     }
498     ImmOperand(const MIRSymbol &symbol, int64 val, int32 relocs, bool isSigned, VaryType isVar = kNotVary,
499                bool isFloat = false)
500         : OperandVisitable(kOpdStImmediate, 0),
501           value(val),
502           isSigned(isSigned),
503           isVary(isVar),
504           isFmov(isFloat),
505           symbol(&symbol),
506           relocs(relocs)
507     {
508     }
509     ~ImmOperand() override = default;
510     using OperandVisitable<ImmOperand>::OperandVisitable;
511 
CloneTree(MapleAllocator & allocator)512     ImmOperand *CloneTree(MapleAllocator &allocator) const override
513     {
514         // const MIRSymbol is not changed in cg, so we can do shallow copy
515         return allocator.GetMemPool()->New<ImmOperand>(*this);
516     }
517 
Clone(MemPool & memPool)518     Operand *Clone(MemPool &memPool) const override
519     {
520         return memPool.Clone<ImmOperand>(*this);
521     }
522 
GetSymbol()523     const MIRSymbol *GetSymbol() const
524     {
525         return symbol;
526     }
527 
GetName()528     const std::string &GetName() const
529     {
530         return symbol->GetName();
531     }
532 
GetRelocs()533     int32 GetRelocs() const
534     {
535         return relocs;
536     }
537 
IsInBitSize(uint8 size,uint8 nLowerZeroBits)538     bool IsInBitSize(uint8 size, uint8 nLowerZeroBits) const
539     {
540         return IsBitSizeImmediate(static_cast<uint64>(value), size, nLowerZeroBits);
541     }
542 
IsBitmaskImmediate()543     bool IsBitmaskImmediate() const
544     {
545         DEBUG_ASSERT(!IsZero(), " 0 is reserved for bitmask immediate");
546         DEBUG_ASSERT(!IsAllOnes(), " -1 is reserved for bitmask immediate");
547         return maplebe::aarch64::IsBitmaskImmediate(static_cast<uint64>(value), static_cast<uint32>(size));
548     }
549 
IsBitmaskImmediate(uint32 destSize)550     bool IsBitmaskImmediate(uint32 destSize) const
551     {
552         DEBUG_ASSERT(!IsZero(), " 0 is reserved for bitmask immediate");
553         DEBUG_ASSERT(!IsAllOnes(), " -1 is reserved for bitmask immediate");
554         return maplebe::aarch64::IsBitmaskImmediate(static_cast<uint64>(value), static_cast<uint32>(destSize));
555     }
556 
IsSingleInstructionMovable()557     bool IsSingleInstructionMovable() const
558     {
559         return (IsMoveWidableImmediate(static_cast<uint64>(value), static_cast<uint32>(size)) ||
560                 IsMoveWidableImmediate(~static_cast<uint64>(value), static_cast<uint32>(size)) || IsBitmaskImmediate());
561     }
562 
IsSingleInstructionMovable(uint32 destSize)563     bool IsSingleInstructionMovable(uint32 destSize) const
564     {
565         return (IsMoveWidableImmediate(static_cast<uint64>(value), static_cast<uint32>(destSize)) ||
566                 IsMoveWidableImmediate(~static_cast<uint64>(value), static_cast<uint32>(destSize)) ||
567                 IsBitmaskImmediate(destSize));
568     }
569 
GetValue()570     int64 GetValue() const
571     {
572         return value;
573     }
574 
SetValue(int64 val)575     void SetValue(int64 val)
576     {
577         value = val;
578     }
579 
SetVary(VaryType flag)580     void SetVary(VaryType flag)
581     {
582         isVary = flag;
583     }
584 
IsZero()585     bool IsZero() const
586     {
587         return value == 0;
588     }
589 
GetVary()590     VaryType GetVary() const
591     {
592         return isVary;
593     }
594 
IsOne()595     bool IsOne() const
596     {
597         return value == 1;
598     }
599 
IsSignedValue()600     bool IsSignedValue() const
601     {
602         return isSigned;
603     }
604 
SetSigned()605     void SetSigned()
606     {
607         isSigned = true;
608     }
609 
SetSigned(bool flag)610     void SetSigned(bool flag)
611     {
612         isSigned = flag;
613     }
614 
IsInBitSizeRot(uint8 size)615     bool IsInBitSizeRot(uint8 size) const
616     {
617         return IsInBitSizeRot(size, value);
618     }
619 
IsInBitSizeRot(uint8 size,int64 val)620     static bool IsInBitSizeRot(uint8 size, int64 val)
621     {
622         /* to tell if the val is in a rotate window of size */
623 #if __GNU_C__ || __clang__
624         if (val == 0) {
625             return true;
626         }
627         int32 start = __builtin_ctzll(static_cast<uint64>(val));
628         int32 end = static_cast<int32>(sizeof(val) * kBitsPerByte - __builtin_clzll(static_cast<uint64>(val)) - 1);
629         return (size >= end - start + 1);
630 #else
631         uint8 start = 0;
632         uint8 end = 0;
633         bool isFound = false;
634         CHECK_FATAL(val > 0, "do not perform bit operator operations on signed integers");
635         for (uint32 i = 0; i < k64BitSize; ++i) {
636             /* check whether the ith bit of val is 1 or not */
637             if (((static_cast<uint64>(val) >> i) & 0x1) == 0x1) {
638                 if (!isFound) {
639                     start = i;
640                     end = i;
641                     isFound = true;
642                 } else {
643                     end = i;
644                 }
645             }
646         }
647         return !isFound || (size >= (end - start) + 1);
648 #endif
649     }
650 
IsInValueRange(int32 lowVal,int32 highVal,int32 val)651     static bool IsInValueRange(int32 lowVal, int32 highVal, int32 val)
652     {
653         return val >= lowVal && val <= highVal;
654     }
655 
IsNegative()656     bool IsNegative() const
657     {
658         return isSigned && value < 0;
659     }
660 
Add(int64 delta)661     void Add(int64 delta)
662     {
663         value += delta;
664     }
665 
Negate()666     void Negate()
667     {
668         value = -value;
669     }
670 
BitwiseNegate()671     void BitwiseNegate()
672     {
673         value = ~(static_cast<uint64>(value)) & ((1ULL << size) - 1UL);
674     }
675 
DivideByPow2(int32 shift)676     void DivideByPow2(int32 shift)
677     {
678         value = (static_cast<uint64>(value)) >> shift;
679     }
680 
ModuloByPow2(int32 shift)681     void ModuloByPow2(int32 shift)
682     {
683         value = (static_cast<uint64>(value)) & ((1ULL << shift) - 1UL);
684     }
685 
IsAllOnes()686     bool IsAllOnes() const
687     {
688         return value == -1;
689     }
690 
IsAllOnes32bit()691     bool IsAllOnes32bit() const
692     {
693         return value == 0x0ffffffffLL;
694     }
695 
696     bool operator<(const ImmOperand &iOpnd) const
697     {
698         return value < iOpnd.value || (value == iOpnd.value && isSigned < iOpnd.isSigned) ||
699                (value == iOpnd.value && isSigned == iOpnd.isSigned && size < iOpnd.GetSize());
700     }
701 
702     bool operator==(const ImmOperand &iOpnd) const
703     {
704         return (value == iOpnd.value && isSigned == iOpnd.isSigned && size == iOpnd.GetSize());
705     }
706 
707     void Dump() const override;
708 
Less(const Operand & right)709     bool Less(const Operand &right) const override
710     {
711         if (&right == this) {
712             return false;
713         }
714 
715         /* For different type. */
716         if (opndKind != right.GetKind()) {
717             return opndKind < right.GetKind();
718         }
719 
720         auto *rightOpnd = static_cast<const ImmOperand *>(&right);
721 
722         /* The same type. */
723         if (isSigned != rightOpnd->isSigned) {
724             return isSigned;
725         }
726 
727         if (isVary != rightOpnd->isVary) {
728             return isVary;
729         }
730 
731         return value < rightOpnd->value;
732     }
733 
Equals(Operand & operand)734     bool Equals(Operand &operand) const override
735     {
736         if (!operand.IsImmediate()) {
737             return false;
738         }
739         auto &op = static_cast<ImmOperand &>(operand);
740         if (&op == this) {
741             return true;
742         }
743         return (BasicEquals(op) && value == op.GetValue() && isSigned == op.IsSignedValue());
744     }
745 
ValueEquals(const ImmOperand & op)746     bool ValueEquals(const ImmOperand &op) const
747     {
748         if (&op == this) {
749             return true;
750         }
751         return (value == op.GetValue() && isSigned == op.IsSignedValue());
752     }
IsFmov()753     bool IsFmov() const
754     {
755         return isFmov;
756     }
757 
758 protected:
759     int64 value;
760     bool isSigned;
761     VaryType isVary;
762     bool isFmov = false;
763     const MIRSymbol *symbol; /* for Immediate in symbol form */
764     int32 relocs;
765 };
766 
767 class OfstOperand : public ImmOperand {
768 public:
769     enum OfstType : uint8 {
770         kSymbolOffset,
771         kImmediateOffset,
772         kSymbolImmediateOffset,
773     };
774 
775     /* only for symbol offset */
OfstOperand(const MIRSymbol & mirSymbol,uint32 size,int32 relocs)776     OfstOperand(const MIRSymbol &mirSymbol, uint32 size, int32 relocs)
777         : ImmOperand(kOpdOffset, 0, size, true, kNotVary, false),
778           offsetType(kSymbolOffset),
779           symbol(&mirSymbol),
780           relocs(relocs)
781     {
782     }
783     /* only for Immediate offset */
784     OfstOperand(int64 val, uint32 size, VaryType isVar = kNotVary)
ImmOperand(kOpdOffset,static_cast<int64> (val),size,true,isVar,false)785         : ImmOperand(kOpdOffset, static_cast<int64>(val), size, true, isVar, false),
786           offsetType(kImmediateOffset),
787           symbol(nullptr),
788           relocs(0)
789     {
790     }
791     /* for symbol and Immediate offset */
792     OfstOperand(const MIRSymbol &mirSymbol, int64 val, uint32 size, int32 relocs, VaryType isVar = kNotVary)
ImmOperand(kOpdOffset,val,size,true,isVar,false)793         : ImmOperand(kOpdOffset, val, size, true, isVar, false),
794           offsetType(kSymbolImmediateOffset),
795           symbol(&mirSymbol),
796           relocs(relocs)
797     {
798     }
799 
~OfstOperand()800     ~OfstOperand() override
801     {
802         symbol = nullptr;
803     }
804 
CloneTree(MapleAllocator & allocator)805     OfstOperand *CloneTree(MapleAllocator &allocator) const override
806     {
807         // const MIRSymbol is not changed in cg, so we can do shallow copy
808         return allocator.GetMemPool()->New<OfstOperand>(*this);
809     }
810 
Clone(MemPool & memPool)811     Operand *Clone(MemPool &memPool) const override
812     {
813         return memPool.Clone<OfstOperand>(*this);
814     }
815 
IsSymOffset()816     bool IsSymOffset() const
817     {
818         return offsetType == kSymbolOffset;
819     }
IsImmOffset()820     bool IsImmOffset() const
821     {
822         return offsetType == kImmediateOffset;
823     }
IsSymAndImmOffset()824     bool IsSymAndImmOffset() const
825     {
826         return offsetType == kSymbolImmediateOffset;
827     }
828 
GetSymbol()829     const MIRSymbol *GetSymbol() const
830     {
831         return symbol;
832     }
833 
GetSymbolName()834     const std::string &GetSymbolName() const
835     {
836         return symbol->GetName();
837     }
838 
GetOffsetValue()839     int64 GetOffsetValue() const
840     {
841         return GetValue();
842     }
843 
SetOffsetValue(int32 offVal)844     void SetOffsetValue(int32 offVal)
845     {
846         SetValue(static_cast<int64>(offVal));
847     }
848 
AdjustOffset(int32 delta)849     void AdjustOffset(int32 delta)
850     {
851         Add(static_cast<int64>(delta));
852     }
853 
854     bool operator==(const OfstOperand &opnd) const
855     {
856         return (offsetType == opnd.offsetType && symbol == opnd.symbol && ImmOperand::operator==(opnd) &&
857                 relocs == opnd.relocs);
858     }
859 
860     bool operator<(const OfstOperand &opnd) const
861     {
862         return (offsetType < opnd.offsetType || (offsetType == opnd.offsetType && symbol < opnd.symbol) ||
863                 (offsetType == opnd.offsetType && symbol == opnd.symbol && GetValue() < opnd.GetValue()));
864     }
865 
Dump()866     void Dump() const override
867     {
868         if (IsImmOffset()) {
869             LogInfo::MapleLogger() << "ofst:" << GetValue();
870         } else {
871             LogInfo::MapleLogger() << GetSymbolName();
872             LogInfo::MapleLogger() << "+offset:" << GetValue();
873         }
874     }
875 
876 private:
877     OfstType offsetType;
878     const MIRSymbol *symbol;
879     int32 relocs;
880 };
881 
882 /*
883  * Table C1-6 A64 Load/Store addressing modes
884  * |         Offset
885  * Addressing Mode    | Immediate     | Register             | Extended Register
886  *
887  * Base register only | [base{,#0}]   | -                    | -
888  * (no offset)        | B_OI_NONE     |                      |
889  *                   imm=0
890  *
891  * Base plus offset   | [base{,#imm}] | [base,Xm{,LSL #imm}] | [base,Wm,(S|U)XTW
892  * {#imm}] B_OI_NONE     | B_OR_X               | B_OR_X imm=0,1 (0,3)        |
893  * imm=00,01,10,11 (0/2,s/u)
894  *
895  * Pre-indexed        | [base, #imm]! | -                    | -
896  *
897  * Post-indexed       | [base], #imm  | [base], Xm(a)        | -
898  *
899  * Literal            | label         | -                    | -
900  * (PC-relative)
901  *
902  * a) The post-indexed by register offset mode can be used with the SIMD
903  * Load/Store structure instructions described in Load/Store Vector on page
904  * C3-154. Otherwise the post-indexed by register offset mode is not available.
905  */
906 class MemOperand : public OperandVisitable<MemOperand> {
907 public:
908     enum AArch64AddressingMode : uint8 {
909         kAddrModeUndef,
910         /* AddrMode_BO, base, offset. EA = [base] + offset */
911         kAddrModeBOi, /* INTACT: EA = [base]+immediate */
912         /*
913          * PRE: base += immediate, EA = [base]
914          * POST: EA = [base], base += immediate
915          */
916         kAddrModeBOrX,    /* EA = [base]+Extend([offreg/idxreg]), OR=Wn/Xn */
917         kAddrModeLiteral, /* AArch64 insruction LDR takes literal and */
918         /*
919          * "calculates an address from the PC value and an immediate offset,
920          * loads a word from memory, and writes it to a register."
921          */
922         kAddrModeLo12Li,  // EA = [base] + #:lo12:Label+immediate. (Example: [x0,
923                           // #:lo12:__Label300+456]
924         kLiteral,         /* xxx_l mode: label */
925         // X86 scale Type
926         kScale,
927     };
928     /*
929      * ARMv8-A A64 ISA Overview by Matteo Franchin @ ARM
930      * (presented at 64-bit terminal platform on ARM. Sep. 2015) p.14
931      * o Address to load from/store to is a 64-bit base register + an optional
932      * offset LDR X0, [X1] ; Load from address held in X1 STR X0, [X1] ; Store to
933      * address held in X1
934      *
935      * o Offset can be an immediate or a register
936      *   LDR X0, [X1, #8]  ; Load from address [X1 + 8 bytes]
937      *   LDR X0, [X1, #-8] ; Load with negative offset
938      *   LDR X0, [X1, X2]  ; Load from address [X1 + X2]
939      *
940      * o A Wn register offset needs to be extended to 64 bits
941      *  LDR X0, [X1, W2, SXTW] ; Sign-extend offset in W2
942      *   LDR X0, [X1, W2, UXTW] ; Zero-extend offset in W2
943      *
944      * o Both Xn and Wn register offsets can include an optional left-shift
945      *   LDR X0, [X1, W2, UXTW #2] ; Zero-extend offset in W2 & left-shift by 2
946      *   LDR X0, [X1, X2, LSL #2]  ; Left-shift offset in X2 by 2
947      *
948      * p.15
949      * Addressing Modes                       Analogous C Code
950      *                                       int *intptr = ... // X1
951      *                                       int out; // W0
952      * o Simple: X1 is not changed
953      *   LDR W0, [X1]                        out = *intptr;
954      * o Offset: X1 is not changed
955      *   LDR W0, [X1, #4]                    out = intptr[1];
956      * o Pre-indexed: X1 changed before load
957      *   LDR W0, [X1, #4]! =|ADD X1,X1,#4    out = *(++intptr);
958      * |LDR W0,[X1]
959      * o Post-indexed: X1 changed after load
960      *   LDR W0, [X1], #4  =|LDR W0,[X1]     out = *(intptr++);
961      * |ADD X1,X1,#4
962      */
963     enum ExtendInfo : uint8 {
964         kShiftZero = 0x1,
965         kShiftOne = 0x2,
966         kShiftTwo = 0x4,
967         kShiftThree = 0x8,
968         kUnsignedExtend = 0x10,
969         kSignExtend = 0x20
970     };
971 
972     enum IndexingOption : uint8 {
973         kIntact,    /* base register stays the same */
974         kPreIndex,  /* base register gets changed before load */
975         kPostIndex, /* base register gets changed after load */
976     };
977 
MemOperand(uint32 size)978     MemOperand(uint32 size) : OperandVisitable(Operand::kOpdMem, size) {}
MemOperand(uint32 size,const MIRSymbol & mirSymbol)979     MemOperand(uint32 size, const MIRSymbol &mirSymbol) : OperandVisitable(Operand::kOpdMem, size), symbol(&mirSymbol)
980     {
981     }
982 
983     MemOperand(uint32 size, RegOperand &baseOp, ImmOperand &ofstOp, AArch64AddressingMode mode = kAddrModeBOi)
OperandVisitable(Operand::kOpdMem,size)984         : OperandVisitable(Operand::kOpdMem, size),
985           baseOpnd(&baseOp),
986           offsetOpnd(&ofstOp),
987           symbol(nullptr),
988           addrMode(mode)
989     {
990         DEBUG_ASSERT((mode == kAddrModeBOi), "check mode!");
991     }
992 
993     MemOperand(uint32 size, RegOperand *baseOp, RegOperand *indexOp, ImmOperand *ofstOp, const MIRSymbol *mirSymbol,
994                ImmOperand *scaleOp = nullptr)
OperandVisitable(Operand::kOpdMem,size)995         : OperandVisitable(Operand::kOpdMem, size),
996           baseOpnd(baseOp),
997           indexOpnd(indexOp),
998           offsetOpnd(ofstOp),
999           scaleOpnd(scaleOp),
1000           symbol(mirSymbol)
1001     {
1002     }
1003 
1004     MemOperand(RegOperand *base, OfstOperand *offset, uint32 size, IndexingOption idxOpt = kIntact)
OperandVisitable(Operand::kOpdMem,size)1005         : OperandVisitable(Operand::kOpdMem, size),
1006           baseOpnd(base),
1007           indexOpnd(nullptr),
1008           offsetOpnd(offset),
1009           symbol(nullptr),
1010           addrMode(kAddrModeBOi),
1011           extend(0),
1012           idxOpt(idxOpt),
1013           noExtend(false),
1014           isStackMem(false)
1015     {
1016     }
1017 
MemOperand(AArch64AddressingMode mode,uint32 size,RegOperand & base,RegOperand * index,ImmOperand * offset,const MIRSymbol * sym)1018     MemOperand(AArch64AddressingMode mode, uint32 size, RegOperand &base, RegOperand *index, ImmOperand *offset,
1019                const MIRSymbol *sym)
1020         : OperandVisitable(Operand::kOpdMem, size),
1021           baseOpnd(&base),
1022           indexOpnd(index),
1023           offsetOpnd(offset),
1024           symbol(sym),
1025           addrMode(mode),
1026           extend(0),
1027           idxOpt(kIntact),
1028           noExtend(false),
1029           isStackMem(false)
1030     {
1031     }
1032 
MemOperand(AArch64AddressingMode mode,uint32 size,RegOperand & base,RegOperand & index,ImmOperand * offset,const MIRSymbol & sym,bool noExtend)1033     MemOperand(AArch64AddressingMode mode, uint32 size, RegOperand &base, RegOperand &index, ImmOperand *offset,
1034                const MIRSymbol &sym, bool noExtend)
1035         : OperandVisitable(Operand::kOpdMem, size),
1036           baseOpnd(&base),
1037           indexOpnd(&index),
1038           offsetOpnd(offset),
1039           symbol(&sym),
1040           addrMode(mode),
1041           extend(0),
1042           idxOpt(kIntact),
1043           noExtend(noExtend),
1044           isStackMem(false)
1045     {
1046     }
1047 
1048     MemOperand(AArch64AddressingMode mode, uint32 dSize, RegOperand &baseOpnd, RegOperand &indexOpnd, uint32 shift,
1049                bool isSigned = false)
OperandVisitable(Operand::kOpdMem,dSize)1050         : OperandVisitable(Operand::kOpdMem, dSize),
1051           baseOpnd(&baseOpnd),
1052           indexOpnd(&indexOpnd),
1053           offsetOpnd(nullptr),
1054           symbol(nullptr),
1055           addrMode(mode),
1056           extend((isSigned ? kSignExtend : kUnsignedExtend) | (1U << shift)),
1057           idxOpt(kIntact),
1058           noExtend(false),
1059           isStackMem(false)
1060     {
1061     }
1062 
MemOperand(AArch64AddressingMode mode,uint32 dSize,const MIRSymbol & sym)1063     MemOperand(AArch64AddressingMode mode, uint32 dSize, const MIRSymbol &sym)
1064         : OperandVisitable(Operand::kOpdMem, dSize),
1065           baseOpnd(nullptr),
1066           indexOpnd(nullptr),
1067           offsetOpnd(nullptr),
1068           symbol(&sym),
1069           addrMode(mode),
1070           extend(0),
1071           idxOpt(kIntact),
1072           noExtend(false),
1073           isStackMem(false)
1074     {
1075         DEBUG_ASSERT(mode == kAddrModeLiteral,
1076                      "This constructor version is supposed to be used with "
1077                      "AddrMode_Literal only");
1078     }
1079 
1080     /* Copy constructor */
MemOperand(const MemOperand & memOpnd)1081     explicit MemOperand(const MemOperand &memOpnd)
1082         : OperandVisitable(Operand::kOpdMem, memOpnd.GetSize()),
1083           baseOpnd(memOpnd.baseOpnd),
1084           indexOpnd(memOpnd.indexOpnd),
1085           offsetOpnd(memOpnd.offsetOpnd),
1086           scaleOpnd(memOpnd.scaleOpnd),
1087           symbol(memOpnd.symbol),
1088           memoryOrder(memOpnd.memoryOrder),
1089           addrMode(memOpnd.addrMode),
1090           extend(memOpnd.extend),
1091           idxOpt(memOpnd.idxOpt),
1092           noExtend(memOpnd.noExtend),
1093           isStackMem(memOpnd.isStackMem),
1094           isStackArgMem(memOpnd.isStackArgMem),
1095           isVolatile(memOpnd.isVolatile)
1096     {
1097     }
1098 
1099     MemOperand &operator=(const MemOperand &memOpnd) = default;
1100 
1101     ~MemOperand() override = default;
1102     using OperandVisitable<MemOperand>::OperandVisitable;
1103 
CloneTree(MapleAllocator & allocator)1104     MemOperand *CloneTree(MapleAllocator &allocator) const override
1105     {
1106         auto *memOpnd = allocator.GetMemPool()->New<MemOperand>(*this);
1107         if (baseOpnd != nullptr) {
1108             memOpnd->SetBaseRegister(*baseOpnd->CloneTree(allocator));
1109         }
1110         if (indexOpnd != nullptr) {
1111             memOpnd->SetIndexRegister(*indexOpnd->CloneTree(allocator));
1112         }
1113         if (offsetOpnd != nullptr) {
1114             memOpnd->SetOffsetOperand(*offsetOpnd->CloneTree(allocator));
1115         }
1116         return memOpnd;
1117     }
1118 
Clone(MemPool & memPool)1119     MemOperand *Clone(MemPool &memPool) const override
1120     {
1121         return memPool.Clone<MemOperand>(*this);
1122     }
1123 
Dump()1124     void Dump() const override {};
1125 
GetBaseRegister()1126     RegOperand *GetBaseRegister() const
1127     {
1128         return baseOpnd;
1129     }
1130 
SetBaseRegister(RegOperand & regOpnd)1131     void SetBaseRegister(RegOperand &regOpnd)
1132     {
1133         baseOpnd = &regOpnd;
1134     }
1135 
GetIndexRegister()1136     RegOperand *GetIndexRegister() const
1137     {
1138         return indexOpnd;
1139     }
1140 
SetIndexRegister(RegOperand & regOpnd)1141     void SetIndexRegister(RegOperand &regOpnd)
1142     {
1143         indexOpnd = &regOpnd;
1144     }
1145 
GetOffsetOperand()1146     ImmOperand *GetOffsetOperand() const
1147     {
1148         return offsetOpnd;
1149     }
1150 
SetOffsetOperand(ImmOperand & oftOpnd)1151     void SetOffsetOperand(ImmOperand &oftOpnd)
1152     {
1153         offsetOpnd = &oftOpnd;
1154     }
1155 
GetScaleOperand()1156     const ImmOperand *GetScaleOperand() const
1157     {
1158         return scaleOpnd;
1159     }
1160 
SetScaleOperand(ImmOperand & scaOpnd)1161     void SetScaleOperand(ImmOperand &scaOpnd)
1162     {
1163         scaleOpnd = &scaOpnd;
1164     }
1165 
GetSymbol()1166     const MIRSymbol *GetSymbol() const
1167     {
1168         return symbol;
1169     }
1170 
SetAccessSize(uint8 size)1171     void SetAccessSize(uint8 size)
1172     {
1173         accessSize = size;
1174     }
1175 
GetAccessSize()1176     uint8 GetAccessSize() const
1177     {
1178         return accessSize;
1179     }
1180 
GetAddrMode()1181     AArch64AddressingMode GetAddrMode() const
1182     {
1183         return addrMode;
1184     }
1185 
GetSymbolName()1186     const std::string &GetSymbolName() const
1187     {
1188         return GetSymbol()->GetName();
1189     }
1190 
IsStackMem()1191     bool IsStackMem() const
1192     {
1193         return isStackMem;
1194     }
1195 
SetStackMem(bool isStack)1196     void SetStackMem(bool isStack)
1197     {
1198         isStackMem = isStack;
1199     }
1200 
IsStackArgMem()1201     bool IsStackArgMem() const
1202     {
1203         return isStackArgMem;
1204     }
1205 
SetStackArgMem(bool isStackArg)1206     void SetStackArgMem(bool isStackArg)
1207     {
1208         isStackArgMem = isStackArg;
1209     }
1210 
1211     Operand *GetOffset() const;
1212 
GetOffsetImmediate()1213     OfstOperand *GetOffsetImmediate() const
1214     {
1215         return static_cast<OfstOperand *>(GetOffsetOperand());
1216     }
1217 
1218     /* Returns N where alignment == 2^N */
GetImmediateOffsetAlignment(uint32 dSize)1219     static uint32 GetImmediateOffsetAlignment(uint32 dSize)
1220     {
1221         DEBUG_ASSERT(dSize >= k8BitSize, "error val:dSize");
1222         DEBUG_ASSERT(dSize <= k128BitSize, "error val:dSize");
1223         DEBUG_ASSERT((dSize & (dSize - 1)) == 0, "error val:dSize");
1224         /* dSize==8: 0, dSize==16 : 1, dSize==32: 2, dSize==64: 3 */
1225         return __builtin_ctz(dSize) - kBaseOffsetAlignment;
1226     }
1227 
GetMaxPIMM(uint32 dSize)1228     static int32 GetMaxPIMM(uint32 dSize)
1229     {
1230         dSize = dSize > k64BitSize ? k64BitSize : dSize;
1231         DEBUG_ASSERT(dSize >= k8BitSize, "error val:dSize");
1232         DEBUG_ASSERT(dSize <= k128BitSize, "error val:dSize");
1233         DEBUG_ASSERT((dSize & (dSize - 1)) == 0, "error val:dSize");
1234         uint32 alignment = GetImmediateOffsetAlignment(dSize);
1235         /* alignment is between kAlignmentOf8Bit and kAlignmentOf64Bit */
1236         DEBUG_ASSERT(alignment >= kOffsetAlignmentOf8Bit, "error val:alignment");
1237         DEBUG_ASSERT(alignment <= kOffsetAlignmentOf128Bit, "error val:alignment");
1238         return (kMaxPimm[alignment]);
1239     }
1240 
GetMaxPairPIMM(uint32 dSize)1241     static int32 GetMaxPairPIMM(uint32 dSize)
1242     {
1243         DEBUG_ASSERT(dSize >= k32BitSize, "error val:dSize");
1244         DEBUG_ASSERT(dSize <= k128BitSize, "error val:dSize");
1245         DEBUG_ASSERT((dSize & (dSize - 1)) == 0, "error val:dSize");
1246         uint32 alignment = GetImmediateOffsetAlignment(dSize);
1247         /* alignment is between kAlignmentOf8Bit and kAlignmentOf64Bit */
1248         DEBUG_ASSERT(alignment >= kOffsetAlignmentOf32Bit, "error val:alignment");
1249         DEBUG_ASSERT(alignment <= kOffsetAlignmentOf128Bit, "error val:alignment");
1250         return (kMaxPairPimm[alignment - k2BitSize]);
1251     }
1252 
IsOffsetMisaligned(uint32 dSize)1253     bool IsOffsetMisaligned(uint32 dSize) const
1254     {
1255         DEBUG_ASSERT(dSize >= k8BitSize, "error val:dSize");
1256         DEBUG_ASSERT(dSize <= k128BitSize, "error val:dSize");
1257         DEBUG_ASSERT((dSize & (dSize - 1)) == 0, "error val:dSize");
1258         if (dSize == k8BitSize) {
1259             return false;
1260         }
1261         OfstOperand *ofstOpnd = GetOffsetImmediate();
1262         if (!ofstOpnd) {
1263             return false;
1264         }
1265         int64 ofstVal = ofstOpnd->GetOffsetValue();
1266         if (addrMode == kAddrModeBOi) {
1267             if (ofstVal >= kMinSimm32 && ofstVal <= kMaxSimm32) {
1268                 return false;
1269             }
1270             return ((static_cast<uint32>(ofstOpnd->GetOffsetValue()) &
1271                      static_cast<uint32>((1U << static_cast<uint32>(GetImmediateOffsetAlignment(dSize))) - 1)) != 0);
1272         } else if (addrMode == kAddrModeLo12Li) {
1273             uint32 alignByte = (dSize / k8BitSize);
1274             return ((ofstVal % static_cast<int64>(alignByte)) != k0BitSize);
1275         }
1276         return false;
1277     }
1278 
IsSIMMOffsetOutOfRange(int64 offset,bool is64bit,bool isLDSTPair)1279     static bool IsSIMMOffsetOutOfRange(int64 offset, bool is64bit, bool isLDSTPair)
1280     {
1281         if (!isLDSTPair) {
1282             return (offset < kMinSimm32 || offset > kMaxSimm32);
1283         }
1284         if (is64bit) {
1285             return (offset < kMinSimm64 || offset > kMaxSimm64Pair) || (static_cast<uint64>(offset) & k7BitSize);
1286         }
1287         return (offset < kMinSimm32 || offset > kMaxSimm32Pair) || (static_cast<uint64>(offset) & k3BitSize);
1288     }
1289 
IsPIMMOffsetOutOfRange(int32 offset,uint32 dSize)1290     static bool IsPIMMOffsetOutOfRange(int32 offset, uint32 dSize)
1291     {
1292         DEBUG_ASSERT(dSize >= k8BitSize, "error val:dSize");
1293         DEBUG_ASSERT(dSize <= k128BitSize, "error val:dSize");
1294         DEBUG_ASSERT((dSize & (dSize - 1)) == 0, "error val:dSize");
1295         return (offset < 0 || offset > GetMaxPIMM(dSize));
1296     }
1297 
1298     bool operator<(const MemOperand &opnd) const
1299     {
1300         if (addrMode != opnd.addrMode) {
1301             return addrMode < opnd.addrMode;
1302         }
1303         if (GetBaseRegister() != opnd.GetBaseRegister()) {
1304             return GetBaseRegister() < opnd.GetBaseRegister();
1305         }
1306         if (GetIndexRegister() != opnd.GetIndexRegister()) {
1307             return GetIndexRegister() < opnd.GetIndexRegister();
1308         }
1309         if (GetOffsetOperand() != opnd.GetOffsetOperand()) {
1310             return GetOffsetOperand() < opnd.GetOffsetOperand();
1311         }
1312         if (GetSymbol() != opnd.GetSymbol()) {
1313             return GetSymbol() < opnd.GetSymbol();
1314         }
1315         if (GetSize() != opnd.GetSize()) {
1316             return GetSize() < opnd.GetSize();
1317         }
1318         if (extend != opnd.extend) {
1319             return extend < opnd.extend;
1320         }
1321         return false;
1322     }
1323 
1324     bool operator==(const MemOperand &opnd) const
1325     {
1326         return (GetSize() == opnd.GetSize()) && (addrMode == opnd.addrMode) && (extend == opnd.extend) &&
1327                (GetBaseRegister() == opnd.GetBaseRegister()) && (GetIndexRegister() == opnd.GetIndexRegister()) &&
1328                (GetSymbol() == opnd.GetSymbol()) && (GetOffsetOperand() == opnd.GetOffsetOperand()) &&
1329                (IsVolatile() == opnd.IsVolatile());
1330     }
1331 
GetMemVaryType()1332     VaryType GetMemVaryType() const
1333     {
1334         Operand *ofstOpnd = GetOffsetOperand();
1335         if (ofstOpnd != nullptr) {
1336             auto *opnd = static_cast<OfstOperand *>(ofstOpnd);
1337             return opnd->GetVary();
1338         }
1339         return kNotVary;
1340     }
1341 
SetAddrMode(AArch64AddressingMode val)1342     void SetAddrMode(AArch64AddressingMode val)
1343     {
1344         addrMode = val;
1345     }
1346 
IsExtendedRegisterMode()1347     bool IsExtendedRegisterMode() const
1348     {
1349         return addrMode == kAddrModeBOrX;
1350     }
1351 
UpdateExtend(ExtendInfo flag)1352     void UpdateExtend(ExtendInfo flag)
1353     {
1354         extend = flag | (1U << ShiftAmount());
1355     }
1356 
SignedExtend()1357     bool SignedExtend() const
1358     {
1359         return IsExtendedRegisterMode() && ((extend & kSignExtend) != 0);
1360     }
1361 
UnsignedExtend()1362     bool UnsignedExtend() const
1363     {
1364         return IsExtendedRegisterMode() && !SignedExtend();
1365     }
1366 
ShiftAmount()1367     uint32 ShiftAmount() const
1368     {
1369         uint32 scale = extend & 0xF;
1370         /* 8 is 1 << 3, 4 is 1 << 2, 2 is 1 << 1, 1 is 1 << 0 */
1371         return (scale == 8) ? 3 : ((scale == 4) ? 2 : ((scale == 2) ? 1 : 0));
1372     }
1373 
ShouldEmitExtend()1374     bool ShouldEmitExtend() const
1375     {
1376         return !noExtend && ((extend & 0x3F) != 0);
1377     }
1378 
GetIndexOpt()1379     IndexingOption GetIndexOpt() const
1380     {
1381         return idxOpt;
1382     }
1383 
SetIndexOpt(IndexingOption newidxOpt)1384     void SetIndexOpt(IndexingOption newidxOpt)
1385     {
1386         idxOpt = newidxOpt;
1387     }
1388 
GetNoExtend()1389     bool GetNoExtend() const
1390     {
1391         return noExtend;
1392     }
1393 
SetNoExtend(bool val)1394     void SetNoExtend(bool val)
1395     {
1396         noExtend = val;
1397     }
1398 
GetExtend()1399     uint32 GetExtend() const
1400     {
1401         return extend;
1402     }
1403 
SetExtend(uint32 val)1404     void SetExtend(uint32 val)
1405     {
1406         extend = val;
1407     }
1408 
SetVolatile(bool flag)1409     void SetVolatile(bool flag)
1410     {
1411         isVolatile = flag;
1412     }
1413 
IsIntactIndexed()1414     bool IsIntactIndexed() const
1415     {
1416         return idxOpt == kIntact;
1417     }
1418 
IsPostIndexed()1419     bool IsPostIndexed() const
1420     {
1421         return idxOpt == kPostIndex;
1422     }
1423 
IsPreIndexed()1424     bool IsPreIndexed() const
1425     {
1426         return idxOpt == kPreIndex;
1427     }
1428 
IsVolatile()1429     bool IsVolatile() const
1430     {
1431         return isVolatile;
1432     }
1433 
GetExtendAsString()1434     std::string GetExtendAsString() const
1435     {
1436         if (GetIndexRegister()->GetSize() == k64BitSize) {
1437             return std::string("LSL");
1438         }
1439         return ((extend & kSignExtend) != 0) ? std::string("SXTW") : std::string("UXTW");
1440     }
1441 
1442     /* Return true if given operand has the same base reg and offset with this. */
1443     bool Equals(Operand &op) const override;
1444     bool Equals(const MemOperand &op) const;
1445     bool Less(const Operand &right) const override;
1446 
1447 private:
1448     RegOperand *baseOpnd = nullptr;   /* base register */
1449     RegOperand *indexOpnd = nullptr;  /* index register */
1450     ImmOperand *offsetOpnd = nullptr; /* offset immediate */
1451     ImmOperand *scaleOpnd = nullptr;
1452     const MIRSymbol *symbol; /* AddrMode_Literal */
1453     uint32 memoryOrder = 0;
1454     uint8 accessSize = 0; /* temp, must be set right before use everytime. */
1455     AArch64AddressingMode addrMode = kAddrModeBOi;
1456     uint32 extend = false;           /* used with offset register ; AddrMode_B_OR_X */
1457     IndexingOption idxOpt = kIntact; /* used with offset immediate ; AddrMode_B_OI */
1458     bool noExtend = false;
1459     bool isStackMem = false;
1460     bool isStackArgMem = false;
1461     bool isVolatile = false;  // based on mem info from ME
1462 };
1463 
1464 class LabelOperand : public OperandVisitable<LabelOperand> {
1465 public:
LabelOperand(const char * parent,LabelIdx labIdx,MemPool & mp)1466     LabelOperand(const char *parent, LabelIdx labIdx, MemPool &mp)
1467         : OperandVisitable(kOpdBBAddress, 0), labelIndex(labIdx), parentFunc(parent, &mp), orderID(-1u)
1468     {
1469     }
1470 
1471     ~LabelOperand() override = default;
1472     using OperandVisitable<LabelOperand>::OperandVisitable;
1473 
CloneTree(MapleAllocator & allocator)1474     LabelOperand *CloneTree(MapleAllocator &allocator) const override
1475     {
1476         return allocator.GetMemPool()->New<LabelOperand>(*this);
1477     }
1478 
Clone(MemPool & memPool)1479     Operand *Clone(MemPool &memPool) const override
1480     {
1481         return memPool.Clone<LabelOperand>(*this);
1482     }
1483 
IsLabelOpnd()1484     bool IsLabelOpnd() const override
1485     {
1486         return true;
1487     }
1488 
GetLabelIndex()1489     LabelIdx GetLabelIndex() const
1490     {
1491         return labelIndex;
1492     }
1493 
GetParentFunc()1494     const MapleString &GetParentFunc() const
1495     {
1496         return parentFunc;
1497     }
1498 
GetLabelOrder()1499     LabelIDOrder GetLabelOrder() const
1500     {
1501         return orderID;
1502     }
1503 
SetLabelOrder(LabelIDOrder idx)1504     void SetLabelOrder(LabelIDOrder idx)
1505     {
1506         orderID = idx;
1507     }
1508 
1509     void Dump() const override;
1510 
Less(const Operand & right)1511     bool Less(const Operand &right) const override
1512     {
1513         if (&right == this) {
1514             return false;
1515         }
1516 
1517         /* For different type. */
1518         if (opndKind != right.GetKind()) {
1519             return opndKind < right.GetKind();
1520         }
1521 
1522         auto *rightOpnd = static_cast<const LabelOperand *>(&right);
1523 
1524         int32 nRes = strcmp(parentFunc.c_str(), rightOpnd->parentFunc.c_str());
1525         if (nRes == 0) {
1526             return labelIndex < rightOpnd->labelIndex;
1527         } else {
1528             return nRes < 0;
1529         }
1530     }
1531 
Equals(Operand & operand)1532     bool Equals(Operand &operand) const override
1533     {
1534         if (!operand.IsLabel()) {
1535             return false;
1536         }
1537         auto &op = static_cast<LabelOperand &>(operand);
1538         return ((&op == this) || (op.GetLabelIndex() == labelIndex));
1539     }
1540 
1541 protected:
1542     LabelIdx labelIndex;
1543     const MapleString parentFunc;
1544 
1545 private:
1546     /* this index records the order this label is defined during code emit. */
1547     LabelIDOrder orderID = -1u;
1548 };
1549 
1550 class ListOperand : public OperandVisitable<ListOperand> {
1551 public:
ListOperand(MapleAllocator & allocator)1552     explicit ListOperand(MapleAllocator &allocator)
1553         : OperandVisitable(Operand::kOpdList, 0), opndList(allocator.Adapter())
1554     {
1555     }
1556 
1557     ~ListOperand() override = default;
1558 
1559     using OperandVisitable<ListOperand>::OperandVisitable;
1560 
CloneTree(MapleAllocator & allocator)1561     ListOperand *CloneTree(MapleAllocator &allocator) const override
1562     {
1563         auto *listOpnd = allocator.GetMemPool()->New<ListOperand>(allocator);
1564         for (auto regOpnd : opndList) {
1565             listOpnd->PushOpnd(*regOpnd->CloneTree(allocator));
1566         }
1567         return listOpnd;
1568     }
1569 
Clone(MemPool & memPool)1570     Operand *Clone(MemPool &memPool) const override
1571     {
1572         return memPool.Clone<ListOperand>(*this);
1573     }
1574 
PushOpnd(RegOperand & opnd)1575     void PushOpnd(RegOperand &opnd)
1576     {
1577         opndList.push_back(&opnd);
1578     }
1579 
GetOperands()1580     MapleList<RegOperand *> &GetOperands()
1581     {
1582         return opndList;
1583     }
1584 
GetOperands()1585     const MapleList<RegOperand *> &GetOperands() const
1586     {
1587         return opndList;
1588     }
1589 
Dump()1590     void Dump() const override
1591     {
1592         for (auto it = opndList.begin(); it != opndList.end();) {
1593             (*it)->Dump();
1594             LogInfo::MapleLogger() << (++it == opndList.end() ? "" : " ,");
1595         }
1596     }
1597 
Less(const Operand & right)1598     bool Less(const Operand &right) const override
1599     {
1600         /* For different type. */
1601         if (opndKind != right.GetKind()) {
1602             return opndKind < right.GetKind();
1603         }
1604 
1605         DEBUG_ASSERT(false, "We don't need to compare list operand.");
1606         return false;
1607     }
1608 
Equals(Operand & operand)1609     bool Equals(Operand &operand) const override
1610     {
1611         if (!operand.IsList()) {
1612             return false;
1613         }
1614         auto &op = static_cast<ListOperand &>(operand);
1615         return (&op == this);
1616     }
1617 
1618 protected:
1619     MapleList<RegOperand *> opndList;
1620 };
1621 
1622 /* representing for global variables address */
1623 class StImmOperand : public OperandVisitable<StImmOperand> {
1624 public:
StImmOperand(const MIRSymbol & symbol,int64 offset,int32 relocs)1625     StImmOperand(const MIRSymbol &symbol, int64 offset, int32 relocs)
1626         : OperandVisitable(kOpdStImmediate, 0), symbol(&symbol), offset(offset), relocs(relocs)
1627     {
1628     }
1629 
1630     ~StImmOperand() override = default;
1631     using OperandVisitable<StImmOperand>::OperandVisitable;
1632 
CloneTree(MapleAllocator & allocator)1633     StImmOperand *CloneTree(MapleAllocator &allocator) const override
1634     {
1635         // const MIRSymbol is not changed in cg, so we can do shallow copy
1636         return allocator.GetMemPool()->New<StImmOperand>(*this);
1637     }
1638 
Clone(MemPool & memPool)1639     Operand *Clone(MemPool &memPool) const override
1640     {
1641         return memPool.Clone<StImmOperand>(*this);
1642     }
1643 
GetSymbol()1644     const MIRSymbol *GetSymbol() const
1645     {
1646         return symbol;
1647     }
1648 
GetName()1649     const std::string &GetName() const
1650     {
1651         return symbol->GetName();
1652     }
1653 
GetOffset()1654     int64 GetOffset() const
1655     {
1656         return offset;
1657     }
1658 
SetOffset(int64 newOffset)1659     void SetOffset(int64 newOffset)
1660     {
1661         offset = newOffset;
1662     }
1663 
GetRelocs()1664     int32 GetRelocs() const
1665     {
1666         return relocs;
1667     }
1668 
1669     bool operator==(const StImmOperand &opnd) const
1670     {
1671         return (symbol == opnd.symbol && offset == opnd.offset && relocs == opnd.relocs);
1672     }
1673 
1674     bool operator<(const StImmOperand &opnd) const
1675     {
1676         return (symbol < opnd.symbol || (symbol == opnd.symbol && offset < opnd.offset) ||
1677                 (symbol == opnd.symbol && offset == opnd.offset && relocs < opnd.relocs));
1678     }
1679 
1680     bool Less(const Operand &right) const override;
1681 
Dump()1682     void Dump() const override
1683     {
1684         CHECK_FATAL(false, "dont run here");
1685     }
1686 
1687 private:
1688     const MIRSymbol *symbol;
1689     int64 offset;
1690     int32 relocs;
1691 };
1692 
1693 class ExtendShiftOperand : public OperandVisitable<ExtendShiftOperand> {
1694 public:
1695     /* if and only if at least one register is WSP, ARM Recommends use of the LSL
1696      * operator name rathe than UXTW */
1697     enum ExtendOp : uint8 {
1698         kUndef,
1699         kUXTB,
1700         kUXTH,
1701         kUXTW, /* equal to lsl in 32bits */
1702         kUXTX, /* equal to lsl in 64bits */
1703         kSXTB,
1704         kSXTH,
1705         kSXTW,
1706         kSXTX,
1707     };
1708 
ExtendShiftOperand(ExtendOp op,uint32 amt,int32 bitLen)1709     ExtendShiftOperand(ExtendOp op, uint32 amt, int32 bitLen)
1710         : OperandVisitable(Operand::kOpdExtend, bitLen), extendOp(op), shiftAmount(amt)
1711     {
1712     }
1713 
1714     ~ExtendShiftOperand() override = default;
1715     using OperandVisitable<ExtendShiftOperand>::OperandVisitable;
1716 
CloneTree(MapleAllocator & allocator)1717     ExtendShiftOperand *CloneTree(MapleAllocator &allocator) const override
1718     {
1719         return allocator.GetMemPool()->New<ExtendShiftOperand>(*this);
1720     }
1721 
Clone(MemPool & memPool)1722     Operand *Clone(MemPool &memPool) const override
1723     {
1724         return memPool.Clone<ExtendShiftOperand>(*this);
1725     }
1726 
GetShiftAmount()1727     uint32 GetShiftAmount() const
1728     {
1729         return shiftAmount;
1730     }
1731 
GetExtendOp()1732     ExtendOp GetExtendOp() const
1733     {
1734         return extendOp;
1735     }
1736 
GetValue()1737     uint32 GetValue() const
1738     {
1739         return shiftAmount;
1740     }
1741 
1742     bool Less(const Operand &right) const override;
1743 
Dump()1744     void Dump() const override
1745     {
1746         CHECK_FATAL(false, "dont run here");
1747     }
1748 
1749 private:
1750     ExtendOp extendOp;
1751     uint32 shiftAmount;
1752 };
1753 
1754 class BitShiftOperand : public OperandVisitable<BitShiftOperand> {
1755 public:
1756     enum ShiftOp : uint8 {
1757         kUndef,
1758         kLSL, /* logical shift left */
1759         kLSR, /* logical shift right */
1760         kASR, /* arithmetic shift right */
1761     };
1762 
1763     /* bitlength is equal to 5 or 6 */
BitShiftOperand(ShiftOp op,uint32 amt,int32 bitLen)1764     BitShiftOperand(ShiftOp op, uint32 amt, int32 bitLen)
1765         : OperandVisitable(Operand::kOpdShift, bitLen), shiftOp(op), shiftAmount(amt)
1766     {
1767     }
1768 
1769     ~BitShiftOperand() override = default;
1770     using OperandVisitable<BitShiftOperand>::OperandVisitable;
1771 
CloneTree(MapleAllocator & allocator)1772     BitShiftOperand *CloneTree(MapleAllocator &allocator) const override
1773     {
1774         return allocator.GetMemPool()->New<BitShiftOperand>(*this);
1775     }
1776 
Clone(MemPool & memPool)1777     Operand *Clone(MemPool &memPool) const override
1778     {
1779         return memPool.Clone<BitShiftOperand>(*this);
1780     }
1781 
Less(const Operand & right)1782     bool Less(const Operand &right) const override
1783     {
1784         if (&right == this) {
1785             return false;
1786         }
1787 
1788         /* For different type. */
1789         if (GetKind() != right.GetKind()) {
1790             return GetKind() < right.GetKind();
1791         }
1792 
1793         const BitShiftOperand *rightOpnd = static_cast<const BitShiftOperand *>(&right);
1794 
1795         /* The same type. */
1796         if (shiftOp != rightOpnd->shiftOp) {
1797             return shiftOp < rightOpnd->shiftOp;
1798         }
1799         return shiftAmount < rightOpnd->shiftAmount;
1800     }
1801 
GetShiftAmount()1802     uint32 GetShiftAmount() const
1803     {
1804         return shiftAmount;
1805     }
1806 
GetShiftOp()1807     ShiftOp GetShiftOp() const
1808     {
1809         return shiftOp;
1810     }
1811 
GetValue()1812     uint32 GetValue() const
1813     {
1814         return GetShiftAmount();
1815     }
1816 
Dump()1817     void Dump() const override
1818     {
1819         CHECK_FATAL(false, "dont run here");
1820     }
1821 
1822 private:
1823     ShiftOp shiftOp;
1824     uint32 shiftAmount;
1825 };
1826 
1827 class CommentOperand : public OperandVisitable<CommentOperand> {
1828 public:
CommentOperand(const char * str,MemPool & memPool)1829     CommentOperand(const char *str, MemPool &memPool) : OperandVisitable(Operand::kOpdString, 0), comment(str, &memPool)
1830     {
1831     }
1832 
CommentOperand(const std::string & str,MemPool & memPool)1833     CommentOperand(const std::string &str, MemPool &memPool)
1834         : OperandVisitable(Operand::kOpdString, 0), comment(str, &memPool)
1835     {
1836     }
1837 
1838     ~CommentOperand() override = default;
1839     using OperandVisitable<CommentOperand>::OperandVisitable;
1840 
GetComment()1841     const MapleString &GetComment() const
1842     {
1843         return comment;
1844     }
1845 
CloneTree(MapleAllocator & allocator)1846     CommentOperand *CloneTree(MapleAllocator &allocator) const override
1847     {
1848         return allocator.GetMemPool()->New<CommentOperand>(*this);
1849     }
1850 
Clone(MemPool & memPool)1851     Operand *Clone(MemPool &memPool) const override
1852     {
1853         return memPool.Clone<CommentOperand>(*this);
1854     }
1855 
IsCommentOpnd()1856     bool IsCommentOpnd() const override
1857     {
1858         return true;
1859     }
1860 
Less(const Operand & right)1861     bool Less(const Operand &right) const override
1862     {
1863         /* For different type. */
1864         return GetKind() < right.GetKind();
1865     }
1866 
Dump()1867     void Dump() const override
1868     {
1869         LogInfo::MapleLogger() << "# ";
1870         if (!comment.empty()) {
1871             LogInfo::MapleLogger() << comment;
1872         }
1873     }
1874 
1875 private:
1876     const MapleString comment;
1877 };
1878 
1879 using StringOperand = CommentOperand;
1880 
1881 class ListConstraintOperand : public OperandVisitable<ListConstraintOperand> {
1882 public:
ListConstraintOperand(MapleAllocator & allocator)1883     explicit ListConstraintOperand(MapleAllocator &allocator)
1884         : OperandVisitable(Operand::kOpdString, 0), stringList(allocator.Adapter()) {};
1885 
1886     ~ListConstraintOperand() override = default;
1887     using OperandVisitable<ListConstraintOperand>::OperandVisitable;
1888 
Dump()1889     void Dump() const override
1890     {
1891         for (auto *str : stringList) {
1892             LogInfo::MapleLogger() << "(" << str->GetComment().c_str() << ")";
1893         }
1894     }
1895 
CloneTree(MapleAllocator & allocator)1896     ListConstraintOperand *CloneTree(MapleAllocator &allocator) const override
1897     {
1898         auto *constraintOpnd = allocator.GetMemPool()->New<ListConstraintOperand>(allocator);
1899         for (auto stringOpnd : stringList) {
1900             constraintOpnd->stringList.emplace_back(stringOpnd->CloneTree(allocator));
1901         }
1902         return constraintOpnd;
1903     }
1904 
Clone(MemPool & memPool)1905     Operand *Clone(MemPool &memPool) const override
1906     {
1907         return memPool.Clone<ListConstraintOperand>(*this);
1908     }
1909 
Less(const Operand & right)1910     bool Less(const Operand &right) const override
1911     {
1912         /* For different type. */
1913         if (opndKind != right.GetKind()) {
1914             return opndKind < right.GetKind();
1915         }
1916 
1917         DEBUG_ASSERT(false, "We don't need to compare list operand.");
1918         return false;
1919     }
1920 
1921     MapleVector<StringOperand *> stringList;
1922 };
1923 
1924 /* for cg ssa analysis */
1925 class PhiOperand : public OperandVisitable<PhiOperand> {
1926 public:
PhiOperand(MapleAllocator & allocator)1927     explicit PhiOperand(MapleAllocator &allocator) : OperandVisitable(Operand::kOpdPhi, 0), phiList(allocator.Adapter())
1928     {
1929     }
1930 
1931     ~PhiOperand() override = default;
1932     using OperandVisitable<PhiOperand>::OperandVisitable;
1933 
CloneTree(MapleAllocator & allocator)1934     PhiOperand *CloneTree(MapleAllocator &allocator) const override
1935     {
1936         auto *phiOpnd = allocator.GetMemPool()->New<PhiOperand>(allocator);
1937         for (auto phiPair : phiList) {
1938             phiOpnd->InsertOpnd(phiPair.first, *phiPair.second->CloneTree(allocator));
1939         }
1940         return phiOpnd;
1941     }
1942 
Clone(MemPool & memPool)1943     Operand *Clone(MemPool &memPool) const override
1944     {
1945         return memPool.Clone<PhiOperand>(*this);
1946     }
1947 
Dump()1948     void Dump() const override
1949     {
1950         CHECK_FATAL(false, "NIY");
1951     }
1952 
InsertOpnd(uint32 bbId,RegOperand & phiParam)1953     void InsertOpnd(uint32 bbId, RegOperand &phiParam)
1954     {
1955         DEBUG_ASSERT(!phiList.count(bbId), "cannot insert duplicate operand");
1956         (void)phiList.emplace(std::pair(bbId, &phiParam));
1957     }
1958 
UpdateOpnd(uint32 bbId,uint32 newId,RegOperand & phiParam)1959     void UpdateOpnd(uint32 bbId, uint32 newId, RegOperand &phiParam)
1960     {
1961         (void)phiList.emplace(std::pair(newId, &phiParam));
1962         phiList.erase(bbId);
1963     }
1964 
GetOperands()1965     MapleMap<uint32, RegOperand *> &GetOperands()
1966     {
1967         return phiList;
1968     }
1969 
1970     uint32 GetLeastCommonValidBit() const;
1971 
1972     bool IsRedundancy() const;
1973 
Less(const Operand & right)1974     bool Less(const Operand &right) const override
1975     {
1976         /* For different type. */
1977         if (opndKind != right.GetKind()) {
1978             return opndKind < right.GetKind();
1979         }
1980         DEBUG_ASSERT(false, "We don't need to compare list operand.");
1981         return false;
1982     }
1983 
Equals(Operand & operand)1984     bool Equals(Operand &operand) const override
1985     {
1986         if (!operand.IsPhi()) {
1987             return false;
1988         }
1989         auto &op = static_cast<PhiOperand &>(operand);
1990         return (&op == this);
1991     }
1992 
1993 protected:
1994     MapleMap<uint32, RegOperand *> phiList; /* ssa-operand && BBId */
1995 };
1996 
1997 /* Use StImmOperand instead? */
1998 class FuncNameOperand : public OperandVisitable<FuncNameOperand> {
1999 public:
FuncNameOperand(const MIRSymbol & fsym)2000     explicit FuncNameOperand(const MIRSymbol &fsym) : OperandVisitable(kOpdBBAddress, 0), symbol(&fsym) {}
2001 
~FuncNameOperand()2002     ~FuncNameOperand() override
2003     {
2004         symbol = nullptr;
2005     }
2006     using OperandVisitable<FuncNameOperand>::OperandVisitable;
2007 
GetName()2008     const std::string &GetName() const
2009     {
2010         return symbol->GetName();
2011     }
2012 
IsFuncNameOpnd()2013     bool IsFuncNameOpnd() const override
2014     {
2015         return true;
2016     }
2017 
GetFunctionSymbol()2018     const MIRSymbol *GetFunctionSymbol() const
2019     {
2020         return symbol;
2021     }
2022 
SetFunctionSymbol(const MIRSymbol & fsym)2023     void SetFunctionSymbol(const MIRSymbol &fsym)
2024     {
2025         symbol = &fsym;
2026     }
2027 
CloneTree(MapleAllocator & allocator)2028     FuncNameOperand *CloneTree(MapleAllocator &allocator) const override
2029     {
2030         // const MIRSymbol is not changed in cg, so we can do shallow copy
2031         return allocator.GetMemPool()->New<FuncNameOperand>(*this);
2032     }
2033 
Clone(MemPool & memPool)2034     Operand *Clone(MemPool &memPool) const override
2035     {
2036         return memPool.New<FuncNameOperand>(*this);
2037     }
2038 
Less(const Operand & right)2039     bool Less(const Operand &right) const override
2040     {
2041         if (&right == this) {
2042             return false;
2043         }
2044         /* For different type. */
2045         if (GetKind() != right.GetKind()) {
2046             return GetKind() < right.GetKind();
2047         }
2048 
2049         auto *rightOpnd = static_cast<const FuncNameOperand *>(&right);
2050 
2051         return static_cast<const void *>(symbol) < static_cast<const void *>(rightOpnd->symbol);
2052     }
2053 
Dump()2054     void Dump() const override
2055     {
2056         LogInfo::MapleLogger() << GetName();
2057     }
2058 
2059 private:
2060     const MIRSymbol *symbol;
2061 };
2062 
2063 namespace operand {
2064 /* bit 0-7 for common */
2065 enum CommOpndDescProp : maple::uint64 { kIsDef = 1ULL, kIsUse = (1ULL << 1), kIsVector = (1ULL << 2) };
2066 
2067 /* bit 8-15 for reg */
2068 enum RegOpndDescProp : maple::uint64 {
2069     kInt = (1ULL << 8),
2070     kFloat = (1ULL << 9),
2071     kRegTyCc = (1ULL << 10),
2072     kRegTyVary = (1ULL << 11),
2073 };
2074 
2075 /* bit 16-23 for imm */
2076 enum ImmOpndDescProp : maple::uint64 {};
2077 
2078 /* bit 24-31 for mem */
2079 enum MemOpndDescProp : maple::uint64 {
2080     kMemLow12 = (1ULL << 24),
2081     kLiteralLow12 = kMemLow12,
2082     kIsLoadLiteral = (1ULL << 25)
2083 };
2084 }  // namespace operand
2085 
2086 class OpndDesc {
2087 public:
OpndDesc(Operand::OperandType t,maple::uint64 p,maple::uint32 s)2088     OpndDesc(Operand::OperandType t, maple::uint64 p, maple::uint32 s) : opndType(t), property(p), size(s) {}
2089     virtual ~OpndDesc() = default;
2090 
GetOperandType()2091     Operand::OperandType GetOperandType() const
2092     {
2093         return opndType;
2094     }
2095 
GetSize()2096     maple::uint32 GetSize() const
2097     {
2098         return size;
2099     }
2100 
IsImm()2101     bool IsImm() const
2102     {
2103         return opndType == Operand::kOpdImmediate;
2104     }
2105 
IsRegister()2106     bool IsRegister() const
2107     {
2108         return opndType == Operand::kOpdRegister;
2109     }
2110 
IsMem()2111     bool IsMem() const
2112     {
2113         return opndType == Operand::kOpdMem;
2114     }
2115 
IsRegDef()2116     bool IsRegDef() const
2117     {
2118         return opndType == Operand::kOpdRegister && (property & operand::kIsDef);
2119     }
2120 
IsRegUse()2121     bool IsRegUse() const
2122     {
2123         return opndType == Operand::kOpdRegister && (property & operand::kIsUse);
2124     }
2125 
IsDef()2126     bool IsDef() const
2127     {
2128         return (property & operand::kIsDef) != 0;
2129     }
2130 
IsUse()2131     bool IsUse() const
2132     {
2133         return (property & operand::kIsUse) != 0;
2134     }
2135 
IsMemLow12()2136     bool IsMemLow12() const
2137     {
2138         return IsMem() && (property & operand::kMemLow12);
2139     }
2140 
IsLiteralLow12()2141     bool IsLiteralLow12() const
2142     {
2143         return opndType == Operand::kOpdStImmediate && (property & operand::kLiteralLow12);
2144     }
2145 
IsLoadLiteral()2146     bool IsLoadLiteral() const
2147     {
2148         return (property & operand::kIsLoadLiteral) != 0;
2149     }
2150 
2151 #define DEFINE_MOP(op, ...) static const OpndDesc op;
2152 #include "operand.def"
2153 #undef DEFINE_MOP
2154 
2155 private:
2156     Operand::OperandType opndType;
2157     maple::uint64 property;
2158     maple::uint32 size;
2159 };
2160 
2161 class CondOperand : public OperandVisitable<CondOperand> {
2162 public:
CondOperand(maplebe::ConditionCode cc)2163     explicit CondOperand(maplebe::ConditionCode cc) : OperandVisitable(Operand::kOpdCond, k4ByteSize), cc(cc) {}
2164 
2165     ~CondOperand() override = default;
2166     using OperandVisitable<CondOperand>::OperandVisitable;
2167 
CloneTree(MapleAllocator & allocator)2168     CondOperand *CloneTree(MapleAllocator &allocator) const override
2169     {
2170         return allocator.GetMemPool()->New<CondOperand>(*this);
2171     }
2172 
Clone(MemPool & memPool)2173     Operand *Clone(MemPool &memPool) const override
2174     {
2175         return memPool.New<CondOperand>(cc);
2176     }
2177 
GetCode()2178     ConditionCode GetCode() const
2179     {
2180         return cc;
2181     }
2182 
2183     bool Less(const Operand &right) const override;
2184 
Dump()2185     void Dump() const override
2186     {
2187         CHECK_FATAL(false, "dont run here");
2188     }
2189 
2190     static const char *ccStrs[kCcLast];
2191 
2192 private:
2193     ConditionCode cc;
2194 };
2195 
2196 class OpndDumpVisitor : public OperandVisitorBase,
2197                         public OperandVisitors<RegOperand, ImmOperand, MemOperand, LabelOperand, FuncNameOperand,
2198                                                ListOperand, StImmOperand, CondOperand, CommentOperand, BitShiftOperand,
2199                                                ExtendShiftOperand, PhiOperand> {
2200 public:
OpndDumpVisitor(const OpndDesc & operandDesc)2201     explicit OpndDumpVisitor(const OpndDesc &operandDesc) : opndDesc(&operandDesc) {}
~OpndDumpVisitor()2202     virtual ~OpndDumpVisitor()
2203     {
2204         opndDesc = nullptr;
2205     }
2206 
2207 protected:
DumpOpndPrefix()2208     virtual void DumpOpndPrefix()
2209     {
2210         LogInfo::MapleLogger() << " (opnd:";
2211     }
DumpOpndSuffix()2212     virtual void DumpOpndSuffix()
2213     {
2214         LogInfo::MapleLogger() << " )";
2215     }
DumpSize(const Operand & opnd)2216     void DumpSize(const Operand &opnd) const
2217     {
2218         LogInfo::MapleLogger() << " [size:" << opnd.GetSize() << "]";
2219     }
DumpReferenceInfo(const Operand & opnd)2220     void DumpReferenceInfo(const Operand &opnd) const
2221     {
2222         if (opnd.IsReference()) {
2223             LogInfo::MapleLogger() << "[is_ref]";
2224         }
2225     }
GetOpndDesc()2226     const OpndDesc *GetOpndDesc() const
2227     {
2228         return opndDesc;
2229     }
2230 
2231 private:
2232     const OpndDesc *opndDesc;
2233 };
2234 } /* namespace maplebe */
2235 
2236 #endif /* MAPLEBE_INCLUDE_CG_OPERAND_H */
2237