• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef MAPLEBE_INCLUDE_CG_OPERAND_H
17 #define MAPLEBE_INCLUDE_CG_OPERAND_H
18 
19 #include "becommon.h"
20 #include "cg_option.h"
21 #include "aarch64/aarch64_imm_valid.h"
22 #include "visitor_common.h"
23 
24 /* maple_ir */
25 #include "mir_symbol.h"
26 #include "prim_types.h" /* for PrimType */
27 #include "types_def.h"  /* need uint8 etc */
28 
29 /* Mempool */
30 #include "memlayout.h"
31 #include "mempool_allocator.h" /* MapleList */
32 
33 namespace maplebe {
34 class OpndDesc;
35 class Emitter;
36 class FuncEmitInfo;
37 
38 bool IsMoveWidableImmediate(uint64 val, uint32 bitLen);
39 bool BetterUseMOVZ(uint64 val);
40 
41 using MOperator = uint32;
42 enum RegType : maple::uint8 {
43     kRegTyUndef,
44     kRegTyInt,
45     kRegTyFloat,
46     kRegTyCc,
47     kRegTyX87,
48     kRegTyVary,
49     kRegTyFpsc,
50     kRegTyIndex,
51     kRegTyLast,
52 };
53 
54 class Operand {
55 public:
56     enum OperandType : uint8 {
57         kOpdRegister,
58         kOpdImmediate,
59         kOpdMem,
60         kOpdCond, /*  for condition code */
61         kOpdPhi,  /*  for phi operand */
62         kOpdFPImmediate,
63         kOpdStImmediate, /* use the symbol name as the offset */
64         kOpdOffset,      /* for the offset operand in MemOperand */
65         kOpdBBAddress,
66         kOpdList,     /*  for list operand */
67         kOpdShift,    /*  for imm shift operand */
68         kOpdRegShift, /*  for reg shift operand */
69         kOpdExtend,   /*  for extend operand */
70         kOpdString,   /*  for comments */
71         kOpdUndef
72     };
73 
Operand(OperandType type,uint32 size)74     Operand(OperandType type, uint32 size) : opndKind(type), size(size) {}
75     virtual ~Operand() = default;
76 
GetSize()77     uint32 GetSize() const
78     {
79         return size;
80     }
81 
SetSize(uint32 sz)82     void SetSize(uint32 sz)
83     {
84         size = sz;
85     }
86 
IsReference()87     bool IsReference() const
88     {
89         return isReference;
90     }
91 
SetIsReference(bool isRef)92     void SetIsReference(bool isRef)
93     {
94         isReference = isRef;
95     }
96 
GetKind()97     OperandType GetKind() const
98     {
99         return opndKind;
100     }
101 
IsIntImmediate()102     bool IsIntImmediate() const
103     {
104         return opndKind == kOpdImmediate || opndKind == kOpdOffset;
105     }
106 
IsConstImmediate()107     bool IsConstImmediate() const
108     {
109         return opndKind == kOpdImmediate || opndKind == kOpdOffset || opndKind == kOpdFPImmediate;
110     }
111 
IsOfstImmediate()112     bool IsOfstImmediate() const
113     {
114         return opndKind == kOpdOffset;
115     }
116 
IsStImmediate()117     bool IsStImmediate() const
118     {
119         return opndKind == kOpdStImmediate;
120     }
121 
IsImmediate()122     bool IsImmediate() const
123     {
124         return (kOpdFPImmediate <= opndKind && opndKind <= kOpdOffset) || opndKind == kOpdImmediate;
125     }
126 
IsRegister()127     bool IsRegister() const
128     {
129         return opndKind == kOpdRegister;
130     }
131 
IsList()132     bool IsList() const
133     {
134         return opndKind == kOpdList;
135     }
136 
IsPhi()137     bool IsPhi() const
138     {
139         return opndKind == kOpdPhi;
140     }
141 
IsMemoryAccessOperand()142     bool IsMemoryAccessOperand() const
143     {
144         return opndKind == kOpdMem;
145     }
146 
IsLabel()147     bool IsLabel() const
148     {
149         return opndKind == kOpdBBAddress;
150     }
151 
IsZeroRegister()152     virtual bool IsZeroRegister() const
153     {
154         return false;
155     };
156 
IsConditionCode()157     bool IsConditionCode() const
158     {
159         return opndKind == kOpdCond;
160     }
161 
IsOpdShift()162     bool IsOpdShift() const
163     {
164         return opndKind == kOpdShift;
165     }
166 
IsRegShift()167     bool IsRegShift() const
168     {
169         return opndKind == kOpdRegShift;
170     }
171 
IsOpdExtend()172     bool IsOpdExtend() const
173     {
174         return opndKind == kOpdExtend;
175     }
176 
IsLabelOpnd()177     virtual bool IsLabelOpnd() const
178     {
179         return false;
180     }
181 
IsFuncNameOpnd()182     virtual bool IsFuncNameOpnd() const
183     {
184         return false;
185     }
186 
IsCommentOpnd()187     virtual bool IsCommentOpnd() const
188     {
189         return false;
190     }
191 
192     // Custom deep copy
193     virtual Operand *CloneTree(MapleAllocator &allocator) const = 0;
194     virtual Operand *Clone(MemPool &memPool) const = 0;
195 
196     /*
197      * A simple implementation here.
198      * Each subclass can elaborate on demand.
199      */
Equals(Operand & op)200     virtual bool Equals(Operand &op) const
201     {
202         return BasicEquals(op) && (&op == this);
203     }
204 
BasicEquals(const Operand & op)205     bool BasicEquals(const Operand &op) const
206     {
207         return opndKind == op.GetKind() && size == op.GetSize();
208     }
209 
210     virtual void Dump() const = 0;
211 
212     virtual bool Less(const Operand &right) const = 0;
213 
214     virtual void Accept(OperandVisitorBase &v) = 0;
215 
216 protected:
217     OperandType opndKind; /* operand type */
218     uint32 size;          /* size in bits */
219     uint64 flag = 0;      /* operand property*/
220     bool isReference = false;
221 };
222 
223 /* RegOperand */
224 enum RegOperandState : uint32 { kRegOpndNone = 0, kRegOpndSetLow32 = 0x1, kRegOpndSetHigh32 = 0x2 };
225 
226 template <typename VisitableTy>
227 class OperandVisitable : public Operand {
228 public:
229     using Operand::Operand;
Accept(OperandVisitorBase & v)230     void Accept(OperandVisitorBase &v) override
231     {
232         if (OperandVisitor<VisitableTy> *typeV = dynamic_cast<OperandVisitor<VisitableTy> *>(&v)) {
233             typeV->Visit(static_cast<VisitableTy *>(this));
234         } else {
235             /* the type which has no implements */
236         }
237     }
238 };
239 
240 class RegOperand : public OperandVisitable<RegOperand> {
241 public:
242     RegOperand(regno_t regNum, uint32 size, RegType type, uint32 flg = 0)
OperandVisitable(kOpdRegister,size)243         : OperandVisitable(kOpdRegister, size), regNO(regNum), regType(type), validBitsNum(size), regFlag(flg)
244     {
245     }
246 
247     ~RegOperand() override = default;
248     using OperandVisitable<RegOperand>::OperandVisitable;
249 
CloneTree(MapleAllocator & allocator)250     RegOperand *CloneTree(MapleAllocator &allocator) const override
251     {
252         return allocator.GetMemPool()->New<RegOperand>(*this);
253     }
254 
Clone(MemPool & memPool)255     Operand *Clone(MemPool &memPool) const override
256     {
257         return memPool.Clone<RegOperand>(*this);
258     }
259 
SetValidBitsNum(uint32 validNum)260     void SetValidBitsNum(uint32 validNum)
261     {
262         validBitsNum = validNum;
263     }
264 
GetValidBitsNum()265     uint32 GetValidBitsNum() const
266     {
267         return validBitsNum;
268     }
269 
IsOfIntClass()270     bool IsOfIntClass() const
271     {
272         return regType == kRegTyInt;
273     }
274 
IsOfFloatOrSIMDClass()275     bool IsOfFloatOrSIMDClass() const
276     {
277         return regType == kRegTyFloat;
278     }
279 
IsOfCC()280     bool IsOfCC() const
281     {
282         return regType == kRegTyCc;
283     }
284 
IsOfVary()285     bool IsOfVary() const
286     {
287         return regType == kRegTyVary;
288     }
289 
GetRegisterType()290     RegType GetRegisterType() const
291     {
292         return regType;
293     }
294 
SetRegisterType(RegType newTy)295     void SetRegisterType(RegType newTy)
296     {
297         regType = newTy;
298     }
299 
IsBBLocalReg()300     virtual bool IsBBLocalReg() const
301     {
302         return isBBLocal;
303     }
304 
SetRegNotBBLocal()305     void SetRegNotBBLocal()
306     {
307         isBBLocal = false;
308     }
309 
GetRegisterNumber()310     regno_t GetRegisterNumber() const
311     {
312         return regNO;
313     }
314 
SetRegisterNumber(regno_t regNum)315     void SetRegisterNumber(regno_t regNum)
316     {
317         regNO = regNum;
318     }
319 
Dump()320     void Dump() const override
321     {
322         LogInfo::MapleLogger() << "reg ";
323         LogInfo::MapleLogger() << "size : " << GetSize();
324         LogInfo::MapleLogger() << " NO_" << GetRegisterNumber();
325         if (IsReference()) {
326             LogInfo::MapleLogger() << " is_ref";
327         }
328     };
329 
Less(const Operand & right)330     bool Less(const Operand &right) const override
331     {
332         if (&right == this) {
333             return false;
334         }
335 
336         /* For different type. */
337         if (opndKind != right.GetKind()) {
338             return opndKind < right.GetKind();
339         }
340 
341         auto *rightOpnd = static_cast<const RegOperand *>(&right);
342 
343         /* The same type. */
344         return regNO < rightOpnd->regNO;
345     }
346 
Less(const RegOperand & right)347     bool Less(const RegOperand &right) const
348     {
349         return regNO < right.regNO;
350     }
351 
RegNumEqual(const RegOperand & right)352     bool RegNumEqual(const RegOperand &right) const
353     {
354         return regNO == right.GetRegisterNumber();
355     }
356 
RegCompare(const RegOperand & right)357     int32 RegCompare(const RegOperand &right) const
358     {
359         return (regNO - right.GetRegisterNumber());
360     }
361 
Equals(Operand & operand)362     bool Equals(Operand &operand) const override
363     {
364         if (!operand.IsRegister()) {
365             return false;
366         }
367         auto &op = static_cast<RegOperand &>(operand);
368         if (&op == this) {
369             return true;
370         }
371         return (BasicEquals(op) && regNO == op.GetRegisterNumber() && regType == op.GetRegisterType() &&
372                 IsBBLocalReg() == op.IsBBLocalReg());
373     }
374 
IsSameRegNO(const Operand & firstOpnd,const Operand & secondOpnd)375     static bool IsSameRegNO(const Operand &firstOpnd, const Operand &secondOpnd)
376     {
377         if (!firstOpnd.IsRegister() || !secondOpnd.IsRegister()) {
378             return false;
379         }
380         auto &firstReg = static_cast<const RegOperand &>(firstOpnd);
381         auto &secondReg = static_cast<const RegOperand &>(secondOpnd);
382         return firstReg.RegNumEqual(secondReg);
383     }
384 
IsSameReg(const Operand & firstOpnd,const Operand & secondOpnd)385     static bool IsSameReg(const Operand &firstOpnd, const Operand &secondOpnd)
386     {
387         if (firstOpnd.GetSize() != secondOpnd.GetSize()) {
388             return false;
389         }
390         return IsSameRegNO(firstOpnd, secondOpnd);
391     }
392 
SetOpndSSAForm()393     void SetOpndSSAForm()
394     {
395         isSSAForm = true;
396     }
397 
SetOpndOutOfSSAForm()398     void SetOpndOutOfSSAForm()
399     {
400         isSSAForm = false;
401     }
402 
IsSSAForm()403     bool IsSSAForm() const
404     {
405         return isSSAForm;
406     }
407 
SetRefField(bool newIsRefField)408     void SetRefField(bool newIsRefField)
409     {
410         isRefField = newIsRefField;
411     }
412 
IsPhysicalRegister()413     bool IsPhysicalRegister() const
414     {
415         constexpr uint32 maxPhysicalRegisterNumber = 100;
416         return GetRegisterNumber() > 0 && GetRegisterNumber() < maxPhysicalRegisterNumber && !IsOfCC();
417     }
418 
IsVirtualRegister()419     bool IsVirtualRegister() const
420     {
421         return !IsPhysicalRegister();
422     }
423 
IsBBLocalVReg()424     bool IsBBLocalVReg() const
425     {
426         return IsVirtualRegister() && IsBBLocalReg();
427     }
428 
SetIF64Vec()429     void SetIF64Vec()
430     {
431         if64Vec = true;
432     }
433 
GetIF64Vec()434     bool GetIF64Vec() const
435     {
436         return if64Vec;
437     }
438 
SetVecLanePosition(int32 pos)439     void SetVecLanePosition(int32 pos)
440     {
441         vecLane = static_cast<int16>(pos);
442     }
443 
GetVecLanePosition()444     int32 GetVecLanePosition() const
445     {
446         return vecLane;
447     }
448 
SetVecLaneSize(uint32 size)449     void SetVecLaneSize(uint32 size)
450     {
451         vecLaneSize = static_cast<uint16>(size);
452     }
453 
GetVecLaneSize()454     uint32 GetVecLaneSize() const
455     {
456         return vecLaneSize;
457     }
458 
SetVecElementSize(uint32 size)459     void SetVecElementSize(uint32 size)
460     {
461         vecElementSize = size;
462     }
463 
GetVecElementSize()464     uint64 GetVecElementSize() const
465     {
466         return vecElementSize;
467     }
468 
SetHigh8Bit()469     void SetHigh8Bit()
470     {
471         isHigh8Bit = true;
472     }
473 
IsHigh8Bit()474     bool IsHigh8Bit()
475     {
476         return isHigh8Bit;
477     }
478 
SetBaseRefOpnd(RegOperand & regOpnd)479     void SetBaseRefOpnd(RegOperand &regOpnd)
480     {
481         baseRefOpnd = &regOpnd;
482     }
483 
GetBaseRefOpnd()484     const RegOperand *GetBaseRefOpnd() const
485     {
486         return baseRefOpnd;
487     }
488 
GetBaseRefOpnd()489     RegOperand *GetBaseRefOpnd()
490     {
491         return baseRefOpnd;
492     }
493 
494     bool operator==(const RegOperand &o) const;
495 
496     bool operator<(const RegOperand &o) const;
497 
498 protected:
499     regno_t regNO;
500     RegType regType;
501 
502     /*
503      * used for EBO(-O1), it can recognize the registers whose use and def are in
504      * different BB. It is true by default. Sometime it should be false such as
505      * when handle intrinsiccall for target
506      * aarch64(AArch64CGFunc::SelectIntrinsicCall).
507      */
508     bool isBBLocal = true;
509     uint32 validBitsNum;
510     /* use for SSA analysis */
511     bool isSSAForm = false;
512     bool isRefField = false;
513     uint32 regFlag = 0;
514     int16 vecLane = -1;        /* -1 for whole reg, 0 to 15 to specify each lane one at a time */
515     uint16 vecLaneSize = 0;    /* Number of lanes */
516     uint64 vecElementSize = 0; /* size of vector element in each lane */
517     bool if64Vec = false;      /* operand returning 64x1's int value in FP/Simd register */
518     bool isHigh8Bit = false;
519     RegOperand *baseRefOpnd = nullptr;
520 }; /* class RegOperand */
521 
522 enum VaryType : uint8 {
523     kNotVary = 0,
524     kUnAdjustVary,
525     kAdjustVary,
526 };
527 
528 class ImmOperand : public OperandVisitable<ImmOperand> {
529 public:
530     ImmOperand(int64 val, uint32 size, bool isSigned, VaryType isVar = kNotVary, bool isFloat = false)
OperandVisitable(kOpdImmediate,size)531         : OperandVisitable(kOpdImmediate, size), value(val), isSigned(isSigned), isVary(isVar), isFmov(isFloat)
532     {
533     }
534     ImmOperand(OperandType type, int64 val, uint32 size, bool isSigned, VaryType isVar = kNotVary, bool isFloat = false)
OperandVisitable(type,size)535         : OperandVisitable(type, size), value(val), isSigned(isSigned), isVary(isVar), isFmov(isFloat)
536     {
537     }
538     ImmOperand(const MIRSymbol &symbol, int64 val, int32 relocs, bool isSigned, VaryType isVar = kNotVary,
539                bool isFloat = false)
540         : OperandVisitable(kOpdStImmediate, 0),
541           value(val),
542           isSigned(isSigned),
543           isVary(isVar),
544           isFmov(isFloat),
545           symbol(&symbol),
546           relocs(relocs)
547     {
548     }
549     ~ImmOperand() override = default;
550     using OperandVisitable<ImmOperand>::OperandVisitable;
551 
CloneTree(MapleAllocator & allocator)552     ImmOperand *CloneTree(MapleAllocator &allocator) const override
553     {
554         // const MIRSymbol is not changed in cg, so we can do shallow copy
555         return allocator.GetMemPool()->New<ImmOperand>(*this);
556     }
557 
Clone(MemPool & memPool)558     Operand *Clone(MemPool &memPool) const override
559     {
560         return memPool.Clone<ImmOperand>(*this);
561     }
562 
GetSymbol()563     const MIRSymbol *GetSymbol() const
564     {
565         return symbol;
566     }
567 
GetName()568     const std::string &GetName() const
569     {
570         return symbol->GetName();
571     }
572 
GetRelocs()573     int32 GetRelocs() const
574     {
575         return relocs;
576     }
577 
IsInBitSize(uint8 size,uint8 nLowerZeroBits)578     bool IsInBitSize(uint8 size, uint8 nLowerZeroBits) const
579     {
580         return IsBitSizeImmediate(static_cast<uint64>(value), size, nLowerZeroBits);
581     }
582 
IsBitmaskImmediate()583     bool IsBitmaskImmediate() const
584     {
585         DEBUG_ASSERT(!IsZero(), " 0 is reserved for bitmask immediate");
586         DEBUG_ASSERT(!IsAllOnes(), " -1 is reserved for bitmask immediate");
587         return maplebe::aarch64::IsBitmaskImmediate(static_cast<uint64>(value), static_cast<uint32>(size));
588     }
589 
IsBitmaskImmediate(uint32 destSize)590     bool IsBitmaskImmediate(uint32 destSize) const
591     {
592         DEBUG_ASSERT(!IsZero(), " 0 is reserved for bitmask immediate");
593         DEBUG_ASSERT(!IsAllOnes(), " -1 is reserved for bitmask immediate");
594         return maplebe::aarch64::IsBitmaskImmediate(static_cast<uint64>(value), static_cast<uint32>(destSize));
595     }
596 
IsSingleInstructionMovable()597     bool IsSingleInstructionMovable() const
598     {
599         return (IsMoveWidableImmediate(static_cast<uint64>(value), static_cast<uint32>(size)) ||
600                 IsMoveWidableImmediate(~static_cast<uint64>(value), static_cast<uint32>(size)) || IsBitmaskImmediate());
601     }
602 
IsSingleInstructionMovable(uint32 destSize)603     bool IsSingleInstructionMovable(uint32 destSize) const
604     {
605         return (IsMoveWidableImmediate(static_cast<uint64>(value), static_cast<uint32>(destSize)) ||
606                 IsMoveWidableImmediate(~static_cast<uint64>(value), static_cast<uint32>(destSize)) ||
607                 IsBitmaskImmediate(destSize));
608     }
609 
GetValue()610     int64 GetValue() const
611     {
612         return value;
613     }
614 
SetValue(int64 val)615     void SetValue(int64 val)
616     {
617         value = val;
618     }
619 
SetVary(VaryType flag)620     void SetVary(VaryType flag)
621     {
622         isVary = flag;
623     }
624 
IsZero()625     bool IsZero() const
626     {
627         return value == 0;
628     }
629 
GetVary()630     VaryType GetVary() const
631     {
632         return isVary;
633     }
634 
IsOne()635     bool IsOne() const
636     {
637         return value == 1;
638     }
639 
IsSignedValue()640     bool IsSignedValue() const
641     {
642         return isSigned;
643     }
644 
SetSigned()645     void SetSigned()
646     {
647         isSigned = true;
648     }
649 
SetSigned(bool flag)650     void SetSigned(bool flag)
651     {
652         isSigned = flag;
653     }
654 
IsInBitSizeRot(uint8 size)655     bool IsInBitSizeRot(uint8 size) const
656     {
657         return IsInBitSizeRot(size, value);
658     }
659 
IsInBitSizeRot(uint8 size,int64 val)660     static bool IsInBitSizeRot(uint8 size, int64 val)
661     {
662         /* to tell if the val is in a rotate window of size */
663 #if __GNU_C__ || __clang__
664         if (val == 0) {
665             return true;
666         }
667         int32 start = __builtin_ctzll(static_cast<uint64>(val));
668         int32 end = static_cast<int32>(sizeof(val) * kBitsPerByte - __builtin_clzll(static_cast<uint64>(val)) - 1);
669         return (size >= end - start + 1);
670 #else
671         uint8 start = 0;
672         uint8 end = 0;
673         bool isFound = false;
674         CHECK_FATAL(val > 0, "do not perform bit operator operations on signed integers");
675         for (uint32 i = 0; i < k64BitSize; ++i) {
676             /* check whether the ith bit of val is 1 or not */
677             if (((static_cast<uint64>(val) >> i) & 0x1) == 0x1) {
678                 if (!isFound) {
679                     start = i;
680                     end = i;
681                     isFound = true;
682                 } else {
683                     end = i;
684                 }
685             }
686         }
687         return !isFound || (size >= (end - start) + 1);
688 #endif
689     }
690 
IsInValueRange(int32 lowVal,int32 highVal,int32 val)691     static bool IsInValueRange(int32 lowVal, int32 highVal, int32 val)
692     {
693         return val >= lowVal && val <= highVal;
694     }
695 
IsNegative()696     bool IsNegative() const
697     {
698         return isSigned && value < 0;
699     }
700 
Add(int64 delta)701     void Add(int64 delta)
702     {
703         value += delta;
704     }
705 
Negate()706     void Negate()
707     {
708         value = -value;
709     }
710 
BitwiseNegate()711     void BitwiseNegate()
712     {
713         value = ~(static_cast<uint64>(value)) & ((1ULL << size) - 1UL);
714     }
715 
DivideByPow2(int32 shift)716     void DivideByPow2(int32 shift)
717     {
718         value = (static_cast<uint64>(value)) >> shift;
719     }
720 
ModuloByPow2(int32 shift)721     void ModuloByPow2(int32 shift)
722     {
723         value = (static_cast<uint64>(value)) & ((1ULL << shift) - 1UL);
724     }
725 
IsAllOnes()726     bool IsAllOnes() const
727     {
728         return value == -1;
729     }
730 
IsAllOnes32bit()731     bool IsAllOnes32bit() const
732     {
733         return value == 0x0ffffffffLL;
734     }
735 
736     bool operator<(const ImmOperand &iOpnd) const
737     {
738         return value < iOpnd.value || (value == iOpnd.value && isSigned < iOpnd.isSigned) ||
739                (value == iOpnd.value && isSigned == iOpnd.isSigned && size < iOpnd.GetSize());
740     }
741 
742     bool operator==(const ImmOperand &iOpnd) const
743     {
744         return (value == iOpnd.value && isSigned == iOpnd.isSigned && size == iOpnd.GetSize());
745     }
746 
747     void Dump() const override;
748 
Less(const Operand & right)749     bool Less(const Operand &right) const override
750     {
751         if (&right == this) {
752             return false;
753         }
754 
755         /* For different type. */
756         if (opndKind != right.GetKind()) {
757             return opndKind < right.GetKind();
758         }
759 
760         auto *rightOpnd = static_cast<const ImmOperand *>(&right);
761 
762         /* The same type. */
763         if (isSigned != rightOpnd->isSigned) {
764             return isSigned;
765         }
766 
767         if (isVary != rightOpnd->isVary) {
768             return isVary;
769         }
770 
771         return value < rightOpnd->value;
772     }
773 
Equals(Operand & operand)774     bool Equals(Operand &operand) const override
775     {
776         if (!operand.IsImmediate()) {
777             return false;
778         }
779         auto &op = static_cast<ImmOperand &>(operand);
780         if (&op == this) {
781             return true;
782         }
783         return (BasicEquals(op) && value == op.GetValue() && isSigned == op.IsSignedValue());
784     }
785 
ValueEquals(const ImmOperand & op)786     bool ValueEquals(const ImmOperand &op) const
787     {
788         if (&op == this) {
789             return true;
790         }
791         return (value == op.GetValue() && isSigned == op.IsSignedValue());
792     }
IsFmov()793     bool IsFmov() const
794     {
795         return isFmov;
796     }
797 
798 protected:
799     int64 value;
800     bool isSigned;
801     VaryType isVary;
802     bool isFmov = false;
803     const MIRSymbol *symbol; /* for Immediate in symbol form */
804     int32 relocs;
805 };
806 
807 class OfstOperand : public ImmOperand {
808 public:
809     enum OfstType : uint8 {
810         kSymbolOffset,
811         kImmediateOffset,
812         kSymbolImmediateOffset,
813     };
814 
815     /* only for symbol offset */
OfstOperand(const MIRSymbol & mirSymbol,uint32 size,int32 relocs)816     OfstOperand(const MIRSymbol &mirSymbol, uint32 size, int32 relocs)
817         : ImmOperand(kOpdOffset, 0, size, true, kNotVary, false),
818           offsetType(kSymbolOffset),
819           symbol(&mirSymbol),
820           relocs(relocs)
821     {
822     }
823     /* only for Immediate offset */
824     OfstOperand(int64 val, uint32 size, VaryType isVar = kNotVary)
ImmOperand(kOpdOffset,static_cast<int64> (val),size,true,isVar,false)825         : ImmOperand(kOpdOffset, static_cast<int64>(val), size, true, isVar, false),
826           offsetType(kImmediateOffset),
827           symbol(nullptr),
828           relocs(0)
829     {
830     }
831     /* for symbol and Immediate offset */
832     OfstOperand(const MIRSymbol &mirSymbol, int64 val, uint32 size, int32 relocs, VaryType isVar = kNotVary)
ImmOperand(kOpdOffset,val,size,true,isVar,false)833         : ImmOperand(kOpdOffset, val, size, true, isVar, false),
834           offsetType(kSymbolImmediateOffset),
835           symbol(&mirSymbol),
836           relocs(relocs)
837     {
838     }
839 
~OfstOperand()840     ~OfstOperand() override
841     {
842         symbol = nullptr;
843     }
844 
CloneTree(MapleAllocator & allocator)845     OfstOperand *CloneTree(MapleAllocator &allocator) const override
846     {
847         // const MIRSymbol is not changed in cg, so we can do shallow copy
848         return allocator.GetMemPool()->New<OfstOperand>(*this);
849     }
850 
Clone(MemPool & memPool)851     Operand *Clone(MemPool &memPool) const override
852     {
853         return memPool.Clone<OfstOperand>(*this);
854     }
855 
IsSymOffset()856     bool IsSymOffset() const
857     {
858         return offsetType == kSymbolOffset;
859     }
IsImmOffset()860     bool IsImmOffset() const
861     {
862         return offsetType == kImmediateOffset;
863     }
IsSymAndImmOffset()864     bool IsSymAndImmOffset() const
865     {
866         return offsetType == kSymbolImmediateOffset;
867     }
868 
GetSymbol()869     const MIRSymbol *GetSymbol() const
870     {
871         return symbol;
872     }
873 
GetSymbolName()874     const std::string &GetSymbolName() const
875     {
876         return symbol->GetName();
877     }
878 
GetOffsetValue()879     int64 GetOffsetValue() const
880     {
881         return GetValue();
882     }
883 
SetOffsetValue(int32 offVal)884     void SetOffsetValue(int32 offVal)
885     {
886         SetValue(static_cast<int64>(offVal));
887     }
888 
AdjustOffset(int32 delta)889     void AdjustOffset(int32 delta)
890     {
891         Add(static_cast<int64>(delta));
892     }
893 
894     bool operator==(const OfstOperand &opnd) const
895     {
896         return (offsetType == opnd.offsetType && symbol == opnd.symbol && ImmOperand::operator==(opnd) &&
897                 relocs == opnd.relocs);
898     }
899 
900     bool operator<(const OfstOperand &opnd) const
901     {
902         return (offsetType < opnd.offsetType || (offsetType == opnd.offsetType && symbol < opnd.symbol) ||
903                 (offsetType == opnd.offsetType && symbol == opnd.symbol && GetValue() < opnd.GetValue()));
904     }
905 
Dump()906     void Dump() const override
907     {
908         if (IsImmOffset()) {
909             LogInfo::MapleLogger() << "ofst:" << GetValue();
910         } else {
911             LogInfo::MapleLogger() << GetSymbolName();
912             LogInfo::MapleLogger() << "+offset:" << GetValue();
913         }
914     }
915 
916 private:
917     OfstType offsetType;
918     const MIRSymbol *symbol;
919     int32 relocs;
920 };
921 
922 /*
923  * Table C1-6 A64 Load/Store addressing modes
924  * |         Offset
925  * Addressing Mode    | Immediate     | Register             | Extended Register
926  *
927  * Base register only | [base{,#0}]   | -                    | -
928  * (no offset)        | B_OI_NONE     |                      |
929  *                   imm=0
930  *
931  * Base plus offset   | [base{,#imm}] | [base,Xm{,LSL #imm}] | [base,Wm,(S|U)XTW
932  * {#imm}] B_OI_NONE     | B_OR_X               | B_OR_X imm=0,1 (0,3)        |
933  * imm=00,01,10,11 (0/2,s/u)
934  *
935  * Pre-indexed        | [base, #imm]! | -                    | -
936  *
937  * Post-indexed       | [base], #imm  | [base], Xm(a)        | -
938  *
939  * Literal            | label         | -                    | -
940  * (PC-relative)
941  *
942  * a) The post-indexed by register offset mode can be used with the SIMD
943  * Load/Store structure instructions described in Load/Store Vector on page
944  * C3-154. Otherwise the post-indexed by register offset mode is not available.
945  */
946 class MemOperand : public OperandVisitable<MemOperand> {
947 public:
948     enum AArch64AddressingMode : uint8 {
949         kAddrModeUndef,
950         /* AddrMode_BO, base, offset. EA = [base] + offset */
951         kAddrModeBOi, /* INTACT: EA = [base]+immediate */
952         /*
953          * PRE: base += immediate, EA = [base]
954          * POST: EA = [base], base += immediate
955          */
956         kAddrModeBOrX,    /* EA = [base]+Extend([offreg/idxreg]), OR=Wn/Xn */
957         kAddrModeLiteral, /* AArch64 insruction LDR takes literal and */
958         /*
959          * "calculates an address from the PC value and an immediate offset,
960          * loads a word from memory, and writes it to a register."
961          */
962         kAddrModeLo12Li,  // EA = [base] + #:lo12:Label+immediate. (Example: [x0,
963                           // #:lo12:__Label300+456]
964         kLiteral,         /* xxx_l mode: label */
965         // X86 scale Type
966         kScale,
967     };
968     /*
969      * ARMv8-A A64 ISA Overview by Matteo Franchin @ ARM
970      * (presented at 64-bit terminal platform on ARM. Sep. 2015) p.14
971      * o Address to load from/store to is a 64-bit base register + an optional
972      * offset LDR X0, [X1] ; Load from address held in X1 STR X0, [X1] ; Store to
973      * address held in X1
974      *
975      * o Offset can be an immediate or a register
976      *   LDR X0, [X1, #8]  ; Load from address [X1 + 8 bytes]
977      *   LDR X0, [X1, #-8] ; Load with negative offset
978      *   LDR X0, [X1, X2]  ; Load from address [X1 + X2]
979      *
980      * o A Wn register offset needs to be extended to 64 bits
981      *  LDR X0, [X1, W2, SXTW] ; Sign-extend offset in W2
982      *   LDR X0, [X1, W2, UXTW] ; Zero-extend offset in W2
983      *
984      * o Both Xn and Wn register offsets can include an optional left-shift
985      *   LDR X0, [X1, W2, UXTW #2] ; Zero-extend offset in W2 & left-shift by 2
986      *   LDR X0, [X1, X2, LSL #2]  ; Left-shift offset in X2 by 2
987      *
988      * p.15
989      * Addressing Modes                       Analogous C Code
990      *                                       int *intptr = ... // X1
991      *                                       int out; // W0
992      * o Simple: X1 is not changed
993      *   LDR W0, [X1]                        out = *intptr;
994      * o Offset: X1 is not changed
995      *   LDR W0, [X1, #4]                    out = intptr[1];
996      * o Pre-indexed: X1 changed before load
997      *   LDR W0, [X1, #4]! =|ADD X1,X1,#4    out = *(++intptr);
998      * |LDR W0,[X1]
999      * o Post-indexed: X1 changed after load
1000      *   LDR W0, [X1], #4  =|LDR W0,[X1]     out = *(intptr++);
1001      * |ADD X1,X1,#4
1002      */
1003     enum ExtendInfo : uint8 {
1004         kShiftZero = 0x1,
1005         kShiftOne = 0x2,
1006         kShiftTwo = 0x4,
1007         kShiftThree = 0x8,
1008         kUnsignedExtend = 0x10,
1009         kSignExtend = 0x20
1010     };
1011 
1012     enum IndexingOption : uint8 {
1013         kIntact,    /* base register stays the same */
1014         kPreIndex,  /* base register gets changed before load */
1015         kPostIndex, /* base register gets changed after load */
1016     };
1017 
MemOperand(uint32 size)1018     MemOperand(uint32 size) : OperandVisitable(Operand::kOpdMem, size) {}
MemOperand(uint32 size,const MIRSymbol & mirSymbol)1019     MemOperand(uint32 size, const MIRSymbol &mirSymbol) : OperandVisitable(Operand::kOpdMem, size), symbol(&mirSymbol)
1020     {
1021     }
1022 
1023     MemOperand(uint32 size, RegOperand &baseOp, ImmOperand &ofstOp, AArch64AddressingMode mode = kAddrModeBOi)
OperandVisitable(Operand::kOpdMem,size)1024         : OperandVisitable(Operand::kOpdMem, size),
1025           baseOpnd(&baseOp),
1026           offsetOpnd(&ofstOp),
1027           symbol(nullptr),
1028           addrMode(mode)
1029     {
1030         DEBUG_ASSERT((mode == kAddrModeBOi), "check mode!");
1031     }
1032 
1033     MemOperand(uint32 size, RegOperand *baseOp, RegOperand *indexOp, ImmOperand *ofstOp, const MIRSymbol *mirSymbol,
1034                ImmOperand *scaleOp = nullptr)
OperandVisitable(Operand::kOpdMem,size)1035         : OperandVisitable(Operand::kOpdMem, size),
1036           baseOpnd(baseOp),
1037           indexOpnd(indexOp),
1038           offsetOpnd(ofstOp),
1039           scaleOpnd(scaleOp),
1040           symbol(mirSymbol)
1041     {
1042     }
1043 
1044     MemOperand(RegOperand *base, OfstOperand *offset, uint32 size, IndexingOption idxOpt = kIntact)
OperandVisitable(Operand::kOpdMem,size)1045         : OperandVisitable(Operand::kOpdMem, size),
1046           baseOpnd(base),
1047           indexOpnd(nullptr),
1048           offsetOpnd(offset),
1049           symbol(nullptr),
1050           addrMode(kAddrModeBOi),
1051           extend(0),
1052           idxOpt(idxOpt),
1053           noExtend(false),
1054           isStackMem(false)
1055     {
1056     }
1057 
MemOperand(AArch64AddressingMode mode,uint32 size,RegOperand & base,RegOperand * index,ImmOperand * offset,const MIRSymbol * sym)1058     MemOperand(AArch64AddressingMode mode, uint32 size, RegOperand &base, RegOperand *index, ImmOperand *offset,
1059                const MIRSymbol *sym)
1060         : OperandVisitable(Operand::kOpdMem, size),
1061           baseOpnd(&base),
1062           indexOpnd(index),
1063           offsetOpnd(offset),
1064           symbol(sym),
1065           addrMode(mode),
1066           extend(0),
1067           idxOpt(kIntact),
1068           noExtend(false),
1069           isStackMem(false)
1070     {
1071     }
1072 
MemOperand(AArch64AddressingMode mode,uint32 size,RegOperand & base,RegOperand & index,ImmOperand * offset,const MIRSymbol & sym,bool noExtend)1073     MemOperand(AArch64AddressingMode mode, uint32 size, RegOperand &base, RegOperand &index, ImmOperand *offset,
1074                const MIRSymbol &sym, bool noExtend)
1075         : OperandVisitable(Operand::kOpdMem, size),
1076           baseOpnd(&base),
1077           indexOpnd(&index),
1078           offsetOpnd(offset),
1079           symbol(&sym),
1080           addrMode(mode),
1081           extend(0),
1082           idxOpt(kIntact),
1083           noExtend(noExtend),
1084           isStackMem(false)
1085     {
1086     }
1087 
1088     MemOperand(AArch64AddressingMode mode, uint32 dSize, RegOperand &baseOpnd, RegOperand &indexOpnd, uint32 shift,
1089                bool isSigned = false)
OperandVisitable(Operand::kOpdMem,dSize)1090         : OperandVisitable(Operand::kOpdMem, dSize),
1091           baseOpnd(&baseOpnd),
1092           indexOpnd(&indexOpnd),
1093           offsetOpnd(nullptr),
1094           symbol(nullptr),
1095           addrMode(mode),
1096           extend((isSigned ? kSignExtend : kUnsignedExtend) | (1U << shift)),
1097           idxOpt(kIntact),
1098           noExtend(false),
1099           isStackMem(false)
1100     {
1101     }
1102 
MemOperand(AArch64AddressingMode mode,uint32 dSize,const MIRSymbol & sym)1103     MemOperand(AArch64AddressingMode mode, uint32 dSize, const MIRSymbol &sym)
1104         : OperandVisitable(Operand::kOpdMem, dSize),
1105           baseOpnd(nullptr),
1106           indexOpnd(nullptr),
1107           offsetOpnd(nullptr),
1108           symbol(&sym),
1109           addrMode(mode),
1110           extend(0),
1111           idxOpt(kIntact),
1112           noExtend(false),
1113           isStackMem(false)
1114     {
1115         DEBUG_ASSERT(mode == kAddrModeLiteral,
1116                      "This constructor version is supposed to be used with "
1117                      "AddrMode_Literal only");
1118     }
1119 
1120     /* Copy constructor */
MemOperand(const MemOperand & memOpnd)1121     explicit MemOperand(const MemOperand &memOpnd)
1122         : OperandVisitable(Operand::kOpdMem, memOpnd.GetSize()),
1123           baseOpnd(memOpnd.baseOpnd),
1124           indexOpnd(memOpnd.indexOpnd),
1125           offsetOpnd(memOpnd.offsetOpnd),
1126           scaleOpnd(memOpnd.scaleOpnd),
1127           symbol(memOpnd.symbol),
1128           memoryOrder(memOpnd.memoryOrder),
1129           addrMode(memOpnd.addrMode),
1130           extend(memOpnd.extend),
1131           idxOpt(memOpnd.idxOpt),
1132           noExtend(memOpnd.noExtend),
1133           isStackMem(memOpnd.isStackMem),
1134           isStackArgMem(memOpnd.isStackArgMem),
1135           isVolatile(memOpnd.isVolatile)
1136     {
1137     }
1138 
1139     MemOperand &operator=(const MemOperand &memOpnd) = default;
1140 
1141     ~MemOperand() override = default;
1142     using OperandVisitable<MemOperand>::OperandVisitable;
1143 
CloneTree(MapleAllocator & allocator)1144     MemOperand *CloneTree(MapleAllocator &allocator) const override
1145     {
1146         auto *memOpnd = allocator.GetMemPool()->New<MemOperand>(*this);
1147         if (baseOpnd != nullptr) {
1148             memOpnd->SetBaseRegister(*baseOpnd->CloneTree(allocator));
1149         }
1150         if (indexOpnd != nullptr) {
1151             memOpnd->SetIndexRegister(*indexOpnd->CloneTree(allocator));
1152         }
1153         if (offsetOpnd != nullptr) {
1154             memOpnd->SetOffsetOperand(*offsetOpnd->CloneTree(allocator));
1155         }
1156         return memOpnd;
1157     }
1158 
Clone(MemPool & memPool)1159     MemOperand *Clone(MemPool &memPool) const override
1160     {
1161         return memPool.Clone<MemOperand>(*this);
1162     }
1163 
Dump()1164     void Dump() const override {};
1165 
GetBaseRegister()1166     RegOperand *GetBaseRegister() const
1167     {
1168         return baseOpnd;
1169     }
1170 
SetBaseRegister(RegOperand & regOpnd)1171     void SetBaseRegister(RegOperand &regOpnd)
1172     {
1173         baseOpnd = &regOpnd;
1174     }
1175 
GetIndexRegister()1176     RegOperand *GetIndexRegister() const
1177     {
1178         return indexOpnd;
1179     }
1180 
SetIndexRegister(RegOperand & regOpnd)1181     void SetIndexRegister(RegOperand &regOpnd)
1182     {
1183         indexOpnd = &regOpnd;
1184     }
1185 
GetOffsetOperand()1186     ImmOperand *GetOffsetOperand() const
1187     {
1188         return offsetOpnd;
1189     }
1190 
SetOffsetOperand(ImmOperand & oftOpnd)1191     void SetOffsetOperand(ImmOperand &oftOpnd)
1192     {
1193         offsetOpnd = &oftOpnd;
1194     }
1195 
GetScaleOperand()1196     const ImmOperand *GetScaleOperand() const
1197     {
1198         return scaleOpnd;
1199     }
1200 
SetScaleOperand(ImmOperand & scaOpnd)1201     void SetScaleOperand(ImmOperand &scaOpnd)
1202     {
1203         scaleOpnd = &scaOpnd;
1204     }
1205 
GetSymbol()1206     const MIRSymbol *GetSymbol() const
1207     {
1208         return symbol;
1209     }
1210 
SetMemoryOrdering(uint32 memOrder)1211     void SetMemoryOrdering(uint32 memOrder)
1212     {
1213         memoryOrder |= memOrder;
1214     }
1215 
HasMemoryOrdering(uint32 memOrder)1216     bool HasMemoryOrdering(uint32 memOrder) const
1217     {
1218         return (memoryOrder & memOrder) != 0;
1219     }
1220 
SetAccessSize(uint8 size)1221     void SetAccessSize(uint8 size)
1222     {
1223         accessSize = size;
1224     }
1225 
GetAccessSize()1226     uint8 GetAccessSize() const
1227     {
1228         return accessSize;
1229     }
1230 
GetAddrMode()1231     AArch64AddressingMode GetAddrMode() const
1232     {
1233         return addrMode;
1234     }
1235 
GetSymbolName()1236     const std::string &GetSymbolName() const
1237     {
1238         return GetSymbol()->GetName();
1239     }
1240 
IsStackMem()1241     bool IsStackMem() const
1242     {
1243         return isStackMem;
1244     }
1245 
SetStackMem(bool isStack)1246     void SetStackMem(bool isStack)
1247     {
1248         isStackMem = isStack;
1249     }
1250 
IsStackArgMem()1251     bool IsStackArgMem() const
1252     {
1253         return isStackArgMem;
1254     }
1255 
SetStackArgMem(bool isStackArg)1256     void SetStackArgMem(bool isStackArg)
1257     {
1258         isStackArgMem = isStackArg;
1259     }
1260 
1261     Operand *GetOffset() const;
1262 
GetOffsetImmediate()1263     OfstOperand *GetOffsetImmediate() const
1264     {
1265         return static_cast<OfstOperand *>(GetOffsetOperand());
1266     }
1267 
1268     /* Returns N where alignment == 2^N */
GetImmediateOffsetAlignment(uint32 dSize)1269     static uint32 GetImmediateOffsetAlignment(uint32 dSize)
1270     {
1271         DEBUG_ASSERT(dSize >= k8BitSize, "error val:dSize");
1272         DEBUG_ASSERT(dSize <= k128BitSize, "error val:dSize");
1273         DEBUG_ASSERT((dSize & (dSize - 1)) == 0, "error val:dSize");
1274         /* dSize==8: 0, dSize==16 : 1, dSize==32: 2, dSize==64: 3 */
1275         return __builtin_ctz(dSize) - kBaseOffsetAlignment;
1276     }
1277 
GetMaxPIMM(uint32 dSize)1278     static int32 GetMaxPIMM(uint32 dSize)
1279     {
1280         dSize = dSize > k64BitSize ? k64BitSize : dSize;
1281         DEBUG_ASSERT(dSize >= k8BitSize, "error val:dSize");
1282         DEBUG_ASSERT(dSize <= k128BitSize, "error val:dSize");
1283         DEBUG_ASSERT((dSize & (dSize - 1)) == 0, "error val:dSize");
1284         uint32 alignment = GetImmediateOffsetAlignment(dSize);
1285         /* alignment is between kAlignmentOf8Bit and kAlignmentOf64Bit */
1286         DEBUG_ASSERT(alignment >= kOffsetAlignmentOf8Bit, "error val:alignment");
1287         DEBUG_ASSERT(alignment <= kOffsetAlignmentOf128Bit, "error val:alignment");
1288         return (kMaxPimm[alignment]);
1289     }
1290 
GetMaxPairPIMM(uint32 dSize)1291     static int32 GetMaxPairPIMM(uint32 dSize)
1292     {
1293         DEBUG_ASSERT(dSize >= k32BitSize, "error val:dSize");
1294         DEBUG_ASSERT(dSize <= k128BitSize, "error val:dSize");
1295         DEBUG_ASSERT((dSize & (dSize - 1)) == 0, "error val:dSize");
1296         uint32 alignment = GetImmediateOffsetAlignment(dSize);
1297         /* alignment is between kAlignmentOf8Bit and kAlignmentOf64Bit */
1298         DEBUG_ASSERT(alignment >= kOffsetAlignmentOf32Bit, "error val:alignment");
1299         DEBUG_ASSERT(alignment <= kOffsetAlignmentOf128Bit, "error val:alignment");
1300         return (kMaxPairPimm[alignment - k2BitSize]);
1301     }
1302 
IsOffsetMisaligned(uint32 dSize)1303     bool IsOffsetMisaligned(uint32 dSize) const
1304     {
1305         DEBUG_ASSERT(dSize >= k8BitSize, "error val:dSize");
1306         DEBUG_ASSERT(dSize <= k128BitSize, "error val:dSize");
1307         DEBUG_ASSERT((dSize & (dSize - 1)) == 0, "error val:dSize");
1308         if (dSize == k8BitSize) {
1309             return false;
1310         }
1311         OfstOperand *ofstOpnd = GetOffsetImmediate();
1312         if (!ofstOpnd) {
1313             return false;
1314         }
1315         int64 ofstVal = ofstOpnd->GetOffsetValue();
1316         if (addrMode == kAddrModeBOi) {
1317             if (ofstVal >= kMinSimm32 && ofstVal <= kMaxSimm32) {
1318                 return false;
1319             }
1320             return ((static_cast<uint32>(ofstOpnd->GetOffsetValue()) &
1321                      static_cast<uint32>((1U << static_cast<uint32>(GetImmediateOffsetAlignment(dSize))) - 1)) != 0);
1322         } else if (addrMode == kAddrModeLo12Li) {
1323             uint32 alignByte = (dSize / k8BitSize);
1324             return ((ofstVal % static_cast<int64>(alignByte)) != k0BitSize);
1325         }
1326         return false;
1327     }
1328 
IsSIMMOffsetOutOfRange(int64 offset,bool is64bit,bool isLDSTPair)1329     static bool IsSIMMOffsetOutOfRange(int64 offset, bool is64bit, bool isLDSTPair)
1330     {
1331         if (!isLDSTPair) {
1332             return (offset < kMinSimm32 || offset > kMaxSimm32);
1333         }
1334         if (is64bit) {
1335             return (offset < kMinSimm64 || offset > kMaxSimm64Pair) || (static_cast<uint64>(offset) & k7BitSize);
1336         }
1337         return (offset < kMinSimm32 || offset > kMaxSimm32Pair) || (static_cast<uint64>(offset) & k3BitSize);
1338     }
1339 
IsPIMMOffsetOutOfRange(int32 offset,uint32 dSize)1340     static bool IsPIMMOffsetOutOfRange(int32 offset, uint32 dSize)
1341     {
1342         DEBUG_ASSERT(dSize >= k8BitSize, "error val:dSize");
1343         DEBUG_ASSERT(dSize <= k128BitSize, "error val:dSize");
1344         DEBUG_ASSERT((dSize & (dSize - 1)) == 0, "error val:dSize");
1345         return (offset < 0 || offset > GetMaxPIMM(dSize));
1346     }
1347 
1348     bool operator<(const MemOperand &opnd) const
1349     {
1350         if (addrMode != opnd.addrMode) {
1351             return addrMode < opnd.addrMode;
1352         }
1353         if (GetBaseRegister() != opnd.GetBaseRegister()) {
1354             return GetBaseRegister() < opnd.GetBaseRegister();
1355         }
1356         if (GetIndexRegister() != opnd.GetIndexRegister()) {
1357             return GetIndexRegister() < opnd.GetIndexRegister();
1358         }
1359         if (GetOffsetOperand() != opnd.GetOffsetOperand()) {
1360             return GetOffsetOperand() < opnd.GetOffsetOperand();
1361         }
1362         if (GetSymbol() != opnd.GetSymbol()) {
1363             return GetSymbol() < opnd.GetSymbol();
1364         }
1365         if (GetSize() != opnd.GetSize()) {
1366             return GetSize() < opnd.GetSize();
1367         }
1368         if (extend != opnd.extend) {
1369             return extend < opnd.extend;
1370         }
1371         return false;
1372     }
1373 
1374     bool operator==(const MemOperand &opnd) const
1375     {
1376         return (GetSize() == opnd.GetSize()) && (addrMode == opnd.addrMode) && (extend == opnd.extend) &&
1377                (GetBaseRegister() == opnd.GetBaseRegister()) && (GetIndexRegister() == opnd.GetIndexRegister()) &&
1378                (GetSymbol() == opnd.GetSymbol()) && (GetOffsetOperand() == opnd.GetOffsetOperand()) &&
1379                (IsVolatile() == opnd.IsVolatile());
1380     }
1381 
GetMemVaryType()1382     VaryType GetMemVaryType() const
1383     {
1384         Operand *ofstOpnd = GetOffsetOperand();
1385         if (ofstOpnd != nullptr) {
1386             auto *opnd = static_cast<OfstOperand *>(ofstOpnd);
1387             return opnd->GetVary();
1388         }
1389         return kNotVary;
1390     }
1391 
SetAddrMode(AArch64AddressingMode val)1392     void SetAddrMode(AArch64AddressingMode val)
1393     {
1394         addrMode = val;
1395     }
1396 
IsExtendedRegisterMode()1397     bool IsExtendedRegisterMode() const
1398     {
1399         return addrMode == kAddrModeBOrX;
1400     }
1401 
UpdateExtend(ExtendInfo flag)1402     void UpdateExtend(ExtendInfo flag)
1403     {
1404         extend = flag | (1U << ShiftAmount());
1405     }
1406 
SignedExtend()1407     bool SignedExtend() const
1408     {
1409         return IsExtendedRegisterMode() && ((extend & kSignExtend) != 0);
1410     }
1411 
UnsignedExtend()1412     bool UnsignedExtend() const
1413     {
1414         return IsExtendedRegisterMode() && !SignedExtend();
1415     }
1416 
ShiftAmount()1417     uint32 ShiftAmount() const
1418     {
1419         uint32 scale = extend & 0xF;
1420         /* 8 is 1 << 3, 4 is 1 << 2, 2 is 1 << 1, 1 is 1 << 0 */
1421         return (scale == 8) ? 3 : ((scale == 4) ? 2 : ((scale == 2) ? 1 : 0));
1422     }
1423 
ShouldEmitExtend()1424     bool ShouldEmitExtend() const
1425     {
1426         return !noExtend && ((extend & 0x3F) != 0);
1427     }
1428 
GetIndexOpt()1429     IndexingOption GetIndexOpt() const
1430     {
1431         return idxOpt;
1432     }
1433 
SetIndexOpt(IndexingOption newidxOpt)1434     void SetIndexOpt(IndexingOption newidxOpt)
1435     {
1436         idxOpt = newidxOpt;
1437     }
1438 
GetNoExtend()1439     bool GetNoExtend() const
1440     {
1441         return noExtend;
1442     }
1443 
SetNoExtend(bool val)1444     void SetNoExtend(bool val)
1445     {
1446         noExtend = val;
1447     }
1448 
GetExtend()1449     uint32 GetExtend() const
1450     {
1451         return extend;
1452     }
1453 
SetExtend(uint32 val)1454     void SetExtend(uint32 val)
1455     {
1456         extend = val;
1457     }
1458 
SetVolatile(bool flag)1459     void SetVolatile(bool flag)
1460     {
1461         isVolatile = flag;
1462     }
1463 
IsIntactIndexed()1464     bool IsIntactIndexed() const
1465     {
1466         return idxOpt == kIntact;
1467     }
1468 
IsPostIndexed()1469     bool IsPostIndexed() const
1470     {
1471         return idxOpt == kPostIndex;
1472     }
1473 
IsPreIndexed()1474     bool IsPreIndexed() const
1475     {
1476         return idxOpt == kPreIndex;
1477     }
1478 
IsVolatile()1479     bool IsVolatile() const
1480     {
1481         return isVolatile;
1482     }
1483 
GetExtendAsString()1484     std::string GetExtendAsString() const
1485     {
1486         if (GetIndexRegister()->GetSize() == k64BitSize) {
1487             return std::string("LSL");
1488         }
1489         return ((extend & kSignExtend) != 0) ? std::string("SXTW") : std::string("UXTW");
1490     }
1491 
1492     /* Return true if given operand has the same base reg and offset with this. */
1493     bool Equals(Operand &op) const override;
1494     bool Equals(const MemOperand &op) const;
1495     bool Less(const Operand &right) const override;
1496 
1497 private:
1498     RegOperand *baseOpnd = nullptr;   /* base register */
1499     RegOperand *indexOpnd = nullptr;  /* index register */
1500     ImmOperand *offsetOpnd = nullptr; /* offset immediate */
1501     ImmOperand *scaleOpnd = nullptr;
1502     const MIRSymbol *symbol; /* AddrMode_Literal */
1503     uint32 memoryOrder = 0;
1504     uint8 accessSize = 0; /* temp, must be set right before use everytime. */
1505     AArch64AddressingMode addrMode = kAddrModeBOi;
1506     uint32 extend = false;           /* used with offset register ; AddrMode_B_OR_X */
1507     IndexingOption idxOpt = kIntact; /* used with offset immediate ; AddrMode_B_OI */
1508     bool noExtend = false;
1509     bool isStackMem = false;
1510     bool isStackArgMem = false;
1511     bool isVolatile = false;  // based on mem info from ME
1512 };
1513 
1514 class LabelOperand : public OperandVisitable<LabelOperand> {
1515 public:
LabelOperand(const char * parent,LabelIdx labIdx,MemPool & mp)1516     LabelOperand(const char *parent, LabelIdx labIdx, MemPool &mp)
1517         : OperandVisitable(kOpdBBAddress, 0), labelIndex(labIdx), parentFunc(parent, &mp), orderID(-1u)
1518     {
1519     }
1520 
1521     ~LabelOperand() override = default;
1522     using OperandVisitable<LabelOperand>::OperandVisitable;
1523 
CloneTree(MapleAllocator & allocator)1524     LabelOperand *CloneTree(MapleAllocator &allocator) const override
1525     {
1526         return allocator.GetMemPool()->New<LabelOperand>(*this);
1527     }
1528 
Clone(MemPool & memPool)1529     Operand *Clone(MemPool &memPool) const override
1530     {
1531         return memPool.Clone<LabelOperand>(*this);
1532     }
1533 
IsLabelOpnd()1534     bool IsLabelOpnd() const override
1535     {
1536         return true;
1537     }
1538 
GetLabelIndex()1539     LabelIdx GetLabelIndex() const
1540     {
1541         return labelIndex;
1542     }
1543 
GetParentFunc()1544     const MapleString &GetParentFunc() const
1545     {
1546         return parentFunc;
1547     }
1548 
GetLabelOrder()1549     LabelIDOrder GetLabelOrder() const
1550     {
1551         return orderID;
1552     }
1553 
SetLabelOrder(LabelIDOrder idx)1554     void SetLabelOrder(LabelIDOrder idx)
1555     {
1556         orderID = idx;
1557     }
1558 
1559     void Dump() const override;
1560 
Less(const Operand & right)1561     bool Less(const Operand &right) const override
1562     {
1563         if (&right == this) {
1564             return false;
1565         }
1566 
1567         /* For different type. */
1568         if (opndKind != right.GetKind()) {
1569             return opndKind < right.GetKind();
1570         }
1571 
1572         auto *rightOpnd = static_cast<const LabelOperand *>(&right);
1573 
1574         int32 nRes = strcmp(parentFunc.c_str(), rightOpnd->parentFunc.c_str());
1575         if (nRes == 0) {
1576             return labelIndex < rightOpnd->labelIndex;
1577         } else {
1578             return nRes < 0;
1579         }
1580     }
1581 
Equals(Operand & operand)1582     bool Equals(Operand &operand) const override
1583     {
1584         if (!operand.IsLabel()) {
1585             return false;
1586         }
1587         auto &op = static_cast<LabelOperand &>(operand);
1588         return ((&op == this) || (op.GetLabelIndex() == labelIndex));
1589     }
1590 
1591 protected:
1592     LabelIdx labelIndex;
1593     const MapleString parentFunc;
1594 
1595 private:
1596     /* this index records the order this label is defined during code emit. */
1597     LabelIDOrder orderID = -1u;
1598 };
1599 
1600 class ListOperand : public OperandVisitable<ListOperand> {
1601 public:
ListOperand(MapleAllocator & allocator)1602     explicit ListOperand(MapleAllocator &allocator)
1603         : OperandVisitable(Operand::kOpdList, 0), opndList(allocator.Adapter())
1604     {
1605     }
1606 
1607     ~ListOperand() override = default;
1608 
1609     using OperandVisitable<ListOperand>::OperandVisitable;
1610 
CloneTree(MapleAllocator & allocator)1611     ListOperand *CloneTree(MapleAllocator &allocator) const override
1612     {
1613         auto *listOpnd = allocator.GetMemPool()->New<ListOperand>(allocator);
1614         for (auto regOpnd : opndList) {
1615             listOpnd->PushOpnd(*regOpnd->CloneTree(allocator));
1616         }
1617         return listOpnd;
1618     }
1619 
Clone(MemPool & memPool)1620     Operand *Clone(MemPool &memPool) const override
1621     {
1622         return memPool.Clone<ListOperand>(*this);
1623     }
1624 
PushOpnd(RegOperand & opnd)1625     void PushOpnd(RegOperand &opnd)
1626     {
1627         opndList.push_back(&opnd);
1628     }
1629 
GetOperands()1630     MapleList<RegOperand *> &GetOperands()
1631     {
1632         return opndList;
1633     }
1634 
GetOperands()1635     const MapleList<RegOperand *> &GetOperands() const
1636     {
1637         return opndList;
1638     }
1639 
Dump()1640     void Dump() const override
1641     {
1642         for (auto it = opndList.begin(); it != opndList.end();) {
1643             (*it)->Dump();
1644             LogInfo::MapleLogger() << (++it == opndList.end() ? "" : " ,");
1645         }
1646     }
1647 
Less(const Operand & right)1648     bool Less(const Operand &right) const override
1649     {
1650         /* For different type. */
1651         if (opndKind != right.GetKind()) {
1652             return opndKind < right.GetKind();
1653         }
1654 
1655         DEBUG_ASSERT(false, "We don't need to compare list operand.");
1656         return false;
1657     }
1658 
Equals(Operand & operand)1659     bool Equals(Operand &operand) const override
1660     {
1661         if (!operand.IsList()) {
1662             return false;
1663         }
1664         auto &op = static_cast<ListOperand &>(operand);
1665         return (&op == this);
1666     }
1667 
1668 protected:
1669     MapleList<RegOperand *> opndList;
1670 };
1671 
1672 /* representing for global variables address */
1673 class StImmOperand : public OperandVisitable<StImmOperand> {
1674 public:
StImmOperand(const MIRSymbol & symbol,int64 offset,int32 relocs)1675     StImmOperand(const MIRSymbol &symbol, int64 offset, int32 relocs)
1676         : OperandVisitable(kOpdStImmediate, 0), symbol(&symbol), offset(offset), relocs(relocs)
1677     {
1678     }
1679 
1680     ~StImmOperand() override = default;
1681     using OperandVisitable<StImmOperand>::OperandVisitable;
1682 
CloneTree(MapleAllocator & allocator)1683     StImmOperand *CloneTree(MapleAllocator &allocator) const override
1684     {
1685         // const MIRSymbol is not changed in cg, so we can do shallow copy
1686         return allocator.GetMemPool()->New<StImmOperand>(*this);
1687     }
1688 
Clone(MemPool & memPool)1689     Operand *Clone(MemPool &memPool) const override
1690     {
1691         return memPool.Clone<StImmOperand>(*this);
1692     }
1693 
GetSymbol()1694     const MIRSymbol *GetSymbol() const
1695     {
1696         return symbol;
1697     }
1698 
GetName()1699     const std::string &GetName() const
1700     {
1701         return symbol->GetName();
1702     }
1703 
GetOffset()1704     int64 GetOffset() const
1705     {
1706         return offset;
1707     }
1708 
SetOffset(int64 newOffset)1709     void SetOffset(int64 newOffset)
1710     {
1711         offset = newOffset;
1712     }
1713 
GetRelocs()1714     int32 GetRelocs() const
1715     {
1716         return relocs;
1717     }
1718 
1719     bool operator==(const StImmOperand &opnd) const
1720     {
1721         return (symbol == opnd.symbol && offset == opnd.offset && relocs == opnd.relocs);
1722     }
1723 
1724     bool operator<(const StImmOperand &opnd) const
1725     {
1726         return (symbol < opnd.symbol || (symbol == opnd.symbol && offset < opnd.offset) ||
1727                 (symbol == opnd.symbol && offset == opnd.offset && relocs < opnd.relocs));
1728     }
1729 
1730     bool Less(const Operand &right) const override;
1731 
Dump()1732     void Dump() const override
1733     {
1734         CHECK_FATAL(false, "dont run here");
1735     }
1736 
1737 private:
1738     const MIRSymbol *symbol;
1739     int64 offset;
1740     int32 relocs;
1741 };
1742 
1743 class ExtendShiftOperand : public OperandVisitable<ExtendShiftOperand> {
1744 public:
1745     /* if and only if at least one register is WSP, ARM Recommends use of the LSL
1746      * operator name rathe than UXTW */
1747     enum ExtendOp : uint8 {
1748         kUndef,
1749         kUXTB,
1750         kUXTH,
1751         kUXTW, /* equal to lsl in 32bits */
1752         kUXTX, /* equal to lsl in 64bits */
1753         kSXTB,
1754         kSXTH,
1755         kSXTW,
1756         kSXTX,
1757     };
1758 
ExtendShiftOperand(ExtendOp op,uint32 amt,int32 bitLen)1759     ExtendShiftOperand(ExtendOp op, uint32 amt, int32 bitLen)
1760         : OperandVisitable(Operand::kOpdExtend, bitLen), extendOp(op), shiftAmount(amt)
1761     {
1762     }
1763 
1764     ~ExtendShiftOperand() override = default;
1765     using OperandVisitable<ExtendShiftOperand>::OperandVisitable;
1766 
CloneTree(MapleAllocator & allocator)1767     ExtendShiftOperand *CloneTree(MapleAllocator &allocator) const override
1768     {
1769         return allocator.GetMemPool()->New<ExtendShiftOperand>(*this);
1770     }
1771 
Clone(MemPool & memPool)1772     Operand *Clone(MemPool &memPool) const override
1773     {
1774         return memPool.Clone<ExtendShiftOperand>(*this);
1775     }
1776 
GetShiftAmount()1777     uint32 GetShiftAmount() const
1778     {
1779         return shiftAmount;
1780     }
1781 
GetExtendOp()1782     ExtendOp GetExtendOp() const
1783     {
1784         return extendOp;
1785     }
1786 
GetValue()1787     uint32 GetValue() const
1788     {
1789         return shiftAmount;
1790     }
1791 
1792     bool Less(const Operand &right) const override;
1793 
Dump()1794     void Dump() const override
1795     {
1796         CHECK_FATAL(false, "dont run here");
1797     }
1798 
1799 private:
1800     ExtendOp extendOp;
1801     uint32 shiftAmount;
1802 };
1803 
1804 class BitShiftOperand : public OperandVisitable<BitShiftOperand> {
1805 public:
1806     enum ShiftOp : uint8 {
1807         kUndef,
1808         kLSL, /* logical shift left */
1809         kLSR, /* logical shift right */
1810         kASR, /* arithmetic shift right */
1811     };
1812 
1813     /* bitlength is equal to 5 or 6 */
BitShiftOperand(ShiftOp op,uint32 amt,int32 bitLen)1814     BitShiftOperand(ShiftOp op, uint32 amt, int32 bitLen)
1815         : OperandVisitable(Operand::kOpdShift, bitLen), shiftOp(op), shiftAmount(amt)
1816     {
1817     }
1818 
1819     ~BitShiftOperand() override = default;
1820     using OperandVisitable<BitShiftOperand>::OperandVisitable;
1821 
CloneTree(MapleAllocator & allocator)1822     BitShiftOperand *CloneTree(MapleAllocator &allocator) const override
1823     {
1824         return allocator.GetMemPool()->New<BitShiftOperand>(*this);
1825     }
1826 
Clone(MemPool & memPool)1827     Operand *Clone(MemPool &memPool) const override
1828     {
1829         return memPool.Clone<BitShiftOperand>(*this);
1830     }
1831 
Less(const Operand & right)1832     bool Less(const Operand &right) const override
1833     {
1834         if (&right == this) {
1835             return false;
1836         }
1837 
1838         /* For different type. */
1839         if (GetKind() != right.GetKind()) {
1840             return GetKind() < right.GetKind();
1841         }
1842 
1843         const BitShiftOperand *rightOpnd = static_cast<const BitShiftOperand *>(&right);
1844 
1845         /* The same type. */
1846         if (shiftOp != rightOpnd->shiftOp) {
1847             return shiftOp < rightOpnd->shiftOp;
1848         }
1849         return shiftAmount < rightOpnd->shiftAmount;
1850     }
1851 
GetShiftAmount()1852     uint32 GetShiftAmount() const
1853     {
1854         return shiftAmount;
1855     }
1856 
GetShiftOp()1857     ShiftOp GetShiftOp() const
1858     {
1859         return shiftOp;
1860     }
1861 
GetValue()1862     uint32 GetValue() const
1863     {
1864         return GetShiftAmount();
1865     }
1866 
Dump()1867     void Dump() const override
1868     {
1869         CHECK_FATAL(false, "dont run here");
1870     }
1871 
1872 private:
1873     ShiftOp shiftOp;
1874     uint32 shiftAmount;
1875 };
1876 
1877 class CommentOperand : public OperandVisitable<CommentOperand> {
1878 public:
CommentOperand(const char * str,MemPool & memPool)1879     CommentOperand(const char *str, MemPool &memPool) : OperandVisitable(Operand::kOpdString, 0), comment(str, &memPool)
1880     {
1881     }
1882 
CommentOperand(const std::string & str,MemPool & memPool)1883     CommentOperand(const std::string &str, MemPool &memPool)
1884         : OperandVisitable(Operand::kOpdString, 0), comment(str, &memPool)
1885     {
1886     }
1887 
1888     ~CommentOperand() override = default;
1889     using OperandVisitable<CommentOperand>::OperandVisitable;
1890 
GetComment()1891     const MapleString &GetComment() const
1892     {
1893         return comment;
1894     }
1895 
CloneTree(MapleAllocator & allocator)1896     CommentOperand *CloneTree(MapleAllocator &allocator) const override
1897     {
1898         return allocator.GetMemPool()->New<CommentOperand>(*this);
1899     }
1900 
Clone(MemPool & memPool)1901     Operand *Clone(MemPool &memPool) const override
1902     {
1903         return memPool.Clone<CommentOperand>(*this);
1904     }
1905 
IsCommentOpnd()1906     bool IsCommentOpnd() const override
1907     {
1908         return true;
1909     }
1910 
Less(const Operand & right)1911     bool Less(const Operand &right) const override
1912     {
1913         /* For different type. */
1914         return GetKind() < right.GetKind();
1915     }
1916 
Dump()1917     void Dump() const override
1918     {
1919         LogInfo::MapleLogger() << "# ";
1920         if (!comment.empty()) {
1921             LogInfo::MapleLogger() << comment;
1922         }
1923     }
1924 
1925 private:
1926     const MapleString comment;
1927 };
1928 
1929 using StringOperand = CommentOperand;
1930 
1931 class ListConstraintOperand : public OperandVisitable<ListConstraintOperand> {
1932 public:
ListConstraintOperand(MapleAllocator & allocator)1933     explicit ListConstraintOperand(MapleAllocator &allocator)
1934         : OperandVisitable(Operand::kOpdString, 0), stringList(allocator.Adapter()) {};
1935 
1936     ~ListConstraintOperand() override = default;
1937     using OperandVisitable<ListConstraintOperand>::OperandVisitable;
1938 
Dump()1939     void Dump() const override
1940     {
1941         for (auto *str : stringList) {
1942             LogInfo::MapleLogger() << "(" << str->GetComment().c_str() << ")";
1943         }
1944     }
1945 
CloneTree(MapleAllocator & allocator)1946     ListConstraintOperand *CloneTree(MapleAllocator &allocator) const override
1947     {
1948         auto *constraintOpnd = allocator.GetMemPool()->New<ListConstraintOperand>(allocator);
1949         for (auto stringOpnd : stringList) {
1950             constraintOpnd->stringList.emplace_back(stringOpnd->CloneTree(allocator));
1951         }
1952         return constraintOpnd;
1953     }
1954 
Clone(MemPool & memPool)1955     Operand *Clone(MemPool &memPool) const override
1956     {
1957         return memPool.Clone<ListConstraintOperand>(*this);
1958     }
1959 
Less(const Operand & right)1960     bool Less(const Operand &right) const override
1961     {
1962         /* For different type. */
1963         if (opndKind != right.GetKind()) {
1964             return opndKind < right.GetKind();
1965         }
1966 
1967         DEBUG_ASSERT(false, "We don't need to compare list operand.");
1968         return false;
1969     }
1970 
1971     MapleVector<StringOperand *> stringList;
1972 };
1973 
1974 /* for cg ssa analysis */
1975 class PhiOperand : public OperandVisitable<PhiOperand> {
1976 public:
PhiOperand(MapleAllocator & allocator)1977     explicit PhiOperand(MapleAllocator &allocator) : OperandVisitable(Operand::kOpdPhi, 0), phiList(allocator.Adapter())
1978     {
1979     }
1980 
1981     ~PhiOperand() override = default;
1982     using OperandVisitable<PhiOperand>::OperandVisitable;
1983 
CloneTree(MapleAllocator & allocator)1984     PhiOperand *CloneTree(MapleAllocator &allocator) const override
1985     {
1986         auto *phiOpnd = allocator.GetMemPool()->New<PhiOperand>(allocator);
1987         for (auto phiPair : phiList) {
1988             phiOpnd->InsertOpnd(phiPair.first, *phiPair.second->CloneTree(allocator));
1989         }
1990         return phiOpnd;
1991     }
1992 
Clone(MemPool & memPool)1993     Operand *Clone(MemPool &memPool) const override
1994     {
1995         return memPool.Clone<PhiOperand>(*this);
1996     }
1997 
Dump()1998     void Dump() const override
1999     {
2000         CHECK_FATAL(false, "NIY");
2001     }
2002 
InsertOpnd(uint32 bbId,RegOperand & phiParam)2003     void InsertOpnd(uint32 bbId, RegOperand &phiParam)
2004     {
2005         DEBUG_ASSERT(!phiList.count(bbId), "cannot insert duplicate operand");
2006         (void)phiList.emplace(std::pair(bbId, &phiParam));
2007     }
2008 
UpdateOpnd(uint32 bbId,uint32 newId,RegOperand & phiParam)2009     void UpdateOpnd(uint32 bbId, uint32 newId, RegOperand &phiParam)
2010     {
2011         (void)phiList.emplace(std::pair(newId, &phiParam));
2012         phiList.erase(bbId);
2013     }
2014 
GetOperands()2015     MapleMap<uint32, RegOperand *> &GetOperands()
2016     {
2017         return phiList;
2018     }
2019 
2020     uint32 GetLeastCommonValidBit() const;
2021 
2022     bool IsRedundancy() const;
2023 
Less(const Operand & right)2024     bool Less(const Operand &right) const override
2025     {
2026         /* For different type. */
2027         if (opndKind != right.GetKind()) {
2028             return opndKind < right.GetKind();
2029         }
2030         DEBUG_ASSERT(false, "We don't need to compare list operand.");
2031         return false;
2032     }
2033 
Equals(Operand & operand)2034     bool Equals(Operand &operand) const override
2035     {
2036         if (!operand.IsPhi()) {
2037             return false;
2038         }
2039         auto &op = static_cast<PhiOperand &>(operand);
2040         return (&op == this);
2041     }
2042 
2043 protected:
2044     MapleMap<uint32, RegOperand *> phiList; /* ssa-operand && BBId */
2045 };
2046 
2047 /* Use StImmOperand instead? */
2048 class FuncNameOperand : public OperandVisitable<FuncNameOperand> {
2049 public:
FuncNameOperand(const MIRSymbol & fsym)2050     explicit FuncNameOperand(const MIRSymbol &fsym) : OperandVisitable(kOpdBBAddress, 0), symbol(&fsym) {}
2051 
~FuncNameOperand()2052     ~FuncNameOperand() override
2053     {
2054         symbol = nullptr;
2055     }
2056     using OperandVisitable<FuncNameOperand>::OperandVisitable;
2057 
GetName()2058     const std::string &GetName() const
2059     {
2060         return symbol->GetName();
2061     }
2062 
IsFuncNameOpnd()2063     bool IsFuncNameOpnd() const override
2064     {
2065         return true;
2066     }
2067 
GetFunctionSymbol()2068     const MIRSymbol *GetFunctionSymbol() const
2069     {
2070         return symbol;
2071     }
2072 
SetFunctionSymbol(const MIRSymbol & fsym)2073     void SetFunctionSymbol(const MIRSymbol &fsym)
2074     {
2075         symbol = &fsym;
2076     }
2077 
CloneTree(MapleAllocator & allocator)2078     FuncNameOperand *CloneTree(MapleAllocator &allocator) const override
2079     {
2080         // const MIRSymbol is not changed in cg, so we can do shallow copy
2081         return allocator.GetMemPool()->New<FuncNameOperand>(*this);
2082     }
2083 
Clone(MemPool & memPool)2084     Operand *Clone(MemPool &memPool) const override
2085     {
2086         return memPool.New<FuncNameOperand>(*this);
2087     }
2088 
Less(const Operand & right)2089     bool Less(const Operand &right) const override
2090     {
2091         if (&right == this) {
2092             return false;
2093         }
2094         /* For different type. */
2095         if (GetKind() != right.GetKind()) {
2096             return GetKind() < right.GetKind();
2097         }
2098 
2099         auto *rightOpnd = static_cast<const FuncNameOperand *>(&right);
2100 
2101         return static_cast<const void *>(symbol) < static_cast<const void *>(rightOpnd->symbol);
2102     }
2103 
Dump()2104     void Dump() const override
2105     {
2106         LogInfo::MapleLogger() << GetName();
2107     }
2108 
2109 private:
2110     const MIRSymbol *symbol;
2111 };
2112 
2113 namespace operand {
2114 /* bit 0-7 for common */
2115 enum CommOpndDescProp : maple::uint64 { kIsDef = 1ULL, kIsUse = (1ULL << 1), kIsVector = (1ULL << 2) };
2116 
2117 /* bit 8-15 for reg */
2118 enum RegOpndDescProp : maple::uint64 {
2119     kInt = (1ULL << 8),
2120     kFloat = (1ULL << 9),
2121     kRegTyCc = (1ULL << 10),
2122     kRegTyVary = (1ULL << 11),
2123 };
2124 
2125 /* bit 16-23 for imm */
2126 enum ImmOpndDescProp : maple::uint64 {};
2127 
2128 /* bit 24-31 for mem */
2129 enum MemOpndDescProp : maple::uint64 {
2130     kMemLow12 = (1ULL << 24),
2131     kLiteralLow12 = kMemLow12,
2132     kIsLoadLiteral = (1ULL << 25)
2133 };
2134 }  // namespace operand
2135 
2136 class OpndDesc {
2137 public:
OpndDesc(Operand::OperandType t,maple::uint64 p,maple::uint32 s)2138     OpndDesc(Operand::OperandType t, maple::uint64 p, maple::uint32 s) : opndType(t), property(p), size(s) {}
2139     virtual ~OpndDesc() = default;
2140 
GetOperandType()2141     Operand::OperandType GetOperandType() const
2142     {
2143         return opndType;
2144     }
2145 
GetSize()2146     maple::uint32 GetSize() const
2147     {
2148         return size;
2149     }
2150 
IsImm()2151     bool IsImm() const
2152     {
2153         return opndType == Operand::kOpdImmediate;
2154     }
2155 
IsRegister()2156     bool IsRegister() const
2157     {
2158         return opndType == Operand::kOpdRegister;
2159     }
2160 
IsMem()2161     bool IsMem() const
2162     {
2163         return opndType == Operand::kOpdMem;
2164     }
2165 
IsRegDef()2166     bool IsRegDef() const
2167     {
2168         return opndType == Operand::kOpdRegister && (property & operand::kIsDef);
2169     }
2170 
IsRegUse()2171     bool IsRegUse() const
2172     {
2173         return opndType == Operand::kOpdRegister && (property & operand::kIsUse);
2174     }
2175 
IsDef()2176     bool IsDef() const
2177     {
2178         return (property & operand::kIsDef) != 0;
2179     }
2180 
IsUse()2181     bool IsUse() const
2182     {
2183         return (property & operand::kIsUse) != 0;
2184     }
2185 
IsMemLow12()2186     bool IsMemLow12() const
2187     {
2188         return IsMem() && (property & operand::kMemLow12);
2189     }
2190 
IsLiteralLow12()2191     bool IsLiteralLow12() const
2192     {
2193         return opndType == Operand::kOpdStImmediate && (property & operand::kLiteralLow12);
2194     }
2195 
IsLoadLiteral()2196     bool IsLoadLiteral() const
2197     {
2198         return (property & operand::kIsLoadLiteral) != 0;
2199     }
2200 
IsVectorOperand()2201     bool IsVectorOperand() const
2202     {
2203         return (property & operand::kIsVector);
2204     }
2205 
2206 #define DEFINE_MOP(op, ...) static const OpndDesc op;
2207 #include "operand.def"
2208 #undef DEFINE_MOP
2209 
2210 private:
2211     Operand::OperandType opndType;
2212     maple::uint64 property;
2213     maple::uint32 size;
2214 };
2215 
2216 class CondOperand : public OperandVisitable<CondOperand> {
2217 public:
CondOperand(maplebe::ConditionCode cc)2218     explicit CondOperand(maplebe::ConditionCode cc) : OperandVisitable(Operand::kOpdCond, k4ByteSize), cc(cc) {}
2219 
2220     ~CondOperand() override = default;
2221     using OperandVisitable<CondOperand>::OperandVisitable;
2222 
CloneTree(MapleAllocator & allocator)2223     CondOperand *CloneTree(MapleAllocator &allocator) const override
2224     {
2225         return allocator.GetMemPool()->New<CondOperand>(*this);
2226     }
2227 
Clone(MemPool & memPool)2228     Operand *Clone(MemPool &memPool) const override
2229     {
2230         return memPool.New<CondOperand>(cc);
2231     }
2232 
GetCode()2233     ConditionCode GetCode() const
2234     {
2235         return cc;
2236     }
2237 
2238     bool Less(const Operand &right) const override;
2239 
Dump()2240     void Dump() const override
2241     {
2242         CHECK_FATAL(false, "dont run here");
2243     }
2244 
2245     static const char *ccStrs[kCcLast];
2246 
2247 private:
2248     ConditionCode cc;
2249 };
2250 
2251 class OpndDumpVisitor : public OperandVisitorBase,
2252                         public OperandVisitors<RegOperand, ImmOperand, MemOperand, LabelOperand, FuncNameOperand,
2253                                                ListOperand, StImmOperand, CondOperand, CommentOperand, BitShiftOperand,
2254                                                ExtendShiftOperand, PhiOperand> {
2255 public:
OpndDumpVisitor(const OpndDesc & operandDesc)2256     explicit OpndDumpVisitor(const OpndDesc &operandDesc) : opndDesc(&operandDesc) {}
~OpndDumpVisitor()2257     virtual ~OpndDumpVisitor()
2258     {
2259         opndDesc = nullptr;
2260     }
2261 
2262 protected:
DumpOpndPrefix()2263     virtual void DumpOpndPrefix()
2264     {
2265         LogInfo::MapleLogger() << " (opnd:";
2266     }
DumpOpndSuffix()2267     virtual void DumpOpndSuffix()
2268     {
2269         LogInfo::MapleLogger() << " )";
2270     }
DumpSize(const Operand & opnd)2271     void DumpSize(const Operand &opnd) const
2272     {
2273         LogInfo::MapleLogger() << " [size:" << opnd.GetSize() << "]";
2274     }
DumpReferenceInfo(const Operand & opnd)2275     void DumpReferenceInfo(const Operand &opnd) const
2276     {
2277         if (opnd.IsReference()) {
2278             LogInfo::MapleLogger() << "[is_ref]";
2279         }
2280     }
GetOpndDesc()2281     const OpndDesc *GetOpndDesc() const
2282     {
2283         return opndDesc;
2284     }
2285 
2286 private:
2287     const OpndDesc *opndDesc;
2288 };
2289 } /* namespace maplebe */
2290 
2291 #endif /* MAPLEBE_INCLUDE_CG_OPERAND_H */
2292