• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "aarch64_ebo.h"
17 #include "aarch64_cg.h"
18 #include "mpl_logging.h"
19 #include "aarch64_utils.h"
20 
21 namespace maplebe {
22 using namespace maple;
23 #define EBO_DUMP CG_DEBUG_FUNC(*cgFunc)
24 
25 enum AArch64Ebo::ExtOpTable : uint8 { AND, SXTB, SXTH, SXTW, ZXTB, ZXTH, ZXTW, ExtTableSize };
26 
27 namespace {
28 
29 using PairMOperator = MOperator[2];
30 
31 constexpr uint8 insPairsNum = 5;
32 
33 PairMOperator extInsnPairTable[ExtTableSize][insPairsNum] = {
34     /* {origMop, newMop} */
35     {{MOP_wldrb, MOP_wldrb},
36      {MOP_wldrsh, MOP_wldrb},
37      {MOP_wldrh, MOP_wldrb},
38      {MOP_xldrsw, MOP_wldrb},
39      {MOP_wldr, MOP_wldrb}}, /* AND */
40     {{MOP_wldrb, MOP_wldrsb},
41      {MOP_wldr, MOP_wldrsb},
42      {MOP_undef, MOP_undef},
43      {MOP_undef, MOP_undef},
44      {MOP_undef, MOP_undef}}, /* SXTB */
45     {{MOP_wldrh, MOP_wldrsh},
46      {MOP_wldrb, MOP_wldrb},
47      {MOP_wldrsb, MOP_wldrsb},
48      {MOP_wldrsh, MOP_wldrsh},
49      {MOP_undef, MOP_undef}}, /* SXTH */
50     {{MOP_wldrh, MOP_wldrh},
51      {MOP_wldrsh, MOP_wldrsh},
52      {MOP_wldrb, MOP_wldrb},
53      {MOP_wldrsb, MOP_wldrsb},
54      {MOP_wldr, MOP_xldrsw}}, /* SXTW */
55     {{MOP_wldrb, MOP_wldrb},
56      {MOP_wldrsb, MOP_wldrb},
57      {MOP_undef, MOP_undef},
58      {MOP_undef, MOP_undef},
59      {MOP_undef, MOP_undef}}, /* ZXTB */
60     {{MOP_wldrh, MOP_wldrh},
61      {MOP_wldrb, MOP_wldrb},
62      {MOP_wldr, MOP_wldrh},
63      {MOP_undef, MOP_undef},
64      {MOP_undef, MOP_undef}}, /* ZXTH */
65     {{MOP_wldr, MOP_wldr},
66      {MOP_wldrh, MOP_wldrh},
67      {MOP_wldrb, MOP_wldrb},
68      {MOP_undef, MOP_undef},
69      {MOP_undef, MOP_undef}} /* ZXTW */
70 };
71 
72 }  // anonymous namespace
73 
ExtLoadSwitchBitSize(MOperator lowMop) const74 MOperator AArch64Ebo::ExtLoadSwitchBitSize(MOperator lowMop) const
75 {
76     switch (lowMop) {
77         case MOP_wldrsb:
78             return MOP_xldrsb;
79         case MOP_wldrsh:
80             return MOP_xldrsh;
81         default:
82             break;
83     }
84     return lowMop;
85 }
86 
IsFmov(const Insn & insn) const87 bool AArch64Ebo::IsFmov(const Insn &insn) const
88 {
89     return ((insn.GetMachineOpcode() >= MOP_xvmovsr) && (insn.GetMachineOpcode() <= MOP_xvmovrd));
90 }
91 
IsAdd(const Insn & insn) const92 bool AArch64Ebo::IsAdd(const Insn &insn) const
93 {
94     return ((insn.GetMachineOpcode() >= MOP_xaddrrr) && (insn.GetMachineOpcode() <= MOP_ssub));
95 }
96 
IsInvalidReg(const RegOperand & opnd) const97 bool AArch64Ebo::IsInvalidReg(const RegOperand &opnd) const
98 {
99     return (opnd.GetRegisterNumber() == AArch64reg::kRinvalid);
100 }
101 
IsZeroRegister(const Operand & opnd) const102 bool AArch64Ebo::IsZeroRegister(const Operand &opnd) const
103 {
104     if (!opnd.IsRegister()) {
105         return false;
106     }
107     const RegOperand *regOpnd = static_cast<const RegOperand *>(&opnd);
108     return regOpnd->GetRegisterNumber() == RZR;
109 }
110 
IsConstantImmOrReg(const Operand & opnd) const111 bool AArch64Ebo::IsConstantImmOrReg(const Operand &opnd) const
112 {
113     if (opnd.IsConstImmediate()) {
114         return true;
115     }
116     return IsZeroRegister(opnd);
117 }
118 
IsClinitCheck(const Insn & insn) const119 bool AArch64Ebo::IsClinitCheck(const Insn &insn) const
120 {
121     MOperator mOp = insn.GetMachineOpcode();
122     return ((mOp == MOP_clinit) || (mOp == MOP_clinit_tail));
123 }
124 
IsDecoupleStaticOp(Insn & insn) const125 bool AArch64Ebo::IsDecoupleStaticOp(Insn &insn) const
126 {
127     if (insn.GetMachineOpcode() == MOP_lazy_ldr_static) {
128         Operand *opnd1 = &insn.GetOperand(kInsnSecondOpnd);
129         CHECK_FATAL(opnd1 != nullptr, "opnd1 is null!");
130         auto *stImmOpnd = static_cast<StImmOperand *>(opnd1);
131         return StringUtils::StartsWith(stImmOpnd->GetName(), namemangler::kDecoupleStaticValueStr);
132     }
133     return false;
134 }
135 
IsYieldPoint(Insn & insn)136 static bool IsYieldPoint(Insn &insn)
137 {
138     /*
139      * It is a yieldpoint if loading from a dedicated
140      * register holding polling page address:
141      * ldr  wzr, [RYP]
142      */
143     if (insn.IsLoad() && !insn.IsLoadLabel()) {
144         auto mem = static_cast<MemOperand *>(insn.GetMemOpnd());
145         return (mem != nullptr && mem->GetBaseRegister() != nullptr &&
146                 mem->GetBaseRegister()->GetRegisterNumber() == RYP);
147     }
148     return false;
149 }
150 
151 /* retrun true if insn is globalneeded */
IsGlobalNeeded(Insn & insn) const152 bool AArch64Ebo::IsGlobalNeeded(Insn &insn) const
153 {
154     /* Calls may have side effects. */
155     if (insn.IsCall()) {
156         return true;
157     }
158 
159     /* Intrinsic call should not be removed. */
160     if (insn.IsSpecialIntrinsic()) {
161         return true;
162     }
163 
164     /* Clinit should not be removed. */
165     if (IsClinitCheck(insn)) {
166         return true;
167     }
168 
169     /* Yieldpoints should not be removed by optimizer. */
170     if (cgFunc->GetCG()->GenYieldPoint() && IsYieldPoint(insn)) {
171         return true;
172     }
173 
174     std::set<uint32> defRegs = insn.GetDefRegs();
175     for (auto defRegNo : defRegs) {
176         if (defRegNo == RZR || defRegNo == RSP || (defRegNo == RFP && CGOptions::UseFramePointer())) {
177             return true;
178         }
179     }
180     return false;
181 }
182 
183 /* in aarch64,resOp will not be def and use in the same time */
ResIsNotDefAndUse(Insn & insn) const184 bool AArch64Ebo::ResIsNotDefAndUse(Insn &insn) const
185 {
186     (void)insn;
187     return true;
188 }
189 
190 /* Return true if opnd live out of bb. */
LiveOutOfBB(const Operand & opnd,const BB & bb) const191 bool AArch64Ebo::LiveOutOfBB(const Operand &opnd, const BB &bb) const
192 {
193     CHECK_FATAL(opnd.IsRegister(), "expect register here.");
194     /* when optimize_level < 2, there is need to anlyze live range. */
195     if (live == nullptr) {
196         return false;
197     }
198     bool isLiveOut = false;
199     if (bb.GetLiveOut()->TestBit(static_cast<RegOperand const *>(&opnd)->GetRegisterNumber())) {
200         isLiveOut = true;
201     }
202     return isLiveOut;
203 }
204 
IsLastAndBranch(BB & bb,Insn & insn) const205 bool AArch64Ebo::IsLastAndBranch(BB &bb, Insn &insn) const
206 {
207     return (bb.GetLastInsn() == &insn) && insn.IsBranch();
208 }
209 
IsSameRedefine(BB & bb,Insn & insn,OpndInfo & opndInfo) const210 bool AArch64Ebo::IsSameRedefine(BB &bb, Insn &insn, OpndInfo &opndInfo) const
211 {
212     MOperator mOp = insn.GetMachineOpcode();
213     if (!(mOp == MOP_wmovri32 || mOp == MOP_xmovri64 || mOp == MOP_wsfmovri || mOp == MOP_xdfmovri)) {
214         return false;
215     }
216     OpndInfo *sameInfo = opndInfo.same;
217     if (sameInfo == nullptr || sameInfo->insn == nullptr || sameInfo->bb != &bb ||
218         sameInfo->insn->GetMachineOpcode() != mOp) {
219         return false;
220     }
221     Insn *prevInsn = sameInfo->insn;
222     if (!prevInsn->GetOperand(kInsnSecondOpnd).IsImmediate()) {
223         return false;
224     }
225     auto &sameOpnd = static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
226     auto &opnd = static_cast<ImmOperand &>(insn.GetOperand(kInsnSecondOpnd));
227     if (sameOpnd.GetValue() == opnd.GetValue()) {
228         sameInfo->refCount += opndInfo.refCount;
229         return true;
230     }
231     return false;
232 }
233 
GetRegOperand(const Operand & opnd) const234 const RegOperand &AArch64Ebo::GetRegOperand(const Operand &opnd) const
235 {
236     CHECK_FATAL(opnd.IsRegister(), "aarch64 shoud not have regShiftOp! opnd is not register!");
237     const auto &res = static_cast<const RegOperand &>(opnd);
238     return res;
239 }
240 
241 /* Create infomation for local_opnd from its def insn current_insn. */
OperandInfoDef(BB & currentBB,Insn & currentInsn,Operand & localOpnd)242 OpndInfo *AArch64Ebo::OperandInfoDef(BB &currentBB, Insn &currentInsn, Operand &localOpnd)
243 {
244     int32 hashVal = localOpnd.IsRegister() ? -1 : ComputeOpndHash(localOpnd);
245     OpndInfo *opndInfoPrev = GetOpndInfo(localOpnd, hashVal);
246     OpndInfo *opndInfo = GetNewOpndInfo(currentBB, &currentInsn, localOpnd, hashVal);
247     if (localOpnd.IsMemoryAccessOperand()) {
248         MemOpndInfo *memInfo = static_cast<MemOpndInfo *>(opndInfo);
249         MemOperand *mem = static_cast<MemOperand *>(&localOpnd);
250         Operand *base = mem->GetBaseRegister();
251         Operand *offset = mem->GetOffset();
252         if (base != nullptr && base->IsRegister()) {
253             memInfo->SetBaseInfo(*OperandInfoUse(currentBB, *base));
254         }
255         if (offset != nullptr && offset->IsRegister()) {
256             CHECK_FATAL(OperandInfoUse(currentBB, *offset) != nullptr, "nullptr check");
257             memInfo->SetOffsetInfo(*OperandInfoUse(currentBB, *offset));
258         }
259     }
260     opndInfo->same = opndInfoPrev;
261     if ((opndInfoPrev != nullptr)) {
262         opndInfoPrev->redefined = true;
263         if (opndInfoPrev->bb == &currentBB) {
264             opndInfoPrev->redefinedInBB = true;
265             opndInfoPrev->redefinedInsn = &currentInsn;
266         }
267         UpdateOpndInfo(localOpnd, *opndInfoPrev, opndInfo, hashVal);
268     } else {
269         SetOpndInfo(localOpnd, opndInfo, hashVal);
270     }
271     return opndInfo;
272 }
273 
DefineClinitSpecialRegisters(InsnInfo & insnInfo)274 void AArch64Ebo::DefineClinitSpecialRegisters(InsnInfo &insnInfo)
275 {
276     Insn *insn = insnInfo.insn;
277     CHECK_FATAL(insn != nullptr, "nullptr of currInsnInfo");
278     RegOperand &phyOpnd1 = a64CGFunc->GetOrCreatePhysicalRegisterOperand(R16, k64BitSize, kRegTyInt);
279     OpndInfo *opndInfo = OperandInfoDef(*insn->GetBB(), *insn, phyOpnd1);
280     opndInfo->insnInfo = &insnInfo;
281 
282     RegOperand &phyOpnd2 = a64CGFunc->GetOrCreatePhysicalRegisterOperand(R17, k64BitSize, kRegTyInt);
283     opndInfo = OperandInfoDef(*insn->GetBB(), *insn, phyOpnd2);
284     opndInfo->insnInfo = &insnInfo;
285 }
286 
BuildCallerSaveRegisters()287 void AArch64Ebo::BuildCallerSaveRegisters()
288 {
289     callerSaveRegTable.clear();
290     RegOperand &phyOpndR0 = a64CGFunc->GetOrCreatePhysicalRegisterOperand(R0, k64BitSize, kRegTyInt);
291     RegOperand &phyOpndV0 = a64CGFunc->GetOrCreatePhysicalRegisterOperand(V0, k64BitSize, kRegTyFloat);
292     callerSaveRegTable.emplace_back(&phyOpndR0);
293     callerSaveRegTable.emplace_back(&phyOpndV0);
294     for (uint32 i = R1; i <= R18; i++) {
295         RegOperand &phyOpnd =
296             a64CGFunc->GetOrCreatePhysicalRegisterOperand(static_cast<AArch64reg>(i), k64BitSize, kRegTyInt);
297         callerSaveRegTable.emplace_back(&phyOpnd);
298     }
299     for (uint32 i = V1; i <= V7; i++) {
300         RegOperand &phyOpnd =
301             a64CGFunc->GetOrCreatePhysicalRegisterOperand(static_cast<AArch64reg>(i), k64BitSize, kRegTyFloat);
302         callerSaveRegTable.emplace_back(&phyOpnd);
303     }
304     for (uint32 i = V16; i <= V31; i++) {
305         RegOperand &phyOpnd =
306             a64CGFunc->GetOrCreatePhysicalRegisterOperand(static_cast<AArch64reg>(i), k64BitSize, kRegTyFloat);
307         callerSaveRegTable.emplace_back(&phyOpnd);
308     }
309     CHECK_FATAL(callerSaveRegTable.size() < kMaxCallerSaveReg,
310                 "number of elements in callerSaveRegTable must less then 45!");
311 }
312 
DefineAsmRegisters(InsnInfo & insnInfo)313 void AArch64Ebo::DefineAsmRegisters(InsnInfo &insnInfo)
314 {
315     Insn *insn = insnInfo.insn;
316     DEBUG_ASSERT(insn->GetMachineOpcode() == MOP_asm, "insn should be a call insn.");
317     ListOperand &outList =
318         const_cast<ListOperand &>(static_cast<const ListOperand &>(insn->GetOperand(kAsmOutputListOpnd)));
319     for (auto opnd : outList.GetOperands()) {
320         OpndInfo *opndInfo = OperandInfoDef(*insn->GetBB(), *insn, *opnd);
321         opndInfo->insnInfo = &insnInfo;
322     }
323     ListOperand &clobberList =
324         const_cast<ListOperand &>(static_cast<const ListOperand &>(insn->GetOperand(kAsmClobberListOpnd)));
325     for (auto opnd : clobberList.GetOperands()) {
326         OpndInfo *opndInfo = OperandInfoDef(*insn->GetBB(), *insn, *opnd);
327         opndInfo->insnInfo = &insnInfo;
328     }
329     ListOperand &inList =
330         const_cast<ListOperand &>(static_cast<const ListOperand &>(insn->GetOperand(kAsmInputListOpnd)));
331     for (auto opnd : inList.GetOperands()) {
332         OperandInfoUse(*(insn->GetBB()), *opnd);
333     }
334 }
335 
DefineCallerSaveRegisters(InsnInfo & insnInfo)336 void AArch64Ebo::DefineCallerSaveRegisters(InsnInfo &insnInfo)
337 {
338     Insn *insn = insnInfo.insn;
339     if (insn->IsAsmInsn()) {
340         DefineAsmRegisters(insnInfo);
341         return;
342     }
343     DEBUG_ASSERT(insn->IsCall() || insn->IsTailCall(), "insn should be a call insn.");
344     if (CGOptions::DoIPARA()) {
345         auto *targetOpnd = insn->GetCallTargetOperand();
346         CHECK_FATAL(targetOpnd != nullptr, "target is null in Insn::IsCallToFunctionThatNeverReturns");
347         if (targetOpnd->IsFuncNameOpnd()) {
348             FuncNameOperand *target = static_cast<FuncNameOperand *>(targetOpnd);
349             const MIRSymbol *funcSt = target->GetFunctionSymbol();
350             DEBUG_ASSERT(funcSt->GetSKind() == kStFunc, "funcst must be a function name symbol");
351             MIRFunction *func = funcSt->GetFunction();
352             if (func != nullptr && func->IsReferedRegsValid()) {
353                 for (auto preg : func->GetReferedRegs()) {
354                     if (AArch64Abi::IsCalleeSavedReg(static_cast<AArch64reg>(preg))) {
355                         continue;
356                     }
357                     RegOperand *opnd = &a64CGFunc->GetOrCreatePhysicalRegisterOperand(
358                         static_cast<AArch64reg>(preg), k64BitSize,
359                         AArch64isa::IsFPSIMDRegister(static_cast<AArch64reg>(preg)) ? kRegTyFloat : kRegTyInt);
360                     OpndInfo *opndInfo = OperandInfoDef(*insn->GetBB(), *insn, *opnd);
361                     opndInfo->insnInfo = &insnInfo;
362                 }
363                 return;
364             }
365         }
366     }
367     for (auto opnd : callerSaveRegTable) {
368         OpndInfo *opndInfo = OperandInfoDef(*insn->GetBB(), *insn, *opnd);
369         opndInfo->insnInfo = &insnInfo;
370     }
371 }
372 
DefineReturnUseRegister(Insn & insn)373 void AArch64Ebo::DefineReturnUseRegister(Insn &insn)
374 {
375     if (insn.GetMachineOpcode() != MOP_xret) {
376         return;
377     }
378     /* Define scalar callee save register and FP, LR. */
379     for (uint32 i = R19; i <= R30; i++) {
380         RegOperand &phyOpnd =
381             a64CGFunc->GetOrCreatePhysicalRegisterOperand(static_cast<AArch64reg>(i), k64BitSize, kRegTyInt);
382         OperandInfoUse(*insn.GetBB(), phyOpnd);
383     }
384 
385     /* Define SP */
386     RegOperand &phyOpndSP =
387         a64CGFunc->GetOrCreatePhysicalRegisterOperand(static_cast<AArch64reg>(RSP), k64BitSize, kRegTyInt);
388     OperandInfoUse(*insn.GetBB(), phyOpndSP);
389 
390     /* Define FP callee save registers. */
391     for (uint32 i = V8; i <= V15; i++) {
392         RegOperand &phyOpnd =
393             a64CGFunc->GetOrCreatePhysicalRegisterOperand(static_cast<AArch64reg>(i), k64BitSize, kRegTyFloat);
394         OperandInfoUse(*insn.GetBB(), phyOpnd);
395     }
396 }
397 
DefineCallUseSpecialRegister(Insn & insn)398 void AArch64Ebo::DefineCallUseSpecialRegister(Insn &insn)
399 {
400     if (insn.GetMachineOpcode() == MOP_asm) {
401         return;
402     }
403     AArch64reg fpRegNO = RFP;
404     if (!beforeRegAlloc && cgFunc->UseFP()) {
405         fpRegNO = R29;
406     }
407     /* Define FP, LR. */
408     RegOperand &phyOpndFP = a64CGFunc->GetOrCreatePhysicalRegisterOperand(fpRegNO, k64BitSize, kRegTyInt);
409     OperandInfoUse(*insn.GetBB(), phyOpndFP);
410     RegOperand &phyOpndLR =
411         a64CGFunc->GetOrCreatePhysicalRegisterOperand(static_cast<AArch64reg>(RLR), k64BitSize, kRegTyInt);
412     OperandInfoUse(*insn.GetBB(), phyOpndLR);
413 
414     /* Define SP */
415     RegOperand &phyOpndSP =
416         a64CGFunc->GetOrCreatePhysicalRegisterOperand(static_cast<AArch64reg>(RSP), k64BitSize, kRegTyInt);
417     OperandInfoUse(*insn.GetBB(), phyOpndSP);
418 }
419 
420 /* return true if op1 == op2 */
OperandEqSpecial(const Operand & op1,const Operand & op2) const421 bool AArch64Ebo::OperandEqSpecial(const Operand &op1, const Operand &op2) const
422 {
423     switch (op1.GetKind()) {
424         case Operand::kOpdRegister: {
425             const RegOperand &reg1 = static_cast<const RegOperand &>(op1);
426             const RegOperand &reg2 = static_cast<const RegOperand &>(op2);
427             return reg1 == reg2;
428         }
429         case Operand::kOpdImmediate: {
430             const ImmOperand &imm1 = static_cast<const ImmOperand &>(op1);
431             const ImmOperand &imm2 = static_cast<const ImmOperand &>(op2);
432             return imm1 == imm2;
433         }
434         case Operand::kOpdOffset: {
435             const OfstOperand &ofst1 = static_cast<const OfstOperand &>(op1);
436             const OfstOperand &ofst2 = static_cast<const OfstOperand &>(op2);
437             return ofst1 == ofst2;
438         }
439         case Operand::kOpdStImmediate: {
440             const StImmOperand &stImm1 = static_cast<const StImmOperand &>(op1);
441             const StImmOperand &stImm2 = static_cast<const StImmOperand &>(op2);
442             return stImm1 == stImm2;
443         }
444         case Operand::kOpdMem: {
445             const MemOperand &mem1 = static_cast<const MemOperand &>(op1);
446             const MemOperand &mem2 = static_cast<const MemOperand &>(op2);
447             if (mem1.GetAddrMode() == mem2.GetAddrMode()) {
448                 DEBUG_ASSERT(mem1.GetBaseRegister() != nullptr, "nullptr check");
449                 DEBUG_ASSERT(mem2.GetBaseRegister() != nullptr, "nullptr check");
450             }
451             return ((mem1.GetAddrMode() == mem2.GetAddrMode()) &&
452                     OperandEqual(*(mem1.GetBaseRegister()), *(mem2.GetBaseRegister())) &&
453                     OperandEqual(*(mem1.GetIndexRegister()), *(mem2.GetIndexRegister())) &&
454                     OperandEqual(*(mem1.GetOffsetOperand()), *(mem2.GetOffsetOperand())) &&
455                     (mem1.GetSymbol() == mem2.GetSymbol()) && (mem1.GetSize() == mem2.GetSize()));
456         }
457         default: {
458             return false;
459         }
460     }
461 }
462 
GetOffsetVal(const MemOperand & memOpnd) const463 int32 AArch64Ebo::GetOffsetVal(const MemOperand &memOpnd) const
464 {
465     OfstOperand *offset = memOpnd.GetOffsetImmediate();
466     int32 val = 0;
467     if (offset != nullptr) {
468         val += static_cast<int>(offset->GetOffsetValue());
469 
470         if (offset->IsSymOffset() || offset->IsSymAndImmOffset()) {
471             val += static_cast<int>(offset->GetSymbol()->GetStIdx().Idx());
472         }
473     }
474     return val;
475 }
476 
GetReverseCond(const CondOperand & cond) const477 ConditionCode AArch64Ebo::GetReverseCond(const CondOperand &cond) const
478 {
479     switch (cond.GetCode()) {
480         case CC_NE:
481             return CC_EQ;
482         case CC_EQ:
483             return CC_NE;
484         case CC_LT:
485             return CC_GE;
486         case CC_GE:
487             return CC_LT;
488         case CC_GT:
489             return CC_LE;
490         case CC_LE:
491             return CC_GT;
492         default:
493             CHECK_FATAL(0, "Not support yet.");
494     }
495     return kCcLast;
496 }
497 
498 /* return true if cond == CC_LE */
CheckCondCode(const CondOperand & cond) const499 bool AArch64Ebo::CheckCondCode(const CondOperand &cond) const
500 {
501     switch (cond.GetCode()) {
502         case CC_NE:
503         case CC_EQ:
504         case CC_LT:
505         case CC_GE:
506         case CC_GT:
507         case CC_LE:
508             return true;
509         default:
510             return false;
511     }
512 }
513 
SimplifyBothConst(BB & bb,Insn & insn,const ImmOperand & immOperand0,const ImmOperand & immOperand1,uint32 opndSize) const514 bool AArch64Ebo::SimplifyBothConst(BB &bb, Insn &insn, const ImmOperand &immOperand0, const ImmOperand &immOperand1,
515                                    uint32 opndSize) const
516 {
517     MOperator mOp = insn.GetMachineOpcode();
518     int64 val = 0;
519     /* do not support negative const simplify yet */
520     if (immOperand0.GetValue() < 0 || immOperand1.GetValue() < 0) {
521         return false;
522     }
523     uint64 opndValue0 = static_cast<uint64>(immOperand0.GetValue());
524     uint64 opndValue1 = static_cast<uint64>(immOperand1.GetValue());
525     switch (mOp) {
526         case MOP_weorrri12:
527         case MOP_weorrrr:
528         case MOP_xeorrri13:
529         case MOP_xeorrrr:
530             val = static_cast<int64>(opndValue0 ^ opndValue1);
531             break;
532         case MOP_wandrri12:
533         case MOP_waddrri24:
534         case MOP_wandrrr:
535         case MOP_xandrri13:
536         case MOP_xandrrr:
537             val = static_cast<int64>(opndValue0 & opndValue1);
538             break;
539         case MOP_wiorrri12:
540         case MOP_wiorrrr:
541         case MOP_xiorrri13:
542         case MOP_xiorrrr:
543             val = static_cast<int64>(opndValue0 | opndValue1);
544             break;
545         default:
546             return false;
547     }
548     Operand *res = &insn.GetOperand(kInsnFirstOpnd);
549     ImmOperand *immOperand = &a64CGFunc->CreateImmOperand(val, opndSize, false);
550     if (!immOperand->IsSingleInstructionMovable()) {
551         DEBUG_ASSERT(res->IsRegister(), " expect a register operand");
552         static_cast<AArch64CGFunc *>(cgFunc)->SplitMovImmOpndInstruction(val, *(static_cast<RegOperand *>(res)), &insn);
553         bb.RemoveInsn(insn);
554     } else {
555         MOperator newmOp = opndSize == k64BitSize ? MOP_xmovri64 : MOP_wmovri32;
556         Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(newmOp, *res, *immOperand);
557         if (!VERIFY_INSN(&newInsn)) {  // insn needs to be split, so we do not implement the opt.
558             return false;
559         }
560         bb.ReplaceInsn(insn, newInsn);
561     }
562     return true;
563 }
564 
OperandLiveAfterInsn(const RegOperand & regOpnd,Insn & insn) const565 bool AArch64Ebo::OperandLiveAfterInsn(const RegOperand &regOpnd, Insn &insn) const
566 {
567     for (Insn *nextInsn = insn.GetNext(); nextInsn != nullptr; nextInsn = nextInsn->GetNext()) {
568         if (!nextInsn->IsMachineInstruction()) {
569             continue;
570         }
571         CHECK_FATAL(nextInsn->GetOperandSize() >= 1, "value overflow");
572         int32 lastOpndId = static_cast<int32>(nextInsn->GetOperandSize() - 1);
573         for (int32 i = lastOpndId; i >= 0; --i) {
574             Operand &opnd = nextInsn->GetOperand(static_cast<uint32>(i));
575             if (opnd.IsMemoryAccessOperand()) {
576                 auto &mem = static_cast<MemOperand &>(opnd);
577                 Operand *base = mem.GetBaseRegister();
578                 Operand *offset = mem.GetOffset();
579 
580                 if (base != nullptr && base->IsRegister()) {
581                     auto *tmpRegOpnd = static_cast<RegOperand *>(base);
582                     if (tmpRegOpnd->GetRegisterNumber() == regOpnd.GetRegisterNumber()) {
583                         return true;
584                     }
585                 }
586                 if (offset != nullptr && offset->IsRegister()) {
587                     auto *tmpRegOpnd = static_cast<RegOperand *>(offset);
588                     if (tmpRegOpnd->GetRegisterNumber() == regOpnd.GetRegisterNumber()) {
589                         return true;
590                     }
591                 }
592             }
593 
594             if (!opnd.IsRegister()) {
595                 continue;
596             }
597             auto &tmpRegOpnd = static_cast<RegOperand &>(opnd);
598             if (tmpRegOpnd.GetRegisterNumber() != regOpnd.GetRegisterNumber()) {
599                 continue;
600             }
601             auto *regProp = nextInsn->GetDesc()->opndMD[static_cast<uint32>(i)];
602             bool isUse = regProp->IsUse();
603             /* if noUse Redefined, no need to check live-out. */
604             return isUse;
605         }
606     }
607     return LiveOutOfBB(regOpnd, *insn.GetBB());
608 }
609 
ValidPatternForCombineExtAndLoad(OpndInfo * prevOpndInfo,Insn * insn,MOperator newMop,MOperator oldMop,const RegOperand & opnd)610 bool AArch64Ebo::ValidPatternForCombineExtAndLoad(OpndInfo *prevOpndInfo, Insn *insn, MOperator newMop,
611                                                   MOperator oldMop, const RegOperand &opnd)
612 {
613     if (newMop == oldMop) {
614         return true;
615     }
616     if (prevOpndInfo == nullptr || prevOpndInfo->refCount > 1) {
617         return false;
618     }
619     if (OperandLiveAfterInsn(opnd, *insn)) {
620         return false;
621     }
622     Insn *prevInsn = prevOpndInfo->insn;
623     MemOperand *memOpnd = static_cast<MemOperand *>(prevInsn->GetMemOpnd());
624     DEBUG_ASSERT(!prevInsn->IsStorePair(), "do not do this opt for str pair");
625     DEBUG_ASSERT(!prevInsn->IsLoadPair(), "do not do this opt for ldr pair");
626     CHECK_NULL_FATAL(memOpnd);
627     if (memOpnd->GetAddrMode() == MemOperand::kAddrModeBOi &&
628         !a64CGFunc->IsOperandImmValid(newMop, prevInsn->GetMemOpnd(), kInsnSecondOpnd)) {
629         return false;
630     }
631     uint32 shiftAmount = memOpnd->ShiftAmount();
632     if (shiftAmount == 0) {
633         return true;
634     }
635     const InsnDesc *md = &AArch64CG::kMd[newMop];
636     uint32 memSize = md->GetOperandSize() / k8BitSize;
637     uint32 validShiftAmount = memSize == 8 ? 3 : memSize == 4 ? 2 : memSize == 2 ? 1 : 0;
638     if (shiftAmount != validShiftAmount) {
639         return false;
640     }
641     return true;
642 }
643 
CombineExtensionAndLoad(Insn * insn,const MapleVector<OpndInfo * > & origInfos,ExtOpTable idx,bool is64bits)644 bool AArch64Ebo::CombineExtensionAndLoad(Insn *insn, const MapleVector<OpndInfo *> &origInfos, ExtOpTable idx,
645                                          bool is64bits)
646 {
647     if (!beforeRegAlloc) {
648         return false;
649     }
650     OpndInfo *opndInfo = origInfos[kInsnSecondOpnd];
651     if (opndInfo == nullptr) {
652         return false;
653     }
654     Insn *prevInsn = opndInfo->insn;
655     if (prevInsn == nullptr) {
656         return false;
657     }
658 
659     MOperator prevMop = prevInsn->GetMachineOpcode();
660     DEBUG_ASSERT(prevMop != MOP_undef, "Invalid opcode of instruction!");
661     PairMOperator *begin = &extInsnPairTable[idx][0];
662     PairMOperator *end = &extInsnPairTable[idx][insPairsNum];
663     auto pairIt = std::find_if(begin, end, [prevMop](const PairMOperator insPair) { return prevMop == insPair[0]; });
664     if (pairIt == end) {
665         return false;
666     }
667 
668     auto &res = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
669     OpndInfo *prevOpndInfo = GetOpndInfo(res, -1);
670     MOperator newPreMop = (*pairIt)[1];
671     DEBUG_ASSERT(newPreMop != MOP_undef, "Invalid opcode of instruction!");
672     if (!ValidPatternForCombineExtAndLoad(prevOpndInfo, insn, newPreMop, prevMop, res)) {
673         return false;
674     }
675     auto *newMemOp = GetOrCreateMemOperandForNewMOP(*cgFunc, *prevInsn, newPreMop);
676     if (newMemOp == nullptr) {
677         return false;
678     }
679     prevInsn->SetMemOpnd(newMemOp);
680     if (is64bits && idx <= SXTW && idx >= SXTB) {
681         newPreMop = ExtLoadSwitchBitSize(newPreMop);
682         auto &prevDstOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
683         prevDstOpnd.SetSize(k64BitSize);
684         prevDstOpnd.SetValidBitsNum(k64BitSize);
685     }
686     prevInsn->SetMOP(AArch64CG::kMd[newPreMop]);
687     MOperator movOp = is64bits ? MOP_xmovrr : MOP_wmovrr;
688     if (insn->GetMachineOpcode() == MOP_wandrri12 || insn->GetMachineOpcode() == MOP_xandrri13) {
689         Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(movOp, insn->GetOperand(kInsnFirstOpnd),
690                                                             insn->GetOperand(kInsnSecondOpnd));
691         insn->GetBB()->ReplaceInsn(*insn, newInsn);
692     } else {
693         insn->SetMOP(AArch64CG::kMd[movOp]);
694     }
695     return true;
696 }
697 
CombineMultiplyAdd(Insn * insn,const Insn * prevInsn,InsnInfo * insnInfo,Operand * addOpnd,bool is64bits,bool isFp) const698 bool AArch64Ebo::CombineMultiplyAdd(Insn *insn, const Insn *prevInsn, InsnInfo *insnInfo, Operand *addOpnd,
699                                     bool is64bits, bool isFp) const
700 {
701     /* don't use register if it was redefined. */
702     OpndInfo *opndInfo1 = insnInfo->origOpnd[kInsnSecondOpnd];
703     OpndInfo *opndInfo2 = insnInfo->origOpnd[kInsnThirdOpnd];
704     if (((opndInfo1 != nullptr) && opndInfo1->redefined) || ((opndInfo2 != nullptr) && opndInfo2->redefined)) {
705         return false;
706     }
707     Operand &res = insn->GetOperand(kInsnFirstOpnd);
708     Operand &opnd1 = prevInsn->GetOperand(kInsnSecondOpnd);
709     Operand &opnd2 = prevInsn->GetOperand(kInsnThirdOpnd);
710     /* may overflow */
711     if ((prevInsn->GetOperand(kInsnFirstOpnd).GetSize() == k32BitSize) && is64bits) {
712         return false;
713     }
714     MOperator mOp = isFp ? (is64bits ? MOP_dmadd : MOP_smadd) : (is64bits ? MOP_xmaddrrrr : MOP_wmaddrrrr);
715     insn->GetBB()->ReplaceInsn(*insn, cgFunc->GetInsnBuilder()->BuildInsn(mOp, res, opnd1, opnd2, *addOpnd));
716     return true;
717 }
718 
CheckCanDoMadd(Insn * insn,OpndInfo * opndInfo,int32 pos,bool is64bits,bool isFp)719 bool AArch64Ebo::CheckCanDoMadd(Insn *insn, OpndInfo *opndInfo, int32 pos, bool is64bits, bool isFp)
720 {
721     if ((opndInfo == nullptr) || (opndInfo->insn == nullptr)) {
722         return false;
723     }
724     if (!cgFunc->GetMirModule().IsCModule()) {
725         return false;
726     }
727     Insn *insn1 = opndInfo->insn;
728     InsnInfo *insnInfo = opndInfo->insnInfo;
729     if (insnInfo == nullptr) {
730         return false;
731     }
732     Operand &addOpnd = insn->GetOperand(static_cast<uint32>(pos));
733     MOperator opc1 = insn1->GetMachineOpcode();
734     if ((isFp && ((opc1 == MOP_xvmuld) || (opc1 == MOP_xvmuls))) ||
735         (!isFp && ((opc1 == MOP_xmulrrr) || (opc1 == MOP_wmulrrr)))) {
736         return CombineMultiplyAdd(insn, insn1, insnInfo, &addOpnd, is64bits, isFp);
737     }
738     return false;
739 }
740 
CombineMultiplySub(Insn * insn,OpndInfo * opndInfo,bool is64bits,bool isFp) const741 bool AArch64Ebo::CombineMultiplySub(Insn *insn, OpndInfo *opndInfo, bool is64bits, bool isFp) const
742 {
743     if ((opndInfo == nullptr) || (opndInfo->insn == nullptr)) {
744         return false;
745     }
746     if (!cgFunc->GetMirModule().IsCModule()) {
747         return false;
748     }
749     Insn *insn1 = opndInfo->insn;
750     InsnInfo *insnInfo = opndInfo->insnInfo;
751     if (insnInfo == nullptr) {
752         return false;
753     }
754     Operand &subOpnd = insn->GetOperand(kInsnSecondOpnd);
755     MOperator opc1 = insn1->GetMachineOpcode();
756     if ((isFp && ((opc1 == MOP_xvmuld) || (opc1 == MOP_xvmuls))) ||
757         (!isFp && ((opc1 == MOP_xmulrrr) || (opc1 == MOP_wmulrrr)))) {
758         /* don't use register if it was redefined. */
759         OpndInfo *opndInfo1 = insnInfo->origOpnd[kInsnSecondOpnd];
760         OpndInfo *opndInfo2 = insnInfo->origOpnd[kInsnThirdOpnd];
761         if (((opndInfo1 != nullptr) && opndInfo1->redefined) || ((opndInfo2 != nullptr) && opndInfo2->redefined)) {
762             return false;
763         }
764         Operand &res = insn->GetOperand(kInsnFirstOpnd);
765         Operand &opnd1 = insn1->GetOperand(kInsnSecondOpnd);
766         Operand &opnd2 = insn1->GetOperand(kInsnThirdOpnd);
767         /* may overflow */
768         if ((insn1->GetOperand(kInsnFirstOpnd).GetSize() == k32BitSize) && is64bits) {
769             return false;
770         }
771         MOperator mOp = isFp ? (is64bits ? MOP_dmsub : MOP_smsub) : (is64bits ? MOP_xmsubrrrr : MOP_wmsubrrrr);
772         insn->GetBB()->ReplaceInsn(*insn, cgFunc->GetInsnBuilder()->BuildInsn(mOp, res, opnd1, opnd2, subOpnd));
773         return true;
774     }
775     return false;
776 }
777 
CheckInsnRefField(const Insn & insn,size_t opndIndex)778 bool CheckInsnRefField(const Insn &insn, size_t opndIndex)
779 {
780     if (insn.IsAccessRefField() && insn.AccessMem()) {
781         Operand &opnd0 = insn.GetOperand(opndIndex);
782         if (opnd0.IsRegister()) {
783             return true;
784         }
785     }
786     return false;
787 }
788 
CombineMultiplyNeg(Insn * insn,OpndInfo * opndInfo,bool is64bits,bool isFp) const789 bool AArch64Ebo::CombineMultiplyNeg(Insn *insn, OpndInfo *opndInfo, bool is64bits, bool isFp) const
790 {
791     if ((opndInfo == nullptr) || (opndInfo->insn == nullptr)) {
792         return false;
793     }
794     if (!cgFunc->GetMirModule().IsCModule()) {
795         return false;
796     }
797     Operand &res = insn->GetOperand(kInsnFirstOpnd);
798     Operand &src = insn->GetOperand(kInsnSecondOpnd);
799     if (res.GetSize() != src.GetSize()) {
800         return false;
801     }
802     Insn *insn1 = opndInfo->insn;
803     InsnInfo *insnInfo = opndInfo->insnInfo;
804     CHECK_NULL_FATAL(insnInfo);
805     MOperator opc1 = insn1->GetMachineOpcode();
806     if ((isFp && ((opc1 == MOP_xvmuld) || (opc1 == MOP_xvmuls))) ||
807         (!isFp && ((opc1 == MOP_xmulrrr) || (opc1 == MOP_wmulrrr)))) {
808         /* don't use register if it was redefined. */
809         OpndInfo *opndInfo1 = insnInfo->origOpnd[kInsnSecondOpnd];
810         OpndInfo *opndInfo2 = insnInfo->origOpnd[kInsnThirdOpnd];
811         if (((opndInfo1 != nullptr) && opndInfo1->redefined) || ((opndInfo2 != nullptr) && opndInfo2->redefined)) {
812             return false;
813         }
814         Operand &opnd1 = insn1->GetOperand(kInsnSecondOpnd);
815         Operand &opnd2 = insn1->GetOperand(kInsnThirdOpnd);
816         MOperator mOp = isFp ? (is64bits ? MOP_dnmul : MOP_snmul) : (is64bits ? MOP_xmnegrrr : MOP_wmnegrrr);
817         insn->GetBB()->ReplaceInsn(*insn, cgFunc->GetInsnBuilder()->BuildInsn(mOp, res, opnd1, opnd2));
818         return true;
819     }
820     return false;
821 }
822 
CombineLsrAnd(Insn & insn,const OpndInfo & opndInfo,bool is64bits,bool isFp) const823 bool AArch64Ebo::CombineLsrAnd(Insn &insn, const OpndInfo &opndInfo, bool is64bits, bool isFp) const
824 {
825     if (opndInfo.insn == nullptr) {
826         return false;
827     }
828     if (!cgFunc->GetMirModule().IsCModule()) {
829         return false;
830     }
831     AArch64CGFunc *aarchFunc = static_cast<AArch64CGFunc *>(cgFunc);
832     Insn *prevInsn = opndInfo.insn;
833     InsnInfo *insnInfo = opndInfo.insnInfo;
834     if (insnInfo == nullptr) {
835         return false;
836     }
837     CHECK_NULL_FATAL(insnInfo);
838     MOperator opc1 = prevInsn->GetMachineOpcode();
839     if (!isFp && ((opc1 == MOP_xlsrrri6) || (opc1 == MOP_wlsrrri5))) {
840         /* don't use register if it was redefined. */
841         OpndInfo *opndInfo1 = insnInfo->origOpnd[kInsnSecondOpnd];
842         if ((opndInfo1 != nullptr) && opndInfo1->redefined) {
843             return false;
844         }
845         Operand &res = insn.GetOperand(kInsnFirstOpnd);
846         Operand &opnd1 = prevInsn->GetOperand(kInsnSecondOpnd);
847         int64 immVal1 = static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnThirdOpnd)).GetValue();
848         Operand &immOpnd1 = is64bits ? aarchFunc->CreateImmOperand(immVal1, kMaxImmVal6Bits, false)
849                                      : aarchFunc->CreateImmOperand(immVal1, kMaxImmVal5Bits, false);
850         int64 immVal2 = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd)).GetValue();
851         int64 immV2 = __builtin_ffsll(immVal2 + 1) - 1;
852         if (immVal1 + immV2 < k1BitSize || (is64bits && immVal1 + immV2 > k64BitSize) ||
853             (!is64bits && immVal1 + immV2 > k32BitSize)) {
854             return false;
855         }
856         Operand &immOpnd2 = is64bits ? aarchFunc->CreateImmOperand(immV2, kMaxImmVal6Bits, false)
857                                      : aarchFunc->CreateImmOperand(immV2, kMaxImmVal5Bits, false);
858         MOperator mOp = (is64bits ? MOP_xubfxrri6i6 : MOP_wubfxrri5i5);
859         insn.GetBB()->ReplaceInsn(insn, cgFunc->GetInsnBuilder()->BuildInsn(mOp, res, opnd1, immOpnd1, immOpnd2));
860         return true;
861     }
862     return false;
863 }
864 
865 /*
866  *  *iii. mov w16, v10.s[1]   //  FMOV from simd 105   ---> replace_insn
867  *      mov w1, w16     ----->insn
868  *      ==>
869  *      mov w1, v10.s[1]
870  */
IsMovToSIMDVmov(Insn & insn,const Insn & replaceInsn) const871 bool AArch64Ebo::IsMovToSIMDVmov(Insn &insn, const Insn &replaceInsn) const
872 {
873     if (insn.GetMachineOpcode() == MOP_wmovrr && replaceInsn.GetMachineOpcode() == MOP_xvmovrv) {
874         insn.SetMOP(AArch64CG::kMd[replaceInsn.GetMachineOpcode()]);
875         return true;
876     }
877     return false;
878 }
879 
IsPseudoRet(Insn & insn) const880 bool AArch64Ebo::IsPseudoRet(Insn &insn) const
881 {
882     MOperator mop = insn.GetMachineOpcode();
883     if (mop == MOP_pseudo_ret_int || mop == MOP_pseudo_ret_float) {
884         return true;
885     }
886     return false;
887 }
888 
889 } /* namespace maplebe */
890