• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "aarch64_peep.h"
17 #include "cg.h"
18 #include "mpl_logging.h"
19 #include "common_utils.h"
20 #include "cg_option.h"
21 #include "aarch64_utils.h"
22 #include "cg_irbuilder.h"
23 #include "aarch64_cg.h"
24 #include "aarch64_mem_reference.h"
25 
26 namespace maplebe {
27 #define CG_PEEP_DUMP CG_DEBUG_FUNC(*cgFunc)
28 namespace {
29 const std::string kMccLoadRef = "MCC_LoadRefField";
30 const std::string kMccLoadRefV = "MCC_LoadVolatileField";
31 const std::string kMccLoadRefS = "MCC_LoadRefStatic";
32 const std::string kMccLoadRefVS = "MCC_LoadVolatileStaticField";
33 const std::string kMccDummy = "MCC_Dummy";
34 
35 const uint32 kSizeOfSextMopTable = 5;
36 const uint32 kSizeOfUextMopTable = 3;
37 
38 MOperator sextMopTable[kSizeOfSextMopTable] = {MOP_xsxtb32, MOP_xsxtb64, MOP_xsxth32, MOP_xsxth64, MOP_xsxtw64};
39 
40 MOperator uextMopTable[kSizeOfUextMopTable] = {MOP_xuxtb32, MOP_xuxth32, MOP_xuxtw64};
41 
GetReadBarrierName(const Insn & insn)42 const std::string GetReadBarrierName(const Insn &insn)
43 {
44     constexpr int32 totalBarrierNamesNum = 5;
45     std::array<std::string, totalBarrierNamesNum> barrierNames = {kMccLoadRef, kMccLoadRefV, kMccLoadRefS,
46                                                                   kMccLoadRefVS, kMccDummy};
47     if (insn.GetMachineOpcode() == MOP_xbl || insn.GetMachineOpcode() == MOP_tail_call_opt_xbl) {
48         auto &op = static_cast<FuncNameOperand &>(insn.GetOperand(kInsnFirstOpnd));
49         const std::string &funcName = op.GetName();
50         for (const std::string &singleBarrierName : barrierNames) {
51             if (funcName == singleBarrierName) {
52                 return singleBarrierName;
53             }
54         }
55     }
56     return "";
57 }
58 
GetLoadOperator(uint32 refSize,bool isVolatile)59 MOperator GetLoadOperator(uint32 refSize, bool isVolatile)
60 {
61     if (refSize == k32BitSize) {
62         return isVolatile ? MOP_wldar : MOP_wldr;
63     }
64     return isVolatile ? MOP_xldar : MOP_xldr;
65 }
66 }  // namespace
67 
IsZeroRegister(const Operand & opnd)68 static bool IsZeroRegister(const Operand &opnd)
69 {
70     if (!opnd.IsRegister()) {
71         return false;
72     }
73     const RegOperand *regOpnd = static_cast<const RegOperand *>(&opnd);
74     return regOpnd->GetRegisterNumber() == RZR;
75 }
76 
GetMopUpdateAPSR(MOperator mop,bool & isAddShift)77 MOperator GetMopUpdateAPSR(MOperator mop, bool &isAddShift)
78 {
79     MOperator newMop = MOP_undef;
80     switch (mop) {
81         case MOP_xaddrrr: {
82             newMop = MOP_xaddsrrr;
83             isAddShift = false;
84             break;
85         }
86         case MOP_xaddrri12: {
87             newMop = MOP_xaddsrri12;
88             isAddShift = false;
89             break;
90         }
91         case MOP_waddrrr: {
92             newMop = MOP_waddsrrr;
93             isAddShift = false;
94             break;
95         }
96         case MOP_waddrri12: {
97             newMop = MOP_waddsrri12;
98             isAddShift = false;
99             break;
100         }
101         case MOP_xaddrrrs: {
102             newMop = MOP_xaddsrrrs;
103             isAddShift = true;
104             break;
105         }
106         case MOP_waddrrrs: {
107             newMop = MOP_waddsrrrs;
108             isAddShift = true;
109             break;
110         }
111         default:
112             break;
113     }
114     return newMop;
115 }
116 
Run()117 void AArch64CGPeepHole::Run()
118 {
119     bool optSuccess = false;
120     FOR_ALL_BB(bb, cgFunc)
121     {
122         FOR_BB_INSNS_SAFE(insn, bb, nextInsn)
123         {
124             if (!insn->IsMachineInstruction()) {
125                 continue;
126             }
127             if (ssaInfo != nullptr) {
128                 optSuccess = optSuccess || DoSSAOptimize(*bb, *insn);
129             } else {
130                 DoNormalOptimize(*bb, *insn);
131             }
132         }
133     }
134     if (optSuccess) {
135         Run();
136     }
137 }
138 
DoSSAOptimize(BB & bb,Insn & insn)139 bool AArch64CGPeepHole::DoSSAOptimize(BB &bb, Insn &insn)
140 {
141     MOperator thisMop = insn.GetMachineOpcode();
142     manager = peepMemPool->New<PeepOptimizeManager>(*cgFunc, bb, insn, *ssaInfo);
143     manager->SetOptSuccess(false);
144     switch (thisMop) {
145         case MOP_xandrrr:
146         case MOP_wandrrr: {
147             manager->Optimize<MvnAndToBicPattern>(true);
148             break;
149         }
150         case MOP_wiorrri12:
151         case MOP_xiorrri13: {
152             manager->Optimize<OrrToMovPattern>(true);
153             break;
154         }
155         case MOP_wcbz:
156         case MOP_xcbz:
157         case MOP_wcbnz:
158         case MOP_xcbnz: {
159             manager->Optimize<AndCbzToTbzPattern>(true);
160             manager->Optimize<CsetCbzToBeqPattern>(true);
161             manager->Optimize<OneHoleBranchPattern>(true);
162             break;
163         }
164         case MOP_beq:
165         case MOP_bne: {
166             manager->Optimize<AndCmpBranchesToTbzPattern>(true);
167             manager->Optimize<AndAndCmpBranchesToTstPattern>(true);
168             break;
169         }
170         case MOP_wcsetrc:
171         case MOP_xcsetrc: {
172             manager->Optimize<AndAndCmpBranchesToTstPattern>(true);
173             manager->Optimize<AndCmpBranchesToCsetPattern>(true);
174             manager->Optimize<ContinuousCmpCsetPattern>(true);
175             break;
176         }
177         case MOP_waddrrr:
178         case MOP_xaddrrr: {
179             manager->Optimize<SimplifyMulArithmeticPattern>(true);
180             manager->Optimize<CsetToCincPattern>(true);
181             break;
182         }
183         case MOP_dadd:
184         case MOP_sadd:
185         case MOP_wsubrrr:
186         case MOP_xsubrrr:
187         case MOP_dsub:
188         case MOP_ssub:
189         case MOP_xinegrr:
190         case MOP_winegrr:
191         case MOP_wfnegrr:
192         case MOP_xfnegrr: {
193             manager->Optimize<SimplifyMulArithmeticPattern>(true);
194             break;
195         }
196         case MOP_wandrri12:
197         case MOP_xandrri13: {
198             manager->Optimize<UbfxAndMergetPattern>(true);
199             manager->Optimize<LsrAndToUbfxPattern>(true);
200             manager->Optimize<LslAndToUbfizPattern>(true);
201             manager->Optimize<ElimSpecificExtensionPattern>(true);
202             break;
203         }
204         case MOP_wcselrrrc:
205         case MOP_xcselrrrc: {
206             manager->Optimize<AndAndCmpBranchesToTstPattern>(true);
207             manager->Optimize<CselToCsetPattern>(true);
208             manager->Optimize<CselToCsincRemoveMovPattern>(true);
209             break;
210         }
211         case MOP_wiorrrr:
212         case MOP_xiorrrr:
213         case MOP_wiorrrrs:
214         case MOP_xiorrrrs: {
215             manager->Optimize<LogicShiftAndOrrToExtrPattern>(true);
216             break;
217         }
218         case MOP_bge:
219         case MOP_ble:
220         case MOP_blt:
221         case MOP_bgt: {
222             manager->Optimize<ZeroCmpBranchesToTbzPattern>(true);
223             break;
224         }
225         case MOP_wcmprr:
226         case MOP_xcmprr: {
227             manager->Optimize<NegCmpToCmnPattern>(true);
228             break;
229         }
230         case MOP_xlslrri6: {
231             manager->Optimize<ExtLslToBitFieldInsertPattern>();
232             manager->Optimize<CombineSameArithmeticPattern>(true);
233             manager->Optimize<LslAndToUbfizPattern>(true);
234             break;
235         }
236         case MOP_xsxtb32:
237         case MOP_xsxtb64:
238         case MOP_xsxth32:
239         case MOP_xsxth64:
240         case MOP_xsxtw64:
241         case MOP_xuxtb32:
242         case MOP_xuxth32:
243         case MOP_xuxtw64: {
244             manager->Optimize<ElimSpecificExtensionPattern>(true);
245             break;
246         }
247         case MOP_wlsrrri5:
248         case MOP_xlsrrri6:
249         case MOP_wasrrri5:
250         case MOP_xasrrri6:
251         case MOP_waddrri12:
252         case MOP_xaddrri12:
253         case MOP_wsubrri12:
254         case MOP_xsubrri12: {
255             manager->Optimize<CombineSameArithmeticPattern>(true);
256             break;
257         }
258         case MOP_wlslrri5: {
259             manager->Optimize<CombineSameArithmeticPattern>(true);
260             manager->Optimize<LslAndToUbfizPattern>(true);
261             break;
262         }
263         case MOP_wubfxrri5i5:
264         case MOP_xubfxrri6i6: {
265             manager->Optimize<UbfxAndMergetPattern>(true);
266             manager->Optimize<UbfxAndCbzToTbzPattern>(true);
267             break;
268         }
269         case MOP_xmulrrr:
270         case MOP_wmulrrr: {
271             manager->Optimize<MulImmToShiftPattern>(!cgFunc->IsAfterRegAlloc());
272             break;
273         }
274         case MOP_wcmpri:
275         case MOP_xcmpri: {
276             manager->Optimize<AddCmpZeroPatternSSA>(true);
277             break;
278         }
279         case MOP_wtbz:
280         case MOP_xtbz:
281         case MOP_wtbnz:
282         case MOP_xtbnz: {
283             manager->Optimize<AndTbzPattern>(true);
284             manager->Optimize<CsetCbzToBeqPattern>(true);
285             break;
286         }
287         default:
288             break;
289     }
290     return manager->OptSuccess();
291 }
292 
CheckCondCode(const CondOperand & condOpnd) const293 bool ContinuousCmpCsetPattern::CheckCondCode(const CondOperand &condOpnd) const
294 {
295     switch (condOpnd.GetCode()) {
296         case CC_NE:
297         case CC_EQ:
298         case CC_LT:
299         case CC_GE:
300         case CC_GT:
301         case CC_LE:
302             return true;
303         default:
304             return false;
305     }
306 }
307 
CheckCondition(Insn & insn)308 bool ContinuousCmpCsetPattern::CheckCondition(Insn &insn)
309 {
310     MOperator curMop = insn.GetMachineOpcode();
311     if (curMop != MOP_wcsetrc && curMop != MOP_xcsetrc) {
312         return false;
313     }
314     auto &condOpnd = static_cast<CondOperand &>(insn.GetOperand(kInsnSecondOpnd));
315     if (condOpnd.GetCode() != CC_NE && condOpnd.GetCode() != CC_EQ) {
316         return false;
317     }
318     reverse = (condOpnd.GetCode() == CC_EQ);
319     auto &ccReg = static_cast<RegOperand &>(insn.GetOperand(kInsnThirdOpnd));
320     prevCmpInsn = ssaInfo->GetDefInsn(ccReg);
321     if (prevCmpInsn == nullptr) {
322         return false;
323     }
324     MOperator prevCmpMop = prevCmpInsn->GetMachineOpcode();
325     if (prevCmpMop != MOP_wcmpri && prevCmpMop != MOP_xcmpri) {
326         return false;
327     }
328     if (!static_cast<ImmOperand &>(prevCmpInsn->GetOperand(kInsnThirdOpnd)).IsZero()) {
329         return false;
330     }
331     auto &cmpCCReg = static_cast<RegOperand &>(prevCmpInsn->GetOperand(kInsnFirstOpnd));
332     InsnSet useSet = GetAllUseInsn(cmpCCReg);
333     if (useSet.size() > 1) {
334         return false;
335     }
336     auto &cmpUseReg = static_cast<RegOperand &>(prevCmpInsn->GetOperand(kInsnSecondOpnd));
337     prevCsetInsn1 = ssaInfo->GetDefInsn(cmpUseReg);
338     if (prevCsetInsn1 == nullptr) {
339         return false;
340     }
341     MOperator prevCsetMop1 = prevCsetInsn1->GetMachineOpcode();
342     if (prevCsetMop1 != MOP_wcsetrc && prevCsetMop1 != MOP_xcsetrc) {
343         return false;
344     }
345     auto &condOpnd1 = static_cast<CondOperand &>(prevCsetInsn1->GetOperand(kInsnSecondOpnd));
346     if (!CheckCondCode(condOpnd1)) {
347         return false;
348     }
349     auto &ccReg1 = static_cast<RegOperand &>(prevCsetInsn1->GetOperand(kInsnThirdOpnd));
350     prevCmpInsn1 = GetDefInsn(ccReg1);
351     if (prevCmpInsn1 == nullptr) {
352         return false;
353     }
354     if (IsCCRegCrossVersion(*prevCsetInsn1, *prevCmpInsn, ccReg1)) {
355         return false;
356     }
357     return true;
358 }
359 
Run(BB & bb,Insn & insn)360 void ContinuousCmpCsetPattern::Run(BB &bb, Insn &insn)
361 {
362     if (!CheckCondition(insn)) {
363         return;
364     }
365     auto *aarFunc = static_cast<AArch64CGFunc *>(cgFunc);
366     MOperator curMop = insn.GetMachineOpcode();
367     Operand &resOpnd = insn.GetOperand(kInsnFirstOpnd);
368     Insn *newCsetInsn = nullptr;
369     if (reverse) {
370         MOperator prevCsetMop = prevCsetInsn1->GetMachineOpcode();
371         auto &prevCsetCondOpnd = static_cast<CondOperand &>(prevCsetInsn1->GetOperand(kInsnSecondOpnd));
372         CondOperand &newCondOpnd = aarFunc->GetCondOperand(GetReverseBasicCC(prevCsetCondOpnd.GetCode()));
373         regno_t tmpRegNO = 0;
374         auto *tmpDefOpnd = aarFunc->CreateVirtualRegisterOperand(tmpRegNO, resOpnd.GetSize(),
375                                                                  static_cast<RegOperand &>(resOpnd).GetRegisterType());
376         tmpDefOpnd->SetValidBitsNum(k1BitSize);
377         newCsetInsn = &cgFunc->GetInsnBuilder()->BuildInsn(prevCsetMop, *tmpDefOpnd, newCondOpnd,
378                                                            prevCsetInsn1->GetOperand(kInsnThirdOpnd));
379         BB *prevCsetBB = prevCsetInsn1->GetBB();
380         (void)prevCsetBB->InsertInsnAfter(*prevCsetInsn1, *newCsetInsn);
381         /* update ssa info */
382         auto *a64SSAInfo = static_cast<AArch64CGSSAInfo *>(ssaInfo);
383         a64SSAInfo->CreateNewInsnSSAInfo(*newCsetInsn);
384         /* dump pattern info */
385         if (CG_PEEP_DUMP) {
386             std::vector<Insn *> prevs;
387             prevs.emplace_back(prevCmpInsn1);
388             prevs.emplace_back(&insn);
389             DumpAfterPattern(prevs, prevCmpInsn, newCsetInsn);
390         }
391     }
392     MOperator newMop = (curMop == MOP_wcsetrc) ? MOP_wmovrr : MOP_xmovrr;
393     Insn *newInsn = nullptr;
394     if (newCsetInsn == nullptr) {
395         newInsn = &cgFunc->GetInsnBuilder()->BuildInsn(newMop, insn.GetOperand(kInsnFirstOpnd),
396                                                        prevCsetInsn1->GetOperand(kInsnFirstOpnd));
397     } else {
398         newInsn = &cgFunc->GetInsnBuilder()->BuildInsn(newMop, insn.GetOperand(kInsnFirstOpnd),
399                                                        newCsetInsn->GetOperand(kInsnFirstOpnd));
400     }
401     if (newInsn == nullptr) {
402         return;
403     }
404     bb.ReplaceInsn(insn, *newInsn);
405     /* update ssa info */
406     ssaInfo->ReplaceInsn(insn, *newInsn);
407     optSuccess = true;
408     SetCurrInsn(newInsn);
409     /* dump pattern info */
410     if (CG_PEEP_DUMP) {
411         std::vector<Insn *> prevs;
412         prevs.emplace_back(prevCmpInsn1);
413         prevs.emplace_back(prevCsetInsn1);
414         if (newCsetInsn == nullptr) {
415             (void)prevs.emplace_back(prevCmpInsn);
416         } else {
417             (void)prevs.emplace_back(newCsetInsn);
418         }
419         DumpAfterPattern(prevs, &insn, newInsn);
420     }
421 }
422 
CheckCondition(Insn & insn)423 bool NegCmpToCmnPattern::CheckCondition(Insn &insn)
424 {
425     MOperator curMop = insn.GetMachineOpcode();
426     if (curMop != MOP_wcmprr && curMop != MOP_xcmprr) {
427         return false;
428     }
429     auto &useReg = static_cast<RegOperand &>(insn.GetOperand(kInsnThirdOpnd));
430     prevInsn = ssaInfo->GetDefInsn(useReg);
431     if (prevInsn == nullptr) {
432         return false;
433     }
434     MOperator prevMop = prevInsn->GetMachineOpcode();
435     if (prevMop != MOP_winegrr && prevMop != MOP_xinegrr && prevMop != MOP_winegrrs && prevMop != MOP_xinegrrs) {
436         return false;
437     }
438     // Determine whether implicit conversion is existed.
439     if ((prevMop == MOP_winegrr && curMop == MOP_xcmprr) || (prevMop == MOP_winegrrs && curMop == MOP_xcmprr) ||
440         (prevMop == MOP_xinegrr && curMop == MOP_wcmprr) || (prevMop == MOP_winegrrs && curMop == MOP_xcmprr)) {
441         return false;
442     }
443     /*
444      * If the value of srcOpnd of neg is 0, we can not do this optimization.
445      * Because
446      *   for cmp(subs): if NOT(0), the add calculation may overflow
447      *   for cmn(adds): it has handled overflows before calling the addWithCarry function
448      * When the value of srcOpnd of neg is 0, (neg + cmp) and (cmn) set different (c) and (v) in condition flags.
449      *
450      * But we can not get the value of register, so we can only restrict the condition codes which use (c) and (v)
451      * flags.
452      */
453     auto &ccReg = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
454     InsnSet useInsns = GetAllUseInsn(ccReg);
455     for (auto *useInsn : useInsns) {
456         if (useInsn == nullptr) {
457             continue;
458         }
459         MOperator useMop = useInsn->GetMachineOpcode();
460         if (useInsn->IsCondBranch() && useMop != MOP_beq && useMop != MOP_bne && useMop != MOP_bmi &&
461             useMop != MOP_bpl) {
462             return false;
463         }
464         bool hasUnsupportedCode = false;
465         for (uint32 i = 0; i < useInsn->GetOperandSize(); ++i) {
466             if (useInsn->GetOperand(i).GetKind() == Operand::kOpdCond) {
467                 ConditionCode cond = static_cast<CondOperand &>(useInsn->GetOperand(i)).GetCode();
468                 /* in case of ignoring v flag
469                  *  adds xt, x0, x1 (0x8000000000000000) -> not set v
470                  *  ==>
471                  *  neg x1 x1 (0x8000000000000000) which is same for negative 0
472                  *  subs xt, x0, x1 () -> set v
473                  */
474                 if (cond != CC_EQ && cond != CC_NE && cond != CC_MI && cond != CC_PL) {
475                     hasUnsupportedCode = true;
476                     break;
477                 }
478             }
479         }
480         if (hasUnsupportedCode) {
481             return false;
482         }
483     }
484     return true;
485 }
486 
Run(BB & bb,Insn & insn)487 void NegCmpToCmnPattern::Run(BB &bb, Insn &insn)
488 {
489     if (!CheckCondition(insn)) {
490         return;
491     }
492     Operand &opnd1 = insn.GetOperand(kInsnSecondOpnd);
493     Operand &opnd2 = prevInsn->GetOperand(kInsnSecondOpnd);
494     auto &ccReg = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
495     MOperator prevMop = prevInsn->GetMachineOpcode();
496     MOperator currMop = insn.GetMachineOpcode();
497     Insn *newInsn = nullptr;
498     if (prevMop == MOP_winegrr || prevMop == MOP_xinegrr) {
499         MOperator newMop = (currMop == MOP_wcmprr) ? MOP_wcmnrr : MOP_xcmnrr;
500         newInsn = &(cgFunc->GetInsnBuilder()->BuildInsn(newMop, ccReg, opnd1, opnd2));
501     } else {
502         MOperator newMop = (currMop == MOP_wcmprr) ? MOP_wcmnrrs : MOP_xcmnrrs;
503         Operand &shiftOpnd = prevInsn->GetOperand(kInsnThirdOpnd);
504         newInsn = &(cgFunc->GetInsnBuilder()->BuildInsn(newMop, ccReg, opnd1, opnd2, shiftOpnd));
505     }
506     CHECK_FATAL(newInsn != nullptr, "must create newInsn");
507     bb.ReplaceInsn(insn, *newInsn);
508     /* update ssa info */
509     ssaInfo->ReplaceInsn(insn, *newInsn);
510     optSuccess = true;
511     SetCurrInsn(newInsn);
512     /* dump pattern info */
513     if (CG_PEEP_DUMP) {
514         std::vector<Insn *> prevs;
515         prevs.emplace_back(prevInsn);
516         DumpAfterPattern(prevs, &insn, newInsn);
517     }
518 }
519 
Run(BB & bb,Insn & insn)520 void LdrCmpPattern::Run(BB &bb, Insn &insn)
521 {
522     if (!CheckCondition(insn)) {
523         return;
524     }
525     bb.RemoveInsn(*ldr2);
526     bb.RemoveInsn(*ldr1);
527     bb.RemoveInsn(insn);
528     bb.RemoveInsn(*bne1);
529     prevLdr1->SetMOP(AArch64CG::kMd[MOP_xldr]);
530     prevLdr2->SetMOP(AArch64CG::kMd[MOP_xldr]);
531     prevCmp->SetMOP(AArch64CG::kMd[MOP_xcmprr]);
532 }
533 
CheckCondition(Insn & insn)534 bool LdrCmpPattern::CheckCondition(Insn &insn)
535 {
536     /* a pattern which breaks cfg
537      * it is more suitable for peephole after pgo using */
538     if (currInsn != &insn) {
539         return false;
540     }
541     if (!SetInsns()) {
542         return false;
543     }
544     if (!CheckInsns()) {
545         return false;
546     }
547     auto &reg0 = static_cast<RegOperand &>(currInsn->GetOperand(kInsnSecondOpnd));
548     auto &reg1 = static_cast<RegOperand &>(currInsn->GetOperand(kInsnThirdOpnd));
549     return !(IfOperandIsLiveAfterInsn(reg0, insn) || IfOperandIsLiveAfterInsn(reg1, insn));
550 }
551 
552 /*
553  * mopSeq:
554  * ldr,ldr,cmp,bne
555  */
SetInsns()556 bool LdrCmpPattern::SetInsns()
557 {
558     if (!IsLdr(currInsn->GetPreviousMachineInsn())) {
559         return false;
560     }
561     ldr2 = currInsn->GetPreviousMachineInsn();
562     if (!IsLdr(ldr2->GetPreviousMachineInsn())) {
563         return false;
564     }
565     ldr1 = ldr2->GetPreviousMachineInsn();
566     /* ldr1 must be firstInsn in currBB */
567     if (currInsn->GetBB()->GetFirstMachineInsn() != ldr1) {
568         return false;
569     }
570     if (!IsBne(currInsn->GetNextMachineInsn())) {
571         return false;
572     }
573     bne1 = currInsn->GetNextMachineInsn();
574     BB *prevBB = currInsn->GetBB()->GetPrev();
575     /* single prev, single pred */
576     const MapleList<BB *> &predBBs = currInsn->GetBB()->GetPreds();
577     if ((prevBB == nullptr) || (predBBs.size() != 1) || (prevBB != *predBBs.begin())) {
578         return false;
579     }
580     if (!IsBne(prevBB->GetLastMachineInsn())) {
581         return false;
582     }
583     bne2 = prevBB->GetLastMachineInsn();
584     if (!IsCmp(bne2->GetPreviousMachineInsn())) {
585         return false;
586     }
587     prevCmp = bne2->GetPreviousMachineInsn();
588     prevLdr2 = prevCmp->GetPreviousMachineInsn();
589     if (prevCmp == nullptr || prevLdr2 == nullptr) {
590         return false;
591     }
592     if (!IsLdr(prevCmp->GetPreviousMachineInsn())) {
593         return false;
594     }
595     if (!IsLdr(prevLdr2->GetPreviousMachineInsn())) {
596         return false;
597     }
598     prevLdr1 = prevLdr2->GetPreviousMachineInsn();
599     return true;
600 }
601 
CheckInsns() const602 bool LdrCmpPattern::CheckInsns() const
603 {
604     auto &label1 = static_cast<LabelOperand &>(bne1->GetOperand(kInsnSecondOpnd));
605     auto &label2 = static_cast<LabelOperand &>(bne2->GetOperand(kInsnSecondOpnd));
606     if (label1.GetLabelIndex() != label2.GetLabelIndex()) {
607         return false;
608     }
609     auto &reg0 = static_cast<RegOperand &>(currInsn->GetOperand(kInsnSecondOpnd));
610     auto &reg1 = static_cast<RegOperand &>(currInsn->GetOperand(kInsnThirdOpnd));
611     regno_t regno0 = reg0.GetRegisterNumber();
612     regno_t regno1 = reg1.GetRegisterNumber();
613     if (regno0 == regno1) {
614         return false;
615     }
616     auto &mem1 = static_cast<MemOperand &>(ldr1->GetOperand(kInsnSecondOpnd));
617     auto &preMem1 = static_cast<MemOperand &>(prevLdr1->GetOperand(kInsnSecondOpnd));
618     auto &mem2 = static_cast<MemOperand &>(ldr2->GetOperand(kInsnSecondOpnd));
619     auto &preMem2 = static_cast<MemOperand &>(prevLdr2->GetOperand(kInsnSecondOpnd));
620     regno_t regnoBase0 = mem1.GetBaseRegister()->GetRegisterNumber();
621     regno_t regnoBase1 = mem2.GetBaseRegister()->GetRegisterNumber();
622     if (regnoBase0 == regnoBase1) {
623         return false;
624     }
625     if ((regno0 == regnoBase0) || (regno0 == regnoBase1) || (regno1 == regnoBase0) || (regno1 == regnoBase1)) {
626         return false;
627     }
628     if ((reg0 == static_cast<RegOperand &>(ldr2->GetOperand(kInsnFirstOpnd))) &&
629         (reg0 == static_cast<RegOperand &>(prevLdr2->GetOperand(kInsnFirstOpnd))) &&
630         (reg1 == static_cast<RegOperand &>(ldr1->GetOperand(kInsnFirstOpnd))) &&
631         (reg1 == static_cast<RegOperand &>(prevLdr1->GetOperand(kInsnFirstOpnd)))) {
632         if (MemOffet4Bit(preMem2, mem2) && MemOffet4Bit(preMem1, mem1)) {
633             return true;
634         }
635     }
636     if ((reg0 == static_cast<RegOperand &>(ldr1->GetOperand(kInsnFirstOpnd))) &&
637         (reg0 == static_cast<RegOperand &>(prevLdr1->GetOperand(kInsnFirstOpnd))) &&
638         (reg1 == static_cast<RegOperand &>(ldr2->GetOperand(kInsnFirstOpnd))) &&
639         (reg1 == static_cast<RegOperand &>(prevLdr2->GetOperand(kInsnFirstOpnd)))) {
640         if (MemOffet4Bit(preMem2, mem2) && MemOffet4Bit(preMem1, mem1)) {
641             return true;
642         }
643     }
644     return false;
645 }
646 
MemOffet4Bit(const MemOperand & m1,const MemOperand & m2) const647 bool LdrCmpPattern::MemOffet4Bit(const MemOperand &m1, const MemOperand &m2) const
648 {
649     if (m1.GetAddrMode() != m2.GetAddrMode()) {
650         return false;
651     }
652     if (m1.GetAddrMode() != MemOperand::kAddrModeBOi) {
653         return false;
654     }
655     if (m1.GetBaseRegister()->GetRegisterNumber() != m2.GetBaseRegister()->GetRegisterNumber()) {
656         return false;
657     }
658     int64 offset = m2.GetOffsetOperand()->GetValue() - m1.GetOffsetOperand()->GetValue();
659     return offset == k4BitSizeInt;
660 }
661 
CheckCondition(Insn & insn)662 bool CsetCbzToBeqPattern::CheckCondition(Insn &insn)
663 {
664     MOperator curMop = insn.GetMachineOpcode();
665     bool isValidMop = false;
666     if (curMop == MOP_wtbnz || curMop == MOP_xtbnz || curMop == MOP_wtbz || curMop == MOP_xtbz) {
667         auto &immOpnd1 = static_cast<ImmOperand &>(insn.GetOperand(kInsnSecondOpnd));
668         if (immOpnd1.GetValue() == 0) {
669             labelOpnd = &static_cast<LabelOperand &>(insn.GetOperand(kInsnThirdOpnd));
670             isValidMop = true;
671         }
672     }
673     if (curMop == MOP_wcbz || curMop == MOP_xcbz || curMop == MOP_wcbnz || curMop == MOP_xcbnz) {
674         labelOpnd = &static_cast<LabelOperand &>(insn.GetOperand(kInsnSecondOpnd));
675         isValidMop = true;
676     }
677     if (!isValidMop) {
678         return false;
679     }
680     auto &useReg = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
681     prevInsn = ssaInfo->GetDefInsn(useReg);
682     if (prevInsn == nullptr) {
683         return false;
684     }
685     MOperator prevMop = prevInsn->GetMachineOpcode();
686     if (prevMop != MOP_wcsetrc && prevMop != MOP_xcsetrc) {
687         return false;
688     }
689     auto &ccReg = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnThirdOpnd));
690     if (IsCCRegCrossVersion(*prevInsn, insn, ccReg)) {
691         return false;
692     }
693     return true;
694 }
695 
SelectNewMop(ConditionCode condCode,bool inverse) const696 MOperator CsetCbzToBeqPattern::SelectNewMop(ConditionCode condCode, bool inverse) const
697 {
698     switch (condCode) {
699         case CC_NE:
700             return inverse ? MOP_beq : MOP_bne;
701         case CC_EQ:
702             return inverse ? MOP_bne : MOP_beq;
703         case CC_MI:
704             return inverse ? MOP_bpl : MOP_bmi;
705         case CC_PL:
706             return inverse ? MOP_bmi : MOP_bpl;
707         case CC_VS:
708             return inverse ? MOP_bvc : MOP_bvs;
709         case CC_VC:
710             return inverse ? MOP_bvs : MOP_bvc;
711         case CC_HI:
712             return inverse ? MOP_bls : MOP_bhi;
713         case CC_LS:
714             return inverse ? MOP_bhi : MOP_bls;
715         case CC_GE:
716             return inverse ? MOP_blt : MOP_bge;
717         case CC_LT:
718             return inverse ? MOP_bge : MOP_blt;
719         case CC_HS:
720             return inverse ? MOP_blo : MOP_bhs;
721         case CC_LO:
722             return inverse ? MOP_bhs : MOP_blo;
723         case CC_LE:
724             return inverse ? MOP_bgt : MOP_ble;
725         case CC_GT:
726             return inverse ? MOP_ble : MOP_bgt;
727         case CC_CS:
728             return inverse ? MOP_bcc : MOP_bcs;
729         default:
730             return MOP_undef;
731     }
732 }
733 
Run(BB & bb,Insn & insn)734 void CsetCbzToBeqPattern::Run(BB &bb, Insn &insn)
735 {
736     if (!CheckCondition(insn)) {
737         return;
738     }
739     MOperator curMop = insn.GetMachineOpcode();
740     bool reverse = (curMop == MOP_wcbz || curMop == MOP_xcbz || curMop == MOP_wtbz || curMop == MOP_xtbz);
741     auto &condOpnd = static_cast<CondOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
742     MOperator newMop = SelectNewMop(condOpnd.GetCode(), reverse);
743     DEBUG_ASSERT(newMop != MOP_undef, "unknown condition code");
744     Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(newMop, prevInsn->GetOperand(kInsnThirdOpnd), *labelOpnd);
745     bb.ReplaceInsn(insn, newInsn);
746     /* update ssa info */
747     ssaInfo->ReplaceInsn(insn, newInsn);
748     optSuccess = true;
749     SetCurrInsn(&newInsn);
750     /* dump pattern info */
751     if (CG_PEEP_DUMP) {
752         std::vector<Insn *> prevs;
753         prevs.emplace_back(prevInsn);
754         DumpAfterPattern(prevs, &insn, &newInsn);
755     }
756 }
757 
CheckCondition(Insn & insn)758 bool ExtLslToBitFieldInsertPattern::CheckCondition(Insn &insn)
759 {
760     auto &useReg = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
761     prevInsn = ssaInfo->GetDefInsn(useReg);
762     if (prevInsn == nullptr) {
763         return false;
764     }
765     MOperator prevMop = prevInsn->GetMachineOpcode();
766     if (prevMop != MOP_xsxtw64 && prevMop != MOP_xuxtw64) {
767         return false;
768     }
769     auto &immOpnd = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
770     if (immOpnd.GetValue() > k32BitSize) {
771         return false;
772     }
773     return true;
774 }
775 
Run(BB & bb,Insn & insn)776 void ExtLslToBitFieldInsertPattern::Run(BB &bb, Insn &insn)
777 {
778     if (!CheckCondition(insn)) {
779         return;
780     }
781     auto &prevSrcReg = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
782     cgFunc->InsertExtendSet(prevSrcReg.GetRegisterNumber());
783     MOperator newMop = (prevInsn->GetMachineOpcode() == MOP_xsxtw64) ? MOP_xsbfizrri6i6 : MOP_xubfizrri6i6;
784     auto *aarFunc = static_cast<AArch64CGFunc *>(cgFunc);
785     auto &newImmOpnd1 = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
786     ImmOperand &newImmOpnd2 = aarFunc->CreateImmOperand(k32BitSize, k6BitSize, false);
787     Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(newMop, insn.GetOperand(kInsnFirstOpnd), prevSrcReg,
788                                                         newImmOpnd1, newImmOpnd2);
789     bb.ReplaceInsn(insn, newInsn);
790     /* update ssa info */
791     ssaInfo->ReplaceInsn(insn, newInsn);
792     optSuccess = true;
793     /* dump pattern info */
794     if (CG_PEEP_DUMP) {
795         std::vector<Insn *> prevs;
796         prevs.emplace_back(prevInsn);
797         DumpAfterPattern(prevs, &insn, &newInsn);
798     }
799 }
800 
IsOpndDefByZero(const Insn & insn) const801 bool CselToCsetPattern::IsOpndDefByZero(const Insn &insn) const
802 {
803     MOperator movMop = insn.GetMachineOpcode();
804     switch (movMop) {
805         case MOP_xmovrr:
806         case MOP_wmovrr: {
807             return IsZeroRegister(insn.GetOperand(kInsnSecondOpnd));
808         }
809         case MOP_wmovri32:
810         case MOP_xmovri64: {
811             auto &immOpnd = static_cast<ImmOperand &>(insn.GetOperand(kInsnSecondOpnd));
812             return immOpnd.IsZero();
813         }
814         default:
815             return false;
816     }
817 }
818 
IsOpndDefByOne(const Insn & insn) const819 bool CselToCsetPattern::IsOpndDefByOne(const Insn &insn) const
820 {
821     MOperator movMop = insn.GetMachineOpcode();
822     if ((movMop != MOP_wmovri32) && (movMop != MOP_xmovri64)) {
823         return false;
824     }
825     auto &immOpnd = static_cast<ImmOperand &>(insn.GetOperand(kInsnSecondOpnd));
826     return immOpnd.IsOne();
827 }
828 
IsOpndDefByAllOnes(const Insn & insn) const829 bool CselToCsetPattern::IsOpndDefByAllOnes(const Insn &insn) const
830 {
831     MOperator movMop = insn.GetMachineOpcode();
832     if ((movMop != MOP_wmovri32) && (movMop != MOP_xmovri64)) {
833         return false;
834     }
835     bool is32Bits = (insn.GetOperandSize(kInsnFirstOpnd) == k32BitSize);
836     auto &immOpnd = static_cast<ImmOperand &>(insn.GetOperand(kInsnSecondOpnd));
837     return immOpnd.IsAllOnes() || (is32Bits && immOpnd.IsAllOnes32bit());
838 }
839 
CheckZeroCondition(const Insn & insn)840 bool CselToCsetPattern::CheckZeroCondition(const Insn &insn)
841 {
842     MOperator curMop = insn.GetMachineOpcode();
843     if (curMop != MOP_wcselrrrc && curMop != MOP_xcselrrrc) {
844         return false;
845     }
846     RegOperand &useReg1 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
847     RegOperand &useReg2 = static_cast<RegOperand &>(insn.GetOperand(kInsnThirdOpnd));
848     if ((useReg1.GetRegisterNumber() == RZR && useReg2.GetRegisterNumber() == RZR) ||
849         (useReg1.GetRegisterNumber() != RZR && useReg2.GetRegisterNumber() != RZR)) {
850         return false;
851     }
852     isZeroBefore = (useReg1.GetRegisterNumber() == RZR);
853     useReg = isZeroBefore ? &useReg2 : &useReg1;
854     if (ssaInfo) {
855         prevMovInsn = ssaInfo->GetDefInsn(*useReg);
856     } else {
857         prevMovInsn = insn.GetPreviousMachineInsn();
858     }
859     if (prevMovInsn == nullptr) {
860         return false;
861     }
862     MOperator prevMop = prevMovInsn->GetMachineOpcode();
863     if (prevMop != MOP_wmovri32 && prevMop != MOP_xmovri64) {
864         return false;
865     }
866     if (prevMovInsn->GetOperandSize(kInsnFirstOpnd) != insn.GetOperandSize(kInsnFirstOpnd)) {
867         return false;
868     }
869     if (!ssaInfo && (useReg->GetRegisterNumber() !=
870                      static_cast<RegOperand &>(prevMovInsn->GetOperand(kInsnFirstOpnd)).GetRegisterNumber())) {
871         return false;
872     }
873     ImmOperand &immOpnd = static_cast<ImmOperand &>(prevMovInsn->GetOperand(kInsnSecondOpnd));
874     isOne = immOpnd.IsOne();
875     isAllOnes =
876         (prevMop == MOP_xmovri64 && immOpnd.IsAllOnes()) || (prevMop == MOP_wmovri32 && immOpnd.IsAllOnes32bit());
877     if (!isOne && !isAllOnes) {
878         return false;
879     }
880     return true;
881 }
882 
CheckCondition(Insn & insn)883 bool CselToCsetPattern::CheckCondition(Insn &insn)
884 {
885     if (CheckZeroCondition(insn)) {
886         return true;
887     }
888     if (!ssaInfo) {
889         return false;
890     }
891     MOperator curMop = insn.GetMachineOpcode();
892     if (curMop != MOP_wcselrrrc && curMop != MOP_xcselrrrc) {
893         return false;
894     }
895     auto &useOpnd1 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
896     prevMovInsn1 = ssaInfo->GetDefInsn(useOpnd1);
897     if (prevMovInsn1 == nullptr) {
898         return false;
899     }
900     MOperator prevMop1 = prevMovInsn1->GetMachineOpcode();
901     if (prevMop1 != MOP_wmovri32 && prevMop1 != MOP_xmovri64 && prevMop1 != MOP_wmovrr && prevMop1 != MOP_xmovrr) {
902         return false;
903     }
904     auto &useOpnd2 = static_cast<RegOperand &>(insn.GetOperand(kInsnThirdOpnd));
905     prevMovInsn2 = ssaInfo->GetDefInsn(useOpnd2);
906     if (prevMovInsn2 == nullptr) {
907         return false;
908     }
909     MOperator prevMop2 = prevMovInsn2->GetMachineOpcode();
910     if (prevMop2 != MOP_wmovri32 && prevMop2 != MOP_xmovri64 && prevMop2 != MOP_wmovrr && prevMop2 != MOP_xmovrr) {
911         return false;
912     }
913     return true;
914 }
915 
BuildCondSetInsn(const Insn & cselInsn) const916 Insn *CselToCsetPattern::BuildCondSetInsn(const Insn &cselInsn) const
917 {
918     RegOperand &dest = static_cast<RegOperand &>(cselInsn.GetOperand(kInsnFirstOpnd));
919     bool is32Bits = (cselInsn.GetOperandSize(kInsnFirstOpnd) == k32BitSize);
920     ConditionCode ccCode = static_cast<CondOperand &>(cselInsn.GetOperand(kInsnFourthOpnd)).GetCode();
921     DEBUG_ASSERT(ccCode != kCcLast, "unknown cond, ccCode can't be kCcLast");
922     AArch64CGFunc *func = static_cast<AArch64CGFunc *>(cgFunc);
923     Operand &rflag = func->GetOrCreateRflag();
924     if (isZeroBefore) {
925         ConditionCode inverseCondCode = GetReverseCC(ccCode);
926         if (inverseCondCode == kCcLast) {
927             return nullptr;
928         }
929         CondOperand &cond = func->GetCondOperand(inverseCondCode);
930         if (isOne) {
931             return &cgFunc->GetInsnBuilder()->BuildInsn((is32Bits ? MOP_wcsetrc : MOP_xcsetrc), dest, cond, rflag);
932         } else if (isAllOnes) {
933             return &cgFunc->GetInsnBuilder()->BuildInsn((is32Bits ? MOP_wcsetmrc : MOP_xcsetmrc), dest, cond, rflag);
934         }
935     } else {
936         CondOperand &cond = func->GetCondOperand(ccCode);
937         if (isOne) {
938             return &cgFunc->GetInsnBuilder()->BuildInsn((is32Bits ? MOP_wcsetrc : MOP_xcsetrc), dest, cond, rflag);
939         } else if (isAllOnes) {
940             return &cgFunc->GetInsnBuilder()->BuildInsn((is32Bits ? MOP_wcsetmrc : MOP_xcsetmrc), dest, cond, rflag);
941         }
942     }
943     return nullptr;
944 }
945 
ZeroRun(BB & bb,Insn & insn)946 void CselToCsetPattern::ZeroRun(BB &bb, Insn &insn)
947 {
948     if (!CheckCondition(insn)) {
949         return;
950     }
951     Insn *newInsn = BuildCondSetInsn(insn);
952     if (newInsn == nullptr) {
953         return;
954     }
955     bb.ReplaceInsn(insn, *newInsn);
956     if (ssaInfo) {
957         // update ssa info
958         ssaInfo->ReplaceInsn(insn, *newInsn);
959     } else if (static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd)).GetRegisterNumber() ==
960                useReg->GetRegisterNumber()) {
961         bb.RemoveInsn(*prevMovInsn);
962     }
963     optSuccess = true;
964     SetCurrInsn(newInsn);
965     // dump pattern info
966     if (CG_PEEP_DUMP) {
967         std::vector<Insn *> prevs;
968         prevs.emplace_back(prevMovInsn);
969         DumpAfterPattern(prevs, &insn, newInsn);
970     }
971 }
972 
Run(BB & bb,Insn & insn)973 void CselToCsetPattern::Run(BB &bb, Insn &insn)
974 {
975     if (!CheckCondition(insn)) {
976         return;
977     }
978     if (CheckZeroCondition(insn)) {
979         ZeroRun(bb, insn);
980         return;
981     }
982     Operand &dstOpnd = insn.GetOperand(kInsnFirstOpnd);
983     uint32 dstOpndSize = insn.GetOperandSize(kInsnFirstOpnd);
984     MOperator newMop = MOP_undef;
985     Operand &condOpnd = insn.GetOperand(kInsnFourthOpnd);
986     Operand &rflag = insn.GetOperand(kInsnFifthOpnd);
987     Insn *newInsn = nullptr;
988     if (IsOpndDefByZero(*prevMovInsn2)) {
989         if (IsOpndDefByOne(*prevMovInsn1)) {
990             newMop = (dstOpndSize == k64BitSize ? MOP_xcsetrc : MOP_wcsetrc);
991         } else if (IsOpndDefByAllOnes(*prevMovInsn1)) {
992             newMop = (dstOpndSize == k64BitSize ? MOP_xcsetmrc : MOP_wcsetmrc);
993         }
994         newInsn = &(cgFunc->GetInsnBuilder()->BuildInsn(newMop, dstOpnd, condOpnd, rflag));
995     } else if (IsOpndDefByZero(*prevMovInsn1)) {
996         auto &origCondOpnd = static_cast<CondOperand &>(condOpnd);
997         ConditionCode inverseCondCode = GetReverseCC(origCondOpnd.GetCode());
998         if (inverseCondCode == kCcLast) {
999             return;
1000         }
1001         auto *aarFunc = static_cast<AArch64CGFunc *>(cgFunc);
1002         CondOperand &inverseCondOpnd = aarFunc->GetCondOperand(inverseCondCode);
1003         if (IsOpndDefByOne(*prevMovInsn2)) {
1004             newMop = (dstOpndSize == k64BitSize ? MOP_xcsetrc : MOP_wcsetrc);
1005         } else if (IsOpndDefByAllOnes(*prevMovInsn1)) {
1006             newMop = (dstOpndSize == k64BitSize ? MOP_xcsetmrc : MOP_wcsetmrc);
1007         }
1008         newInsn = &(cgFunc->GetInsnBuilder()->BuildInsn(newMop, dstOpnd, inverseCondOpnd, rflag));
1009     }
1010     if (newMop == MOP_undef || newInsn == nullptr) {
1011         return;
1012     }
1013     bb.ReplaceInsn(insn, *newInsn);
1014     /* update ssa info */
1015     ssaInfo->ReplaceInsn(insn, *newInsn);
1016     optSuccess = true;
1017     SetCurrInsn(newInsn);
1018     /* dump pattern info */
1019     if (CG_PEEP_DUMP) {
1020         std::vector<Insn *> prevs;
1021         prevs.emplace_back(prevMovInsn1);
1022         prevs.emplace_back(prevMovInsn2);
1023         DumpAfterPattern(prevs, &insn, newInsn);
1024     }
1025 }
1026 
CheckCondition(Insn & insn)1027 bool CselToMovPattern::CheckCondition(Insn &insn)
1028 {
1029     MOperator mop = insn.GetMachineOpcode();
1030     if (mop != MOP_wcselrrrc && mop != MOP_xcselrrrc) {
1031         return false;
1032     }
1033 
1034     if (!RegOperand::IsSameReg(insn.GetOperand(kInsnSecondOpnd), insn.GetOperand(kInsnThirdOpnd))) {
1035         return false;
1036     }
1037 
1038     return true;
1039 }
1040 
Run(BB & bb,Insn & insn)1041 void CselToMovPattern::Run(BB &bb, Insn &insn)
1042 {
1043     if (!CheckCondition(insn)) {
1044         return;
1045     }
1046 
1047     MOperator newMop = insn.GetMachineOpcode() == MOP_wcselrrrc ? MOP_wmovrr : MOP_xmovrr;
1048     Insn &newInsn =
1049         cgFunc->GetInsnBuilder()->BuildInsn(newMop, insn.GetOperand(kInsnFirstOpnd), insn.GetOperand(kInsnSecondOpnd));
1050 
1051     bb.ReplaceInsn(insn, newInsn);
1052 }
1053 
IsOpndMovOneAndNewOpndOpt(const Insn & curInsn)1054 bool CselToCsincRemoveMovPattern::IsOpndMovOneAndNewOpndOpt(const Insn &curInsn)
1055 {
1056     auto &insnThirdOpnd = static_cast<RegOperand &>(curInsn.GetOperand(kInsnThirdOpnd));
1057     auto &insnSecondOpnd = static_cast<RegOperand &>(curInsn.GetOperand(kInsnSecondOpnd));
1058     auto &origCondOpnd = static_cast<CondOperand &>(curInsn.GetOperand(kInsnFourthOpnd));
1059     Insn *insnThirdOpndDefInsn = ssaInfo->GetDefInsn(insnThirdOpnd);
1060     Insn *insnSecondOpndDefInsn = ssaInfo->GetDefInsn(insnSecondOpnd);
1061     if (insnThirdOpndDefInsn == nullptr || insnSecondOpndDefInsn == nullptr) {
1062         return false;
1063     }
1064     MOperator insnThirdOpndDefMop = insnThirdOpndDefInsn->GetMachineOpcode();
1065     MOperator insnSecondOpndDefMop = insnSecondOpndDefInsn->GetMachineOpcode();
1066     if (insnThirdOpndDefMop == MOP_wmovri32 || insnThirdOpndDefMop == MOP_xmovri64) {
1067         prevMovInsn = insnThirdOpndDefInsn;
1068     } else if (insnSecondOpndDefMop == MOP_wmovri32 || insnSecondOpndDefMop == MOP_xmovri64) {
1069         prevMovInsn = insnSecondOpndDefInsn;
1070         needReverseCond = true;
1071     } else {
1072         return false;
1073     }
1074     auto &prevMovImmOpnd = static_cast<ImmOperand &>(prevMovInsn->GetOperand(kInsnSecondOpnd));
1075     auto val = prevMovImmOpnd.GetValue();
1076     if (val != 1) {
1077         return false;
1078     }
1079     if (needReverseCond) {
1080         newSecondOpnd = &insnThirdOpnd;
1081         ConditionCode inverseCondCode = GetReverseCC(origCondOpnd.GetCode());
1082         if (inverseCondCode == kCcLast) {
1083             return false;
1084         }
1085         auto *aarFunc = static_cast<AArch64CGFunc *>(cgFunc);
1086         CondOperand &inverseCondOpnd = aarFunc->GetCondOperand(inverseCondCode);
1087         cond = &inverseCondOpnd;
1088     } else {
1089         newSecondOpnd = &insnSecondOpnd;
1090         cond = &origCondOpnd;
1091     }
1092     return true;
1093 }
1094 
CheckCondition(Insn & insn)1095 bool CselToCsincRemoveMovPattern::CheckCondition(Insn &insn)
1096 {
1097     MOperator curMop = insn.GetMachineOpcode();
1098     if (curMop != MOP_xcselrrrc && curMop != MOP_wcselrrrc) {
1099         return false;
1100     }
1101     if (!IsOpndMovOneAndNewOpndOpt(insn)) {
1102         return false;
1103     }
1104     return true;
1105 }
1106 
Run(BB & bb,Insn & insn)1107 void CselToCsincRemoveMovPattern::Run(BB &bb, Insn &insn)
1108 {
1109     if (!CheckCondition(insn)) {
1110         return;
1111     }
1112     uint32 dstOpndSize = insn.GetOperandSize(kInsnFirstOpnd);
1113     MOperator newMop = (dstOpndSize == k64ByteSize) ? MOP_xcsincrrrc : MOP_wcsincrrrc;
1114     Operand &ccReg = insn.GetOperand(kInsnFifthOpnd);
1115     RegOperand &zeroOpnd = cgFunc->GetZeroOpnd(dstOpndSize);
1116     auto &insnFirstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
1117     Insn &newInsn =
1118         cgFunc->GetInsnBuilder()->BuildInsn(newMop, insnFirstOpnd, *static_cast<RegOperand *>(newSecondOpnd), zeroOpnd,
1119                                             *static_cast<CondOperand *>(cond), ccReg);
1120     bb.ReplaceInsn(insn, newInsn);
1121     // updata ssa info
1122     ssaInfo->ReplaceInsn(insn, newInsn);
1123     optSuccess = true;
1124     SetCurrInsn(&newInsn);
1125     // dump pattern info
1126     if (CG_PEEP_DUMP) {
1127         std::vector<Insn *> prevs;
1128         prevs.emplace_back(prevMovInsn);
1129         DumpAfterPattern(prevs, &insn, &newInsn);
1130     }
1131 }
1132 
CheckDefInsn(const RegOperand & opnd,Insn & insn)1133 bool CsetToCincPattern::CheckDefInsn(const RegOperand &opnd, Insn &insn)
1134 {
1135     Insn *tempDefInsn = ssaInfo->GetDefInsn(opnd);
1136     if (tempDefInsn != nullptr && tempDefInsn->GetBB()->GetId() == insn.GetBB()->GetId()) {
1137         InsnSet useInsns = GetAllUseInsn(opnd);
1138         if (useInsns.size() != 1) {
1139             return false;
1140         }
1141         MOperator mop = tempDefInsn->GetMachineOpcode();
1142         if (mop == MOP_wcsetrc || mop == MOP_xcsetrc) {
1143             /* DefInsn and tempDefInsn are in the same BB. Select a close to useInsn(add) */
1144             if (!CheckRegTyCc(*tempDefInsn, insn)) {
1145                 return false;
1146             }
1147             defInsn = tempDefInsn;
1148             return true;
1149         }
1150     }
1151     return false;
1152 }
1153 
1154 /* If a new ConditionCode is generated after csetInsn, this optimization is not performed. */
CheckRegTyCc(const Insn & tempDefInsn,Insn & insn) const1155 bool CsetToCincPattern::CheckRegTyCc(const Insn &tempDefInsn, Insn &insn) const
1156 {
1157     bool betweenUseAndDef = false;
1158     FOR_BB_INSNS_REV(bbInsn, insn.GetBB())
1159     {
1160         if (!bbInsn->IsMachineInstruction()) {
1161             continue;
1162         }
1163         if (bbInsn->GetId() == insn.GetId()) {
1164             betweenUseAndDef = true;
1165         }
1166         if (betweenUseAndDef) {
1167             /* Select a close to useInsn(add) */
1168             if (defInsn != nullptr && bbInsn->GetId() == defInsn->GetId()) {
1169                 return false;
1170             } else if (bbInsn->GetId() == tempDefInsn.GetId()) {
1171                 return true;
1172             } else if (static_cast<RegOperand &>(bbInsn->GetOperand(kInsnFirstOpnd)).IsOfCC()) {
1173                 return false;
1174             } else if (bbInsn->IsCall()) {
1175                 return false;
1176             }
1177         }
1178     }
1179     return false;
1180 }
1181 
CheckCondition(Insn & insn)1182 bool CsetToCincPattern::CheckCondition(Insn &insn)
1183 {
1184     RegOperand &opnd2 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
1185     RegOperand &opnd3 = static_cast<RegOperand &>(insn.GetOperand(kInsnThirdOpnd));
1186     bool opnd2Cset = CheckDefInsn(opnd2, insn);
1187     bool opnd3Cset = CheckDefInsn(opnd3, insn);
1188     if (opnd3Cset) {
1189         csetOpnd1 = kInsnThirdOpnd;
1190         return true;
1191     } else if (opnd2Cset) {
1192         csetOpnd1 = kInsnSecondOpnd;
1193         return true;
1194     }
1195     return false;
1196 }
1197 
Run(BB & bb,Insn & insn)1198 void CsetToCincPattern::Run(BB &bb, Insn &insn)
1199 {
1200     RegOperand &opnd1 = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
1201     /* Exclude other patterns that have been optimized. */
1202     Insn *newAddInsn = ssaInfo->GetDefInsn(opnd1);
1203     if (newAddInsn == nullptr) {
1204         return;
1205     }
1206     MOperator mop = newAddInsn->GetMachineOpcode();
1207     if (mop != MOP_waddrrr && mop != MOP_xaddrrr) {
1208         return;
1209     }
1210     if (!CheckCondition(insn) || defInsn == nullptr || csetOpnd1 == 0) {
1211         return;
1212     }
1213 
1214     MOperator newMop = MOP_undef;
1215     int32 cincOpnd2 = (csetOpnd1 == kInsnSecondOpnd) ? kInsnThirdOpnd : kInsnSecondOpnd;
1216     RegOperand &opnd2 = static_cast<RegOperand &>(insn.GetOperand(static_cast<uint32>(cincOpnd2)));
1217     Operand &condOpnd = defInsn->GetOperand(kInsnSecondOpnd);
1218     Operand &rflag = defInsn->GetOperand(kInsnThirdOpnd);
1219     Insn *newInsn = &(cgFunc->GetInsnBuilder()->BuildInsn(newMop, opnd1, opnd2, condOpnd, rflag));
1220     bb.ReplaceInsn(insn, *newInsn);
1221     /* update ssa info */
1222     ssaInfo->ReplaceInsn(insn, *newInsn);
1223     optSuccess = true;
1224     SetCurrInsn(newInsn);
1225     /* dump pattern info */
1226     if (CG_PEEP_DUMP) {
1227         std::vector<Insn *> prevs;
1228         (void)prevs.emplace_back(defInsn);
1229         DumpAfterPattern(prevs, &insn, newInsn);
1230     }
1231 }
1232 
CheckAndSelectPattern(const Insn & currInsn)1233 bool AndCmpBranchesToTbzPattern::CheckAndSelectPattern(const Insn &currInsn)
1234 {
1235     MOperator curMop = currInsn.GetMachineOpcode();
1236     MOperator prevAndMop = prevAndInsn->GetMachineOpcode();
1237     auto &andImmOpnd = static_cast<ImmOperand &>(prevAndInsn->GetOperand(kInsnThirdOpnd));
1238     auto &cmpImmOpnd = static_cast<ImmOperand &>(prevCmpInsn->GetOperand(kInsnThirdOpnd));
1239     if (cmpImmOpnd.GetValue() == 0) {
1240         tbzImmVal = GetLogValueAtBase2(andImmOpnd.GetValue());
1241         if (tbzImmVal < 0) {
1242             return false;
1243         }
1244         switch (curMop) {
1245             case MOP_beq:
1246                 newMop = (prevAndMop == MOP_wandrri12) ? MOP_wtbz : MOP_xtbz;
1247                 break;
1248             case MOP_bne:
1249                 newMop = (prevAndMop == MOP_wandrri12) ? MOP_wtbnz : MOP_xtbnz;
1250                 break;
1251             default:
1252                 return false;
1253         }
1254     } else {
1255         tbzImmVal = GetLogValueAtBase2(andImmOpnd.GetValue());
1256         int64 tmpVal = GetLogValueAtBase2(cmpImmOpnd.GetValue());
1257         if (tbzImmVal < 0 || tmpVal < 0 || tbzImmVal != tmpVal) {
1258             return false;
1259         }
1260         switch (curMop) {
1261             case MOP_beq:
1262                 newMop = (prevAndMop == MOP_wandrri12) ? MOP_wtbnz : MOP_xtbnz;
1263                 break;
1264             case MOP_bne:
1265                 newMop = (prevAndMop == MOP_wandrri12) ? MOP_wtbz : MOP_xtbz;
1266                 break;
1267             default:
1268                 return false;
1269         }
1270     }
1271     return true;
1272 }
1273 
CheckCondition(Insn & insn)1274 bool AndCmpBranchesToTbzPattern::CheckCondition(Insn &insn)
1275 {
1276     MOperator curMop = insn.GetMachineOpcode();
1277     if (curMop != MOP_beq && curMop != MOP_bne) {
1278         return false;
1279     }
1280     auto &ccReg = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
1281     prevCmpInsn = ssaInfo->GetDefInsn(ccReg);
1282     if (prevCmpInsn == nullptr) {
1283         return false;
1284     }
1285     MOperator prevCmpMop = prevCmpInsn->GetMachineOpcode();
1286     if (prevCmpMop != MOP_wcmpri && prevCmpMop != MOP_xcmpri) {
1287         return false;
1288     }
1289     auto &cmpUseReg = static_cast<RegOperand &>(prevCmpInsn->GetOperand(kInsnSecondOpnd));
1290     prevAndInsn = ssaInfo->GetDefInsn(cmpUseReg);
1291     if (prevAndInsn == nullptr) {
1292         return false;
1293     }
1294     MOperator prevAndMop = prevAndInsn->GetMachineOpcode();
1295     if (prevAndMop != MOP_wandrri12 && prevAndMop != MOP_xandrri13) {
1296         return false;
1297     }
1298     CHECK_FATAL(prevAndInsn->GetOperand(kInsnFirstOpnd).GetSize() == prevCmpInsn->GetOperand(kInsnSecondOpnd).GetSize(),
1299                 "def-use reg size must be same based-on ssa");
1300     if (!CheckAndSelectPattern(insn)) {
1301         return false;
1302     }
1303     return true;
1304 }
1305 
Run(BB & bb,Insn & insn)1306 void AndCmpBranchesToTbzPattern::Run(BB &bb, Insn &insn)
1307 {
1308     if (!CheckCondition(insn)) {
1309         return;
1310     }
1311     auto *aarFunc = static_cast<AArch64CGFunc *>(cgFunc);
1312     auto &labelOpnd = static_cast<LabelOperand &>(insn.GetOperand(kInsnSecondOpnd));
1313     ImmOperand &tbzImmOpnd = aarFunc->CreateImmOperand(tbzImmVal, k8BitSize, false);
1314     Insn &newInsn =
1315         cgFunc->GetInsnBuilder()->BuildInsn(newMop, prevAndInsn->GetOperand(kInsnSecondOpnd), tbzImmOpnd, labelOpnd);
1316     if (!VERIFY_INSN(&newInsn)) {
1317         return;
1318     }
1319     bb.ReplaceInsn(insn, newInsn);
1320     /* update ssa info */
1321     ssaInfo->ReplaceInsn(insn, newInsn);
1322     optSuccess = true;
1323     SetCurrInsn(&newInsn);
1324     /* dump pattern info */
1325     if (CG_PEEP_DUMP) {
1326         std::vector<Insn *> prevs;
1327         prevs.emplace_back(prevAndInsn);
1328         prevs.emplace_back(prevCmpInsn);
1329         DumpAfterPattern(prevs, &insn, &newInsn);
1330     }
1331 }
1332 
CheckAndSelectPattern()1333 bool AndAndCmpBranchesToTstPattern::CheckAndSelectPattern()
1334 {
1335     MOperator prevAndMop = prevAndInsn->GetMachineOpcode();
1336     MOperator prevPrevAndMop = prevPrevAndInsn->GetMachineOpcode();
1337     if (prevAndMop != prevPrevAndMop) {
1338         return false;
1339     }
1340     auto &prevAndImmOpnd = static_cast<ImmOperand &>(prevAndInsn->GetOperand(kInsnThirdOpnd));
1341     auto &prevPrevAndImmOpnd = static_cast<ImmOperand &>(prevPrevAndInsn->GetOperand(kInsnThirdOpnd));
1342     if (prevAndImmOpnd.GetValue() == prevPrevAndImmOpnd.GetValue() &&
1343         ((static_cast<uint64>(prevAndImmOpnd.GetValue()) & static_cast<uint64>(prevAndImmOpnd.GetValue() + 1)) == 0) &&
1344         ((static_cast<uint64>(prevPrevAndImmOpnd.GetValue()) &
1345           static_cast<uint64>(prevPrevAndImmOpnd.GetValue() + 1)) == 0)) {
1346         bool isWOrX = (prevAndMop == MOP_wandrri12 && prevPrevAndMop == MOP_wandrri12);
1347         newEorMop = isWOrX ? MOP_weorrrr : MOP_xeorrrr;
1348         newTstMop = isWOrX ? MOP_wtstri32 : MOP_xtstri64;
1349         tstImmVal = prevAndImmOpnd.GetValue();
1350         return true;
1351     }
1352     return false;
1353 }
1354 
CheckCondInsn(const Insn & insn)1355 bool AndAndCmpBranchesToTstPattern::CheckCondInsn(const Insn &insn)
1356 {
1357     if (insn.GetMachineOpcode() == MOP_bne || insn.GetMachineOpcode() == MOP_beq) {
1358         ccReg = static_cast<RegOperand *>(&insn.GetOperand(kInsnFirstOpnd));
1359         return true;
1360     }
1361     if (!insn.IsCondDef()) {
1362         return false;
1363     }
1364     CondOperand *condOpnd = nullptr;
1365     for (uint32 i = 0; i < insn.GetOperandSize(); ++i) {
1366         if (insn.GetDesc()->GetOpndDes(i) == &OpndDesc::Cond) {
1367             condOpnd = static_cast<CondOperand *>(&insn.GetOperand(i));
1368         } else if (insn.GetDesc()->GetOpndDes(i) == &OpndDesc::CCS) {
1369             ccReg = static_cast<RegOperand *>(&insn.GetOperand(i));
1370         }
1371     }
1372     if (condOpnd == nullptr || ccReg == nullptr) {
1373         return false;
1374     }
1375     return (condOpnd->GetCode() == CC_NE || condOpnd->GetCode() == CC_EQ);
1376 }
1377 
CheckAndGetPrevAndDefInsn(const RegOperand & regOpnd) const1378 Insn *AndAndCmpBranchesToTstPattern::CheckAndGetPrevAndDefInsn(const RegOperand &regOpnd) const
1379 {
1380     if (!regOpnd.IsSSAForm()) {
1381         return nullptr;
1382     }
1383     auto *regVersion = ssaInfo->FindSSAVersion(regOpnd.GetRegisterNumber());
1384     DEBUG_ASSERT(regVersion != nullptr, "UseVRegVersion must not be null based on ssa");
1385     if (regVersion->GetAllUseInsns().size() != 1) {  // only one use point can do opt
1386         return nullptr;
1387     }
1388     auto *defInfo = regVersion->GetDefInsnInfo();
1389     if (defInfo == nullptr) {
1390         return nullptr;
1391     }
1392     auto andMop = defInfo->GetInsn()->GetMachineOpcode();
1393     if (andMop != MOP_wandrri12 && andMop != MOP_xandrri13) {
1394         return nullptr;
1395     }
1396     return defInfo->GetInsn();
1397 }
1398 
CheckCondition(Insn & insn)1399 bool AndAndCmpBranchesToTstPattern::CheckCondition(Insn &insn)
1400 {
1401     if (!CheckCondInsn(insn)) {
1402         return false;
1403     }
1404     prevCmpInsn = ssaInfo->GetDefInsn(*ccReg);
1405     if (prevCmpInsn == nullptr) {
1406         return false;
1407     }
1408     MOperator prevCmpMop = prevCmpInsn->GetMachineOpcode();
1409     if (prevCmpMop != MOP_wcmprr && prevCmpMop != MOP_xcmprr) {
1410         return false;
1411     }
1412     prevAndInsn = CheckAndGetPrevAndDefInsn(static_cast<RegOperand &>(prevCmpInsn->GetOperand(kInsnThirdOpnd)));
1413     if (prevAndInsn == nullptr) {
1414         return false;
1415     }
1416     prevPrevAndInsn = CheckAndGetPrevAndDefInsn(static_cast<RegOperand &>(prevCmpInsn->GetOperand(kInsnSecondOpnd)));
1417     if (prevPrevAndInsn == nullptr) {
1418         return false;
1419     }
1420     if (!CheckAndSelectPattern()) {
1421         return false;
1422     }
1423     return true;
1424 }
1425 
Run(BB & bb,Insn & insn)1426 void AndAndCmpBranchesToTstPattern ::Run(BB &bb, Insn &insn)
1427 {
1428     if (!CheckCondition(insn)) {
1429         return;
1430     }
1431     auto *a64Func = static_cast<AArch64CGFunc *>(cgFunc);
1432     regno_t tmpRegNO = 0;
1433     Operand &andOpnd = prevAndInsn->GetOperand(kInsnSecondOpnd);
1434     auto *tmpDefOpnd = a64Func->CreateVirtualRegisterOperand(tmpRegNO, andOpnd.GetSize(),
1435                                                              static_cast<RegOperand &>(andOpnd).GetRegisterType());
1436     Insn &newEorInsn = cgFunc->GetInsnBuilder()->BuildInsn(
1437         newEorMop, *tmpDefOpnd, prevPrevAndInsn->GetOperand(kInsnSecondOpnd), prevAndInsn->GetOperand(kInsnSecondOpnd));
1438     BB *preCmpBB = prevCmpInsn->GetBB();
1439     (void)preCmpBB->InsertInsnBefore(*prevCmpInsn, newEorInsn);
1440     /* update ssa info */
1441     ssaInfo->CreateNewInsnSSAInfo(newEorInsn);
1442     ImmOperand &tstImmOpnd = a64Func->CreateImmOperand(tstImmVal, k8BitSize, false);
1443     Insn &newTstInsn = cgFunc->GetInsnBuilder()->BuildInsn(newTstMop, prevCmpInsn->GetOperand(kInsnFirstOpnd),
1444                                                            newEorInsn.GetOperand(kInsnFirstOpnd), tstImmOpnd);
1445     bb.ReplaceInsn(*prevCmpInsn, newTstInsn);
1446     /* update ssa info */
1447     ssaInfo->ReplaceInsn(*prevCmpInsn, newTstInsn);
1448     optSuccess = true;
1449     /* dump pattern info */
1450     if (CG_PEEP_DUMP) {
1451         std::vector<Insn *> prevs;
1452         prevs.emplace_back(prevPrevAndInsn);
1453         prevs.emplace_back(prevAndInsn);
1454         prevs.emplace_back(prevCmpInsn);
1455         DumpAfterPattern(prevs, &newEorInsn, &newTstInsn);
1456     }
1457 }
1458 
CheckAndSelectPattern(const Insn & currInsn)1459 bool ZeroCmpBranchesToTbzPattern::CheckAndSelectPattern(const Insn &currInsn)
1460 {
1461     MOperator currMop = currInsn.GetMachineOpcode();
1462     MOperator prevMop = prevInsn->GetMachineOpcode();
1463     switch (prevMop) {
1464         case MOP_wcmpri:
1465         case MOP_xcmpri: {
1466             regOpnd = &static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
1467             auto &immOpnd = static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnThirdOpnd));
1468             if (immOpnd.GetValue() != 0) {
1469                 return false;
1470             }
1471             switch (currMop) {
1472                 case MOP_bge:
1473                     newMop = (regOpnd->GetSize() <= k32BitSize) ? MOP_wtbz : MOP_xtbz;
1474                     break;
1475                 case MOP_blt:
1476                     newMop = (regOpnd->GetSize() <= k32BitSize) ? MOP_wtbnz : MOP_xtbnz;
1477                     break;
1478                 default:
1479                     return false;
1480             }
1481             break;
1482         }
1483         case MOP_wcmprr:
1484         case MOP_xcmprr: {
1485             auto &regOpnd0 = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
1486             auto &regOpnd1 = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnThirdOpnd));
1487             if (!IsZeroRegister(regOpnd0) && !IsZeroRegister(regOpnd1)) {
1488                 return false;
1489             }
1490             switch (currMop) {
1491                 case MOP_bge:
1492                     if (IsZeroRegister(regOpnd1)) {
1493                         regOpnd = &static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
1494                         newMop = (regOpnd->GetSize() <= k32BitSize) ? MOP_wtbz : MOP_xtbz;
1495                     } else {
1496                         return false;
1497                     }
1498                     break;
1499                 case MOP_ble:
1500                     if (IsZeroRegister(regOpnd0)) {
1501                         regOpnd = &static_cast<RegOperand &>(prevInsn->GetOperand(kInsnThirdOpnd));
1502                         newMop = (regOpnd->GetSize() <= k32BitSize) ? MOP_wtbz : MOP_xtbz;
1503                     } else {
1504                         return false;
1505                     }
1506                     break;
1507                 case MOP_blt:
1508                     if (IsZeroRegister(regOpnd1)) {
1509                         regOpnd = &static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
1510                         newMop = (regOpnd->GetSize() <= k32BitSize) ? MOP_wtbnz : MOP_xtbnz;
1511                     } else {
1512                         return false;
1513                     }
1514                     break;
1515                 case MOP_bgt:
1516                     if (IsZeroRegister(regOpnd0)) {
1517                         regOpnd = &static_cast<RegOperand &>(prevInsn->GetOperand(kInsnThirdOpnd));
1518                         newMop = (regOpnd->GetSize() <= k32BitSize) ? MOP_wtbnz : MOP_xtbnz;
1519                     } else {
1520                         return false;
1521                     }
1522                     break;
1523                 default:
1524                     return false;
1525             }
1526             break;
1527         }
1528             // fall through
1529             [[clang::fallthrough]];
1530         default:
1531             return false;
1532     }
1533     return true;
1534 }
1535 
CheckCondition(Insn & insn)1536 bool ZeroCmpBranchesToTbzPattern::CheckCondition(Insn &insn)
1537 {
1538     MOperator curMop = insn.GetMachineOpcode();
1539     if (curMop != MOP_bge && curMop != MOP_ble && curMop != MOP_blt && curMop != MOP_bgt) {
1540         return false;
1541     }
1542     CHECK_FATAL(insn.GetOperand(kInsnSecondOpnd).IsLabel(), "must be labelOpnd");
1543     auto &ccReg = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
1544     prevInsn = ssaInfo->GetDefInsn(ccReg);
1545     if (prevInsn == nullptr) {
1546         return false;
1547     }
1548     MOperator prevMop = prevInsn->GetMachineOpcode();
1549     if (prevMop != MOP_wcmpri && prevMop != MOP_xcmpri && prevMop != MOP_wcmprr && prevMop != MOP_xcmprr) {
1550         return false;
1551     }
1552     if (!CheckAndSelectPattern(insn)) {
1553         return false;
1554     }
1555     return true;
1556 }
1557 
Run(BB & bb,Insn & insn)1558 void ZeroCmpBranchesToTbzPattern::Run(BB &bb, Insn &insn)
1559 {
1560     if (!CheckCondition(insn)) {
1561         return;
1562     }
1563     CHECK_FATAL(regOpnd != nullptr, "must have regOpnd");
1564     auto *aarFunc = static_cast<AArch64CGFunc *>(cgFunc);
1565     ImmOperand &bitOpnd = aarFunc->CreateImmOperand(
1566         (regOpnd->GetSize() <= k32BitSize) ? (k32BitSize - 1) : (k64BitSize - 1), k8BitSize, false);
1567     auto &labelOpnd = static_cast<LabelOperand &>(insn.GetOperand(kInsnSecondOpnd));
1568     Insn &newInsn =
1569         cgFunc->GetInsnBuilder()->BuildInsn(newMop, *static_cast<RegOperand *>(regOpnd), bitOpnd, labelOpnd);
1570     if (!VERIFY_INSN(&newInsn)) {
1571         return;
1572     }
1573     bb.ReplaceInsn(insn, newInsn);
1574     /* update ssa info */
1575     ssaInfo->ReplaceInsn(insn, newInsn);
1576     optSuccess = true;
1577     SetCurrInsn(&newInsn);
1578     /* dump pattern info */
1579     if (CG_PEEP_DUMP) {
1580         std::vector<Insn *> prevs;
1581         prevs.emplace_back(prevInsn);
1582         DumpAfterPattern(prevs, &insn, &newInsn);
1583     }
1584 }
1585 
CheckIntersectedCondition(const Insn & insn)1586 bool LsrAndToUbfxPattern::CheckIntersectedCondition(const Insn &insn)
1587 {
1588     MOperator curMop = insn.GetMachineOpcode();
1589     MOperator prevMop = prevInsn->GetMachineOpcode();
1590     int64 lsb = static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnThirdOpnd)).GetValue();
1591     int64 width = __builtin_popcountll(static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd)).GetValue());
1592     if (lsb + width <= k32BitSize) {
1593         return true;
1594     } else if (curMop == MOP_wandrri12 && prevMop == MOP_xlsrrri6 && lsb >= k32BitSize && (lsb + width) <= k64BitSize) {
1595         isWXSumOutOfRange = true;
1596         return isWXSumOutOfRange;
1597     }
1598     return false;
1599 }
1600 
CheckCondition(Insn & insn)1601 bool LsrAndToUbfxPattern::CheckCondition(Insn &insn)
1602 {
1603     MOperator curMop = insn.GetMachineOpcode();
1604     if (curMop != MOP_wandrri12 && curMop != MOP_xandrri13) {
1605         return false;
1606     }
1607     int64 immValue = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd)).GetValue();
1608     /* and_imm value must be (1 << n - 1) */
1609     if (immValue <= 0 || (((static_cast<uint64>(immValue)) & (static_cast<uint64>(immValue) + 1)) != 0)) {
1610         return false;
1611     }
1612     auto &useReg = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
1613     prevInsn = ssaInfo->GetDefInsn(useReg);
1614     if (prevInsn == nullptr) {
1615         return false;
1616     }
1617     MOperator prevMop = prevInsn->GetMachineOpcode();
1618     if (prevMop != MOP_wlsrrri5 && prevMop != MOP_xlsrrri6) {
1619         return false;
1620     }
1621     if (((curMop == MOP_wandrri12 && prevMop == MOP_xlsrrri6) ||
1622          (curMop == MOP_xandrri13 && prevMop == MOP_wlsrrri5)) &&
1623         !CheckIntersectedCondition(insn)) {
1624         return false;
1625     }
1626     auto &prevDstOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
1627     auto &currUseOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
1628     /* check def-use reg size found by ssa */
1629     CHECK_FATAL(prevDstOpnd.GetSize() == currUseOpnd.GetSize(), "def-use reg size must be same");
1630     auto &andDstReg = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
1631     VRegVersion *andDstVersion = ssaInfo->FindSSAVersion(andDstReg.GetRegisterNumber());
1632     DEBUG_ASSERT(andDstVersion != nullptr, "find destReg Version failed");
1633     for (auto useDUInfoIt : andDstVersion->GetAllUseInsns()) {
1634         if (useDUInfoIt.second == nullptr) {
1635             continue;
1636         }
1637         Insn *useInsn = (useDUInfoIt.second)->GetInsn();
1638         if (useInsn == nullptr) {
1639             continue;
1640         }
1641         MOperator useMop = useInsn->GetMachineOpcode();
1642         /* combine [and & cbz --> tbz] first, to eliminate more insns becase of incompleted copy prop */
1643         if (useMop == MOP_wcbz || useMop == MOP_xcbz || useMop == MOP_wcbnz || useMop == MOP_xcbnz) {
1644             return false;
1645         }
1646     }
1647     return true;
1648 }
1649 
Run(BB & bb,Insn & insn)1650 void LsrAndToUbfxPattern::Run(BB &bb, Insn &insn)
1651 {
1652     if (!CheckCondition(insn)) {
1653         return;
1654     }
1655     auto *aarFunc = static_cast<AArch64CGFunc *>(cgFunc);
1656     // If isWXSumOutOfRange returns true, newInsn will be 64bit
1657     bool is64Bits = isWXSumOutOfRange ? true : (insn.GetOperandSize(kInsnFirstOpnd) == k64BitSize);
1658     Operand &resOpnd = insn.GetOperand(kInsnFirstOpnd);
1659     Operand &srcOpnd = prevInsn->GetOperand(kInsnSecondOpnd);
1660     int64 immVal1 = static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnThirdOpnd)).GetValue();
1661     Operand &immOpnd1 = is64Bits ? aarFunc->CreateImmOperand(immVal1, kMaxImmVal6Bits, false)
1662                                  : aarFunc->CreateImmOperand(immVal1, kMaxImmVal5Bits, false);
1663     int64 tmpVal = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd)).GetValue();
1664     int64 immVal2 = __builtin_ffsll(tmpVal + 1) - 1;
1665     if ((immVal2 < k1BitSize) || (is64Bits && (immVal1 + immVal2) > k64BitSize) ||
1666         (!is64Bits && (immVal1 + immVal2) > k32BitSize)) {
1667         return;
1668     }
1669     Operand &immOpnd2 = is64Bits ? aarFunc->CreateImmOperand(immVal2, kMaxImmVal6Bits, false)
1670                                  : aarFunc->CreateImmOperand(immVal2, kMaxImmVal5Bits, false);
1671     MOperator newMop = (is64Bits ? MOP_xubfxrri6i6 : MOP_wubfxrri5i5);
1672     Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(newMop, resOpnd, srcOpnd, immOpnd1, immOpnd2);
1673     bb.ReplaceInsn(insn, newInsn);
1674     /* update ssa info */
1675     ssaInfo->ReplaceInsn(insn, newInsn);
1676     optSuccess = true;
1677     SetCurrInsn(&newInsn);
1678     /* dump pattern info */
1679     if (CG_PEEP_DUMP) {
1680         std::vector<Insn *> prevs;
1681         prevs.emplace_back(prevInsn);
1682         DumpAfterPattern(prevs, &insn, &newInsn);
1683     }
1684 }
1685 
CheckCondition(Insn & insn)1686 bool LslAndToUbfizPattern::CheckCondition(Insn &insn)
1687 {
1688     MOperator mop = insn.GetMachineOpcode();
1689     RegOperand &opnd2 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
1690     defInsn = ssaInfo->GetDefInsn(opnd2);
1691     InsnSet useInsns = GetAllUseInsn(opnd2);
1692     if (useInsns.size() != 1 || defInsn == nullptr) {
1693         return false;
1694     }
1695     MOperator defMop = defInsn->GetMachineOpcode();
1696     if ((mop == MOP_wandrri12 || mop == MOP_xandrri13) && (defMop == MOP_wlslrri5 || defMop == MOP_xlslrri6)) {
1697         return true;
1698     } else if ((defMop == MOP_wandrri12 || defMop == MOP_xandrri13) && (mop == MOP_wlslrri5 || mop == MOP_xlslrri6)) {
1699         /* lsl w1, w2, #n. insn and w1's useInsn can do prop, skipping this pattern */
1700         for (auto *useInsn : GetAllUseInsn(static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd)))) {
1701             if (useInsn == nullptr) {
1702                 continue;
1703             }
1704             if (!CheckUseInsnMop(*useInsn)) {
1705                 return false;
1706             }
1707         }
1708         return true;
1709     }
1710     return false;
1711 }
1712 
CheckUseInsnMop(const Insn & useInsn) const1713 bool LslAndToUbfizPattern::CheckUseInsnMop(const Insn &useInsn) const
1714 {
1715     if (useInsn.IsLoad() || useInsn.IsStore()) {
1716         return false;
1717     }
1718     MOperator useMop = useInsn.GetMachineOpcode();
1719     switch (useMop) {
1720         case MOP_xeorrrr:
1721         case MOP_xeorrrrs:
1722         case MOP_weorrrr:
1723         case MOP_weorrrrs:
1724         case MOP_xiorrrr:
1725         case MOP_xiorrrrs:
1726         case MOP_wiorrrr:
1727         case MOP_wiorrrrs:
1728         case MOP_xaddrrr:
1729         case MOP_xxwaddrrre:
1730         case MOP_xaddrrrs:
1731         case MOP_waddrrr:
1732         case MOP_wwwaddrrre:
1733         case MOP_waddrrrs:
1734         case MOP_waddrri12:
1735         case MOP_xaddrri12:
1736         case MOP_xsubrrr:
1737         case MOP_xxwsubrrre:
1738         case MOP_xsubrrrs:
1739         case MOP_wsubrrr:
1740         case MOP_wwwsubrrre:
1741         case MOP_wsubrrrs:
1742         case MOP_xinegrr:
1743         case MOP_winegrr:
1744         case MOP_xsxtb32:
1745         case MOP_xsxtb64:
1746         case MOP_xsxth32:
1747         case MOP_xsxth64:
1748         case MOP_xuxtb32:
1749         case MOP_xuxth32:
1750         case MOP_xsxtw64:
1751         case MOP_xubfxrri6i6:
1752         case MOP_xcmprr:
1753         case MOP_xwcmprre:
1754         case MOP_xcmprrs:
1755         case MOP_wcmprr:
1756         case MOP_wwcmprre:
1757         case MOP_wcmprrs:
1758             return false;
1759         default:
1760             break;
1761     }
1762     return true;
1763 }
1764 
Run(BB & bb,Insn & insn)1765 void LslAndToUbfizPattern::Run(BB &bb, Insn &insn)
1766 {
1767     if (!CheckCondition(insn)) {
1768         return;
1769     }
1770     MOperator mop = insn.GetMachineOpcode();
1771     Insn *newInsn = nullptr;
1772     if (mop == MOP_wandrri12 || mop == MOP_xandrri13) {
1773         newInsn = BuildNewInsn(insn, *defInsn, insn);
1774     }
1775     if (mop == MOP_wlslrri5 || mop == MOP_xlslrri6) {
1776         newInsn = BuildNewInsn(*defInsn, insn, insn);
1777     }
1778     if (newInsn == nullptr) {
1779         return;
1780     }
1781     bb.ReplaceInsn(insn, *newInsn);
1782     /* update ssa info */
1783     ssaInfo->ReplaceInsn(insn, *newInsn);
1784     optSuccess = true;
1785     SetCurrInsn(newInsn);
1786     /* dump pattern info */
1787     if (CG_PEEP_DUMP) {
1788         std::vector<Insn *> prevs;
1789         (void)prevs.emplace_back(defInsn);
1790         DumpAfterPattern(prevs, &insn, newInsn);
1791     }
1792 }
1793 
1794 // Build ubfiz insn or mov insn
BuildNewInsn(const Insn & andInsn,const Insn & lslInsn,const Insn & useInsn) const1795 Insn *LslAndToUbfizPattern::BuildNewInsn(const Insn &andInsn, const Insn &lslInsn, const Insn &useInsn) const
1796 {
1797     uint64 andImmValue = static_cast<uint64>(static_cast<ImmOperand &>(andInsn.GetOperand(kInsnThirdOpnd)).GetValue());
1798     uint64 lslImmValue = static_cast<uint64>(static_cast<ImmOperand &>(lslInsn.GetOperand(kInsnThirdOpnd)).GetValue());
1799     MOperator useMop = useInsn.GetMachineOpcode();
1800     // isLslAnd means true -> lsl + and, false -> and + lsl
1801     bool isLslAnd = (useMop == MOP_wandrri12) || (useMop == MOP_xandrri13);
1802     // judgment need to set non-zero value
1803     uint64 judgment = 1;
1804     // When useInsn is lsl, check whether the value of immValue is 2^n-1.
1805     // When useInsn is and, check whether the value of immValue is (2^n-1) << m
1806     if (isLslAnd) {
1807         if ((andImmValue >> lslImmValue) != 0) {
1808             judgment = (andImmValue >> lslImmValue) & ((andImmValue >> lslImmValue) + 1);
1809         }
1810     } else {
1811         judgment = andImmValue & (andImmValue + 1);
1812     }
1813     if (judgment != 0) {
1814         return nullptr;
1815     }
1816     RegOperand &ubfizOpnd1 = static_cast<RegOperand &>(useInsn.GetOperand(kInsnFirstOpnd));
1817     uint32 opnd1Size = ubfizOpnd1.GetSize();
1818     RegOperand &ubfizOpnd2 = static_cast<RegOperand &>(defInsn->GetOperand(kInsnSecondOpnd));
1819     uint32 opnd2Size = ubfizOpnd2.GetSize();
1820     ImmOperand &ubfizOpnd3 = static_cast<ImmOperand &>(lslInsn.GetOperand(kInsnThirdOpnd));
1821     uint32 mValue = static_cast<uint32>(ubfizOpnd3.GetValue());
1822     uint32 nValue = 0;
1823     if (isLslAnd) {
1824         nValue = static_cast<uint32>(__builtin_popcountll(andImmValue >> lslImmValue));
1825     } else {
1826         nValue = static_cast<uint32>(__builtin_popcountll(andImmValue));
1827     }
1828     auto *aarFunc = static_cast<AArch64CGFunc *>(cgFunc);
1829     if (opnd1Size != opnd2Size || (mValue + nValue) > opnd1Size) {
1830         return nullptr;
1831     }
1832     MOperator addMop = andInsn.GetMachineOpcode();
1833     MOperator newMop = (addMop == MOP_wandrri12) ? MOP_wubfizrri5i5 : MOP_xubfizrri6i6;
1834     uint32 size = (addMop == MOP_wandrri12) ? kMaxImmVal5Bits : kMaxImmVal6Bits;
1835     ImmOperand &ubfizOpnd4 = aarFunc->CreateImmOperand(nValue, size, false);
1836     Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(newMop, ubfizOpnd1, ubfizOpnd2, ubfizOpnd3, ubfizOpnd4);
1837     return &newInsn;
1838 }
1839 
CheckCondition(Insn & insn)1840 bool MvnAndToBicPattern::CheckCondition(Insn &insn)
1841 {
1842     MOperator curMop = insn.GetMachineOpcode();
1843     if (curMop != MOP_wandrrr && curMop != MOP_xandrrr) {
1844         return false;
1845     }
1846     auto &useReg1 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
1847     auto &useReg2 = static_cast<RegOperand &>(insn.GetOperand(kInsnThirdOpnd));
1848     prevInsn1 = ssaInfo->GetDefInsn(useReg1);
1849     prevInsn2 = ssaInfo->GetDefInsn(useReg2);
1850     MOperator mop = insn.GetMachineOpcode();
1851     MOperator desMop = mop == MOP_xandrrr ? MOP_xnotrr : MOP_wnotrr;
1852     op1IsMvnDef = prevInsn1 != nullptr && prevInsn1->GetMachineOpcode() == desMop;
1853     op2IsMvnDef = prevInsn2 != nullptr && prevInsn2->GetMachineOpcode() == desMop;
1854     if (op1IsMvnDef || op2IsMvnDef) {
1855         return true;
1856     }
1857     return false;
1858 }
1859 
Run(BB & bb,Insn & insn)1860 void MvnAndToBicPattern::Run(BB &bb, Insn &insn)
1861 {
1862     if (!CheckCondition(insn)) {
1863         return;
1864     }
1865     MOperator newMop = insn.GetMachineOpcode() == MOP_xandrrr ? MOP_xbicrrr : MOP_wbicrrr;
1866     Insn *prevInsn = op1IsMvnDef ? prevInsn1 : prevInsn2;
1867     auto &prevOpnd1 = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
1868     auto &opnd0 = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
1869     auto &opnd1 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
1870     auto &opnd2 = static_cast<RegOperand &>(insn.GetOperand(kInsnThirdOpnd));
1871     Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(newMop, opnd0, op1IsMvnDef ? opnd2 : opnd1, prevOpnd1);
1872     /* update ssa info */
1873     ssaInfo->ReplaceInsn(insn, newInsn);
1874     bb.ReplaceInsn(insn, newInsn);
1875     optSuccess = true;
1876     SetCurrInsn(&newInsn);
1877     /* dump pattern info */
1878     if (CG_PEEP_DUMP) {
1879         std::vector<Insn *> prevs;
1880         prevs.emplace_back(prevInsn);
1881         DumpAfterPattern(prevs, &insn, &newInsn);
1882     }
1883 }
1884 
CheckCondition(Insn & insn)1885 bool AndCbzToTbzPattern::CheckCondition(Insn &insn)
1886 {
1887     MOperator curMop = insn.GetMachineOpcode();
1888     if (curMop != MOP_wcbz && curMop != MOP_xcbz && curMop != MOP_wcbnz && curMop != MOP_xcbnz) {
1889         return false;
1890     }
1891     auto &useReg = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
1892     prevInsn = ssaInfo ? ssaInfo->GetDefInsn(useReg) : insn.GetPreviousMachineInsn();
1893     if (prevInsn == nullptr) {
1894         return false;
1895     }
1896     MOperator prevMop = prevInsn->GetMachineOpcode();
1897     if (prevMop != MOP_wandrri12 && prevMop != MOP_xandrri13) {
1898         return false;
1899     }
1900     if (!ssaInfo && (&(prevInsn->GetOperand(kInsnFirstOpnd)) != &(insn.GetOperand(kInsnFirstOpnd)))) {
1901         return false;
1902     }
1903     return true;
1904 }
1905 
Run(BB & bb,Insn & insn)1906 void AndCbzToTbzPattern::Run(BB &bb, Insn &insn)
1907 {
1908     auto *aarchFunc = static_cast<AArch64CGFunc *>(cgFunc);
1909     if (!CheckCondition(insn)) {
1910         return;
1911     }
1912     auto &andImm = static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnThirdOpnd));
1913     int64 tbzVal = GetLogValueAtBase2(andImm.GetValue());
1914     if (tbzVal == -1) {
1915         return;
1916     }
1917     MOperator mOp = insn.GetMachineOpcode();
1918     MOperator newMop = MOP_undef;
1919     switch (mOp) {
1920         case MOP_wcbz:
1921             newMop = MOP_wtbz;
1922             break;
1923         case MOP_wcbnz:
1924             newMop = MOP_wtbnz;
1925             break;
1926         case MOP_xcbz:
1927             newMop = MOP_xtbz;
1928             break;
1929         case MOP_xcbnz:
1930             newMop = MOP_xtbnz;
1931             break;
1932         default:
1933             CHECK_FATAL(false, "must be cbz/cbnz");
1934             break;
1935     }
1936     auto &labelOpnd = static_cast<LabelOperand &>(insn.GetOperand(kInsnSecondOpnd));
1937     ImmOperand &tbzImm = aarchFunc->CreateImmOperand(tbzVal, k8BitSize, false);
1938     Insn *newInsn = nullptr;
1939     // if bit offset is invalid (implicit zero), then it should be a unconditional branch
1940     if (!aarchFunc->IsOperandImmValid(newMop, &tbzImm, kInsnSecondOpnd)) {
1941         // cfg adjustment in ssa is complicate, so we just bypass this pattern if imm is invalid.
1942         if (ssaInfo) {
1943             return;
1944         }
1945         bool delEdgeWithTarget = false;
1946         if (newMop == MOP_wtbz) {
1947             newInsn = &aarchFunc->GetInsnBuilder()->BuildInsn(MOP_xuncond, labelOpnd);
1948             bb.SetKind(BB::kBBGoto);
1949         } else if (newMop == MOP_wtbnz) {
1950             bb.SetKind(BB::kBBFallthru);
1951             bb.RemoveInsn(insn);
1952             delEdgeWithTarget = true;
1953         } else {
1954             CHECK_FATAL(false, "only wtbz/wtbnz can have invalid imm");
1955         }
1956         auto *bbFt = bb.GetNext();
1957         auto *targetBB = cgFunc->GetBBFromLab2BBMap(labelOpnd.GetLabelIndex());
1958         if (targetBB != bbFt) {  // when targetBB is ftBB, we cannot remove preds/succs
1959             auto *delEdgeBB = delEdgeWithTarget ? targetBB : bbFt;
1960             delEdgeBB->RemovePreds(bb);
1961             bb.RemoveSuccs(*delEdgeBB);
1962         }
1963     } else {
1964         newInsn =
1965             &cgFunc->GetInsnBuilder()->BuildInsn(newMop, prevInsn->GetOperand(kInsnSecondOpnd), tbzImm, labelOpnd);
1966     }
1967     // try opt failed
1968     if (newInsn == nullptr) {
1969         return;
1970     }
1971     bb.ReplaceInsn(insn, *newInsn);
1972     SetCurrInsn(newInsn);
1973     if (ssaInfo) {
1974         /* update ssa info */
1975         ssaInfo->ReplaceInsn(insn, *newInsn);
1976     }
1977     optSuccess = true;
1978     /* dump pattern info */
1979     if (CG_PEEP_DUMP) {
1980         std::vector<Insn *> prevs;
1981         prevs.emplace_back(prevInsn);
1982         DumpAfterPattern(prevs, &insn, newInsn);
1983     }
1984 }
1985 
CheckCondition(Insn & insn)1986 bool AndTbzPattern::CheckCondition(Insn &insn)
1987 {
1988     MOperator curMop = insn.GetMachineOpcode();
1989     if (curMop != MOP_wtbz && curMop != MOP_xtbz && curMop != MOP_wtbnz && curMop != MOP_xtbnz) {
1990         return false;
1991     }
1992     auto &useReg = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
1993     prevInsn = ssaInfo ? ssaInfo->GetDefInsn(useReg) : insn.GetPreviousMachineInsn();
1994     if (prevInsn == nullptr) {
1995         return false;
1996     }
1997     MOperator prevMop = prevInsn->GetMachineOpcode();
1998     if (prevMop != MOP_wandrri12 && prevMop != MOP_xandrri13) {
1999         return false;
2000     }
2001     if (!ssaInfo && (&(prevInsn->GetOperand(kInsnFirstOpnd)) != &(insn.GetOperand(kInsnFirstOpnd)))) {
2002         return false;
2003     }
2004     return true;
2005 }
2006 
Run(BB & bb,Insn & insn)2007 void AndTbzPattern::Run(BB &bb, Insn &insn)
2008 {
2009     if (!CheckCondition(insn)) {
2010         return;
2011     }
2012     auto tbzVal = static_cast<ImmOperand &>(insn.GetOperand(kInsnSecondOpnd)).GetValue();
2013     auto andVal = static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnThirdOpnd)).GetValue();
2014     CHECK_FATAL(tbzVal >= 0 && tbzVal < k64BitSize, "NIY, tbz imm val out of range.");
2015     if ((static_cast<uint64>(andVal) & (1UL << tbzVal)) == 0) {
2016         return;
2017     }
2018     auto &newOpnd = prevInsn->GetOperand(kInsnSecondOpnd);
2019     auto &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(
2020         insn.GetMachineOpcode(), newOpnd, insn.GetOperand(kInsnSecondOpnd), insn.GetOperand(kInsnThirdOpnd));
2021     bb.ReplaceInsn(insn, newInsn);
2022     SetCurrInsn(&newInsn);
2023     if (ssaInfo) {
2024         // update ssa info
2025         ssaInfo->ReplaceInsn(insn, newInsn);
2026     }
2027     optSuccess = true;
2028     // dump pattern info
2029     if (CG_PEEP_DUMP) {
2030         std::vector<Insn *> prevs;
2031         prevs.emplace_back(prevInsn);
2032         DumpAfterPattern(prevs, &insn, &newInsn);
2033     }
2034 }
2035 
CheckCondition(Insn & insn)2036 bool CombineSameArithmeticPattern::CheckCondition(Insn &insn)
2037 {
2038     MOperator curMop = insn.GetMachineOpcode();
2039     if (std::find(validMops.begin(), validMops.end(), curMop) == validMops.end()) {
2040         return false;
2041     }
2042     Operand &useOpnd = insn.GetOperand(kInsnSecondOpnd);
2043     CHECK_FATAL(useOpnd.IsRegister(), "expect regOpnd");
2044     prevInsn = ssaInfo->GetDefInsn(static_cast<RegOperand &>(useOpnd));
2045     if (prevInsn == nullptr) {
2046         return false;
2047     }
2048     if (prevInsn->GetMachineOpcode() != curMop) {
2049         return false;
2050     }
2051     auto &prevDefOpnd = prevInsn->GetOperand(kInsnFirstOpnd);
2052     CHECK_FATAL(prevDefOpnd.IsRegister(), "expect regOpnd");
2053     InsnSet useInsns = GetAllUseInsn(static_cast<RegOperand &>(prevDefOpnd));
2054     if (useInsns.size() > 1) {
2055         return false;
2056     }
2057     auto *aarFunc = static_cast<AArch64CGFunc *>(cgFunc);
2058     CHECK_FATAL(prevInsn->GetOperand(kInsnThirdOpnd).IsIntImmediate(), "expect immOpnd");
2059     CHECK_FATAL(insn.GetOperand(kInsnThirdOpnd).IsIntImmediate(), "expect immOpnd");
2060     auto &prevImmOpnd = static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnThirdOpnd));
2061     auto &curImmOpnd = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
2062     int64 prevImm = prevImmOpnd.GetValue();
2063     int64 curImm = curImmOpnd.GetValue();
2064     newImmOpnd = &aarFunc->CreateImmOperand(prevImmOpnd.GetValue() + curImmOpnd.GetValue(), curImmOpnd.GetSize(),
2065                                             curImmOpnd.IsSignedValue());
2066     // prop vary attr
2067     if (prevImmOpnd.GetVary() == kUnAdjustVary && curImmOpnd.GetVary() == kUnAdjustVary) {
2068         return false;
2069     }
2070     if (prevImmOpnd.GetVary() == kUnAdjustVary || curImmOpnd.GetVary() == kUnAdjustVary) {
2071         newImmOpnd->SetVary(kUnAdjustVary);
2072     }
2073     if (prevImmOpnd.GetVary() == kAdjustVary || curImmOpnd.GetVary() == kAdjustVary) {
2074         newImmOpnd->SetVary(kAdjustVary);
2075     }
2076     switch (curMop) {
2077         case MOP_wlsrrri5:
2078         case MOP_wasrrri5:
2079         case MOP_wlslrri5: {
2080             if ((prevImm + curImm) < k0BitSizeInt || (prevImm + curImm) >= k32BitSizeInt) {
2081                 return false;
2082             }
2083             break;
2084         }
2085         case MOP_xlsrrri6:
2086         case MOP_xasrrri6:
2087         case MOP_xlslrri6: {
2088             if ((prevImm + curImm) < k0BitSizeInt || (prevImm + curImm) >= k64BitSizeInt) {
2089                 return false;
2090             }
2091             break;
2092         }
2093         case MOP_waddrri12:
2094         case MOP_xaddrri12:
2095         case MOP_wsubrri12:
2096         case MOP_xsubrri12: {
2097             if (!newImmOpnd->IsSingleInstructionMovable()) {
2098                 return false;
2099             }
2100             break;
2101         }
2102         default:
2103             return false;
2104     }
2105     return true;
2106 }
2107 
Run(BB & bb,Insn & insn)2108 void CombineSameArithmeticPattern::Run(BB &bb, Insn &insn)
2109 {
2110     if (!CheckCondition(insn)) {
2111         return;
2112     }
2113     Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(insn.GetMachineOpcode(), insn.GetOperand(kInsnFirstOpnd),
2114                                                         prevInsn->GetOperand(kInsnSecondOpnd), *newImmOpnd);
2115     bb.ReplaceInsn(insn, newInsn);
2116     /* update ssa info */
2117     ssaInfo->ReplaceInsn(insn, newInsn);
2118     optSuccess = true;
2119     SetCurrInsn(&newInsn);
2120     /* dump pattern info */
2121     if (CG_PEEP_DUMP) {
2122         std::vector<Insn *> prevs;
2123         (void)prevs.emplace_back(prevInsn);
2124         DumpAfterPattern(prevs, &insn, &newInsn);
2125     }
2126 }
2127 
CheckCondition(Insn & insn)2128 bool LogicShiftAndOrrToExtrPattern::CheckCondition(Insn &insn)
2129 {
2130     MOperator curMop = insn.GetMachineOpcode();
2131     if (curMop != MOP_wiorrrr && curMop != MOP_xiorrrr && curMop != MOP_wiorrrrs && curMop != MOP_xiorrrrs) {
2132         return false;
2133     }
2134     Operand &curDstOpnd = insn.GetOperand(kInsnFirstOpnd);
2135     is64Bits = (curDstOpnd.GetSize() == k64BitSize);
2136     if (curMop == MOP_wiorrrr || curMop == MOP_xiorrrr) {
2137         auto &useReg1 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
2138         Insn *prevInsn1 = ssaInfo->GetDefInsn(useReg1);
2139         auto &useReg2 = static_cast<RegOperand &>(insn.GetOperand(kInsnThirdOpnd));
2140         Insn *prevInsn2 = ssaInfo->GetDefInsn(useReg2);
2141         if (prevInsn1 == nullptr || prevInsn2 == nullptr) {
2142             return false;
2143         }
2144         MOperator prevMop1 = prevInsn1->GetMachineOpcode();
2145         MOperator prevMop2 = prevInsn2->GetMachineOpcode();
2146         if ((prevMop1 == MOP_wlsrrri5 || prevMop1 == MOP_xlsrrri6) &&
2147             (prevMop2 == MOP_wlslrri5 || prevMop2 == MOP_xlslrri6)) {
2148             prevLsrInsn = prevInsn1;
2149             prevLslInsn = prevInsn2;
2150         } else if ((prevMop2 == MOP_wlsrrri5 || prevMop2 == MOP_xlsrrri6) &&
2151                    (prevMop1 == MOP_wlslrri5 || prevMop1 == MOP_xlslrri6)) {
2152             prevLsrInsn = prevInsn2;
2153             prevLslInsn = prevInsn1;
2154         } else {
2155             return false;
2156         }
2157         int64 prevLsrImmValue = static_cast<ImmOperand &>(prevLsrInsn->GetOperand(kInsnThirdOpnd)).GetValue();
2158         int64 prevLslImmValue = static_cast<ImmOperand &>(prevLslInsn->GetOperand(kInsnThirdOpnd)).GetValue();
2159         if ((prevLsrImmValue + prevLslImmValue) < 0) {
2160             return false;
2161         }
2162         if ((is64Bits && (prevLsrImmValue + prevLslImmValue) != k64BitSize) ||
2163             (!is64Bits && (prevLsrImmValue + prevLslImmValue) != k32BitSize)) {
2164             return false;
2165         }
2166         shiftValue = prevLsrImmValue;
2167     } else if (curMop == MOP_wiorrrrs || curMop == MOP_xiorrrrs) {
2168         auto &useReg = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
2169         Insn *prevInsn = ssaInfo->GetDefInsn(useReg);
2170         if (prevInsn == nullptr) {
2171             return false;
2172         }
2173         MOperator prevMop = prevInsn->GetMachineOpcode();
2174         if (prevMop != MOP_wlsrrri5 && prevMop != MOP_xlsrrri6 && prevMop != MOP_wlslrri5 && prevMop != MOP_xlslrri6) {
2175             return false;
2176         }
2177         int64 prevImm = static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnThirdOpnd)).GetValue();
2178         auto &shiftOpnd = static_cast<BitShiftOperand &>(insn.GetOperand(kInsnFourthOpnd));
2179         uint32 shiftAmount = shiftOpnd.GetShiftAmount();
2180         if (shiftOpnd.GetShiftOp() == BitShiftOperand::kLSL && (prevMop == MOP_wlsrrri5 || prevMop == MOP_xlsrrri6)) {
2181             prevLsrInsn = prevInsn;
2182             shiftValue = prevImm;
2183         } else if (shiftOpnd.GetShiftOp() == BitShiftOperand::kLSR &&
2184                    (prevMop == MOP_wlslrri5 || prevMop == MOP_xlslrri6)) {
2185             prevLslInsn = prevInsn;
2186             shiftValue = shiftAmount;
2187         } else {
2188             return false;
2189         }
2190         if (prevImm + static_cast<int64>(shiftAmount) < 0) {
2191             return false;
2192         }
2193         if ((is64Bits && (prevImm + static_cast<int64>(shiftAmount)) != k64BitSize) ||
2194             (!is64Bits && (prevImm + static_cast<int64>(shiftAmount)) != k32BitSize)) {
2195             return false;
2196         }
2197     } else {
2198         CHECK_FATAL(false, "must be above mop");
2199         return false;
2200     }
2201     return true;
2202 }
2203 
Run(BB & bb,Insn & insn)2204 void LogicShiftAndOrrToExtrPattern::Run(BB &bb, Insn &insn)
2205 {
2206     if (!CheckCondition(insn)) {
2207         return;
2208     }
2209     auto *aarFunc = static_cast<AArch64CGFunc *>(cgFunc);
2210     Operand &opnd1 =
2211         (prevLslInsn == nullptr ? insn.GetOperand(kInsnThirdOpnd) : prevLslInsn->GetOperand(kInsnSecondOpnd));
2212     Operand &opnd2 =
2213         (prevLsrInsn == nullptr ? insn.GetOperand(kInsnThirdOpnd) : prevLsrInsn->GetOperand(kInsnSecondOpnd));
2214     ImmOperand &immOpnd = is64Bits ? aarFunc->CreateImmOperand(shiftValue, kMaxImmVal6Bits, false)
2215                                    : aarFunc->CreateImmOperand(shiftValue, kMaxImmVal5Bits, false);
2216     MOperator newMop = is64Bits ? MOP_xextrrrri6 : MOP_wextrrrri5;
2217     Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(newMop, insn.GetOperand(kInsnFirstOpnd), opnd1, opnd2, immOpnd);
2218     bb.ReplaceInsn(insn, newInsn);
2219     /* update ssa info */
2220     ssaInfo->ReplaceInsn(insn, newInsn);
2221     optSuccess = true;
2222     SetCurrInsn(&newInsn);
2223     /* dump pattern info */
2224     if (CG_PEEP_DUMP) {
2225         std::vector<Insn *> prevs;
2226         prevs.emplace_back(prevLsrInsn);
2227         prevs.emplace_back(prevLslInsn);
2228         DumpAfterPattern(prevs, &insn, &newInsn);
2229     }
2230 }
2231 
SetArithType(const Insn & currInsn)2232 void SimplifyMulArithmeticPattern::SetArithType(const Insn &currInsn)
2233 {
2234     MOperator mOp = currInsn.GetMachineOpcode();
2235     switch (mOp) {
2236         case MOP_waddrrr:
2237         case MOP_xaddrrr: {
2238             arithType = kAdd;
2239             isFloat = false;
2240             break;
2241         }
2242         case MOP_dadd:
2243         case MOP_sadd: {
2244             arithType = kFAdd;
2245             isFloat = true;
2246             break;
2247         }
2248         case MOP_wsubrrr:
2249         case MOP_xsubrrr: {
2250             arithType = kSub;
2251             isFloat = false;
2252             validOpndIdx = kInsnThirdOpnd;
2253             break;
2254         }
2255         case MOP_dsub:
2256         case MOP_ssub: {
2257             arithType = kFSub;
2258             isFloat = true;
2259             validOpndIdx = kInsnThirdOpnd;
2260             break;
2261         }
2262         case MOP_xinegrr:
2263         case MOP_winegrr: {
2264             arithType = kNeg;
2265             isFloat = false;
2266             validOpndIdx = kInsnSecondOpnd;
2267             break;
2268         }
2269         case MOP_wfnegrr:
2270         case MOP_xfnegrr: {
2271             arithType = kFNeg;
2272             isFloat = true;
2273             validOpndIdx = kInsnSecondOpnd;
2274             break;
2275         }
2276         default: {
2277             CHECK_FATAL(false, "must be above mop");
2278             break;
2279         }
2280     }
2281 }
2282 
CheckCondition(Insn & insn)2283 bool SimplifyMulArithmeticPattern::CheckCondition(Insn &insn)
2284 {
2285     if (arithType == kUndef || validOpndIdx < 0) {
2286         return false;
2287     }
2288     auto &useReg = static_cast<RegOperand &>(insn.GetOperand(static_cast<uint32>(validOpndIdx)));
2289     prevInsn = ssaInfo->GetDefInsn(useReg);
2290     if (prevInsn == nullptr) {
2291         return false;
2292     }
2293     regno_t useRegNO = useReg.GetRegisterNumber();
2294     VRegVersion *useVersion = ssaInfo->FindSSAVersion(useRegNO);
2295     DEBUG_ASSERT(useVersion != nullptr, "useVersion should not be nullptr");
2296     if (useVersion->GetAllUseInsns().size() > 1) {
2297         return false;
2298     }
2299     MOperator currMop = insn.GetMachineOpcode();
2300     if (currMop == MOP_dadd || currMop == MOP_sadd || currMop == MOP_dsub || currMop == MOP_ssub ||
2301         currMop == MOP_wfnegrr || currMop == MOP_xfnegrr) {
2302         isFloat = true;
2303     }
2304     MOperator prevMop = prevInsn->GetMachineOpcode();
2305     if (prevMop != MOP_wmulrrr && prevMop != MOP_xmulrrr && prevMop != MOP_xvmuld && prevMop != MOP_xvmuls) {
2306         return false;
2307     }
2308     if (isFloat && (prevMop == MOP_wmulrrr || prevMop == MOP_xmulrrr)) {
2309         return false;
2310     }
2311     if (!isFloat && (prevMop == MOP_xvmuld || prevMop == MOP_xvmuls)) {
2312         return false;
2313     }
2314     if ((currMop == MOP_xaddrrr) || (currMop == MOP_waddrrr)) {
2315         return true;
2316     }
2317     return CGOptions::IsFastMath();
2318 }
2319 
DoOptimize(BB & currBB,Insn & currInsn)2320 void SimplifyMulArithmeticPattern::DoOptimize(BB &currBB, Insn &currInsn)
2321 {
2322     Operand &resOpnd = currInsn.GetOperand(kInsnFirstOpnd);
2323     Operand &opndMulOpnd1 = prevInsn->GetOperand(kInsnSecondOpnd);
2324     Operand &opndMulOpnd2 = prevInsn->GetOperand(kInsnThirdOpnd);
2325     bool is64Bits = (static_cast<RegOperand &>(resOpnd).GetSize() == k64BitSize);
2326     /* may overflow */
2327     if ((prevInsn->GetOperand(kInsnFirstOpnd).GetSize() == k32BitSize) && is64Bits) {
2328         return;
2329     }
2330     MOperator newMop = is64Bits ? curMop2NewMopTable[arithType][1] : curMop2NewMopTable[arithType][0];
2331     Insn *newInsn = nullptr;
2332     if (arithType == kNeg || arithType == kFNeg) {
2333         newInsn = &(cgFunc->GetInsnBuilder()->BuildInsn(newMop, resOpnd, opndMulOpnd1, opndMulOpnd2));
2334     } else {
2335         Operand &opnd3 = (validOpndIdx == kInsnSecondOpnd) ? currInsn.GetOperand(kInsnThirdOpnd)
2336                                                            : currInsn.GetOperand(kInsnSecondOpnd);
2337         newInsn = &(cgFunc->GetInsnBuilder()->BuildInsn(newMop, resOpnd, opndMulOpnd1, opndMulOpnd2, opnd3));
2338     }
2339     CHECK_FATAL(newInsn != nullptr, "must create newInsn");
2340     currBB.ReplaceInsn(currInsn, *newInsn);
2341     /* update ssa info */
2342     ssaInfo->ReplaceInsn(currInsn, *newInsn);
2343     optSuccess = true;
2344     /* dump pattern info */
2345     if (CG_PEEP_DUMP) {
2346         std::vector<Insn *> prevs;
2347         prevs.emplace_back(prevInsn);
2348         DumpAfterPattern(prevs, &currInsn, newInsn);
2349     }
2350 }
2351 
Run(BB & bb,Insn & insn)2352 void SimplifyMulArithmeticPattern::Run(BB &bb, Insn &insn)
2353 {
2354     SetArithType(insn);
2355     if (arithType == kAdd || arithType == kFAdd) {
2356         validOpndIdx = kInsnSecondOpnd;
2357         if (CheckCondition(insn)) {
2358             DoOptimize(bb, insn);
2359             return;
2360         } else {
2361             validOpndIdx = kInsnThirdOpnd;
2362         }
2363     }
2364     if (!CheckCondition(insn)) {
2365         return;
2366     }
2367     DoOptimize(bb, insn);
2368 }
2369 
SetSpecificExtType(const Insn & currInsn)2370 void ElimSpecificExtensionPattern::SetSpecificExtType(const Insn &currInsn)
2371 {
2372     MOperator mOp = currInsn.GetMachineOpcode();
2373     switch (mOp) {
2374         case MOP_xsxtb32: {
2375             is64Bits = false;
2376             extTypeIdx = SXTB;
2377             break;
2378         }
2379         case MOP_xsxtb64: {
2380             is64Bits = true;
2381             extTypeIdx = SXTB;
2382             break;
2383         }
2384         case MOP_xsxth32: {
2385             is64Bits = false;
2386             extTypeIdx = SXTH;
2387             break;
2388         }
2389         case MOP_xsxth64: {
2390             is64Bits = true;
2391             extTypeIdx = SXTH;
2392             break;
2393         }
2394         case MOP_xsxtw64: {
2395             is64Bits = true;
2396             extTypeIdx = SXTW;
2397             break;
2398         }
2399         case MOP_xuxtb32: {
2400             is64Bits = false;
2401             extTypeIdx = UXTB;
2402             break;
2403         }
2404         case MOP_xuxth32: {
2405             is64Bits = false;
2406             extTypeIdx = UXTH;
2407             break;
2408         }
2409         case MOP_xuxtw64: {
2410             is64Bits = true;
2411             extTypeIdx = UXTW;
2412             break;
2413         }
2414         case MOP_wandrri12: {
2415             is64Bits = false;
2416             extTypeIdx = AND;
2417             break;
2418         }
2419         case MOP_xandrri13: {
2420             is64Bits = true;
2421             extTypeIdx = AND;
2422             break;
2423         }
2424         default: {
2425             extTypeIdx = EXTUNDEF;
2426         }
2427     }
2428 }
2429 
SetOptSceneType()2430 void ElimSpecificExtensionPattern::SetOptSceneType()
2431 {
2432     if (prevInsn->IsCall()) {
2433         sceneType = kSceneMov;
2434         return;
2435     }
2436     MOperator preMop = prevInsn->GetMachineOpcode();
2437     switch (preMop) {
2438         case MOP_wldr:
2439         case MOP_wldrb:
2440         case MOP_wldrsb:
2441         case MOP_wldrh:
2442         case MOP_wldrsh:
2443         case MOP_xldrsw: {
2444             sceneType = kSceneLoad;
2445             break;
2446         }
2447         case MOP_wmovri32:
2448         case MOP_xmovri64: {
2449             sceneType = kSceneMov;
2450             break;
2451         }
2452         case MOP_xsxtb32:
2453         case MOP_xsxtb64:
2454         case MOP_xsxth32:
2455         case MOP_xsxth64:
2456         case MOP_xsxtw64:
2457         case MOP_xuxtb32:
2458         case MOP_xuxth32:
2459         case MOP_xuxtw64: {
2460             sceneType = kSceneSameExt;
2461             break;
2462         }
2463         default: {
2464             sceneType = kSceneUndef;
2465         }
2466     }
2467 }
2468 
ReplaceExtWithMov(Insn & currInsn)2469 void ElimSpecificExtensionPattern::ReplaceExtWithMov(Insn &currInsn)
2470 {
2471     auto &prevDstOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
2472     auto &currDstOpnd = static_cast<RegOperand &>(currInsn.GetOperand(kInsnFirstOpnd));
2473     bool isInsnDeleted = false;
2474     if (sceneType == kSceneSameExt && currDstOpnd.IsSSAForm() && prevDstOpnd.IsSSAForm()) {
2475         auto *destVersion = ssaInfo->FindSSAVersion(currDstOpnd.GetRegisterNumber());
2476         DEBUG_ASSERT(destVersion != nullptr, "find Version failed");
2477         auto *srcVersion = ssaInfo->FindSSAVersion(prevDstOpnd.GetRegisterNumber());
2478         DEBUG_ASSERT(srcVersion != nullptr, "find Version failed");
2479         ssaInfo->ReplaceAllUse(destVersion, srcVersion);
2480         if (destVersion->GetAllUseInsns().empty()) {
2481             currInsn.GetBB()->RemoveInsn(currInsn);
2482             destVersion->MarkDeleted();
2483             MapleUnorderedMap<uint32, DUInsnInfo *> &useInfos = srcVersion->GetAllUseInsns();
2484             auto it = useInfos.find(currInsn.GetId());
2485             if (it != useInfos.end()) {
2486                 useInfos.erase(it);
2487             }
2488             isInsnDeleted = true;
2489         }
2490     }
2491     Insn *newInsn = nullptr;
2492     if (!isInsnDeleted) {
2493         MOperator newMop = is64Bits ? MOP_xmovrr : MOP_wmovrr;
2494         newInsn = &cgFunc->GetInsnBuilder()->BuildInsn(newMop, currDstOpnd, prevDstOpnd);
2495         currBB->ReplaceInsn(currInsn, *newInsn);
2496         /* update ssa info */
2497         ssaInfo->ReplaceInsn(currInsn, *newInsn);
2498     }
2499     optSuccess = true;
2500     /* dump pattern info */
2501     if (CG_PEEP_DUMP) {
2502         std::vector<Insn *> prevs;
2503         prevs.emplace_back(prevInsn);
2504         DumpAfterPattern(prevs, &currInsn, newInsn);
2505     }
2506 }
2507 
ElimExtensionAfterMov(Insn & insn)2508 void ElimSpecificExtensionPattern::ElimExtensionAfterMov(Insn &insn)
2509 {
2510     if (&insn == currBB->GetFirstMachineInsn() || extTypeIdx == AND) {
2511         return;
2512     }
2513     auto &prevDstOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
2514     auto &currDstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
2515     auto &currSrcOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
2516     if (prevDstOpnd.GetSize() != currDstOpnd.GetSize()) {
2517         return;
2518     }
2519     MOperator currMop = insn.GetMachineOpcode();
2520     /* example 2) [mov w0, R0] is return value of call and return size is not of range */
2521     if (prevInsn->IsCall() && (currSrcOpnd.GetRegisterNumber() == R0 || currSrcOpnd.GetRegisterNumber() == V0) &&
2522         currDstOpnd.GetRegisterNumber() == currSrcOpnd.GetRegisterNumber()) {
2523         uint32 retSize = prevInsn->GetRetSize();
2524         if (retSize > 0 &&
2525             ((currMop == MOP_xuxtb32 && retSize <= k1ByteSize) || (currMop == MOP_xuxth32 && retSize <= k2ByteSize) ||
2526              (currMop == MOP_xuxtw64 && retSize <= k4ByteSize))) {
2527             ReplaceExtWithMov(insn);
2528         }
2529         return;
2530     }
2531     if (prevInsn->IsCall() && prevInsn->GetIsCallReturnSigned()) {
2532         return;
2533     }
2534     auto &immMovOpnd = static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
2535     int64 value = immMovOpnd.GetValue();
2536     uint64 minRange = extValueRangeTable[extTypeIdx][0];
2537     uint64 maxRange = extValueRangeTable[extTypeIdx][1];
2538     if (currMop == MOP_xsxtb32 || currMop == MOP_xsxth32) {
2539         /* value should be in valid range */
2540         if (static_cast<uint64>(value) >= minRange && static_cast<uint64>(value) <= maxRange &&
2541             immMovOpnd.IsSingleInstructionMovable(currDstOpnd.GetSize())) {
2542             ReplaceExtWithMov(insn);
2543         }
2544     } else if (currMop == MOP_xuxtb32 || currMop == MOP_xuxth32) {
2545         if ((static_cast<uint64>(value) & minRange) == 0) {
2546             ReplaceExtWithMov(insn);
2547         }
2548     } else if (currMop == MOP_xuxtw64) {
2549         ReplaceExtWithMov(insn);
2550     } else {
2551         /* MOP_xsxtb64 & MOP_xsxth64 & MOP_xsxtw64 */
2552         if ((static_cast<uint64>(value) & minRange) == 0 &&
2553             immMovOpnd.IsSingleInstructionMovable(currDstOpnd.GetSize())) {
2554             ReplaceExtWithMov(insn);
2555         }
2556     }
2557 }
2558 
IsValidLoadExtPattern(MOperator oldMop,MOperator newMop) const2559 bool ElimSpecificExtensionPattern::IsValidLoadExtPattern(MOperator oldMop, MOperator newMop) const
2560 {
2561     if (oldMop == newMop) {
2562         return true;
2563     }
2564     auto *aarFunc = static_cast<AArch64CGFunc *>(cgFunc);
2565     auto *memOpnd = static_cast<MemOperand *>(prevInsn->GetMemOpnd());
2566     CHECK_FATAL(memOpnd != nullptr, "invalid memOpnd");
2567     DEBUG_ASSERT(!prevInsn->IsStorePair(), "do not do ElimSpecificExtensionPattern for str pair");
2568     DEBUG_ASSERT(!prevInsn->IsLoadPair(), "do not do ElimSpecificExtensionPattern for ldr pair");
2569     if (memOpnd->GetAddrMode() == MemOperand::kAddrModeBOi &&
2570         !aarFunc->IsOperandImmValid(newMop, memOpnd, kInsnSecondOpnd)) {
2571         return false;
2572     }
2573     uint32 shiftAmount = memOpnd->ShiftAmount();
2574     if (shiftAmount == 0) {
2575         return true;
2576     }
2577     const InsnDesc *md = &AArch64CG::kMd[newMop];
2578     uint32 memSize = md->GetOperandSize() / k8BitSize;
2579     uint32 validShiftAmount =
2580         ((memSize == k8BitSize)
2581              ? k3BitSize
2582              : ((memSize == k4BitSize) ? k2BitSize : ((memSize == k2BitSize) ? k1BitSize : k0BitSize)));
2583     if (shiftAmount != validShiftAmount) {
2584         return false;
2585     }
2586     return true;
2587 }
2588 
SelectNewLoadMopByBitSize(MOperator lowBitMop) const2589 MOperator ElimSpecificExtensionPattern::SelectNewLoadMopByBitSize(MOperator lowBitMop) const
2590 {
2591     auto &prevDstOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
2592     switch (lowBitMop) {
2593         case MOP_wldrsb: {
2594             prevDstOpnd.SetSize(k64BitSize);
2595             return MOP_xldrsb;
2596         }
2597         case MOP_wldrsh: {
2598             prevDstOpnd.SetSize(k64BitSize);
2599             return MOP_xldrsh;
2600         }
2601         default:
2602             break;
2603     }
2604     return lowBitMop;
2605 }
2606 
ElimExtensionAfterLoad(Insn & insn)2607 void ElimSpecificExtensionPattern::ElimExtensionAfterLoad(Insn &insn)
2608 {
2609     if (extTypeIdx == EXTUNDEF) {
2610         return;
2611     }
2612     if (extTypeIdx == AND) {
2613         auto &immOpnd = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
2614         if (immOpnd.GetValue() != 0xff) {
2615             return;
2616         }
2617     }
2618     MOperator prevOrigMop = prevInsn->GetMachineOpcode();
2619     for (uint8 i = 0; i < kPrevLoadPatternNum; i++) {
2620         DEBUG_ASSERT(extTypeIdx < EXTTYPESIZE, "extTypeIdx must be lower than EXTTYPESIZE");
2621         if (prevOrigMop != loadMappingTable[extTypeIdx][i][0]) {
2622             continue;
2623         }
2624         MOperator prevNewMop = loadMappingTable[extTypeIdx][i][1];
2625         if (!IsValidLoadExtPattern(prevOrigMop, prevNewMop)) {
2626             return;
2627         }
2628         if (is64Bits && extTypeIdx >= SXTB && extTypeIdx <= SXTW) {
2629             prevNewMop = SelectNewLoadMopByBitSize(prevNewMop);
2630         }
2631         auto &prevDstOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
2632         auto &currDstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
2633         /* to avoid {mov [64], [32]} in the case of big endian */
2634         if (prevDstOpnd.GetSize() != currDstOpnd.GetSize()) {
2635             return;
2636         }
2637 
2638         auto *newMemOp = GetOrCreateMemOperandForNewMOP(*cgFunc, *prevInsn, prevNewMop);
2639 
2640         if (newMemOp == nullptr) {
2641             return;
2642         }
2643 
2644         auto *aarCGSSAInfo = static_cast<AArch64CGSSAInfo *>(ssaInfo);
2645         if (CG_PEEP_DUMP) {
2646             LogInfo::MapleLogger() << ">>>>>>> In " << GetPatternName() << " : <<<<<<<\n";
2647             if (prevOrigMop != prevNewMop) {
2648                 LogInfo::MapleLogger() << "======= OrigPrevInsn : \n";
2649                 prevInsn->Dump();
2650                 aarCGSSAInfo->DumpInsnInSSAForm(*prevInsn);
2651             }
2652         }
2653 
2654         prevInsn->SetMemOpnd(newMemOp);
2655         prevInsn->SetMOP(AArch64CG::kMd[prevNewMop]);
2656 
2657         if ((prevOrigMop != prevNewMop) && CG_PEEP_DUMP) {
2658             LogInfo::MapleLogger() << "======= NewPrevInsn : \n";
2659             prevInsn->Dump();
2660             aarCGSSAInfo->DumpInsnInSSAForm(*prevInsn);
2661         }
2662 
2663         MOperator movMop = is64Bits ? MOP_xmovrr : MOP_wmovrr;
2664         Insn &newMovInsn = cgFunc->GetInsnBuilder()->BuildInsn(movMop, insn.GetOperand(kInsnFirstOpnd),
2665                                                                prevInsn->GetOperand(kInsnFirstOpnd));
2666         currBB->ReplaceInsn(insn, newMovInsn);
2667         /* update ssa info */
2668         ssaInfo->ReplaceInsn(insn, newMovInsn);
2669         optSuccess = true;
2670         /* dump pattern info */
2671         if (CG_PEEP_DUMP) {
2672             LogInfo::MapleLogger() << "======= ReplacedInsn :\n";
2673             insn.Dump();
2674             aarCGSSAInfo->DumpInsnInSSAForm(insn);
2675             LogInfo::MapleLogger() << "======= NewInsn :\n";
2676             newMovInsn.Dump();
2677             aarCGSSAInfo->DumpInsnInSSAForm(newMovInsn);
2678         }
2679     }
2680 }
2681 
ElimExtensionAfterSameExt(Insn & insn)2682 void ElimSpecificExtensionPattern::ElimExtensionAfterSameExt(Insn &insn)
2683 {
2684     if (extTypeIdx == EXTUNDEF || extTypeIdx == AND) {
2685         return;
2686     }
2687     auto &prevDstOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
2688     auto &currDstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
2689     if (prevDstOpnd.GetSize() != currDstOpnd.GetSize()) {
2690         return;
2691     }
2692     MOperator prevMop = prevInsn->GetMachineOpcode();
2693     MOperator currMop = insn.GetMachineOpcode();
2694     for (uint8 i = 0; i < kSameExtPatternNum; i++) {
2695         DEBUG_ASSERT(extTypeIdx < EXTTYPESIZE, "extTypeIdx must be lower than EXTTYPESIZE");
2696         if (sameExtMappingTable[extTypeIdx][i][0] == MOP_undef || sameExtMappingTable[extTypeIdx][i][1] == MOP_undef) {
2697             continue;
2698         }
2699         if (prevMop == sameExtMappingTable[extTypeIdx][i][0] && currMop == sameExtMappingTable[extTypeIdx][i][1]) {
2700             ReplaceExtWithMov(insn);
2701         }
2702     }
2703 }
2704 
CheckCondition(Insn & insn)2705 bool ElimSpecificExtensionPattern::CheckCondition(Insn &insn)
2706 {
2707     auto &useReg = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
2708     prevInsn = ssaInfo->GetDefInsn(useReg);
2709     InsnSet useInsns = GetAllUseInsn(useReg);
2710     if ((prevInsn == nullptr) || (useInsns.size() != 1)) {
2711         return false;
2712     }
2713     SetOptSceneType();
2714     SetSpecificExtType(insn);
2715     if (sceneType == kSceneUndef) {
2716         return false;
2717     }
2718     return true;
2719 }
2720 
Run(BB &,Insn & insn)2721 void ElimSpecificExtensionPattern::Run(BB & /* bb */, Insn &insn)
2722 {
2723     if (!CheckCondition(insn)) {
2724         return;
2725     }
2726     if (sceneType == kSceneMov) {
2727         ElimExtensionAfterMov(insn);
2728     } else if (sceneType == kSceneLoad) {
2729         ElimExtensionAfterLoad(insn);
2730     } else if (sceneType == kSceneSameExt) {
2731         ElimExtensionAfterSameExt(insn);
2732     }
2733 }
2734 
FindNewMop(const BB & bb,const Insn & insn)2735 void OneHoleBranchPattern::FindNewMop(const BB &bb, const Insn &insn)
2736 {
2737     if (&insn != bb.GetLastMachineInsn()) {
2738         return;
2739     }
2740     MOperator thisMop = insn.GetMachineOpcode();
2741     switch (thisMop) {
2742         case MOP_wcbz:
2743             newOp = MOP_wtbnz;
2744             break;
2745         case MOP_wcbnz:
2746             newOp = MOP_wtbz;
2747             break;
2748         case MOP_xcbz:
2749             newOp = MOP_xtbnz;
2750             break;
2751         case MOP_xcbnz:
2752             newOp = MOP_xtbz;
2753             break;
2754         default:
2755             break;
2756     }
2757 }
2758 
2759 /*
2760  * pattern1:
2761  *  uxtb w0, w1     <-----(ValidBitsNum <= 8)
2762  *  cbz w0, .label
2763  *  ===>
2764  *  cbz w1, .label
2765  *
2766  * pattern2:
2767  *  uxtb w2, w1     <-----(ValidBitsNum == 1)
2768  *  eor w3, w2, #1
2769  *  cbz w3, .label
2770  *  ===>
2771  *   tbnz w1, #0, .label
2772  */
Run(BB & bb,Insn & insn)2773 void OneHoleBranchPattern::Run(BB &bb, Insn &insn)
2774 {
2775     if (!CheckCondition(insn)) {
2776         return;
2777     }
2778     LabelOperand &label = static_cast<LabelOperand &>(insn.GetOperand(kInsnSecondOpnd));
2779     bool pattern1 = (prevInsn->GetMachineOpcode() == MOP_xuxtb32) &&
2780                     (static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd)).GetValidBitsNum() <= k8BitSize ||
2781                      static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd)).GetValidBitsNum() <= k8BitSize);
2782     if (pattern1) {
2783         Insn &newCbzInsn =
2784             cgFunc->GetInsnBuilder()->BuildInsn(insn.GetMachineOpcode(), prevInsn->GetOperand(kInsnSecondOpnd), label);
2785         bb.ReplaceInsn(insn, newCbzInsn);
2786         ssaInfo->ReplaceInsn(insn, newCbzInsn);
2787         optSuccess = true;
2788         SetCurrInsn(&newCbzInsn);
2789         if (CG_PEEP_DUMP) {
2790             std::vector<Insn *> prevs;
2791             prevs.emplace_back(prevInsn);
2792             DumpAfterPattern(prevs, &newCbzInsn, nullptr);
2793         }
2794         return;
2795     }
2796     bool pattern2 = (prevInsn->GetMachineOpcode() == MOP_xeorrri13 || prevInsn->GetMachineOpcode() == MOP_weorrri12) &&
2797                     (static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnThirdOpnd)).GetValue() == 1);
2798     if (pattern2) {
2799         if (!CheckPrePrevInsn()) {
2800             return;
2801         }
2802         AArch64CGFunc *aarch64CGFunc = static_cast<AArch64CGFunc *>(cgFunc);
2803         ImmOperand &oneHoleOpnd = aarch64CGFunc->CreateImmOperand(0, k8BitSize, false);
2804         auto &regOperand = static_cast<RegOperand &>(prePrevInsn->GetOperand(kInsnSecondOpnd));
2805         Insn &newTbzInsn = cgFunc->GetInsnBuilder()->BuildInsn(newOp, regOperand, oneHoleOpnd, label);
2806         if (!VERIFY_INSN(&newTbzInsn)) {
2807             return;
2808         }
2809         bb.ReplaceInsn(insn, newTbzInsn);
2810         ssaInfo->ReplaceInsn(insn, newTbzInsn);
2811         optSuccess = true;
2812         if (CG_PEEP_DUMP) {
2813             std::vector<Insn *> prevs;
2814             prevs.emplace_back(prevInsn);
2815             prevs.emplace_back(prePrevInsn);
2816             DumpAfterPattern(prevs, &newTbzInsn, nullptr);
2817         }
2818     }
2819 }
2820 
CheckCondition(Insn & insn)2821 bool OneHoleBranchPattern::CheckCondition(Insn &insn)
2822 {
2823     MOperator curMop = insn.GetMachineOpcode();
2824     if (curMop != MOP_wcbz && curMop != MOP_xcbz && curMop != MOP_wcbnz && curMop != MOP_xcbnz) {
2825         return false;
2826     }
2827     FindNewMop(*insn.GetBB(), insn);
2828     if (newOp == MOP_undef) {
2829         return false;
2830     }
2831     auto &useReg = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
2832     prevInsn = ssaInfo->GetDefInsn(useReg);
2833     if (prevInsn == nullptr) {
2834         return false;
2835     }
2836     if (&(prevInsn->GetOperand(kInsnFirstOpnd)) != &(insn.GetOperand(kInsnFirstOpnd))) {
2837         return false;
2838     }
2839     return true;
2840 }
2841 
CheckPrePrevInsn()2842 bool OneHoleBranchPattern::CheckPrePrevInsn()
2843 {
2844     auto &useReg = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
2845     prePrevInsn = ssaInfo->GetDefInsn(useReg);
2846     if (prePrevInsn == nullptr) {
2847         return false;
2848     }
2849     if (prePrevInsn->GetMachineOpcode() != MOP_xuxtb32 ||
2850         static_cast<RegOperand &>(prePrevInsn->GetOperand(kInsnSecondOpnd)).GetValidBitsNum() != 1) {
2851         return false;
2852     }
2853     if (&(prePrevInsn->GetOperand(kInsnFirstOpnd)) != &(prevInsn->GetOperand(kInsnSecondOpnd))) {
2854         return false;
2855     }
2856     return true;
2857 }
2858 
Run(BB & bb,Insn & insn)2859 void OrrToMovPattern::Run(BB &bb, Insn &insn)
2860 {
2861     if (!CheckCondition(insn)) {
2862         return;
2863     }
2864     RegOperand *reg1 = &static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
2865     Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(newMop, *reg1, *reg2);
2866     bb.ReplaceInsn(insn, newInsn);
2867     ssaInfo->ReplaceInsn(insn, newInsn);
2868     optSuccess = true;
2869     SetCurrInsn(&newInsn);
2870     if (CG_PEEP_DUMP) {
2871         std::vector<Insn *> prevs;
2872         prevs.emplace_back(&insn);
2873         DumpAfterPattern(prevs, &newInsn, nullptr);
2874     }
2875 }
2876 
CheckCondition(Insn & insn)2877 bool OrrToMovPattern::CheckCondition(Insn &insn)
2878 {
2879     MOperator curMop = insn.GetMachineOpcode();
2880     if (curMop != MOP_wiorrri12 && curMop != MOP_xiorrri13) {
2881         return false;
2882     }
2883     MOperator thisMop = insn.GetMachineOpcode();
2884     Operand *opndOfOrr = nullptr;
2885     switch (thisMop) {
2886         case MOP_wiorrri12: { /* opnd1 is reg32 and opnd3 is immediate. */
2887             opndOfOrr = &(insn.GetOperand(kInsnThirdOpnd));
2888             reg2 = &static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
2889             newMop = MOP_wmovrr;
2890             break;
2891         }
2892         case MOP_xiorrri13: { /* opnd1 is reg64 and opnd3 is immediate. */
2893             opndOfOrr = &(insn.GetOperand(kInsnThirdOpnd));
2894             reg2 = &static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
2895             newMop = MOP_xmovrr;
2896             break;
2897         }
2898         default:
2899             return false;
2900     }
2901     CHECK_FATAL(opndOfOrr->IsIntImmediate(), "expects immediate operand");
2902     ImmOperand *immOpnd = static_cast<ImmOperand *>(opndOfOrr);
2903     if (immOpnd->GetValue() != 0) {
2904         return false;
2905     }
2906     return true;
2907 }
2908 
DoNormalOptimize(BB & bb,Insn & insn)2909 void AArch64CGPeepHole::DoNormalOptimize(BB &bb, Insn &insn)
2910 {
2911     MOperator thisMop = insn.GetMachineOpcode();
2912     manager = peepMemPool->New<PeepOptimizeManager>(*cgFunc, bb, insn);
2913     switch (thisMop) {
2914         /*
2915          * e.g.
2916          * execute before & after RA: manager->NormalPatternOpt<>(true)
2917          * execute before RA: manager->NormalPatternOpt<>(!cgFunc->IsAfterRegAlloc())
2918          * execute after RA: manager->NormalPatternOpt<>(cgFunc->IsAfterRegAlloc())
2919          */
2920         case MOP_xubfxrri6i6: {
2921             manager->NormalPatternOpt<UbfxToUxtwPattern>(!cgFunc->IsAfterRegAlloc());
2922             break;
2923         }
2924         case MOP_xmovzri16: {
2925             manager->NormalPatternOpt<LoadFloatPointPattern>(!cgFunc->IsAfterRegAlloc());
2926             break;
2927         }
2928         case MOP_wcmpri: {
2929             manager->NormalPatternOpt<LongIntCompareWithZPattern>(!cgFunc->IsAfterRegAlloc());
2930             break;
2931         }
2932         case MOP_wmovrr:
2933         case MOP_xmovrr:
2934         case MOP_xvmovs:
2935         case MOP_xvmovd:
2936         case MOP_vmovuu:
2937         case MOP_vmovvv: {
2938             manager->NormalPatternOpt<RemoveMovingtoSameRegPattern>(cgFunc->IsAfterRegAlloc());
2939             break;
2940         }
2941         case MOP_wstrb:
2942         case MOP_wldrb: {
2943             // only strb ldrb can do this pattern, other patterns still need to be done, so there is no break here.
2944             break;
2945         }
2946         case MOP_wstrh:
2947         case MOP_wldrh:
2948         case MOP_xldr:
2949         case MOP_xstr:
2950         case MOP_wldr:
2951         case MOP_wstr:
2952         case MOP_dldr:
2953         case MOP_dstr:
2954         case MOP_sldr:
2955         case MOP_sstr:
2956         case MOP_qldr:
2957         case MOP_qstr: {
2958             manager->NormalPatternOpt<CombineContiLoadAndStorePattern>(cgFunc->IsAfterRegAlloc());
2959             if (!manager->OptSuccess()) {
2960                 manager->NormalPatternOpt<ContiLDRorSTRToSameMEMPattern>(cgFunc->IsAfterRegAlloc());
2961             }
2962             if (!manager->OptSuccess()) {
2963                 manager->NormalPatternOpt<RemoveIdenticalLoadAndStorePattern>(cgFunc->IsAfterRegAlloc());
2964             }
2965             break;
2966         }
2967         case MOP_xvmovrv:
2968         case MOP_xvmovrd: {
2969             manager->NormalPatternOpt<FmovRegPattern>(cgFunc->IsAfterRegAlloc());
2970             break;
2971         }
2972         case MOP_xsbfxrri6i6: {
2973             manager->NormalPatternOpt<SbfxOptPattern>(cgFunc->IsAfterRegAlloc());
2974             break;
2975         }
2976         case MOP_wcbz:
2977         case MOP_xcbz:
2978         case MOP_wcbnz:
2979         case MOP_xcbnz: {
2980             manager->NormalPatternOpt<AndCbzToTbzPattern>(!cgFunc->IsAfterRegAlloc());
2981             if (!manager->OptSuccess()) {
2982                 manager->NormalPatternOpt<CbnzToCbzPattern>(cgFunc->IsAfterRegAlloc());
2983             }
2984             break;
2985         }
2986         case MOP_wsdivrrr: {
2987             manager->NormalPatternOpt<ReplaceDivToMultiPattern>(cgFunc->IsAfterRegAlloc());
2988             break;
2989         }
2990         case MOP_xbl: {
2991             if (CGOptions::IsGCOnly() && CGOptions::DoWriteRefFieldOpt()) {
2992                 manager->NormalPatternOpt<WriteFieldCallPattern>(!cgFunc->IsAfterRegAlloc());
2993             }
2994             break;
2995         }
2996         default:
2997             break;
2998     }
2999 }
3000 /* ======== CGPeepPattern End ======== */
3001 
InitOpts()3002 void AArch64PeepHole::InitOpts()
3003 {
3004     optimizations.resize(kPeepholeOptsNum);
3005     optimizations[kEliminateSpecifcSXTOpt] = optOwnMemPool->New<EliminateSpecifcSXTAArch64>(cgFunc);
3006     optimizations[kEliminateSpecifcUXTOpt] = optOwnMemPool->New<EliminateSpecifcUXTAArch64>(cgFunc);
3007     optimizations[kCsetCbzToBeqOpt] = optOwnMemPool->New<CsetCbzToBeqOptAArch64>(cgFunc);
3008     optimizations[kAndCmpBranchesToCsetOpt] = optOwnMemPool->New<AndCmpBranchesToCsetAArch64>(cgFunc);
3009     optimizations[kAndCmpBranchesToTstOpt] = optOwnMemPool->New<AndCmpBranchesToTstAArch64>(cgFunc);
3010     optimizations[kAndCbzBranchesToTstOpt] = optOwnMemPool->New<AndCbzBranchesToTstAArch64>(cgFunc);
3011     optimizations[kZeroCmpBranchesOpt] = optOwnMemPool->New<ZeroCmpBranchesAArch64>(cgFunc);
3012     optimizations[kCselZeroOneToCsetOpt] = optOwnMemPool->New<CselZeroOneToCsetOpt>(cgFunc);
3013     optimizations[kAndCmpCsetEorCbzOpt] = optOwnMemPool->New<AndCmpCsetEorCbzOpt>(cgFunc);
3014     optimizations[kAddLdrOpt] = optOwnMemPool->New<AddLdrOpt>(cgFunc);
3015     optimizations[kCsetEorOpt] = optOwnMemPool->New<CsetEorOpt>(cgFunc);
3016     optimizations[kMoveCmpOpt] = optOwnMemPool->New<MoveCmpOpt>(cgFunc);
3017 }
3018 
Run(BB & bb,Insn & insn)3019 void AArch64PeepHole::Run(BB &bb, Insn &insn)
3020 {
3021     MOperator thisMop = insn.GetMachineOpcode();
3022     switch (thisMop) {
3023         case MOP_xsxtb32:
3024         case MOP_xsxth32:
3025         case MOP_xsxtb64:
3026         case MOP_xsxth64:
3027         case MOP_xsxtw64: {
3028             (static_cast<EliminateSpecifcSXTAArch64 *>(optimizations[kEliminateSpecifcSXTOpt]))->Run(bb, insn);
3029             break;
3030         }
3031         case MOP_xuxtb32:
3032         case MOP_xuxth32:
3033         case MOP_xuxtw64: {
3034             (static_cast<EliminateSpecifcUXTAArch64 *>(optimizations[kEliminateSpecifcUXTOpt]))->Run(bb, insn);
3035             break;
3036         }
3037         case MOP_wcbnz:
3038         case MOP_xcbnz: {
3039             (static_cast<CsetCbzToBeqOptAArch64 *>(optimizations[kCsetCbzToBeqOpt]))->Run(bb, insn);
3040             break;
3041         }
3042         case MOP_wcbz:
3043         case MOP_xcbz: {
3044             (static_cast<CsetCbzToBeqOptAArch64 *>(optimizations[kCsetCbzToBeqOpt]))->Run(bb, insn);
3045             break;
3046         }
3047         case MOP_xandrrr:
3048         case MOP_wandrrr:
3049         case MOP_wandrri12:
3050         case MOP_xandrri13: {
3051             (static_cast<AndCmpCsetEorCbzOpt *>(optimizations[kAndCmpCsetEorCbzOpt]))->Run(bb, insn);
3052             (static_cast<AndCmpBranchesToTstAArch64 *>(optimizations[kAndCmpBranchesToTstOpt]))->Run(bb, insn);
3053             (static_cast<AndCbzBranchesToTstAArch64 *>(optimizations[kAndCbzBranchesToTstOpt]))->Run(bb, insn);
3054             break;
3055         }
3056         case MOP_wcsetrc:
3057         case MOP_xcsetrc: {
3058             (static_cast<CsetEorOpt *>(optimizations[kCsetEorOpt]))->Run(bb, insn);
3059             (static_cast<AndCmpBranchesToCsetAArch64 *>(optimizations[kAndCmpBranchesToCsetOpt]))->Run(bb, insn);
3060             break;
3061         }
3062         case MOP_xmovri64:
3063         case MOP_wmovri32: {
3064             static_cast<MoveCmpOpt *>(optimizations[kMoveCmpOpt])->Run(bb, insn);
3065             break;
3066         }
3067         case MOP_xaddrrr: {
3068             (static_cast<AddLdrOpt *>(optimizations[kAddLdrOpt]))->Run(bb, insn);
3069             break;
3070         }
3071         case MOP_wcselrrrc:
3072         case MOP_xcselrrrc: {
3073             (static_cast<CselZeroOneToCsetOpt *>(optimizations[kCselZeroOneToCsetOpt]))->Run(bb, insn);
3074             break;
3075         }
3076         default:
3077             break;
3078     }
3079     if (&insn == bb.GetLastInsn()) {
3080         (static_cast<ZeroCmpBranchesAArch64 *>(optimizations[kZeroCmpBranchesOpt]))->Run(bb, insn);
3081     }
3082 }
3083 
InitOpts()3084 void AArch64PeepHole0::InitOpts()
3085 {
3086     optimizations.resize(kPeepholeOptsNum);
3087     optimizations[kRemoveIdenticalLoadAndStoreOpt] = optOwnMemPool->New<RemoveIdenticalLoadAndStoreAArch64>(cgFunc);
3088     optimizations[kCmpCsetOpt] = optOwnMemPool->New<CmpCsetAArch64>(cgFunc);
3089     optimizations[kComplexMemOperandOptAdd] = optOwnMemPool->New<ComplexMemOperandAddAArch64>(cgFunc);
3090     optimizations[kDeleteMovAfterCbzOrCbnzOpt] = optOwnMemPool->New<DeleteMovAfterCbzOrCbnzAArch64>(cgFunc);
3091     optimizations[kRemoveSxtBeforeStrOpt] = optOwnMemPool->New<RemoveSxtBeforeStrAArch64>(cgFunc);
3092     optimizations[kRemoveMovingtoSameRegOpt] = optOwnMemPool->New<RemoveMovingtoSameRegAArch64>(cgFunc);
3093     optimizations[kEnhanceStrLdrAArch64Opt] = optOwnMemPool->New<EnhanceStrLdrAArch64>(cgFunc);
3094     optimizations[kAddImmZeroToMov] = optOwnMemPool->New<AddImmZeroToMov>(cgFunc);
3095 }
3096 
Run(BB & bb,Insn & insn)3097 void AArch64PeepHole0::Run(BB &bb, Insn &insn)
3098 {
3099     MOperator thisMop = insn.GetMachineOpcode();
3100     switch (thisMop) {
3101         case MOP_wcmpri:
3102         case MOP_xcmpri: {
3103             (static_cast<CmpCsetAArch64 *>(optimizations[kCmpCsetOpt]))->Run(bb, insn);
3104             break;
3105         }
3106         case MOP_xaddrrr: {
3107             (static_cast<ComplexMemOperandAddAArch64 *>(optimizations[kComplexMemOperandOptAdd]))->Run(bb, insn);
3108             break;
3109         }
3110         case MOP_xaddrri12: {
3111             (static_cast<AddImmZeroToMov *>(optimizations[kAddImmZeroToMov]))->Run(bb, insn);
3112             break;
3113         }
3114         case MOP_wcbz:
3115         case MOP_xcbz:
3116         case MOP_wcbnz:
3117         case MOP_xcbnz: {
3118             (static_cast<DeleteMovAfterCbzOrCbnzAArch64 *>(optimizations[kDeleteMovAfterCbzOrCbnzOpt]))->Run(bb, insn);
3119             break;
3120         }
3121         case MOP_wstrh:
3122         case MOP_wstrb: {
3123             (static_cast<RemoveSxtBeforeStrAArch64 *>(optimizations[kRemoveSxtBeforeStrOpt]))->Run(bb, insn);
3124             break;
3125         }
3126         case MOP_wmovrr:
3127         case MOP_xmovrr:
3128         case MOP_xvmovs:
3129         case MOP_xvmovd:
3130         case MOP_vmovuu:
3131         case MOP_vmovvv: {
3132             (static_cast<RemoveMovingtoSameRegAArch64 *>(optimizations[kRemoveMovingtoSameRegOpt]))->Run(bb, insn);
3133             break;
3134         }
3135         case MOP_xldr:
3136         case MOP_xstr:
3137         case MOP_wldr:
3138         case MOP_wstr:
3139         case MOP_dldr:
3140         case MOP_dstr:
3141         case MOP_sldr:
3142         case MOP_sstr: {
3143             if (thisMop == MOP_wstr || thisMop == MOP_xstr) {
3144                 (static_cast<RemoveIdenticalLoadAndStoreAArch64 *>(optimizations[kRemoveIdenticalLoadAndStoreOpt]))
3145                     ->Run(bb, insn);
3146             }
3147             (static_cast<EnhanceStrLdrAArch64 *>(optimizations[kEnhanceStrLdrAArch64Opt]))->Run(bb, insn);
3148             break;
3149         }
3150         default:
3151             break;
3152     }
3153 }
3154 
InitOpts()3155 void AArch64PrePeepHole::InitOpts()
3156 {
3157     optimizations.resize(kPeepholeOptsNum);
3158     optimizations[kOneHoleBranchesPreOpt] = optOwnMemPool->New<OneHoleBranchesPreAArch64>(cgFunc);
3159     optimizations[kReplaceOrrToMovOpt] = optOwnMemPool->New<ReplaceOrrToMovAArch64>(cgFunc);
3160     optimizations[kReplaceCmpToCmnOpt] = optOwnMemPool->New<ReplaceCmpToCmnAArch64>(cgFunc);
3161     optimizations[kComplexMemOperandOpt] = optOwnMemPool->New<ComplexMemOperandAArch64>(cgFunc);
3162     optimizations[kComplexMemOperandPreOptAdd] = optOwnMemPool->New<ComplexMemOperandPreAddAArch64>(cgFunc);
3163     optimizations[kComplexMemOperandOptLSL] = optOwnMemPool->New<ComplexMemOperandLSLAArch64>(cgFunc);
3164     optimizations[kComplexMemOperandOptLabel] = optOwnMemPool->New<ComplexMemOperandLabelAArch64>(cgFunc);
3165     optimizations[kDuplicateExtensionOpt] = optOwnMemPool->New<ElimDuplicateExtensionAArch64>(cgFunc);
3166     optimizations[kEnhanceStrLdrAArch64Opt] = optOwnMemPool->New<EnhanceStrLdrAArch64>(cgFunc);
3167 }
3168 
Run(BB & bb,Insn & insn)3169 void AArch64PrePeepHole::Run(BB &bb, Insn &insn)
3170 {
3171     MOperator thisMop = insn.GetMachineOpcode();
3172     switch (thisMop) {
3173         case MOP_wiorrri12:
3174         case MOP_xiorrri13: {
3175             (static_cast<ReplaceOrrToMovAArch64 *>(optimizations[kReplaceOrrToMovOpt]))->Run(bb, insn);
3176             break;
3177         }
3178         case MOP_wmovri32:
3179         case MOP_xmovri64: {
3180             (static_cast<ReplaceCmpToCmnAArch64 *>(optimizations[kReplaceCmpToCmnOpt]))->Run(bb, insn);
3181             break;
3182         }
3183         case MOP_xadrpl12: {
3184             (static_cast<ComplexMemOperandAArch64 *>(optimizations[kComplexMemOperandOpt]))->Run(bb, insn);
3185             break;
3186         }
3187         case MOP_xaddrrr: {
3188             (static_cast<ComplexMemOperandPreAddAArch64 *>(optimizations[kComplexMemOperandPreOptAdd]))->Run(bb, insn);
3189             break;
3190         }
3191         case MOP_xaddrrrs: {
3192             (static_cast<ComplexMemOperandLSLAArch64 *>(optimizations[kComplexMemOperandOptLSL]))->Run(bb, insn);
3193             break;
3194         }
3195         case MOP_xsxtb32:
3196         case MOP_xsxth32:
3197         case MOP_xsxtb64:
3198         case MOP_xsxth64:
3199         case MOP_xsxtw64:
3200         case MOP_xuxtb32:
3201         case MOP_xuxth32:
3202         case MOP_xuxtw64: {
3203             (static_cast<ElimDuplicateExtensionAArch64 *>(optimizations[kDuplicateExtensionOpt]))->Run(bb, insn);
3204             break;
3205         }
3206         case MOP_xldli: {
3207             (static_cast<ComplexMemOperandLabelAArch64 *>(optimizations[kComplexMemOperandOptLabel]))->Run(bb, insn);
3208             break;
3209         }
3210         case MOP_xldr:
3211         case MOP_xstr:
3212         case MOP_wldr:
3213         case MOP_wstr:
3214         case MOP_dldr:
3215         case MOP_dstr:
3216         case MOP_sldr:
3217         case MOP_sstr: {
3218             (static_cast<EnhanceStrLdrAArch64 *>(optimizations[kEnhanceStrLdrAArch64Opt]))->Run(bb, insn);
3219             break;
3220         }
3221         default:
3222             break;
3223     }
3224     if (&insn == bb.GetLastInsn()) {
3225         (static_cast<OneHoleBranchesPreAArch64 *>(optimizations[kOneHoleBranchesPreOpt]))->Run(bb, insn);
3226     }
3227 }
3228 
InitOpts()3229 void AArch64PrePeepHole1::InitOpts()
3230 {
3231     optimizations.resize(kPeepholeOptsNum);
3232     optimizations[kOneHoleBranchesOpt] = optOwnMemPool->New<OneHoleBranchesAArch64>(cgFunc);
3233     optimizations[kAndCmpBranchesToTbzOpt] = optOwnMemPool->New<AndCmpBranchesToTbzAArch64>(cgFunc);
3234     optimizations[kComplexExtendWordLslOpt] = optOwnMemPool->New<ComplexExtendWordLslAArch64>(cgFunc);
3235 }
3236 
Run(BB & bb,Insn & insn)3237 void AArch64PrePeepHole1::Run(BB &bb, Insn &insn)
3238 {
3239     MOperator thisMop = insn.GetMachineOpcode();
3240     switch (thisMop) {
3241         case MOP_xsxtw64:
3242         case MOP_xuxtw64: {
3243             (static_cast<ComplexExtendWordLslAArch64 *>(optimizations[kComplexExtendWordLslOpt]))->Run(bb, insn);
3244             break;
3245         }
3246         default:
3247             break;
3248     }
3249     if (&insn == bb.GetLastInsn()) {
3250         switch (thisMop) {
3251             case MOP_wcbz:
3252             case MOP_wcbnz:
3253             case MOP_xcbz:
3254             case MOP_xcbnz: {
3255                 (static_cast<OneHoleBranchesAArch64 *>(optimizations[kOneHoleBranchesOpt]))->Run(bb, insn);
3256                 break;
3257             }
3258             case MOP_beq:
3259             case MOP_bne: {
3260                 (static_cast<AndCmpBranchesToTbzAArch64 *>(optimizations[kAndCmpBranchesToTbzOpt]))->Run(bb, insn);
3261                 break;
3262             }
3263             default:
3264                 break;
3265         }
3266     }
3267 }
3268 
IsIdenticalMemOpcode(const Insn & curInsn,const Insn & checkedInsn) const3269 bool RemoveIdenticalLoadAndStorePattern::IsIdenticalMemOpcode(const Insn &curInsn, const Insn &checkedInsn) const
3270 {
3271     if (checkedInsn.GetMachineOpcode() == curInsn.GetMachineOpcode()) {
3272         return true;
3273     }
3274     if (curInsn.IsLoad() && checkedInsn.IsStore()) {
3275         MOperator curMop = curInsn.GetMachineOpcode();
3276         MOperator checkedMop = checkedInsn.GetMachineOpcode();
3277         // We do not eliminate wldr after wstr, to ensure that the high bits are cleared
3278         if (curMop == MOP_xldr && checkedMop == MOP_xstr) {
3279             return true;
3280         }
3281         if (curMop == MOP_dldr && checkedMop == MOP_dstr) {
3282             return true;
3283         }
3284         if (curMop == MOP_sldr && checkedMop == MOP_sstr) {
3285             return true;
3286         }
3287         if (curMop == MOP_qldr && checkedMop == MOP_qstr) {
3288             return true;
3289         }
3290     }
3291     // For str[BOI] + str[BOI]: they do not need to have same [srcOpnd], as long as the size of srcOpnds are same
3292     if (checkedInsn.IsStore() && curInsn.IsStore() && checkedInsn.GetMemoryByteSize() == curInsn.GetMemoryByteSize()) {
3293         return true;
3294     }
3295     return false;
3296 }
3297 
FindPrevIdenticalMemInsn(const Insn & curInsn) const3298 Insn *RemoveIdenticalLoadAndStorePattern::FindPrevIdenticalMemInsn(const Insn &curInsn) const
3299 {
3300     auto *curMemOpnd = static_cast<MemOperand *>(curInsn.GetMemOpnd());
3301     ASSERT_NOT_NULL(curMemOpnd);
3302     RegOperand *curBaseOpnd = curMemOpnd->GetBaseRegister();
3303     OfstOperand *curOfstOpnd = curMemOpnd->GetOffsetImmediate();
3304     auto &curFirstOpnd = static_cast<RegOperand &>(curInsn.GetOperand(kInsnFirstOpnd));
3305     for (Insn *checkedInsn = curInsn.GetPreviousMachineInsn(); checkedInsn != nullptr;
3306          checkedInsn = checkedInsn->GetPreviousMachineInsn()) {
3307         if (checkedInsn->IsCall() || checkedInsn->IsSpecialCall()) {
3308             return nullptr;
3309         }
3310         if (checkedInsn->IsRegDefined(curBaseOpnd->GetRegisterNumber())) {
3311             return nullptr;
3312         }
3313         if (curInsn.IsLoad() && checkedInsn->IsRegDefined(curFirstOpnd.GetRegisterNumber())) {
3314             return nullptr;
3315         }
3316         if (!checkedInsn->IsStore() && !checkedInsn->IsLoad()) {
3317             continue;
3318         }
3319         if (!IsIdenticalMemOpcode(curInsn, *checkedInsn)) {
3320             continue;
3321         }
3322         auto *checkedMemOpnd = static_cast<MemOperand *>(checkedInsn->GetMemOpnd());
3323         CHECK_FATAL(checkedMemOpnd != nullptr, "invalid mem instruction");
3324         if (checkedMemOpnd->GetAddrMode() != MemOperand::kAddrModeBOi || checkedMemOpnd->GetBaseRegister() == nullptr ||
3325             checkedMemOpnd->GetOffsetImmediate() == nullptr) {
3326             continue;
3327         }
3328         RegOperand *checkedBaseOpnd = checkedMemOpnd->GetBaseRegister();
3329         OfstOperand *checkedOfstOpnd = checkedMemOpnd->GetOffsetImmediate();
3330         auto &checkedFirstOpnd = static_cast<RegOperand &>(checkedInsn->GetOperand(kInsnFirstOpnd));
3331         if (checkedBaseOpnd->GetRegisterNumber() == curBaseOpnd->GetRegisterNumber() &&
3332             checkedOfstOpnd->GetValue() == curOfstOpnd->GetValue() &&
3333             (checkedFirstOpnd.GetRegisterNumber() == curFirstOpnd.GetRegisterNumber() || curInsn.IsStore())) {
3334             return checkedInsn;
3335         }
3336     }
3337     return nullptr;
3338 }
3339 
HasMemReferenceBetweenTwoInsns(const Insn & curInsn) const3340 bool RemoveIdenticalLoadAndStorePattern::HasMemReferenceBetweenTwoInsns(const Insn &curInsn) const
3341 {
3342     auto *curMemOpnd = static_cast<MemOperand *>(curInsn.GetMemOpnd());
3343     DEBUG_ASSERT(curMemOpnd != nullptr, "curMemOpnd should not be nullptr");
3344     RegOperand *curBaseOpnd = curMemOpnd->GetBaseRegister();
3345     OfstOperand *curOfstOpnd = curMemOpnd->GetOffsetImmediate();
3346     for (Insn *checkedInsn = curInsn.GetPreviousMachineInsn();
3347          checkedInsn != prevIdenticalInsn && checkedInsn != nullptr;
3348          checkedInsn = checkedInsn->GetPreviousMachineInsn()) {
3349         // Check mem alias
3350         if ((checkedInsn->IsMemAccess() || checkedInsn->IsMemAccessBar()) &&
3351             AArch64MemReference::HasAliasMemoryDep(*checkedInsn, curInsn, kDependenceTypeNone)) {
3352             return true;
3353         }
3354         auto *checkedMemOpnd = static_cast<MemOperand *>(checkedInsn->GetMemOpnd());
3355         if (checkedMemOpnd == nullptr) {
3356             continue;
3357         }
3358         RegOperand *checkedBaseOpnd = checkedMemOpnd->GetBaseRegister();
3359         OfstOperand *checkedOfstOpnd = checkedMemOpnd->GetOffsetImmediate();
3360         if (checkedBaseOpnd == nullptr || checkedOfstOpnd == nullptr) {
3361             continue;
3362         }
3363         if (checkedBaseOpnd->GetRegisterNumber() == curBaseOpnd->GetRegisterNumber()) {
3364             // Check mem overlap
3365             int64 curBaseOfst = curOfstOpnd->GetValue();
3366             int64 checkedOfst = checkedOfstOpnd->GetValue();
3367             auto curMemRange = static_cast<int64>(curInsn.GetMemoryByteSize());
3368             auto checkedMemRange = static_cast<int64>(checkedInsn->GetMemoryByteSize());
3369             if ((curBaseOfst >= checkedOfst && curBaseOfst < (checkedOfst + checkedMemRange)) ||
3370                 (checkedOfst >= curBaseOfst && checkedOfst < (curBaseOfst + curMemRange))) {
3371                 return true;
3372             }
3373         }
3374     }
3375     return false;
3376 }
3377 
HasImplictSizeUse(const Insn & curInsn) const3378 bool RemoveIdenticalLoadAndStorePattern::HasImplictSizeUse(const Insn &curInsn) const
3379 {
3380     if (prevIdenticalInsn->GetOperandSize(kInsnFirstOpnd) != curInsn.GetOperandSize(kInsnFirstOpnd)) {
3381         return true;
3382     }
3383     // To avoid the optimization as following:
3384     // str w10, [sp, #8]
3385     // ldr w10, [sp, #8]     ---\-->  can not be removed
3386     // ...
3387     // str x10, [x1, #16]
3388     if (curInsn.IsLoad() && prevIdenticalInsn->IsStore()) {
3389         auto &defOpnd = static_cast<RegOperand &>(curInsn.GetOperand(kInsnFirstOpnd));
3390         for (Insn *cursor = curInsn.GetNext(); cursor != nullptr; cursor = cursor->GetNext()) {
3391             if (!cursor->IsMachineInstruction()) {
3392                 continue;
3393             }
3394             uint32 opndNum = cursor->GetOperandSize();
3395             for (uint32 i = 0; i < opndNum; ++i) {
3396                 if (cursor->OpndIsDef(i)) {
3397                     continue;
3398                 }
3399                 if (!cursor->GetOperand(i).IsRegister()) {
3400                     continue;
3401                 }
3402                 auto &useOpnd = static_cast<RegOperand &>(cursor->GetOperand(i));
3403                 if (useOpnd.GetRegisterNumber() == defOpnd.GetRegisterNumber() &&
3404                     curInsn.GetOperandSize(kInsnFirstOpnd) != cursor->GetOperandSize(i)) {
3405                     return true;
3406                 }
3407             }
3408         }
3409     }
3410     return false;
3411 }
3412 
CheckCondition(Insn & insn)3413 bool RemoveIdenticalLoadAndStorePattern::CheckCondition(Insn &insn)
3414 {
3415     if (!cgFunc->GetMirModule().IsCModule()) {
3416         return false;
3417     }
3418     if (!insn.IsStore() && !insn.IsLoad()) {
3419         return false;
3420     }
3421     auto *memOpnd = static_cast<MemOperand *>(insn.GetMemOpnd());
3422     CHECK_FATAL(memOpnd != nullptr, "invalid mem instruction");
3423     if (memOpnd->GetAddrMode() != MemOperand::kAddrModeBOi || memOpnd->GetBaseRegister() == nullptr ||
3424         memOpnd->GetOffsetImmediate() == nullptr) {
3425         return false;
3426     }
3427     prevIdenticalInsn = FindPrevIdenticalMemInsn(insn);
3428     if (prevIdenticalInsn == nullptr) {
3429         return false;
3430     }
3431     if (HasImplictSizeUse(insn)) {
3432         return false;
3433     }
3434     if (HasMemReferenceBetweenTwoInsns(insn)) {
3435         return false;
3436     }
3437     return true;
3438 }
3439 
Run(BB & bb,Insn & insn)3440 void RemoveIdenticalLoadAndStorePattern::Run(BB &bb, Insn &insn)
3441 {
3442     if (!CheckCondition(insn)) {
3443         return;
3444     }
3445     if (insn.IsStore()) {
3446         bb.RemoveInsn(*prevIdenticalInsn);
3447     } else {
3448         currInsn = insn.GetNextMachineInsn();
3449         bb.RemoveInsn(insn);
3450     }
3451     optSuccess = true;
3452 }
3453 
IsMemOperandsIdentical(const Insn & insn1,const Insn & insn2) const3454 bool RemoveIdenticalLoadAndStoreAArch64::IsMemOperandsIdentical(const Insn &insn1, const Insn &insn2) const
3455 {
3456     regno_t regNO1 = static_cast<RegOperand &>(insn1.GetOperand(kInsnFirstOpnd)).GetRegisterNumber();
3457     regno_t regNO2 = static_cast<RegOperand &>(insn2.GetOperand(kInsnFirstOpnd)).GetRegisterNumber();
3458     if (regNO1 != regNO2) {
3459         return false;
3460     }
3461     /* Match only [base + offset] */
3462     auto &memOpnd1 = static_cast<MemOperand &>(insn1.GetOperand(kInsnSecondOpnd));
3463     if (memOpnd1.GetAddrMode() != MemOperand::kAddrModeBOi || !memOpnd1.IsIntactIndexed()) {
3464         return false;
3465     }
3466     auto &memOpnd2 = static_cast<MemOperand &>(insn2.GetOperand(kInsnSecondOpnd));
3467     if (memOpnd2.GetAddrMode() != MemOperand::kAddrModeBOi || !memOpnd1.IsIntactIndexed()) {
3468         return false;
3469     }
3470     Operand *base1 = memOpnd1.GetBaseRegister();
3471     Operand *base2 = memOpnd2.GetBaseRegister();
3472     if (!((base1 != nullptr) && base1->IsRegister()) || !((base2 != nullptr) && base2->IsRegister())) {
3473         return false;
3474     }
3475 
3476     regno_t baseRegNO1 = static_cast<RegOperand *>(base1)->GetRegisterNumber();
3477     /* First insn re-write base addr   reg1 <- [ reg1 + offset ] */
3478     if (baseRegNO1 == regNO1) {
3479         return false;
3480     }
3481 
3482     regno_t baseRegNO2 = static_cast<RegOperand *>(base2)->GetRegisterNumber();
3483     if (baseRegNO1 != baseRegNO2) {
3484         return false;
3485     }
3486 
3487     return memOpnd1.GetOffsetImmediate()->GetOffsetValue() == memOpnd2.GetOffsetImmediate()->GetOffsetValue();
3488 }
3489 
Run(BB & bb,Insn & insn)3490 void RemoveIdenticalLoadAndStoreAArch64::Run(BB &bb, Insn &insn)
3491 {
3492     Insn *nextInsn = insn.GetNextMachineInsn();
3493     if (nextInsn == nullptr) {
3494         return;
3495     }
3496     MOperator mop1 = insn.GetMachineOpcode();
3497     MOperator mop2 = nextInsn->GetMachineOpcode();
3498     if ((mop1 == MOP_wstr && mop2 == MOP_wstr) || (mop1 == MOP_xstr && mop2 == MOP_xstr)) {
3499         if (IsMemOperandsIdentical(insn, *nextInsn)) {
3500             bb.RemoveInsn(insn);
3501         }
3502     } else if ((mop1 == MOP_wstr && mop2 == MOP_wldr) || (mop1 == MOP_xstr && mop2 == MOP_xldr)) {
3503         if (IsMemOperandsIdentical(insn, *nextInsn)) {
3504             bb.RemoveInsn(*nextInsn);
3505         }
3506     }
3507 }
3508 
CheckCondition(Insn & insn)3509 bool RemoveMovingtoSameRegPattern::CheckCondition(Insn &insn)
3510 {
3511     DEBUG_ASSERT(insn.GetOperand(kInsnFirstOpnd).IsRegister(), "expects registers");
3512     DEBUG_ASSERT(insn.GetOperand(kInsnSecondOpnd).IsRegister(), "expects registers");
3513     auto &reg1 = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
3514     auto &reg2 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
3515     /* remove mov x0,x0 when it cast i32 to i64 */
3516     if ((reg1.GetRegisterNumber() == reg2.GetRegisterNumber()) && (reg1.GetSize() >= reg2.GetSize())) {
3517         return true;
3518     }
3519     return false;
3520 }
3521 
Run(BB & bb,Insn & insn)3522 void RemoveMovingtoSameRegPattern::Run(BB &bb, Insn &insn)
3523 {
3524     /* remove mov x0,x0 when it cast i32 to i64 */
3525     if (CheckCondition(insn)) {
3526         bb.RemoveInsn(insn);
3527     }
3528 }
3529 
Run(BB & bb,Insn & insn)3530 void RemoveMovingtoSameRegAArch64::Run(BB &bb, Insn &insn)
3531 {
3532     DEBUG_ASSERT(insn.GetOperand(kInsnFirstOpnd).IsRegister(), "expects registers");
3533     DEBUG_ASSERT(insn.GetOperand(kInsnSecondOpnd).IsRegister(), "expects registers");
3534     auto &reg1 = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
3535     auto &reg2 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
3536     /* remove mov x0,x0 when it cast i32 to i64 */
3537     if ((reg1.GetRegisterNumber() == reg2.GetRegisterNumber()) && (reg1.GetSize() >= reg2.GetSize())) {
3538         bb.RemoveInsn(insn);
3539     }
3540 }
3541 
CheckCondition(Insn & insn)3542 bool MulImmToShiftPattern::CheckCondition(Insn &insn)
3543 {
3544     auto &useReg = static_cast<RegOperand &>(insn.GetOperand(kInsnThirdOpnd));
3545     movInsn = ssaInfo->GetDefInsn(useReg);
3546     if (movInsn == nullptr) {
3547         return false;
3548     }
3549     MOperator prevMop = movInsn->GetMachineOpcode();
3550     if (prevMop != MOP_wmovri32 && prevMop != MOP_xmovri64) {
3551         return false;
3552     }
3553     ImmOperand &immOpnd = static_cast<ImmOperand &>(movInsn->GetOperand(kInsnSecondOpnd));
3554     if (immOpnd.IsNegative()) {
3555         return false;
3556     }
3557     uint64 immVal = static_cast<uint64>(immOpnd.GetValue());
3558     if (immVal == 0) {
3559         shiftVal = 0;
3560         newMop = insn.GetMachineOpcode() == MOP_xmulrrr ? MOP_xmovri64 : MOP_wmovri32;
3561         return true;
3562     }
3563     /* power of 2 */
3564     if ((immVal & (immVal - 1)) != 0) {
3565         return false;
3566     }
3567     shiftVal = static_cast<uint32>(log2(immVal));
3568     newMop = (prevMop == MOP_xmovri64) ? MOP_xlslrri6 : MOP_wlslrri5;
3569     return true;
3570 }
3571 
Run(BB & bb,Insn & insn)3572 void MulImmToShiftPattern::Run(BB &bb, Insn &insn)
3573 {
3574     /* mov x0,imm and mul to shift */
3575     if (!CheckCondition(insn)) {
3576         return;
3577     }
3578     auto *aarch64CGFunc = static_cast<AArch64CGFunc *>(cgFunc);
3579     ImmOperand &immOpnd = aarch64CGFunc->CreateImmOperand(shiftVal, k32BitSize, false);
3580     Insn *newInsn;
3581     if (newMop == MOP_xmovri64 || newMop == MOP_wmovri32) {
3582         newInsn = &cgFunc->GetInsnBuilder()->BuildInsn(newMop, insn.GetOperand(kInsnFirstOpnd), immOpnd);
3583     } else {
3584         newInsn = &cgFunc->GetInsnBuilder()->BuildInsn(newMop, insn.GetOperand(kInsnFirstOpnd),
3585                                                        insn.GetOperand(kInsnSecondOpnd), immOpnd);
3586     }
3587     bb.ReplaceInsn(insn, *newInsn);
3588     /* update ssa info */
3589     ssaInfo->ReplaceInsn(insn, *newInsn);
3590     optSuccess = true;
3591     SetCurrInsn(newInsn);
3592     if (CG_PEEP_DUMP) {
3593         std::vector<Insn *> prevs;
3594         prevs.emplace_back(movInsn);
3595         DumpAfterPattern(prevs, &insn, newInsn);
3596     }
3597 }
3598 
CheckOperandIsDeadFromInsn(const RegOperand & regOpnd,Insn & insn)3599 bool EnhanceStrLdrAArch64::CheckOperandIsDeadFromInsn(const RegOperand &regOpnd, Insn &insn)
3600 {
3601     for (uint32 i = 0; i < insn.GetOperandSize(); ++i) {
3602         auto &opnd = insn.GetOperand(i);
3603         if (!insn.GetDesc()->GetOpndDes(i)->IsRegDef()) {
3604             continue;
3605         }
3606         // regOpnd is redefined at curInsn
3607         if (static_cast<RegOperand &>(opnd).GetRegisterNumber() == regOpnd.GetRegisterNumber()) {
3608             return true;
3609         }
3610     }
3611     return !IfOperandIsLiveAfterInsn(regOpnd, insn);
3612 }
3613 
GetInsnAddOrSubNewOffset(Insn & insn,ImmOperand & offset)3614 ImmOperand *EnhanceStrLdrAArch64::GetInsnAddOrSubNewOffset(Insn &insn, ImmOperand &offset)
3615 {
3616     int64 val = 0;
3617     VaryType vary = offset.GetVary();
3618     auto mOp = insn.GetMachineOpcode();
3619     if (mOp == MOP_xaddrri12 || mOp == MOP_xsubrri12) {
3620         auto &immOpnd = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
3621         val = immOpnd.GetValue();
3622         CHECK_FATAL(!(vary == kUnAdjustVary && immOpnd.GetVary() == kUnAdjustVary), "NIY, can not deal this case!");
3623         vary = immOpnd.GetVary();
3624     } else {
3625         auto &immOpnd = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
3626         auto &shiftOpnd = static_cast<BitShiftOperand &>(insn.GetOperand(kInsnFourthOpnd));
3627         CHECK_FATAL(shiftOpnd.GetShiftAmount() == 12, "invalid shiftAmount"); // ShiftAmount must be 12
3628         val = (immOpnd.GetValue() << shiftOpnd.GetShiftAmount());
3629     }
3630 
3631     if (mOp == MOP_xsubrri12 || mOp == MOP_xsubrri24) {
3632         val = -val;
3633     }
3634     val += offset.GetValue();
3635     auto &newImm = static_cast<AArch64CGFunc &>(cgFunc).GetOrCreateOfstOpnd(val, k64BitSize);
3636     newImm.SetVary(vary);
3637     return &newImm;
3638 }
3639 
OptimizeAddrBOI(Insn & insn,MemOperand & memOpnd,Insn & prevInsn)3640 void EnhanceStrLdrAArch64::OptimizeAddrBOI(Insn &insn, MemOperand &memOpnd, Insn &prevInsn)
3641 {
3642     auto *oriBase = memOpnd.GetBaseRegister();
3643     auto *oriOffset = memOpnd.GetOffsetOperand();
3644     auto &defOpnd = static_cast<RegOperand &>(prevInsn.GetOperand(kInsnFirstOpnd));
3645     if (defOpnd.GetRegisterNumber() != oriBase->GetRegisterNumber() || !CheckOperandIsDeadFromInsn(defOpnd, insn)) {
3646         return;
3647     }
3648     auto *newBase = static_cast<RegOperand *>(&prevInsn.GetOperand(kInsnSecondOpnd));
3649     auto *newOffset = GetInsnAddOrSubNewOffset(prevInsn, *memOpnd.GetOffsetOperand());
3650     if (newOffset->GetValue() < 0) {
3651         return;  // obj dump cannot deal str	x19, [x29,#-16]
3652     }
3653 
3654     memOpnd.SetBaseRegister(*newBase);
3655     memOpnd.SetOffsetOperand(*newOffset);
3656     if (!static_cast<AArch64CGFunc &>(cgFunc).IsOperandImmValid(insn.GetMachineOpcode(), &memOpnd, kInsnSecondOpnd)) {
3657         // If new offset is invalid, undo it
3658         memOpnd.SetBaseRegister(*oriBase);
3659         memOpnd.SetOffsetOperand(*oriOffset);
3660         return;
3661     }
3662     memOpnd.SetAddrMode(MemOperand::kAddrModeBOi);
3663     prevInsn.GetBB()->RemoveInsn(prevInsn);
3664 }
3665 
OptimizeAddrBOrXShiftExtend(Insn & insn,MemOperand & memOpnd,Insn & shiftExtendInsn)3666 void EnhanceStrLdrAArch64::OptimizeAddrBOrXShiftExtend(Insn &insn, MemOperand &memOpnd, Insn &shiftExtendInsn)
3667 {
3668     auto mOp = shiftExtendInsn.GetMachineOpcode();
3669     if (mOp != MOP_xuxtw64 && mOp != MOP_xsxtw64 && mOp != MOP_xlslrri6) {
3670         return;
3671     }
3672     auto *oriIndex = memOpnd.GetIndexRegister();
3673     auto &defOpnd = static_cast<RegOperand &>(shiftExtendInsn.GetOperand(kInsnFirstOpnd));
3674     if (defOpnd.GetRegisterNumber() != oriIndex->GetRegisterNumber() || !CheckOperandIsDeadFromInsn(defOpnd, insn)) {
3675         return;
3676     }
3677     auto &newIndex = static_cast<RegOperand &>(shiftExtendInsn.GetOperand(kInsnSecondOpnd));
3678     bool isSigned = (mOp == MOP_xsxtw64);
3679     uint32 shift = 0;
3680     if (mOp == MOP_xlslrri6) {
3681         shift = static_cast<uint32>(static_cast<ImmOperand &>(shiftExtendInsn.GetOperand(kInsnThirdOpnd)).GetValue());
3682     }
3683     const uint32 regSize = insn.GetDesc()->GetOpndDes(kInsnFirstOpnd)->GetSize();
3684     // lsl extend insn shift amount can only be 0 or 1(16-bit def opnd) or 2(32-bit def opnd) or
3685     // 3(64-bit def opnd) or 4(128-bit def opnd) in ldr/str insn, and in this pattern we only have
3686     // 32-bit & 64-bit situation now
3687     if ((shift == k0BitSize) || (regSize == k32BitSize && shift == k2BitSize) ||
3688         (regSize == k64BitSize && shift == k3BitSize)) {
3689         auto *newMemOpnd = static_cast<AArch64CGFunc &>(cgFunc).CreateMemOperand(
3690             MemOperand::kAddrModeBOrX, memOpnd.GetSize(), *memOpnd.GetBaseRegister(), newIndex, shift, isSigned);
3691         insn.SetOperand(kInsnSecondOpnd, *newMemOpnd);
3692         shiftExtendInsn.GetBB()->RemoveInsn(shiftExtendInsn);
3693     }
3694 }
3695 
OptimizeAddrBOrX(Insn & insn,MemOperand & memOpnd,Insn & prevInsn)3696 void EnhanceStrLdrAArch64::OptimizeAddrBOrX(Insn &insn, MemOperand &memOpnd, Insn &prevInsn)
3697 {
3698     if (memOpnd.GetOffsetOperand()->GetValue() != 0 || memOpnd.GetOffsetOperand()->GetVary() == kUnAdjustVary) {
3699         return;
3700     }
3701     auto *oriBase = memOpnd.GetBaseRegister();
3702     auto &defOpnd = static_cast<RegOperand &>(prevInsn.GetOperand(kInsnFirstOpnd));
3703     if (defOpnd.GetRegisterNumber() != oriBase->GetRegisterNumber() || !CheckOperandIsDeadFromInsn(defOpnd, insn)) {
3704         return;
3705     }
3706     auto *newBase = static_cast<RegOperand *>(&prevInsn.GetOperand(kInsnSecondOpnd));
3707     auto *newIndex = static_cast<RegOperand *>(&prevInsn.GetOperand(kInsnThirdOpnd));
3708 
3709     memOpnd.SetBaseRegister(*newBase);
3710     memOpnd.SetIndexRegister(*newIndex);
3711     memOpnd.SetAddrMode(MemOperand::kAddrModeBOrX);
3712     auto *prevShiftExtendInsn = prevInsn.GetPreviousMachineInsn();
3713     if (prevShiftExtendInsn != nullptr) {
3714         OptimizeAddrBOrXShiftExtend(insn, memOpnd, *prevShiftExtendInsn);
3715     }
3716     prevInsn.GetBB()->RemoveInsn(prevInsn);
3717 }
3718 
OptimizeWithAddrrrs(Insn & insn,MemOperand & memOpnd,Insn & addInsn)3719 void EnhanceStrLdrAArch64::OptimizeWithAddrrrs(Insn &insn, MemOperand &memOpnd, Insn &addInsn)
3720 {
3721     if (memOpnd.GetOffsetOperand()->GetValue() != 0 || memOpnd.GetOffsetOperand()->GetVary() != kNotVary) {
3722         return;
3723     }
3724     auto *oriBase = memOpnd.GetBaseRegister();
3725     auto &defOpnd = static_cast<RegOperand &>(addInsn.GetOperand(kInsnFirstOpnd));
3726     if (defOpnd.GetRegisterNumber() != oriBase->GetRegisterNumber() || !CheckOperandIsDeadFromInsn(defOpnd, insn)) {
3727         return;
3728     }
3729     auto &newBase = static_cast<RegOperand &>(addInsn.GetOperand(kInsnSecondOpnd));
3730     auto &newIndex = static_cast<RegOperand &>(addInsn.GetOperand(kInsnThirdOpnd));
3731     auto &shift = static_cast<BitShiftOperand &>(addInsn.GetOperand(kInsnFourthOpnd));
3732     if (shift.GetShiftOp() != BitShiftOperand::kLSL) {
3733         return;
3734     }
3735     auto *newMemOpnd = static_cast<AArch64CGFunc &>(cgFunc).CreateMemOperand(
3736         MemOperand::kAddrModeBOrX, memOpnd.GetSize(), newBase, newIndex, shift.GetShiftAmount());
3737     insn.SetOperand(kInsnSecondOpnd, *newMemOpnd);
3738     addInsn.GetBB()->RemoveInsn(addInsn);
3739 }
3740 
Run(BB & bb,Insn & insn)3741 void EnhanceStrLdrAArch64::Run(BB &bb, Insn &insn)
3742 {
3743     Operand &opnd = insn.GetOperand(kInsnSecondOpnd);
3744     CHECK_FATAL(opnd.IsMemoryAccessOperand(), "Unexpected operand in EnhanceStrLdrAArch64");
3745     auto &memOpnd = static_cast<MemOperand &>(opnd);
3746     if (memOpnd.GetAddrMode() != MemOperand::kAddrModeBOi || !memOpnd.GetOffsetImmediate()->IsImmOffset()) {
3747         return;
3748     }
3749 
3750     auto *prev = insn.GetPreviousMachineInsn();
3751     while (prev != nullptr) {
3752         if (prev->GetMachineOpcode() == MOP_xmovrr) {
3753             auto &defOpnd = static_cast<RegOperand &>(prev->GetOperand(kInsnFirstOpnd));
3754             if (defOpnd.GetRegisterNumber() != memOpnd.GetBaseRegister()->GetRegisterNumber() ||
3755                 !CheckOperandIsDeadFromInsn(defOpnd, insn)) {
3756                 return;
3757             }
3758             memOpnd.SetBaseRegister(static_cast<RegOperand &>(prev->GetOperand(kInsnSecondOpnd)));
3759             auto *tmpInsn = prev;
3760             prev = prev->GetPreviousMachineInsn();
3761             tmpInsn->GetBB()->RemoveInsn(*tmpInsn);
3762             continue;
3763         }
3764         break;
3765     }
3766     if (prev == nullptr) {
3767         return;
3768     }
3769     auto prevMop = prev->GetMachineOpcode();
3770     if (prevMop == MOP_xaddrri12 || prevMop == MOP_xsubrri12 || prevMop == MOP_xaddrri24 || prevMop == MOP_xsubrri24) {
3771         OptimizeAddrBOI(insn, memOpnd, *prev);
3772     } else if (prevMop == MOP_xaddrrr) {
3773         OptimizeAddrBOrX(insn, memOpnd, *prev);
3774     } else if (prevMop == MOP_xaddrrrs) {
3775         OptimizeWithAddrrrs(insn, memOpnd, *prev);
3776     }
3777 }
3778 
IsSameRegisterOperation(const RegOperand & desMovOpnd,const RegOperand & uxtDestOpnd,const RegOperand & uxtFromOpnd)3779 bool IsSameRegisterOperation(const RegOperand &desMovOpnd, const RegOperand &uxtDestOpnd, const RegOperand &uxtFromOpnd)
3780 {
3781     return ((desMovOpnd.GetRegisterNumber() == uxtDestOpnd.GetRegisterNumber()) &&
3782             (uxtDestOpnd.GetRegisterNumber() == uxtFromOpnd.GetRegisterNumber()));
3783 }
3784 
IsRegNotSameMemUseInInsn(const Insn & checkInsn,const Insn & curInsn,regno_t curBaseRegNO,bool isCurStore,int64 curBaseOfst,int64 curMemRange) const3785 bool CombineContiLoadAndStorePattern::IsRegNotSameMemUseInInsn(const Insn &checkInsn, const Insn &curInsn,
3786                                                                regno_t curBaseRegNO, bool isCurStore, int64 curBaseOfst,
3787                                                                int64 curMemRange) const
3788 {
3789     uint32 opndNum = checkInsn.GetOperandSize();
3790     for (uint32 i = 0; i < opndNum; ++i) {
3791         Operand &opnd = checkInsn.GetOperand(i);
3792         if (opnd.IsList()) {
3793             auto &listOpnd = static_cast<const ListOperand &>(opnd);
3794             for (auto &listElem : listOpnd.GetOperands()) {
3795                 auto *regOpnd = static_cast<RegOperand *>(listElem);
3796                 DEBUG_ASSERT(regOpnd != nullptr, "parameter operand must be RegOperand");
3797                 if (curBaseRegNO == regOpnd->GetRegisterNumber()) {
3798                     return true;
3799                 }
3800             }
3801         } else if (opnd.IsMemoryAccessOperand()) {
3802             auto &memOperand = static_cast<MemOperand &>(opnd);
3803             RegOperand *checkBaseReg = memOperand.GetBaseRegister();
3804             // If the BASEREG of the two MEM insns are different, we use cg-mem-reference to check the alias of two MEM:
3805             // if there is no alias, we can combine MEM pair cross the MEM insn.
3806             // e.g.
3807             // str x1, [x9]
3808             // str x6, [x2]
3809             // str x3, [x9, #8]
3810             regno_t stackBaseRegNO = cgFunc->UseFP() ? R29 : RSP;
3811             if ((isCurStore || checkInsn.IsStore()) && checkBaseReg != nullptr &&
3812                 !(curBaseRegNO == stackBaseRegNO && checkBaseReg->GetRegisterNumber() == stackBaseRegNO) &&
3813                 checkBaseReg->GetRegisterNumber() != curBaseRegNO &&
3814                 AArch64MemReference::HasAliasMemoryDep(checkInsn, curInsn, kDependenceTypeNone)) {
3815                 return true;
3816             }
3817             // Check memory overlap
3818             if ((isCurStore || checkInsn.IsStore()) && checkBaseReg != nullptr &&
3819                 memOperand.GetAddrMode() == MemOperand::kAddrModeBOi && memOperand.GetOffsetImmediate() != nullptr) {
3820                 // If memInsn is split with x16, we need to find the actual base register
3821                 int64 checkOffset = memOperand.GetOffsetImmediate()->GetOffsetValue();
3822                 regno_t checkRegNO = checkBaseReg->GetRegisterNumber();
3823                 if (checkRegNO == R16) {
3824                     const Insn *prevInsn = checkInsn.GetPrev();
3825                     // Before cgaggressiveopt, the def and use of R16 must be adjacent, and the def of R16 must be
3826                     // addrri, otherwise, the process is conservative and the mem insn that can be combined is not
3827                     // search forward.
3828                     if (prevInsn == nullptr || prevInsn->GetMachineOpcode() != MOP_xaddrri12 ||
3829                         static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd)).GetRegisterNumber() != R16) {
3830                         return true;
3831                     }
3832                     checkOffset += static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnThirdOpnd)).GetValue();
3833                 }
3834                 auto checkMemRange = static_cast<int64>(checkInsn.GetMemoryByteSize());
3835                 //      curOfst          curOfst+curMemRange
3836                 // |______|_/_/_/_/_/_/_/_/_/_/_|____________|
3837                 if ((curBaseOfst >= checkOffset && curBaseOfst < (checkOffset + checkMemRange)) ||
3838                     (checkOffset >= curBaseOfst && checkOffset < (curBaseOfst + curMemRange))) {
3839                     return true;
3840                 }
3841             }
3842         } else if (opnd.IsConditionCode()) {
3843             auto &rflagOpnd = static_cast<RegOperand &>(cgFunc->GetOrCreateRflag());
3844             if (rflagOpnd.GetRegisterNumber() == curBaseRegNO) {
3845                 return true;
3846             }
3847         } else if (opnd.IsRegister()) {
3848             if (!isCurStore && static_cast<RegOperand &>(opnd).GetRegisterNumber() == curBaseRegNO) {
3849                 return true;
3850             }
3851         }
3852     }
3853     return false;
3854 }
3855 
FindPrevStrLdr(Insn & insn,regno_t destRegNO,regno_t memBaseRegNO,int64 baseOfst) const3856 std::vector<Insn *> CombineContiLoadAndStorePattern::FindPrevStrLdr(Insn &insn, regno_t destRegNO, regno_t memBaseRegNO,
3857                                                                     int64 baseOfst) const
3858 {
3859     std::vector<Insn *> prevContiInsns;
3860     for (Insn *curInsn = insn.GetPrev(); curInsn != nullptr; curInsn = curInsn->GetPrev()) {
3861         if (!curInsn->IsMachineInstruction()) {
3862             continue;
3863         }
3864         if (curInsn->IsRegDefined(memBaseRegNO)) {
3865             return prevContiInsns;
3866         }
3867         DEBUG_ASSERT(insn.GetOperand(kInsnSecondOpnd).IsMemoryAccessOperand(), "invalid mem insn");
3868         auto baseMemRange = static_cast<int64>(insn.GetMemoryByteSize());
3869         if (IsRegNotSameMemUseInInsn(*curInsn, insn, memBaseRegNO, insn.IsStore(), static_cast<int32>(baseOfst),
3870                                      baseMemRange)) {
3871             return prevContiInsns;
3872         }
3873         // record continuous STD/LDR insn
3874         if (!curInsn->IsLoadStorePair() &&
3875             ((insn.IsStore() && curInsn->IsStore()) || (insn.IsLoad() && curInsn->IsLoad()))) {
3876             auto *memOperand = static_cast<MemOperand *>(curInsn->GetMemOpnd());
3877             /* do not combine ldr r0, label */
3878             if (memOperand != nullptr) {
3879                 auto *baseRegOpnd = static_cast<RegOperand *>(memOperand->GetBaseRegister());
3880                 DEBUG_ASSERT(baseRegOpnd == nullptr || !baseRegOpnd->IsVirtualRegister(),
3881                              "physical register has not been allocated?");
3882                 if (memOperand->GetAddrMode() == MemOperand::kAddrModeBOi &&
3883                     baseRegOpnd->GetRegisterNumber() == memBaseRegNO) {
3884                     prevContiInsns.emplace_back(curInsn);
3885                 }
3886             }
3887         }
3888         /* check insn that changes the data flow */
3889         /* ldr x8, [x21, #8]
3890          * call foo()
3891          * ldr x9, [x21, #16]
3892          * although x21 is a calleeSave register, there is no guarantee data in memory [x21] is not changed
3893          */
3894         if (curInsn->IsCall() || curInsn->GetMachineOpcode() == MOP_asm) {
3895             return prevContiInsns;
3896         }
3897         /* Check regOpnd for mem access:
3898          * 1. if the destRegNO is RZR, we do not need to check define and use for destRegNO between PREVINSN and INSN;
3899          * 2. for load insn, we forbid both use and define destRegNO between PREVINSN and INSN;
3900          * 3. for store insn, we only forbit define destRegNO between PREVINSN and INSN;
3901          * e.g.1
3902          * ldr x2, [sp, #16]
3903          * add x3, x1, #5  &  add x1, x3, #5  ---\-->  all [x1] use and define can not across
3904          * ldr x1, [sp, #8]
3905          * e.g.2
3906          * str x2, [sp, #16]
3907          * add x1, x3, #5   ---\--->  only [x1] define can not across
3908          * str x1, [sp, #8]
3909          */
3910         /* store opt should not cross call due to stack args */
3911         if (destRegNO != RZR &&
3912             ((insn.IsLoad() && curInsn->ScanReg(destRegNO)) || (insn.IsStore() && curInsn->IsRegDefined(destRegNO)))) {
3913             return prevContiInsns;
3914         }
3915         if (curInsn->ScanReg(destRegNO)) {
3916             return prevContiInsns;
3917         }
3918     }
3919     return prevContiInsns;
3920 }
3921 
CheckCondition(Insn & insn)3922 bool CombineContiLoadAndStorePattern::CheckCondition(Insn &insn)
3923 {
3924     MOperator mop = insn.GetMachineOpcode();
3925     if (mop == MOP_wldrb || mop == MOP_wldrh) {
3926         return false;
3927     }
3928     auto *curMemOpnd = static_cast<MemOperand *>(insn.GetMemOpnd());
3929     DEBUG_ASSERT(curMemOpnd != nullptr, "get mem operand failed");
3930     if (!doAggressiveCombine || curMemOpnd->GetAddrMode() != MemOperand::kAddrModeBOi) {
3931         return false;
3932     }
3933     return true;
3934 }
3935 
3936 /* Combining 2 STRs into 1 stp or 2 LDRs into 1 ldp */
Run(BB & bb,Insn & insn)3937 void CombineContiLoadAndStorePattern::Run(BB &bb, Insn &insn)
3938 {
3939     if (!CheckCondition(insn)) {
3940         return;
3941     }
3942 
3943     auto *curMemOpnd = static_cast<MemOperand *>(insn.GetMemOpnd());
3944     DEBUG_ASSERT(curMemOpnd->GetAddrMode() == MemOperand::kAddrModeBOi, "invalid continues mem insn");
3945     OfstOperand *curOfstOpnd = curMemOpnd->GetOffsetImmediate();
3946     int64 curOfstVal = curOfstOpnd ? curOfstOpnd->GetOffsetValue() : 0;
3947 
3948     auto *baseRegOpnd = static_cast<RegOperand *>(curMemOpnd->GetBaseRegister());
3949     DEBUG_ASSERT(baseRegOpnd == nullptr || !baseRegOpnd->IsVirtualRegister(),
3950                  "physical register has not been allocated?");
3951     auto &curDestOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
3952     std::vector<Insn *> prevContiInsnVec =
3953         FindPrevStrLdr(insn, curDestOpnd.GetRegisterNumber(), baseRegOpnd->GetRegisterNumber(), curOfstVal);
3954     for (auto prevContiInsn : prevContiInsnVec) {
3955         DEBUG_ASSERT(prevContiInsn != nullptr, "get previous consecutive instructions failed");
3956         auto *prevMemOpnd = static_cast<MemOperand *>(prevContiInsn->GetMemOpnd());
3957         DEBUG_ASSERT(prevMemOpnd->GetAddrMode() == MemOperand::kAddrModeBOi, "invalid continues mem insn");
3958         OfstOperand *prevOfstOpnd = prevMemOpnd->GetOffsetImmediate();
3959         int64 prevOfstVal = prevOfstOpnd ? prevOfstOpnd->GetOffsetValue() : 0;
3960         auto &prevDestOpnd = static_cast<RegOperand &>(prevContiInsn->GetOperand(kInsnFirstOpnd));
3961         if (prevDestOpnd.GetRegisterType() != curDestOpnd.GetRegisterType()) {
3962             continue;
3963         }
3964 
3965         MemOperand *combineMemOpnd = (curOfstVal < prevOfstVal) ? curMemOpnd : prevMemOpnd;
3966         if (IsValidNormalLoadOrStorePattern(insn, *prevContiInsn, *curMemOpnd, curOfstVal, prevOfstVal)) {
3967             // Process normal mem pair
3968             MOperator newMop = GetMopPair(insn.GetMachineOpcode(), true);
3969             Insn *combineInsn =
3970                 GenerateMemPairInsn(newMop, curDestOpnd, prevDestOpnd, *combineMemOpnd, curOfstVal < prevOfstVal);
3971             DEBUG_ASSERT(combineInsn != nullptr, "create combineInsn failed");
3972             bb.InsertInsnAfter(*prevContiInsn, *combineInsn);
3973             if (!(static_cast<AArch64CGFunc &>(*cgFunc).IsOperandImmValid(
3974                 newMop, combineMemOpnd, isPairAfterCombine ? kInsnThirdOpnd : kInsnSecondOpnd))) {
3975                 if (FindUseX16AfterInsn(*prevContiInsn)) {
3976                     // Do not combine Insns when x16 was used after curInsn
3977                     bb.RemoveInsn(*combineInsn);
3978                     return;
3979                 }
3980                 SPLIT_INSN(combineInsn, cgFunc);
3981             }
3982             RemoveInsnAndKeepComment(bb, insn, *prevContiInsn);
3983             SetCurrInsn(combineInsn);
3984             optSuccess = true;
3985             return;
3986         } else if (IsValidStackArgLoadOrStorePattern(insn, *prevContiInsn, *curMemOpnd, *prevMemOpnd, curOfstVal,
3987                                                      prevOfstVal)) {
3988             // Process stack-arg mem pair
3989             regno_t curDestRegNo = curDestOpnd.GetRegisterNumber();
3990             regno_t prevDestRegNo = prevDestOpnd.GetRegisterNumber();
3991             RegOperand &newDest = static_cast<AArch64CGFunc *>(cgFunc)->GetOrCreatePhysicalRegisterOperand(
3992                 static_cast<AArch64reg>(curDestRegNo), k64BitSize, curDestOpnd.GetRegisterType());
3993             RegOperand &newPrevDest = static_cast<AArch64CGFunc *>(cgFunc)->GetOrCreatePhysicalRegisterOperand(
3994                 static_cast<AArch64reg>(prevDestRegNo), k64BitSize, prevDestOpnd.GetRegisterType());
3995             MOperator newMop = (curDestOpnd.GetRegisterType() == kRegTyInt) ? MOP_xstp : MOP_dstp;
3996             if (!(static_cast<AArch64CGFunc &>(*cgFunc).IsOperandImmValid(newMop, combineMemOpnd, kInsnThirdOpnd))) {
3997                 return;
3998             }
3999             Insn *combineInsn =
4000                 GenerateMemPairInsn(newMop, newDest, newPrevDest, *combineMemOpnd, curOfstVal < prevOfstVal);
4001             bb.InsertInsnAfter(*prevContiInsn, *combineInsn);
4002             RemoveInsnAndKeepComment(bb, insn, *prevContiInsn);
4003             SetCurrInsn(combineInsn);
4004             optSuccess = true;
4005             return;
4006         }
4007     }
4008 }
4009 
FindUseX16AfterInsn(const Insn & curInsn) const4010 bool CombineContiLoadAndStorePattern::FindUseX16AfterInsn(const Insn &curInsn) const
4011 {
4012     for (Insn *cursor = curInsn.GetNext(); cursor != nullptr; cursor = cursor->GetNext()) {
4013         if (!cursor->IsMachineInstruction()) {
4014             continue;
4015         }
4016         for (uint32 defRegNo : cursor->GetDefRegs()) {
4017             if (defRegNo == R16) {
4018                 return false;
4019             }
4020         }
4021         if ((!cursor->IsLoad() && !cursor->IsStore() && !cursor->IsLoadStorePair()) || cursor->IsAtomic()) {
4022             continue;
4023         }
4024         const InsnDesc *md = &AArch64CG::kMd[cursor->GetMachineOpcode()];
4025         if (cursor->IsLoadLabel() || md->IsLoadAddress()) {
4026             continue;
4027         }
4028         uint32 memIdx = (cursor->IsLoadStorePair() ? kInsnThirdOpnd : kInsnSecondOpnd);
4029         auto &curMemOpnd = static_cast<MemOperand &>(cursor->GetOperand(memIdx));
4030         RegOperand *baseOpnd = curMemOpnd.GetBaseRegister();
4031         if (baseOpnd != nullptr && baseOpnd->GetRegisterNumber() == R16) {
4032             return true;
4033         }
4034     }
4035     return false;
4036 }
4037 
GenerateMemPairInsn(MOperator newMop,RegOperand & curDestOpnd,RegOperand & prevDestOpnd,MemOperand & combineMemOpnd,bool isCurDestFirst)4038 Insn *CombineContiLoadAndStorePattern::GenerateMemPairInsn(MOperator newMop, RegOperand &curDestOpnd,
4039                                                            RegOperand &prevDestOpnd, MemOperand &combineMemOpnd,
4040                                                            bool isCurDestFirst)
4041 {
4042     DEBUG_ASSERT(newMop != MOP_undef, "invalid MOperator");
4043     Insn *combineInsn = nullptr;
4044     if (isPairAfterCombine) {  // for ldr/str --> ldp/stp
4045         combineInsn = (isCurDestFirst)
4046                           ? &cgFunc->GetInsnBuilder()->BuildInsn(newMop, curDestOpnd, prevDestOpnd, combineMemOpnd)
4047                           : &cgFunc->GetInsnBuilder()->BuildInsn(newMop, prevDestOpnd, curDestOpnd, combineMemOpnd);
4048     } else {  // for strb/strh --> strh/str, curDestOpnd == preDestOpnd
4049         combineInsn = &cgFunc->GetInsnBuilder()->BuildInsn(newMop, curDestOpnd, combineMemOpnd);
4050         combineMemOpnd.SetSize(newMop == MOP_wstrh ? maplebe::k16BitSize : maplebe::k32BitSize);
4051     }
4052     return combineInsn;
4053 }
4054 
IsValidNormalLoadOrStorePattern(const Insn & insn,const Insn & prevInsn,const MemOperand & memOpnd,int64 curOfstVal,int64 prevOfstVal)4055 bool CombineContiLoadAndStorePattern::IsValidNormalLoadOrStorePattern(const Insn &insn, const Insn &prevInsn,
4056                                                                       const MemOperand &memOpnd, int64 curOfstVal,
4057                                                                       int64 prevOfstVal)
4058 {
4059     if (memOpnd.IsStackArgMem()) {
4060         return false;
4061     }
4062     DEBUG_ASSERT(insn.GetOperand(kInsnFirstOpnd).IsRegister(), "unexpect operand");
4063     DEBUG_ASSERT(prevInsn.GetOperand(kInsnFirstOpnd).IsRegister(), "unexpect operand");
4064     auto &curDestOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
4065     auto &prevDestOpnd = static_cast<RegOperand &>(prevInsn.GetOperand(kInsnFirstOpnd));
4066     if (prevDestOpnd.GetRegisterType() != curDestOpnd.GetRegisterType() ||
4067         curDestOpnd.GetSize() != prevDestOpnd.GetSize()) {
4068         return false;
4069     }
4070     uint32 memSize = insn.GetMemoryByteSize();
4071     uint32 prevMemSize = prevInsn.GetMemoryByteSize();
4072     if (memSize != prevMemSize) {
4073         return false;
4074     }
4075 
4076     int64 diffVal = std::abs(curOfstVal - prevOfstVal);
4077     if ((memSize == k1ByteSize && diffVal == k1BitSize) || (memSize == k2ByteSize && diffVal == k2BitSize) ||
4078         (memSize == k4ByteSize && diffVal == k4BitSize) || (memSize == k8ByteSize && diffVal == k8BitSize)) {
4079         MOperator curMop = insn.GetMachineOpcode();
4080         DEBUG_ASSERT(curMop != MOP_wldrb && curMop != MOP_wldrh, "invalid mem insn that cannot be combined");
4081         if (curMop == MOP_wstrb || curMop == MOP_wstrh) {
4082             isPairAfterCombine = false;
4083         }
4084 
4085         regno_t destRegNO = curDestOpnd.GetRegisterNumber();
4086         regno_t prevDestRegNO = prevDestOpnd.GetRegisterNumber();
4087         if (destRegNO == RZR && prevDestRegNO == RZR) {
4088             return true;
4089         }
4090 
4091         if (insn.IsLoad() && destRegNO == prevDestRegNO) {
4092             return false;
4093         }
4094 
4095         if ((curMop == MOP_wstrb || curMop == MOP_wstrh) && (destRegNO != RZR || prevDestRegNO != RZR)) {
4096             return false;
4097         }
4098 
4099         return true;
4100     }
4101 
4102     return false;
4103 }
4104 
IsValidStackArgLoadOrStorePattern(const Insn & curInsn,const Insn & prevInsn,const MemOperand & curMemOpnd,const MemOperand & prevMemOpnd,int64 curOfstVal,int64 prevOfstVal) const4105 bool CombineContiLoadAndStorePattern::IsValidStackArgLoadOrStorePattern(const Insn &curInsn, const Insn &prevInsn,
4106                                                                         const MemOperand &curMemOpnd,
4107                                                                         const MemOperand &prevMemOpnd, int64 curOfstVal,
4108                                                                         int64 prevOfstVal) const
4109 {
4110     if (!curInsn.IsStore()) {
4111         return false;
4112     }
4113     if (!curMemOpnd.IsStackArgMem() || !prevMemOpnd.IsStackArgMem()) {
4114         return false;
4115     }
4116     auto &curDestOpnd = static_cast<RegOperand &>(curInsn.GetOperand(kInsnFirstOpnd));
4117     auto &prevDestOpnd = static_cast<RegOperand &>(prevInsn.GetOperand(kInsnFirstOpnd));
4118     uint32 memSize = curInsn.GetMemoryByteSize();
4119     uint32 prevMemSize = prevInsn.GetMemoryByteSize();
4120     auto diffVal = std::abs(curOfstVal - prevOfstVal);
4121     if ((memSize == k4ByteSize || memSize == k8ByteSize) && (prevMemSize == k4ByteSize || prevMemSize == k8ByteSize) &&
4122         (diffVal == k8BitSize) && (curDestOpnd.GetValidBitsNum() == memSize * k8BitSize) &&
4123         (prevDestOpnd.GetValidBitsNum() == prevMemSize * k8BitSize)) {
4124         return true;
4125     }
4126     return false;
4127 }
4128 
RemoveInsnAndKeepComment(BB & bb,Insn & insn,Insn & prevInsn) const4129 void CombineContiLoadAndStorePattern::RemoveInsnAndKeepComment(BB &bb, Insn &insn, Insn &prevInsn) const
4130 {
4131     /* keep the comment */
4132     Insn *nn = prevInsn.GetNextMachineInsn();
4133     std::string newComment = "";
4134     MapleString comment = insn.GetComment();
4135     if (comment.c_str() != nullptr && strlen(comment.c_str()) > 0) {
4136         newComment += comment.c_str();
4137     }
4138     comment = prevInsn.GetComment();
4139     if (comment.c_str() != nullptr && strlen(comment.c_str()) > 0) {
4140         newComment = newComment + "  " + comment.c_str();
4141     }
4142     if (newComment.c_str() != nullptr && strlen(newComment.c_str()) > 0) {
4143         DEBUG_ASSERT(nn != nullptr, "nn should not be nullptr");
4144         nn->SetComment(newComment);
4145     }
4146     bb.RemoveInsn(insn);
4147     bb.RemoveInsn(prevInsn);
4148 }
4149 
Run(BB & bb,Insn & insn)4150 void EliminateSpecifcSXTAArch64::Run(BB &bb, Insn &insn)
4151 {
4152     MOperator thisMop = insn.GetMachineOpcode();
4153     Insn *prevInsn = insn.GetPrev();
4154     while (prevInsn != nullptr && !prevInsn->GetMachineOpcode()) {
4155         prevInsn = prevInsn->GetPrev();
4156     }
4157     if (prevInsn == nullptr) {
4158         return;
4159     }
4160     auto &regOpnd0 = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
4161     auto &regOpnd1 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
4162     if (&insn != bb.GetFirstInsn() && regOpnd0.GetRegisterNumber() == regOpnd1.GetRegisterNumber() &&
4163         prevInsn->IsMachineInstruction()) {
4164         if (prevInsn->GetMachineOpcode() == MOP_wmovri32 || prevInsn->GetMachineOpcode() == MOP_xmovri64) {
4165             auto &dstMovOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
4166             if (dstMovOpnd.GetRegisterNumber() != regOpnd1.GetRegisterNumber()) {
4167                 return;
4168             }
4169             Operand &opnd = prevInsn->GetOperand(kInsnSecondOpnd);
4170             if (opnd.IsIntImmediate()) {
4171                 auto &immOpnd = static_cast<ImmOperand &>(opnd);
4172                 int64 value = immOpnd.GetValue();
4173                 if (thisMop == MOP_xsxtb32) {
4174                     /* value should in range between -127 and 127 */
4175                     if (value >= static_cast<int64>(0xFFFFFFFFFFFFFF80) && value <= 0x7F &&
4176                         immOpnd.IsSingleInstructionMovable(regOpnd0.GetSize())) {
4177                         bb.RemoveInsn(insn);
4178                     }
4179                 } else if (thisMop == MOP_xsxth32) {
4180                     /* value should in range between -32678 and 32678 */
4181                     if (value >= static_cast<int64>(0xFFFFFFFFFFFF8000) && value <= 0x7FFF &&
4182                         immOpnd.IsSingleInstructionMovable(regOpnd0.GetSize())) {
4183                         bb.RemoveInsn(insn);
4184                     }
4185                 } else {
4186                     uint64 flag = 0xFFFFFFFFFFFFFF80; /* initialize the flag with fifty-nine 1s at top */
4187                     if (thisMop == MOP_xsxth64) {
4188                         flag = 0xFFFFFFFFFFFF8000; /* specify the flag with forty-nine 1s at top in this case */
4189                     } else if (thisMop == MOP_xsxtw64) {
4190                         flag = 0xFFFFFFFF80000000; /* specify the flag with thirty-three 1s at top in this case */
4191                     }
4192                     if (!(static_cast<uint64>(value) & flag) &&
4193                         immOpnd.IsSingleInstructionMovable(regOpnd0.GetSize())) {
4194                         auto *aarch64CGFunc = static_cast<AArch64CGFunc *>(&cgFunc);
4195                         RegOperand &dstOpnd = aarch64CGFunc->GetOrCreatePhysicalRegisterOperand(
4196                             static_cast<AArch64reg>(dstMovOpnd.GetRegisterNumber()), k64BitSize,
4197                             dstMovOpnd.GetRegisterType());
4198                         prevInsn->SetOperand(kInsnFirstOpnd, dstOpnd);
4199                         prevInsn->SetMOP(AArch64CG::kMd[MOP_xmovri64]);
4200                         bb.RemoveInsn(insn);
4201                     }
4202                 }
4203             }
4204         } else if (prevInsn->GetMachineOpcode() == MOP_wldrsb) {
4205             auto &dstMovOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
4206             if (dstMovOpnd.GetRegisterNumber() != regOpnd1.GetRegisterNumber()) {
4207                 return;
4208             }
4209             if (thisMop == MOP_xsxtb32) {
4210                 bb.RemoveInsn(insn);
4211             }
4212         } else if (prevInsn->GetMachineOpcode() == MOP_wldrsh) {
4213             auto &dstMovOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
4214             if (dstMovOpnd.GetRegisterNumber() != regOpnd1.GetRegisterNumber()) {
4215                 return;
4216             }
4217             if (thisMop == MOP_xsxth32) {
4218                 bb.RemoveInsn(insn);
4219             }
4220         }
4221     }
4222 }
4223 
Run(BB & bb,Insn & insn)4224 void EliminateSpecifcUXTAArch64::Run(BB &bb, Insn &insn)
4225 {
4226     MOperator thisMop = insn.GetMachineOpcode();
4227     Insn *prevInsn = insn.GetPreviousMachineInsn();
4228     if (prevInsn == nullptr) {
4229         return;
4230     }
4231     auto &regOpnd0 = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
4232     auto &regOpnd1 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
4233     if (prevInsn->IsCall() && prevInsn->GetIsCallReturnUnsigned() &&
4234         regOpnd0.GetRegisterNumber() == regOpnd1.GetRegisterNumber() &&
4235         (regOpnd1.GetRegisterNumber() == R0 || regOpnd1.GetRegisterNumber() == V0)) {
4236         uint32 retSize = prevInsn->GetRetSize();
4237         if (retSize > 0 &&
4238             ((thisMop == MOP_xuxtb32 && retSize <= k1ByteSize) || (thisMop == MOP_xuxth32 && retSize <= k2ByteSize) ||
4239              (thisMop == MOP_xuxtw64 && retSize <= k4ByteSize))) {
4240             bb.RemoveInsn(insn);
4241         }
4242         return;
4243     }
4244     if (&insn == bb.GetFirstInsn() || regOpnd0.GetRegisterNumber() != regOpnd1.GetRegisterNumber() ||
4245         !prevInsn->IsMachineInstruction()) {
4246         return;
4247     }
4248     if (cgFunc.GetMirModule().GetSrcLang() == kSrcLangC && prevInsn->IsCall() && prevInsn->GetIsCallReturnSigned()) {
4249         return;
4250     }
4251     if (thisMop == MOP_xuxtb32) {
4252         if (prevInsn->GetMachineOpcode() == MOP_wmovri32 || prevInsn->GetMachineOpcode() == MOP_xmovri64) {
4253             auto &dstMovOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
4254             if (!IsSameRegisterOperation(dstMovOpnd, regOpnd1, regOpnd0)) {
4255                 return;
4256             }
4257             Operand &opnd = prevInsn->GetOperand(kInsnSecondOpnd);
4258             if (opnd.IsIntImmediate()) {
4259                 auto &immOpnd = static_cast<ImmOperand &>(opnd);
4260                 int64 value = immOpnd.GetValue();
4261                 /* check the top 56 bits of value */
4262                 if (!(static_cast<uint64>(value) & 0xFFFFFFFFFFFFFF00)) {
4263                     bb.RemoveInsn(insn);
4264                 }
4265             }
4266         } else if (prevInsn->GetMachineOpcode() == MOP_wldrb) {
4267             auto &dstOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
4268             if (dstOpnd.GetRegisterNumber() != regOpnd1.GetRegisterNumber()) {
4269                 return;
4270             }
4271             bb.RemoveInsn(insn);
4272         }
4273     } else if (thisMop == MOP_xuxth32) {
4274         if (prevInsn->GetMachineOpcode() == MOP_wmovri32 || prevInsn->GetMachineOpcode() == MOP_xmovri64) {
4275             auto &dstMovOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
4276             if (!IsSameRegisterOperation(dstMovOpnd, regOpnd1, regOpnd0)) {
4277                 return;
4278             }
4279             Operand &opnd = prevInsn->GetOperand(kInsnSecondOpnd);
4280             if (opnd.IsIntImmediate()) {
4281                 auto &immOpnd = static_cast<ImmOperand &>(opnd);
4282                 int64 value = immOpnd.GetValue();
4283                 if (!(static_cast<uint64>(value) & 0xFFFFFFFFFFFF0000)) {
4284                     bb.RemoveInsn(insn);
4285                 }
4286             }
4287         } else if (prevInsn->GetMachineOpcode() == MOP_wldrh) {
4288             auto &dstOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
4289             if (dstOpnd.GetRegisterNumber() != regOpnd1.GetRegisterNumber()) {
4290                 return;
4291             }
4292             bb.RemoveInsn(insn);
4293         }
4294     } else {
4295         /* this_mop == MOP_xuxtw64 */
4296         if (prevInsn->GetMachineOpcode() == MOP_wmovri32 || prevInsn->GetMachineOpcode() == MOP_wldrsb ||
4297             prevInsn->GetMachineOpcode() == MOP_wldrb || prevInsn->GetMachineOpcode() == MOP_wldrsh ||
4298             prevInsn->GetMachineOpcode() == MOP_wldrh || prevInsn->GetMachineOpcode() == MOP_wldr) {
4299             auto &dstOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
4300             if (!IsSameRegisterOperation(dstOpnd, regOpnd1, regOpnd0)) {
4301                 return;
4302             }
4303             /* 32-bit ldr does zero-extension by default, so this conversion can be skipped */
4304             bb.RemoveInsn(insn);
4305         }
4306     }
4307 }
4308 
CheckCondition(Insn & insn)4309 bool EliminateSpecifcSXTPattern::CheckCondition(Insn &insn)
4310 {
4311     BB *bb = insn.GetBB();
4312     if (bb->GetFirstMachineInsn() == &insn) {
4313         BB *prevBB = bb->GetPrev();
4314         if (prevBB != nullptr && (bb->GetPreds().size() == 1) && (*(bb->GetPreds().cbegin()) == prevBB)) {
4315             prevInsn = prevBB->GetLastMachineInsn();
4316         }
4317     } else {
4318         prevInsn = insn.GetPreviousMachineInsn();
4319     }
4320     if (prevInsn == nullptr) {
4321         return false;
4322     }
4323     return true;
4324 }
4325 
Run(BB & bb,Insn & insn)4326 void EliminateSpecifcSXTPattern::Run(BB &bb, Insn &insn)
4327 {
4328     MOperator thisMop = insn.GetMachineOpcode();
4329     auto &regOpnd0 = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
4330     auto &regOpnd1 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
4331     if (regOpnd0.GetRegisterNumber() == regOpnd1.GetRegisterNumber() && prevInsn->IsMachineInstruction()) {
4332         if (prevInsn->GetMachineOpcode() == MOP_wmovri32 || prevInsn->GetMachineOpcode() == MOP_xmovri64) {
4333             auto &dstMovOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
4334             if (dstMovOpnd.GetRegisterNumber() != regOpnd1.GetRegisterNumber()) {
4335                 return;
4336             }
4337             Operand &opnd = prevInsn->GetOperand(kInsnSecondOpnd);
4338             if (opnd.IsIntImmediate()) {
4339                 auto &immOpnd = static_cast<ImmOperand &>(opnd);
4340                 int64 value = immOpnd.GetValue();
4341                 if (thisMop == MOP_xsxtb32) {
4342                     /* value should in range between -127 and 127 */
4343                     if (value >= static_cast<int64>(0xFFFFFFFFFFFFFF80) && value <= 0x7F &&
4344                         immOpnd.IsSingleInstructionMovable(regOpnd0.GetSize())) {
4345                         bb.RemoveInsn(insn);
4346                         optSuccess = true;
4347                         return;
4348                     }
4349                 } else if (thisMop == MOP_xsxth32) {
4350                     /* value should in range between -32678 and 32678 */
4351                     if (value >= static_cast<int64>(0xFFFFFFFFFFFF8000) && value <= 0x7FFF &&
4352                         immOpnd.IsSingleInstructionMovable(regOpnd0.GetSize())) {
4353                         bb.RemoveInsn(insn);
4354                         optSuccess = true;
4355                         return;
4356                     }
4357                 } else {
4358                     uint64 flag = 0xFFFFFFFFFFFFFF80; /* initialize the flag with fifty-nine 1s at top */
4359                     if (thisMop == MOP_xsxth64) {
4360                         flag = 0xFFFFFFFFFFFF8000; /* specify the flag with forty-nine 1s at top in this case */
4361                     } else if (thisMop == MOP_xsxtw64) {
4362                         flag = 0xFFFFFFFF80000000; /* specify the flag with thirty-three 1s at top in this case */
4363                     }
4364                     if ((static_cast<uint64>(value) & flag) == 0 &&
4365                         immOpnd.IsSingleInstructionMovable(regOpnd0.GetSize())) {
4366                         auto *aarch64CGFunc = static_cast<AArch64CGFunc *>(cgFunc);
4367                         RegOperand &dstOpnd = aarch64CGFunc->GetOrCreatePhysicalRegisterOperand(
4368                             static_cast<AArch64reg>(dstMovOpnd.GetRegisterNumber()), k64BitSize,
4369                             dstMovOpnd.GetRegisterType());
4370                         prevInsn->SetOperand(kInsnFirstOpnd, dstOpnd);
4371                         prevInsn->SetMOP(AArch64CG::kMd[MOP_xmovri64]);
4372                         bb.RemoveInsn(insn);
4373                         optSuccess = true;
4374                         return;
4375                     }
4376                 }
4377             }
4378         }
4379     }
4380 }
4381 
CheckCondition(Insn & insn)4382 bool EliminateSpecifcUXTPattern::CheckCondition(Insn &insn)
4383 {
4384     BB *bb = insn.GetBB();
4385     if (bb->GetFirstMachineInsn() == &insn) {
4386         BB *prevBB = bb->GetPrev();
4387         if (prevBB != nullptr && (bb->GetPreds().size() == 1) && (*(bb->GetPreds().cbegin()) == prevBB)) {
4388             prevInsn = prevBB->GetLastMachineInsn();
4389         }
4390     } else {
4391         prevInsn = insn.GetPreviousMachineInsn();
4392     }
4393     if (prevInsn == nullptr) {
4394         return false;
4395     }
4396     return true;
4397 }
4398 
Run(BB & bb,Insn & insn)4399 void EliminateSpecifcUXTPattern::Run(BB &bb, Insn &insn)
4400 {
4401     if (!CheckCondition(insn)) {
4402         return;
4403     }
4404     MOperator thisMop = insn.GetMachineOpcode();
4405     auto &regOpnd0 = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
4406     auto &regOpnd1 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
4407     if (&insn == bb.GetFirstMachineInsn() || regOpnd0.GetRegisterNumber() != regOpnd1.GetRegisterNumber() ||
4408         !prevInsn->IsMachineInstruction()) {
4409         return;
4410     }
4411     if (cgFunc->GetMirModule().GetSrcLang() == kSrcLangC && prevInsn->IsCall() && prevInsn->GetIsCallReturnSigned()) {
4412         return;
4413     }
4414     if (thisMop == MOP_xuxtb32) {
4415         if (prevInsn->GetMachineOpcode() == MOP_wmovri32 || prevInsn->GetMachineOpcode() == MOP_xmovri64) {
4416             auto &dstMovOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
4417             if (!IsSameRegisterOperation(dstMovOpnd, regOpnd1, regOpnd0)) {
4418                 return;
4419             }
4420             Operand &opnd = prevInsn->GetOperand(kInsnSecondOpnd);
4421             if (opnd.IsIntImmediate()) {
4422                 auto &immOpnd = static_cast<ImmOperand &>(opnd);
4423                 int64 value = immOpnd.GetValue();
4424                 /* check the top 56 bits of value */
4425                 if ((static_cast<uint64>(value) & 0xFFFFFFFFFFFFFF00) == 0) {
4426                     bb.RemoveInsn(insn);
4427                     optSuccess = true;
4428                     return;
4429                 }
4430             }
4431         } else if (prevInsn->GetMachineOpcode() == MOP_wldrb) {
4432             auto &dstOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
4433             if (dstOpnd.GetRegisterNumber() != regOpnd1.GetRegisterNumber()) {
4434                 return;
4435             }
4436             bb.RemoveInsn(insn);
4437             optSuccess = true;
4438             return;
4439         }
4440     } else if (thisMop == MOP_xuxth32) {
4441         if (prevInsn->GetMachineOpcode() == MOP_wmovri32 || prevInsn->GetMachineOpcode() == MOP_xmovri64) {
4442             auto &dstMovOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
4443             if (!IsSameRegisterOperation(dstMovOpnd, regOpnd1, regOpnd0)) {
4444                 return;
4445             }
4446             Operand &opnd = prevInsn->GetOperand(kInsnSecondOpnd);
4447             if (opnd.IsIntImmediate()) {
4448                 auto &immOpnd = static_cast<ImmOperand &>(opnd);
4449                 int64 value = immOpnd.GetValue();
4450                 if ((static_cast<uint64>(value) & 0xFFFFFFFFFFFF0000) == 0) {
4451                     bb.RemoveInsn(insn);
4452                     optSuccess = true;
4453                     return;
4454                 }
4455             }
4456         } else if (prevInsn->GetMachineOpcode() == MOP_wldrh) {
4457             auto &dstOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
4458             if (dstOpnd.GetRegisterNumber() != regOpnd1.GetRegisterNumber()) {
4459                 return;
4460             }
4461             bb.RemoveInsn(insn);
4462             optSuccess = true;
4463             return;
4464         }
4465     } else {
4466         if (prevInsn->GetMachineOpcode() == MOP_wmovri32 || prevInsn->GetMachineOpcode() == MOP_wldrsb ||
4467             prevInsn->GetMachineOpcode() == MOP_wldrb || prevInsn->GetMachineOpcode() == MOP_wldrsh ||
4468             prevInsn->GetMachineOpcode() == MOP_wldrh || prevInsn->GetMachineOpcode() == MOP_wldr) {
4469             auto &dstOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
4470             if (!IsSameRegisterOperation(dstOpnd, regOpnd1, regOpnd0)) {
4471                 return;
4472             }
4473             /* 32-bit ldr does zero-extension by default, so this conversion can be skipped */
4474             bb.RemoveInsn(insn);
4475             optSuccess = true;
4476             return;
4477         }
4478     }
4479 }
4480 
CheckCondition(Insn & insn)4481 bool FmovRegPattern::CheckCondition(Insn &insn)
4482 {
4483     nextInsn = insn.GetNextMachineInsn();
4484     if (nextInsn == nullptr) {
4485         return false;
4486     }
4487     prevInsn = insn.GetPreviousMachineInsn();
4488     if (prevInsn == nullptr) {
4489         return false;
4490     }
4491     auto &curSrcOpnd = insn.GetOperand(kInsnSecondOpnd);
4492     auto &prevSrcOpnd = prevInsn->GetOperand(kInsnSecondOpnd);
4493     if (!curSrcOpnd.IsRegister() || !prevSrcOpnd.IsRegister()) {
4494         return false;
4495     }
4496     auto &curSrcRegOpnd = static_cast<RegOperand&>(curSrcOpnd);
4497     auto &prevSrcRegOpnd = static_cast<RegOperand&>(prevSrcOpnd);
4498     /* same src freg */
4499     if (curSrcRegOpnd.GetRegisterNumber() != prevSrcRegOpnd.GetRegisterNumber()) {
4500         return false;
4501     }
4502     return true;
4503 }
4504 
Run(BB & bb,Insn & insn)4505 void FmovRegPattern::Run(BB &bb, Insn &insn)
4506 {
4507     if (!CheckCondition(insn)) {
4508         return;
4509     }
4510     MOperator thisMop = insn.GetMachineOpcode();
4511     MOperator prevMop = prevInsn->GetMachineOpcode();
4512     MOperator newMop;
4513     uint32 doOpt = 0;
4514     if (prevMop == MOP_xvmovrv && thisMop == MOP_xvmovrv) {
4515         doOpt = k32BitSize;
4516         newMop = MOP_wmovrr;
4517     } else if (prevMop == MOP_xvmovrd && thisMop == MOP_xvmovrd) {
4518         doOpt = k64BitSize;
4519         newMop = MOP_xmovrr;
4520     }
4521     if (doOpt == 0) {
4522         return;
4523     }
4524     auto &curDstRegOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
4525     regno_t curDstReg = curDstRegOpnd.GetRegisterNumber();
4526     /* optimize case 1 */
4527     auto &prevDstRegOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
4528     regno_t prevDstReg = prevDstRegOpnd.GetRegisterNumber();
4529     auto *aarch64CGFunc = static_cast<AArch64CGFunc *>(cgFunc);
4530     RegOperand &dst =
4531         aarch64CGFunc->GetOrCreatePhysicalRegisterOperand(static_cast<AArch64reg>(curDstReg), doOpt, kRegTyInt);
4532     RegOperand &src =
4533         aarch64CGFunc->GetOrCreatePhysicalRegisterOperand(static_cast<AArch64reg>(prevDstReg), doOpt, kRegTyInt);
4534     Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(newMop, dst, src);
4535     bb.InsertInsnBefore(insn, newInsn);
4536     bb.RemoveInsn(insn);
4537     RegOperand &newOpnd =
4538         aarch64CGFunc->GetOrCreatePhysicalRegisterOperand(static_cast<AArch64reg>(prevDstReg), doOpt, kRegTyInt);
4539     uint32 opndNum = nextInsn->GetOperandSize();
4540     for (uint32 opndIdx = 0; opndIdx < opndNum; ++opndIdx) {
4541         Operand &opnd = nextInsn->GetOperand(opndIdx);
4542         if (opnd.IsMemoryAccessOperand()) {
4543             auto &memOpnd = static_cast<MemOperand &>(opnd);
4544             Operand *base = memOpnd.GetBaseRegister();
4545             if (base != nullptr) {
4546                 if (base->IsRegister()) {
4547                     auto *reg = static_cast<RegOperand *>(base);
4548                     if (reg->GetRegisterNumber() == curDstReg) {
4549                         memOpnd.SetBaseRegister(newOpnd);
4550                     }
4551                 }
4552             }
4553             Operand *offset = memOpnd.GetIndexRegister();
4554             if (offset != nullptr) {
4555                 if (offset->IsRegister()) {
4556                     auto *reg = static_cast<RegOperand *>(offset);
4557                     if (reg->GetRegisterNumber() == curDstReg) {
4558                         memOpnd.SetIndexRegister(newOpnd);
4559                     }
4560                 }
4561             }
4562         } else if (opnd.IsRegister()) {
4563             /* Check if it is a source operand. */
4564             auto *regProp = nextInsn->GetDesc()->opndMD[opndIdx];
4565             if (regProp->IsUse()) {
4566                 auto &reg = static_cast<RegOperand &>(opnd);
4567                 if (reg.GetRegisterNumber() == curDstReg) {
4568                     nextInsn->SetOperand(opndIdx, newOpnd);
4569                 }
4570             }
4571         }
4572     }
4573 }
4574 
CheckCondition(Insn & insn)4575 bool SbfxOptPattern::CheckCondition(Insn &insn)
4576 {
4577     nextInsn = insn.GetNextMachineInsn();
4578     if (nextInsn == nullptr) {
4579         return false;
4580     }
4581     auto &curDstRegOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
4582     auto &lsb = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
4583     auto &width = static_cast<ImmOperand &>(insn.GetOperand(kInsnFourthOpnd));
4584     if (lsb.GetValue() != 0 || width.GetValue() < k32BitSize) {
4585         return false;
4586     }
4587     uint32 opndNum = nextInsn->GetOperandSize();
4588     const InsnDesc *md = nextInsn->GetDesc();
4589     for (uint32 opndIdx = 0; opndIdx < opndNum; ++opndIdx) {
4590         Operand &opnd = nextInsn->GetOperand(opndIdx);
4591         /* Check if it is a source operand. */
4592         if (opnd.IsMemoryAccessOperand() || opnd.IsList()) {
4593             return false;
4594         } else if (opnd.IsRegister()) {
4595             auto &reg = static_cast<RegOperand &>(opnd);
4596             auto *regProp = md->opndMD[opndIdx];
4597             if (reg.GetRegisterNumber() == curDstRegOpnd.GetRegisterNumber()) {
4598                 if (nextInsn->GetOperandSize(opndIdx) != k32BitSize) {
4599                     return false;
4600                 }
4601                 if (regProp->IsDef()) {
4602                     toRemove = true;
4603                 } else {
4604                     (void)cands.emplace_back(opndIdx);
4605                 }
4606             }
4607         }
4608     }
4609     return cands.size() != 0;
4610 }
4611 
Run(BB & bb,Insn & insn)4612 void SbfxOptPattern::Run(BB &bb, Insn &insn)
4613 {
4614     if (!CheckCondition(insn)) {
4615         return;
4616     }
4617     auto &srcRegOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
4618     RegOperand &newReg = static_cast<AArch64CGFunc *>(cgFunc)->GetOrCreatePhysicalRegisterOperand(
4619         static_cast<AArch64reg>(srcRegOpnd.GetRegisterNumber()), k32BitSize, srcRegOpnd.GetRegisterType());
4620     // replace use point of opnd in nextInsn
4621     for (auto i : cands) {
4622         nextInsn->SetOperand(i, newReg);
4623     }
4624     if (toRemove) {
4625         bb.RemoveInsn(insn);
4626     }
4627 }
4628 
CheckCondition(Insn & insn)4629 bool CbnzToCbzPattern::CheckCondition(Insn &insn)
4630 {
4631     MOperator curMop = insn.GetMachineOpcode();
4632     if (curMop != MOP_wcbnz && curMop != MOP_xcbnz) {
4633         return false;
4634     }
4635     /* reg has to be R0, since return value is in R0 */
4636     auto &regOpnd0 = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
4637     if (regOpnd0.GetRegisterNumber() != R0) {
4638         return false;
4639     }
4640     nextBB = insn.GetBB()->GetNext();
4641     /* Make sure nextBB can only be reached by bb */
4642     if (nextBB->GetPreds().size() > 1) {
4643         return false;
4644     }
4645     /* Next insn should be a mov R0 = 0 */
4646     movInsn = nextBB->GetFirstMachineInsn();
4647     if (movInsn == nullptr) {
4648         return false;
4649     }
4650     MOperator movInsnMop = movInsn->GetMachineOpcode();
4651     if (movInsnMop != MOP_wmovri32 && movInsnMop != MOP_xmovri64) {
4652         return false;
4653     }
4654     auto &movDest = static_cast<RegOperand &>(movInsn->GetOperand(kInsnFirstOpnd));
4655     if (movDest.GetRegisterNumber() != R0) {
4656         return false;
4657     }
4658     auto &movImm = static_cast<ImmOperand &>(movInsn->GetOperand(kInsnSecondOpnd));
4659     if (movImm.GetValue() != 0) {
4660         return false;
4661     }
4662     Insn *nextBrInsn = movInsn->GetNextMachineInsn();
4663     if (nextBrInsn == nullptr) {
4664         return false;
4665     }
4666     if (nextBrInsn->GetMachineOpcode() != MOP_xuncond) {
4667         return false;
4668     }
4669     /* Is nextBB branch to the return-bb? */
4670     if (nextBB->GetSuccs().size() != 1) {
4671         return false;
4672     }
4673     return true;
4674 }
4675 
Run(BB & bb,Insn & insn)4676 void CbnzToCbzPattern::Run(BB &bb, Insn &insn)
4677 {
4678     if (!CheckCondition(insn)) {
4679         return;
4680     }
4681     MOperator thisMop = insn.GetMachineOpcode();
4682     BB *targetBB = nullptr;
4683     auto it = bb.GetSuccsBegin();
4684     if (*it == nextBB) {
4685         ++it;
4686     }
4687     targetBB = *it;
4688     /* Make sure when nextBB is empty, targetBB is fallthru of bb. */
4689     if (targetBB != nextBB->GetNext()) {
4690         return;
4691     }
4692     BB *nextBBTarget = *(nextBB->GetSuccsBegin());
4693     if (nextBBTarget->GetKind() != BB::kBBReturn) {
4694         return;
4695     }
4696     /* Control flow looks nice, instruction looks nice */
4697     DEBUG_ASSERT(brInsn != nullptr, "brInsn should not be nullptr");
4698     Operand &brTarget = brInsn->GetOperand(kInsnFirstOpnd);
4699     insn.SetOperand(kInsnSecondOpnd, brTarget);
4700     if (thisMop == MOP_wcbnz) {
4701         insn.SetMOP(AArch64CG::kMd[MOP_wcbz]);
4702     } else {
4703         insn.SetMOP(AArch64CG::kMd[MOP_xcbz]);
4704     }
4705     nextBB->RemoveInsn(*movInsn);
4706     nextBB->RemoveInsn(*brInsn);
4707     /* nextBB is now a fallthru bb, not a goto bb */
4708     nextBB->SetKind(BB::kBBFallthru);
4709     /*
4710      * fix control flow, we have bb, nextBB, targetBB, nextBB_target
4711      * connect bb -> nextBB_target erase targetBB
4712      */
4713     it = bb.GetSuccsBegin();
4714     CHECK_FATAL(it != bb.GetSuccsEnd(), "succs is empty.");
4715     if (*it == targetBB) {
4716         bb.EraseSuccs(it);
4717         bb.PushFrontSuccs(*nextBBTarget);
4718     } else {
4719         ++it;
4720         bb.EraseSuccs(it);
4721         bb.PushBackSuccs(*nextBBTarget);
4722     }
4723     for (auto targetBBIt = targetBB->GetPredsBegin(); targetBBIt != targetBB->GetPredsEnd(); ++targetBBIt) {
4724         if (*targetBBIt == &bb) {
4725             targetBB->ErasePreds(targetBBIt);
4726             break;
4727         }
4728     }
4729     for (auto nextIt = nextBBTarget->GetPredsBegin(); nextIt != nextBBTarget->GetPredsEnd(); ++nextIt) {
4730         if (*nextIt == nextBB) {
4731             nextBBTarget->ErasePreds(nextIt);
4732             break;
4733         }
4734     }
4735     nextBBTarget->PushBackPreds(bb);
4736 
4737     /* nextBB has no target, originally just branch target */
4738     nextBB->EraseSuccs(nextBB->GetSuccsBegin());
4739     DEBUG_ASSERT(nextBB->GetSuccs().empty(), "peep: branch target incorrect");
4740     /* Now make nextBB fallthru to targetBB */
4741     nextBB->PushFrontSuccs(*targetBB);
4742     targetBB->PushBackPreds(*nextBB);
4743 }
4744 
HasImplicitSizeUse(const Insn & insn) const4745 bool ContiLDRorSTRToSameMEMPattern::HasImplicitSizeUse(const Insn &insn) const
4746 {
4747     if (insn.GetOperandSize(kInsnFirstOpnd) != prevInsn->GetOperandSize(kInsnFirstOpnd)) {
4748         return true;
4749     }
4750     if (loadAfterStore) {
4751         // To avoid the optimization as following:
4752         // str w10, [sp, #8]
4753         // ldr w10, [sp, #8]     ---\-->  can not be removed
4754         // ...
4755         // str x10, [x1, #16]
4756         auto &defOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
4757         for (Insn *cursor = insn.GetNext(); cursor != nullptr; cursor = cursor->GetNext()) {
4758             if (!cursor->IsMachineInstruction()) {
4759                 continue;
4760             }
4761             uint32 opndNum = cursor->GetOperandSize();
4762             for (uint32 i = 0; i < opndNum; ++i) {
4763                 if (cursor->OpndIsDef(i)) {
4764                     continue;
4765                 }
4766                 if (!cursor->GetOperand(i).IsRegister()) {
4767                     continue;
4768                 }
4769                 auto &useOpnd = static_cast<RegOperand &>(cursor->GetOperand(i));
4770                 if (useOpnd.GetRegisterNumber() == defOpnd.GetRegisterNumber() &&
4771                     insn.GetOperandSize(kInsnFirstOpnd) != cursor->GetOperandSize(i)) {
4772                     return true;
4773                 }
4774             }
4775         }
4776     }
4777     return false;
4778 }
4779 
Run(BB & bb,Insn & insn)4780 void CsetCbzToBeqOptAArch64::Run(BB &bb, Insn &insn)
4781 {
4782     Insn *insn1 = insn.GetPreviousMachineInsn();
4783     if (insn1 == nullptr) {
4784         return;
4785     }
4786     /* prevInsn must be "cset" insn */
4787     MOperator opCode1 = insn1->GetMachineOpcode();
4788     if (opCode1 != MOP_xcsetrc && opCode1 != MOP_wcsetrc) {
4789         return;
4790     }
4791 
4792     auto &tmpRegOp1 = static_cast<RegOperand &>(insn1->GetOperand(kInsnFirstOpnd));
4793     regno_t baseRegNO1 = tmpRegOp1.GetRegisterNumber();
4794     auto &tmpRegOp2 = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
4795     regno_t baseRegNO2 = tmpRegOp2.GetRegisterNumber();
4796     if (baseRegNO1 != baseRegNO2) {
4797         return;
4798     }
4799     /* If the reg will be used later, we shouldn't optimize the cset insn here */
4800     if (IfOperandIsLiveAfterInsn(tmpRegOp2, insn)) {
4801         return;
4802     }
4803     MOperator opCode = insn.GetMachineOpcode();
4804     bool reverse = (opCode == MOP_xcbz || opCode == MOP_wcbz);
4805     Operand &rflag = static_cast<AArch64CGFunc *>(&cgFunc)->GetOrCreateRflag();
4806     auto &label = static_cast<LabelOperand &>(insn.GetOperand(kInsnSecondOpnd));
4807     auto &cond = static_cast<CondOperand &>(insn1->GetOperand(kInsnSecondOpnd));
4808     MOperator jmpOperator = SelectMOperator(cond.GetCode(), reverse);
4809     CHECK_FATAL((jmpOperator != MOP_undef), "unknown condition code");
4810     Insn &newInsn = cgFunc.GetInsnBuilder()->BuildInsn(jmpOperator, rflag, label);
4811     bb.RemoveInsn(*insn1);
4812     bb.ReplaceInsn(insn, newInsn);
4813 }
4814 
SelectMOperator(ConditionCode condCode,bool inverse) const4815 MOperator CsetCbzToBeqOptAArch64::SelectMOperator(ConditionCode condCode, bool inverse) const
4816 {
4817     switch (condCode) {
4818         case CC_NE:
4819             return inverse ? MOP_beq : MOP_bne;
4820         case CC_EQ:
4821             return inverse ? MOP_bne : MOP_beq;
4822         case CC_MI:
4823             return inverse ? MOP_bpl : MOP_bmi;
4824         case CC_PL:
4825             return inverse ? MOP_bmi : MOP_bpl;
4826         case CC_VS:
4827             return inverse ? MOP_bvc : MOP_bvs;
4828         case CC_VC:
4829             return inverse ? MOP_bvs : MOP_bvc;
4830         case CC_HI:
4831             return inverse ? MOP_bls : MOP_bhi;
4832         case CC_LS:
4833             return inverse ? MOP_bhi : MOP_bls;
4834         case CC_GE:
4835             return inverse ? MOP_blt : MOP_bge;
4836         case CC_LT:
4837             return inverse ? MOP_bge : MOP_blt;
4838         case CC_HS:
4839             return inverse ? MOP_blo : MOP_bhs;
4840         case CC_LO:
4841             return inverse ? MOP_bhs : MOP_blo;
4842         case CC_LE:
4843             return inverse ? MOP_bgt : MOP_ble;
4844         case CC_GT:
4845             return inverse ? MOP_ble : MOP_bgt;
4846         case CC_CS:
4847             return inverse ? MOP_bcc : MOP_bcs;
4848         default:
4849             return MOP_undef;
4850     }
4851 }
4852 
CheckCondition(Insn & insn)4853 bool ContiLDRorSTRToSameMEMPattern::CheckCondition(Insn &insn)
4854 {
4855     prevInsn = insn.GetPrev();
4856     while (prevInsn != nullptr && (prevInsn->GetMachineOpcode() == 0 || !prevInsn->IsMachineInstruction()) &&
4857            prevInsn != insn.GetBB()->GetFirstMachineInsn()) {
4858         prevInsn = prevInsn->GetPrev();
4859     }
4860     if (!insn.IsMachineInstruction() || prevInsn == nullptr) {
4861         return false;
4862     }
4863     MOperator thisMop = insn.GetMachineOpcode();
4864     MOperator prevMop = prevInsn->GetMachineOpcode();
4865     /*
4866      * store regB, RegC, offset
4867      * load regA, RegC, offset
4868      */
4869     if ((thisMop == MOP_xldr && prevMop == MOP_xstr) || (thisMop == MOP_wldr && prevMop == MOP_wstr) ||
4870         (thisMop == MOP_dldr && prevMop == MOP_dstr) || (thisMop == MOP_sldr && prevMop == MOP_sstr)) {
4871         loadAfterStore = true;
4872     }
4873     /*
4874      * load regA, RegC, offset
4875      * load regB, RegC, offset
4876      */
4877     if ((thisMop == MOP_xldr || thisMop == MOP_wldr || thisMop == MOP_dldr || thisMop == MOP_sldr) &&
4878         prevMop == thisMop) {
4879         loadAfterLoad = true;
4880     }
4881     if (!loadAfterStore && !loadAfterLoad) {
4882         return false;
4883     }
4884     if (HasImplicitSizeUse(insn)) {
4885         return false;
4886     }
4887     DEBUG_ASSERT(insn.GetOperand(kInsnSecondOpnd).IsMemoryAccessOperand(), "expects mem operands");
4888     DEBUG_ASSERT(prevInsn->GetOperand(kInsnSecondOpnd).IsMemoryAccessOperand(), "expects mem operands");
4889     return true;
4890 }
4891 
Run(BB & bb,Insn & insn)4892 void ContiLDRorSTRToSameMEMPattern::Run(BB &bb, Insn &insn)
4893 {
4894     if (!CheckCondition(insn)) {
4895         return;
4896     }
4897     MOperator thisMop = insn.GetMachineOpcode();
4898     auto &memOpnd1 = static_cast<MemOperand &>(insn.GetOperand(kInsnSecondOpnd));
4899     MemOperand::AArch64AddressingMode addrMode1 = memOpnd1.GetAddrMode();
4900     if (addrMode1 != MemOperand::kAddrModeBOi || (!memOpnd1.IsIntactIndexed())) {
4901         return;
4902     }
4903 
4904     auto *base1 = static_cast<RegOperand *>(memOpnd1.GetBaseRegister());
4905     DEBUG_ASSERT(base1 == nullptr || !base1->IsVirtualRegister(), "physical register has not been allocated?");
4906     OfstOperand *offset1 = memOpnd1.GetOffsetImmediate();
4907 
4908     auto &memOpnd2 = static_cast<MemOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
4909     MemOperand::AArch64AddressingMode addrMode2 = memOpnd2.GetAddrMode();
4910     if (addrMode2 != MemOperand::kAddrModeBOi || (!memOpnd2.IsIntactIndexed())) {
4911         return;
4912     }
4913 
4914     auto *base2 = static_cast<RegOperand *>(memOpnd2.GetBaseRegister());
4915     DEBUG_ASSERT(base2 == nullptr || !base2->IsVirtualRegister(), "physical register has not been allocated?");
4916     OfstOperand *offset2 = memOpnd2.GetOffsetImmediate();
4917 
4918     if (base1 == nullptr || base2 == nullptr || offset1 == nullptr || offset2 == nullptr) {
4919         return;
4920     }
4921 
4922     auto &reg1 = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
4923     auto &reg2 = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
4924     int64 offsetVal1 = offset1->GetOffsetValue();
4925     int64 offsetVal2 = offset2->GetOffsetValue();
4926     if (base1->GetRegisterNumber() != base2->GetRegisterNumber() || reg1.GetRegisterType() != reg2.GetRegisterType() ||
4927         reg1.GetSize() != reg2.GetSize() || offsetVal1 != offsetVal2) {
4928         return;
4929     }
4930     if (loadAfterStore && reg1.GetRegisterNumber() != reg2.GetRegisterNumber()) {
4931         /* replace it with mov */
4932         MOperator newOp = MOP_undef;
4933         if (reg1.GetRegisterType() == kRegTyInt) {
4934             newOp = (insn.GetOperandSize(kInsnFirstOpnd) == k32BitSizeInt) ? MOP_wmovrr : MOP_xmovrr;
4935         } else if (reg1.GetRegisterType() == kRegTyFloat) {
4936             newOp = (insn.GetOperandSize(kInsnFirstOpnd) == k32BitSizeInt) ? MOP_xvmovs : MOP_xvmovd;
4937         }
4938         Insn *nextInsn = insn.GetNext();
4939         while (nextInsn != nullptr && !nextInsn->IsMachineInstruction() && nextInsn != bb.GetLastMachineInsn()) {
4940             nextInsn = nextInsn->GetNext();
4941         }
4942         bool moveSameReg = false;
4943         if (nextInsn && nextInsn->GetIsSpill() && !IfOperandIsLiveAfterInsn(reg1, *nextInsn)) {
4944             MOperator nextMop = nextInsn->GetMachineOpcode();
4945             if ((thisMop == MOP_xldr && nextMop == MOP_xstr) || (thisMop == MOP_wldr && nextMop == MOP_wstr) ||
4946                 (thisMop == MOP_dldr && nextMop == MOP_dstr) || (thisMop == MOP_sldr && nextMop == MOP_sstr)) {
4947                 nextInsn->Insn::SetOperand(kInsnFirstOpnd, reg2);
4948                 moveSameReg = true;
4949             }
4950         }
4951         if (!moveSameReg) {
4952             bb.InsertInsnAfter(*prevInsn, cgFunc->GetInsnBuilder()->BuildInsn(newOp, reg1, reg2));
4953         }
4954         SetCurrInsn(insn.GetNextMachineInsn());
4955         optSuccess = true;
4956         bb.RemoveInsn(insn);
4957     } else if (reg1.GetRegisterNumber() == reg2.GetRegisterNumber() &&
4958                base1->GetRegisterNumber() != reg2.GetRegisterNumber()) {
4959         SetCurrInsn(insn.GetNextMachineInsn());
4960         optSuccess = true;
4961         bb.RemoveInsn(insn);
4962     }
4963 }
4964 
CheckCondition(Insn & insn)4965 bool RemoveIncDecRefPattern::CheckCondition(Insn &insn)
4966 {
4967     if (insn.GetMachineOpcode() != MOP_xbl) {
4968         return false;
4969     }
4970     prevInsn = insn.GetPreviousMachineInsn();
4971     if (prevInsn == nullptr) {
4972         return false;
4973     }
4974     MOperator prevMop = prevInsn->GetMachineOpcode();
4975     if (prevMop != MOP_xmovrr) {
4976         return false;
4977     }
4978     auto &target = static_cast<FuncNameOperand &>(insn.GetOperand(kInsnFirstOpnd));
4979     if (target.GetName() != "MCC_IncDecRef_NaiveRCFast") {
4980         return false;
4981     }
4982     if (static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd)).GetRegisterNumber() != R1 ||
4983         static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd)).GetRegisterNumber() != R0) {
4984         return false;
4985     }
4986     return true;
4987 }
4988 
Run(BB & bb,Insn & insn)4989 void RemoveIncDecRefPattern::Run(BB &bb, Insn &insn)
4990 {
4991     if (!CheckCondition(insn)) {
4992         return;
4993     }
4994     bb.RemoveInsn(*prevInsn);
4995     bb.RemoveInsn(insn);
4996 }
4997 
4998 #ifdef USE_32BIT_REF
4999 constexpr uint32 kRefSize = 32;
5000 #else
5001 constexpr uint32 kRefSize = 64;
5002 #endif
5003 
Run(BB & bb,Insn & insn)5004 void CselZeroOneToCsetOpt::Run(BB &bb, Insn &insn)
5005 {
5006     Operand &trueValueOp = insn.GetOperand(kInsnSecondOpnd);
5007     Operand &falseValueOp = insn.GetOperand(kInsnThirdOpnd);
5008     Operand *trueTempOp = nullptr;
5009     Operand *falseTempOp = nullptr;
5010 
5011     /* find fixed value in BB */
5012     if (!trueValueOp.IsIntImmediate()) {
5013         trueMovInsn = FindFixedValue(trueValueOp, bb, trueTempOp, insn);
5014     }
5015     if (!falseValueOp.IsIntImmediate()) {
5016         falseMovInsn = FindFixedValue(falseValueOp, bb, falseTempOp, insn);
5017     }
5018 
5019     DEBUG_ASSERT(trueTempOp != nullptr, "trueTempOp should not be nullptr");
5020     DEBUG_ASSERT(falseTempOp != nullptr, "falseTempOp should not be nullptr");
5021     /* csel to cset */
5022     if ((trueTempOp->IsIntImmediate() || IsZeroRegister(*trueTempOp)) &&
5023         (falseTempOp->IsIntImmediate() || IsZeroRegister(*falseTempOp))) {
5024         ImmOperand *imm1 = static_cast<ImmOperand *>(trueTempOp);
5025         ImmOperand *imm2 = static_cast<ImmOperand *>(falseTempOp);
5026         bool inverse = imm1->IsOne() && (imm2->IsZero() || IsZeroRegister(*imm2));
5027         if (inverse || ((imm1->IsZero() || IsZeroRegister(*imm1)) && imm2->IsOne())) {
5028             Operand &reg = insn.GetOperand(kInsnFirstOpnd);
5029             CondOperand &condOperand = static_cast<CondOperand &>(insn.GetOperand(kInsnFourthOpnd));
5030             MOperator mopCode = (reg.GetSize() == k64BitSize) ? MOP_xcsetrc : MOP_wcsetrc;
5031             /* get new cond  ccCode */
5032             ConditionCode ccCode = inverse ? condOperand.GetCode() : GetReverseCC(condOperand.GetCode());
5033             if (ccCode == kCcLast) {
5034                 return;
5035             }
5036             AArch64CGFunc *func = static_cast<AArch64CGFunc *>(cgFunc);
5037             CondOperand &cond = func->GetCondOperand(ccCode);
5038             Operand &rflag = func->GetOrCreateRflag();
5039             Insn &csetInsn = func->GetInsnBuilder()->BuildInsn(mopCode, reg, cond, rflag);
5040             if (CGOptions::DoCGSSA() && CGOptions::GetInstance().GetOptimizeLevel() < CGOptions::kLevel0) {
5041                 CHECK_FATAL(false, "check this case in ssa opt");
5042             }
5043             insn.GetBB()->ReplaceInsn(insn, csetInsn);
5044         }
5045     }
5046 }
5047 
FindFixedValue(Operand & opnd,BB & bb,Operand * & tempOp,const Insn & insn) const5048 Insn *CselZeroOneToCsetOpt::FindFixedValue(Operand &opnd, BB &bb, Operand *&tempOp, const Insn &insn) const
5049 {
5050     tempOp = &opnd;
5051     bool alreadyFindCsel = false;
5052     bool isRegDefined = false;
5053     regno_t regno = static_cast<RegOperand &>(opnd).GetRegisterNumber();
5054     FOR_BB_INSNS_REV(defInsn, &bb)
5055     {
5056         if (!defInsn->IsMachineInstruction() || defInsn->IsBranch()) {
5057             continue;
5058         }
5059         /* find csel */
5060         if (defInsn->GetId() == insn.GetId()) {
5061             alreadyFindCsel = true;
5062         }
5063         /* find def defined */
5064         if (alreadyFindCsel) {
5065             isRegDefined = defInsn->IsRegDefined(regno);
5066         }
5067         /* if def defined is movi do this opt */
5068         if (isRegDefined) {
5069             MOperator thisMop = defInsn->GetMachineOpcode();
5070             if (thisMop == MOP_wmovri32 || thisMop == MOP_xmovri64) {
5071                 if (&defInsn->GetOperand(kInsnFirstOpnd) == &opnd) {
5072                     tempOp = &(defInsn->GetOperand(kInsnSecondOpnd));
5073                     return defInsn;
5074                 }
5075             } else {
5076                 return nullptr;
5077             }
5078         }
5079     }
5080     return nullptr;
5081 }
5082 
Run(BB & bb,Insn & insn)5083 void AndCmpCsetEorCbzOpt::Run(BB &bb, Insn &insn)
5084 {
5085     if (insn.GetMachineOpcode() != MOP_wandrri12) {
5086         return;
5087     }
5088     RegOperand &andInsnFirstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
5089     RegOperand &andInsnSecondOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
5090     ImmOperand &andInsnThirdOpnd = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
5091     if (andInsnFirstOpnd.GetRegisterNumber() != andInsnSecondOpnd.GetRegisterNumber() ||
5092         andInsnThirdOpnd.GetValue() != 1) {
5093         return;
5094     }
5095     Insn *cmpInsn = insn.GetNextMachineInsn();
5096     if (cmpInsn == nullptr || cmpInsn->GetMachineOpcode() != MOP_wcmpri) {
5097         return;
5098     }
5099     RegOperand &cmpInsnSecondOpnd = static_cast<RegOperand &>(cmpInsn->GetOperand(kInsnSecondOpnd));
5100     ImmOperand &cmpInsnThirdOpnd = static_cast<ImmOperand &>(cmpInsn->GetOperand(kInsnThirdOpnd));
5101     if (cmpInsnSecondOpnd.GetRegisterNumber() != andInsnFirstOpnd.GetRegisterNumber() ||
5102         cmpInsnThirdOpnd.GetValue() != 0) {
5103         return;
5104     }
5105     Insn *csetInsn = cmpInsn->GetNextMachineInsn();
5106     if (csetInsn == nullptr || csetInsn->GetMachineOpcode() != MOP_wcsetrc) {
5107         return;
5108     }
5109     RegOperand &csetInsnFirstOpnd = static_cast<RegOperand &>(csetInsn->GetOperand(kInsnFirstOpnd));
5110     CondOperand &csetSecondOpnd = static_cast<CondOperand &>(csetInsn->GetOperand(kInsnSecondOpnd));
5111     if (csetInsnFirstOpnd.GetRegisterNumber() != andInsnFirstOpnd.GetRegisterNumber() ||
5112         csetSecondOpnd.GetCode() != CC_EQ) {
5113         return;
5114     }
5115     Insn *eorInsn = csetInsn->GetNextMachineInsn();
5116     if (eorInsn == nullptr || eorInsn->GetMachineOpcode() != MOP_weorrri12) {
5117         return;
5118     }
5119     RegOperand &eorInsnFirstOpnd = static_cast<RegOperand &>(eorInsn->GetOperand(kInsnFirstOpnd));
5120     RegOperand &eorInsnSecondOpnd = static_cast<RegOperand &>(eorInsn->GetOperand(kInsnSecondOpnd));
5121     ImmOperand &eorInsnThirdOpnd = static_cast<ImmOperand &>(eorInsn->GetOperand(kInsnThirdOpnd));
5122     if (eorInsnFirstOpnd.GetRegisterNumber() != andInsnFirstOpnd.GetRegisterNumber() ||
5123         eorInsnFirstOpnd.GetRegisterNumber() != eorInsnSecondOpnd.GetRegisterNumber() ||
5124         eorInsnThirdOpnd.GetValue() != 1) {
5125         return;
5126     }
5127     Insn *cbzInsn = eorInsn->GetNextMachineInsn();
5128     if (cbzInsn == nullptr || cbzInsn->GetMachineOpcode() != MOP_wcbz) {
5129         return;
5130     }
5131     RegOperand &cbzInsnFirstOpnd = static_cast<RegOperand &>(cbzInsn->GetOperand(kInsnFirstOpnd));
5132     if (cbzInsnFirstOpnd.GetRegisterNumber() != andInsnFirstOpnd.GetRegisterNumber()) {
5133         return;
5134     }
5135     bb.RemoveInsn(*cmpInsn);
5136     bb.RemoveInsn(*csetInsn);
5137     bb.RemoveInsn(*eorInsn);
5138     bb.RemoveInsn(*cbzInsn);
5139     /* replace insn */
5140     auto &label = static_cast<LabelOperand &>(cbzInsn->GetOperand(kInsnSecondOpnd));
5141     ImmOperand &oneHoleOpnd = static_cast<AArch64CGFunc *>(cgFunc)->CreateImmOperand(0, k8BitSize, false);
5142     bb.ReplaceInsn(insn, cgFunc->GetInsnBuilder()->BuildInsn(MOP_wtbz, cbzInsnFirstOpnd, oneHoleOpnd, label));
5143 }
5144 
Run(BB & bb,Insn & insn)5145 void AddLdrOpt::Run(BB &bb, Insn &insn)
5146 {
5147     if (insn.GetMachineOpcode() != MOP_xaddrrr) {
5148         return;
5149     }
5150     Insn *nextInsn = insn.GetNextMachineInsn();
5151     if (nextInsn == nullptr) {
5152         return;
5153     }
5154     auto nextMop = nextInsn->GetMachineOpcode();
5155     if (nextMop != MOP_xldr && nextMop != MOP_wldr) {
5156         return;
5157     }
5158     RegOperand &insnFirstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
5159     RegOperand &insnSecondOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
5160     if (insnFirstOpnd.GetRegisterNumber() != insnSecondOpnd.GetRegisterNumber()) {
5161         return;
5162     }
5163     RegOperand &ldrInsnFirstOpnd = static_cast<RegOperand &>(nextInsn->GetOperand(kInsnFirstOpnd));
5164     MemOperand &memOpnd = static_cast<MemOperand &>(nextInsn->GetOperand(kInsnSecondOpnd));
5165     if (memOpnd.GetAddrMode() != MemOperand::kAddrModeBOi ||
5166         memOpnd.GetBaseRegister()->GetRegisterNumber() != insnFirstOpnd.GetRegisterNumber() ||
5167         ldrInsnFirstOpnd.GetRegisterNumber() != insnFirstOpnd.GetRegisterNumber() ||
5168         memOpnd.GetOffsetImmediate()->GetOffsetValue() != 0) {
5169         return;
5170     }
5171     MemOperand &newMemOpnd = static_cast<AArch64CGFunc *>(cgFunc)->GetOrCreateMemOpnd(
5172         MemOperand::kAddrModeBOrX, memOpnd.GetSize(), &insnFirstOpnd,
5173         &static_cast<RegOperand &>(insn.GetOperand(kInsnThirdOpnd)), 0, false);
5174     nextInsn->SetOperand(kInsnSecondOpnd, newMemOpnd);
5175     bb.RemoveInsn(insn);
5176 }
5177 
Run(BB & bb,Insn & insn)5178 void CsetEorOpt::Run(BB &bb, Insn &insn)
5179 {
5180     if (insn.GetMachineOpcode() != MOP_xcsetrc && insn.GetMachineOpcode() != MOP_wcsetrc) {
5181         return;
5182     }
5183     Insn *nextInsn = insn.GetNextMachineInsn();
5184     if (nextInsn == nullptr ||
5185         (nextInsn->GetMachineOpcode() != MOP_weorrri12 && nextInsn->GetMachineOpcode() != MOP_xeorrri13)) {
5186         return;
5187     }
5188     RegOperand &csetFirstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
5189     RegOperand &eorFirstOpnd = static_cast<RegOperand &>(nextInsn->GetOperand(kInsnFirstOpnd));
5190     RegOperand &eorSecondOpnd = static_cast<RegOperand &>(nextInsn->GetOperand(kInsnSecondOpnd));
5191     ImmOperand &eorThirdOpnd = static_cast<ImmOperand &>(nextInsn->GetOperand(kInsnThirdOpnd));
5192     if (eorThirdOpnd.GetValue() != 1 || eorFirstOpnd.GetRegisterNumber() != eorSecondOpnd.GetRegisterNumber() ||
5193         csetFirstOpnd.GetRegisterNumber() != eorFirstOpnd.GetRegisterNumber()) {
5194         return;
5195     }
5196     CondOperand &csetSecondOpnd = static_cast<CondOperand &>(insn.GetOperand(kInsnSecondOpnd));
5197     ConditionCode inverseCondCode = GetReverseCC(csetSecondOpnd.GetCode());
5198     if (inverseCondCode == kCcLast) {
5199         return;
5200     }
5201     auto *aarFunc = static_cast<AArch64CGFunc *>(cgFunc);
5202     CondOperand &inverseCondOpnd = aarFunc->GetCondOperand(inverseCondCode);
5203     insn.SetOperand(kInsnSecondOpnd, inverseCondOpnd);
5204     bb.RemoveInsn(*nextInsn);
5205 }
5206 
Run(BB & bb,Insn & insn)5207 void MoveCmpOpt::Run(BB &bb, Insn &insn)
5208 {
5209     if (insn.GetMachineOpcode() != MOP_xmovri64 && insn.GetMachineOpcode() != MOP_wmovri32) {
5210         return;
5211     }
5212     ImmOperand &immOpnd = static_cast<ImmOperand &>(insn.GetOperand(kInsnSecondOpnd));
5213     if (!immOpnd.IsInBitSize(kMaxImmVal12Bits, 0) && !immOpnd.IsInBitSize(kMaxImmVal12Bits, kMaxImmVal12Bits)) {
5214         return;
5215     }
5216     Insn *nextInsn = insn.GetNextMachineInsn();
5217     if (nextInsn == nullptr ||
5218         (nextInsn->GetMachineOpcode() != MOP_wcmprr && nextInsn->GetMachineOpcode() != MOP_xcmprr)) {
5219         return;
5220     }
5221     RegOperand &cmpSecondOpnd = static_cast<RegOperand &>(nextInsn->GetOperand(kInsnSecondOpnd));
5222     RegOperand &cmpThirdOpnd = static_cast<RegOperand &>(nextInsn->GetOperand(kInsnThirdOpnd));
5223     RegOperand &movFirstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
5224     if (cmpSecondOpnd.GetRegisterNumber() == cmpThirdOpnd.GetRegisterNumber()) {
5225         return;
5226     }
5227     if (cmpThirdOpnd.GetRegisterNumber() != movFirstOpnd.GetRegisterNumber()) {
5228         return;
5229     }
5230     MOperator cmpOpCode = (cmpThirdOpnd.GetSize() == k64BitSize) ? MOP_xcmpri : MOP_wcmpri;
5231     Insn &newCmpInsn = cgFunc->GetInsnBuilder()->BuildInsn(cmpOpCode, nextInsn->GetOperand(kInsnFirstOpnd),
5232                                                            nextInsn->GetOperand(kInsnSecondOpnd), immOpnd);
5233     bb.ReplaceInsn(*nextInsn, newCmpInsn);
5234     if (!IfOperandIsLiveAfterInsn(movFirstOpnd, newCmpInsn)) {
5235         bb.RemoveInsn(insn);
5236     }
5237 }
5238 
Run(BB & bb,Insn & insn)5239 void AddImmZeroToMov::Run(BB &bb, Insn &insn)
5240 {
5241     RegOperand *insnDefReg = &static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
5242     RegOperand *insnUseReg = &static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
5243     int64 immVal = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd)).GetValue();
5244     if (immVal == static_cast<int64>(k0BitSize)) {
5245         if (insnDefReg->GetRegisterNumber() == insnUseReg->GetRegisterNumber()) {
5246             bb.RemoveInsn(insn);
5247             return;
5248         } else {
5249             Insn *newInsn = &cgFunc->GetInsnBuilder()->BuildInsn(MOP_xmovrr, *insnDefReg, *insnUseReg);
5250             bb.ReplaceInsn(insn, *newInsn);
5251             return;
5252         }
5253     }
5254     return;
5255 }
5256 
CheckCondition(Insn & insn)5257 bool InlineReadBarriersPattern::CheckCondition(Insn &insn)
5258 {
5259     /* Inline read barriers only enabled for GCONLY. */
5260     if (!CGOptions::IsGCOnly()) {
5261         return false;
5262     }
5263     return true;
5264 }
5265 
Run(BB & bb,Insn & insn)5266 void InlineReadBarriersPattern::Run(BB &bb, Insn &insn)
5267 {
5268     if (!CheckCondition(insn)) {
5269         return;
5270     }
5271     const std::string &barrierName = GetReadBarrierName(insn);
5272     if (barrierName == kMccDummy) {
5273         /* remove dummy call. */
5274         bb.RemoveInsn(insn);
5275     } else {
5276         /* replace barrier function call with load instruction. */
5277         bool isVolatile = (barrierName == kMccLoadRefV || barrierName == kMccLoadRefVS);
5278         bool isStatic = (barrierName == kMccLoadRefS || barrierName == kMccLoadRefVS);
5279         /* refSize is 32 if USE_32BIT_REF defined, otherwise 64. */
5280         const uint32 refSize = kRefSize;
5281         auto *aarch64CGFunc = static_cast<AArch64CGFunc *>(cgFunc);
5282         MOperator loadOp = GetLoadOperator(refSize, isVolatile);
5283         RegOperand &regOp = aarch64CGFunc->GetOrCreatePhysicalRegisterOperand(R0, refSize, kRegTyInt);
5284         AArch64reg addrReg = isStatic ? R0 : R1;
5285         MemOperand &addr = aarch64CGFunc->CreateMemOpnd(addrReg, 0, refSize);
5286         Insn &loadInsn = cgFunc->GetInsnBuilder()->BuildInsn(loadOp, regOp, addr);
5287         bb.ReplaceInsn(insn, loadInsn);
5288     }
5289     bool isTailCall = (insn.GetMachineOpcode() == MOP_tail_call_opt_xbl);
5290     if (isTailCall) {
5291         /* add 'ret' instruction for tail call optimized load barrier. */
5292         Insn &retInsn = cgFunc->GetInsnBuilder()->BuildInsn<AArch64CG>(MOP_xret);
5293         bb.AppendInsn(retInsn);
5294         bb.SetKind(BB::kBBReturn);
5295     }
5296 }
5297 
CheckCondition(Insn & insn)5298 bool ReplaceDivToMultiPattern::CheckCondition(Insn &insn)
5299 {
5300     prevInsn = insn.GetPreviousMachineInsn();
5301     if (prevInsn == nullptr) {
5302         return false;
5303     }
5304     prePrevInsn = prevInsn->GetPreviousMachineInsn();
5305     auto &sdivOpnd1 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
5306     auto &sdivOpnd2 = static_cast<RegOperand &>(insn.GetOperand(kInsnThirdOpnd));
5307     if (sdivOpnd1.GetRegisterNumber() == sdivOpnd2.GetRegisterNumber() || sdivOpnd1.GetRegisterNumber() == R16 ||
5308         sdivOpnd2.GetRegisterNumber() == R16 || prePrevInsn == nullptr) {
5309         return false;
5310     }
5311     MOperator prevMop = prevInsn->GetMachineOpcode();
5312     MOperator prePrevMop = prePrevInsn->GetMachineOpcode();
5313     if ((prevMop > 0) && (prevMop == MOP_wmovkri16) && (prePrevMop > 0) && (prePrevMop == MOP_wmovri32)) {
5314         return true;
5315     }
5316     return false;
5317 }
5318 
Run(BB & bb,Insn & insn)5319 void ReplaceDivToMultiPattern::Run(BB &bb, Insn &insn)
5320 {
5321     if (CheckCondition(insn)) {
5322         auto &sdivOpnd1 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
5323         auto &sdivOpnd2 = static_cast<RegOperand &>(insn.GetOperand(kInsnThirdOpnd));
5324         /* Check if dest operand of insn is idential with  register of prevInsn and prePrevInsn. */
5325         auto &prevReg = prevInsn->GetOperand(kInsnFirstOpnd);
5326         auto &prePrevReg = prePrevInsn->GetOperand(kInsnFirstOpnd);
5327         if (!prevReg.IsRegister() || !prePrevReg.IsRegister() ||
5328             static_cast<RegOperand &>(prevReg).GetRegisterNumber() != sdivOpnd2.GetRegisterNumber() ||
5329             static_cast<RegOperand &>(prePrevReg).GetRegisterNumber() != sdivOpnd2.GetRegisterNumber()) {
5330             return;
5331         }
5332         auto &prevLsl = static_cast<BitShiftOperand &>(prevInsn->GetOperand(kInsnThirdOpnd));
5333         if (prevLsl.GetShiftAmount() != k16BitSize) {
5334             return;
5335         }
5336         auto &prevImmOpnd = static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
5337         auto &prePrevImmOpnd = static_cast<ImmOperand &>(prePrevInsn->GetOperand(kInsnSecondOpnd));
5338         /*
5339          * expect the immediate value of first mov is 0x086A0 which matches 0x186A0
5340          * because 0x10000 is ignored in 32 bits register
5341          */
5342         constexpr uint32 immOpndBoundary = 34464;
5343         if ((prevImmOpnd.GetValue() != 1) || (prePrevImmOpnd.GetValue() != immOpndBoundary)) {
5344             return;
5345         }
5346         auto *aarch64CGFunc = static_cast<AArch64CGFunc *>(cgFunc);
5347         /* mov   w16, #0x588f */
5348         RegOperand &tempOpnd =
5349             aarch64CGFunc->GetOrCreatePhysicalRegisterOperand(static_cast<AArch64reg>(R16), k64BitSize, kRegTyInt);
5350         /* create a immedate operand with this specific value */
5351         ImmOperand &multiplierLow = aarch64CGFunc->CreateImmOperand(0x588f, k32BitSize, false);
5352         Insn &multiplierLowInsn = cgFunc->GetInsnBuilder()->BuildInsn(MOP_wmovri32, tempOpnd, multiplierLow);
5353         bb.InsertInsnBefore(*prePrevInsn, multiplierLowInsn);
5354 
5355         /*
5356          * movk    w16, #0x4f8b, LSL #16
5357          * create a immedate operand with this specific value
5358          */
5359         ImmOperand &multiplierHigh = aarch64CGFunc->CreateImmOperand(0x4f8b, k32BitSize, false);
5360         BitShiftOperand *multiplierHighLsl = aarch64CGFunc->GetLogicalShiftLeftOperand(k16BitSize, true);
5361         Insn &multiplierHighInsn =
5362             cgFunc->GetInsnBuilder()->BuildInsn(MOP_wmovkri16, tempOpnd, multiplierHigh, *multiplierHighLsl);
5363         bb.InsertInsnBefore(*prePrevInsn, multiplierHighInsn);
5364 
5365         /* smull   x16, w0, w16 */
5366         Insn &newSmullInsn = cgFunc->GetInsnBuilder()->BuildInsn(MOP_xsmullrrr, tempOpnd, sdivOpnd1, tempOpnd);
5367         bb.InsertInsnBefore(*prePrevInsn, newSmullInsn);
5368 
5369         /* asr     x16, x16, #32 */
5370         ImmOperand &dstLsrImmHigh = aarch64CGFunc->CreateImmOperand(k32BitSize, k32BitSize, false);
5371         Insn &dstLsrInsnHigh = cgFunc->GetInsnBuilder()->BuildInsn(MOP_xasrrri6, tempOpnd, tempOpnd, dstLsrImmHigh);
5372         bb.InsertInsnBefore(*prePrevInsn, dstLsrInsnHigh);
5373 
5374         /* add     x16, x16, w0, SXTW */
5375         Operand &sxtw = aarch64CGFunc->CreateExtendShiftOperand(ExtendShiftOperand::kSXTW, 0, 3);
5376         Insn &addInsn = cgFunc->GetInsnBuilder()->BuildInsn(MOP_xxwaddrrre, tempOpnd, tempOpnd, sdivOpnd1, sxtw);
5377         bb.InsertInsnBefore(*prePrevInsn, addInsn);
5378 
5379         /* asr     x16, x16, #17 */
5380         ImmOperand &dstLsrImmChange = aarch64CGFunc->CreateImmOperand(17, k32BitSize, false);
5381         Insn &dstLsrInsnChange = cgFunc->GetInsnBuilder()->BuildInsn(MOP_xasrrri6, tempOpnd, tempOpnd, dstLsrImmChange);
5382         bb.InsertInsnBefore(*prePrevInsn, dstLsrInsnChange);
5383 
5384         /* add     x2, x16, x0, LSR #31 */
5385         auto &sdivOpnd0 = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
5386         regno_t sdivOpnd0RegNO = sdivOpnd0.GetRegisterNumber();
5387         RegOperand &extSdivO0 = aarch64CGFunc->GetOrCreatePhysicalRegisterOperand(
5388             static_cast<AArch64reg>(sdivOpnd0RegNO), k64BitSize, kRegTyInt);
5389 
5390         regno_t sdivOpnd1RegNum = sdivOpnd1.GetRegisterNumber();
5391         RegOperand &extSdivO1 = aarch64CGFunc->GetOrCreatePhysicalRegisterOperand(
5392             static_cast<AArch64reg>(sdivOpnd1RegNum), k64BitSize, kRegTyInt);
5393         /* shift bit amount is thirty-one at this insn */
5394         BitShiftOperand &addLsrOpnd = aarch64CGFunc->CreateBitShiftOperand(BitShiftOperand::kLSR, 31, 6);
5395         Insn &addLsrInsn =
5396             cgFunc->GetInsnBuilder()->BuildInsn(MOP_xaddrrrs, extSdivO0, tempOpnd, extSdivO1, addLsrOpnd);
5397         bb.InsertInsnBefore(*prePrevInsn, addLsrInsn);
5398 
5399         /*
5400          * remove insns
5401          * Check if x1 is used after sdiv insn, and if it is in live-out.
5402          */
5403         if (sdivOpnd2.GetRegisterNumber() != sdivOpnd0.GetRegisterNumber()) {
5404             if (IfOperandIsLiveAfterInsn(sdivOpnd2, insn)) {
5405                 /* Only remove div instruction. */
5406                 bb.RemoveInsn(insn);
5407                 return;
5408             }
5409         }
5410 
5411         bb.RemoveInsn(*prePrevInsn);
5412         bb.RemoveInsn(*prevInsn);
5413         bb.RemoveInsn(insn);
5414     }
5415 }
5416 
FindPreviousCmp(Insn & insn) const5417 Insn *AndCmpBranchesToCsetAArch64::FindPreviousCmp(Insn &insn) const
5418 {
5419     regno_t defRegNO = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd)).GetRegisterNumber();
5420     for (Insn *curInsn = insn.GetPrev(); curInsn != nullptr; curInsn = curInsn->GetPrev()) {
5421         if (!curInsn->IsMachineInstruction()) {
5422             continue;
5423         }
5424         if (curInsn->GetMachineOpcode() == MOP_wcmpri || curInsn->GetMachineOpcode() == MOP_xcmpri) {
5425             return curInsn;
5426         }
5427         /*
5428          * if any def/use of CC or insn defReg between insn and curInsn, stop searching and return nullptr.
5429          */
5430         if (curInsn->ScanReg(defRegNO) || curInsn->ScanReg(kRFLAG)) {
5431             return nullptr;
5432         }
5433     }
5434     return nullptr;
5435 }
5436 
Run(BB & bb,Insn & insn)5437 void AndCmpBranchesToCsetAArch64::Run(BB &bb, Insn &insn)
5438 {
5439     /* prevInsn must be "cmp" insn */
5440     Insn *prevInsn = FindPreviousCmp(insn);
5441     if (prevInsn == nullptr) {
5442         return;
5443     }
5444     /* prevPrevInsn must be "and" insn */
5445     Insn *prevPrevInsn = prevInsn->GetPreviousMachineInsn();
5446     if (prevPrevInsn == nullptr ||
5447         (prevPrevInsn->GetMachineOpcode() != MOP_wandrri12 && prevPrevInsn->GetMachineOpcode() != MOP_xandrri13)) {
5448         return;
5449     }
5450 
5451     auto &csetCond = static_cast<CondOperand &>(insn.GetOperand(kInsnSecondOpnd));
5452     auto &cmpImm = static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnThirdOpnd));
5453     int64 cmpImmVal = cmpImm.GetValue();
5454     auto &andImm = static_cast<ImmOperand &>(prevPrevInsn->GetOperand(kInsnThirdOpnd));
5455     int64 andImmVal = andImm.GetValue();
5456     if ((csetCond.GetCode() == CC_EQ && cmpImmVal == andImmVal) || (csetCond.GetCode() == CC_NE && cmpImmVal == 0)) {
5457         /* if flag_reg of "cmp" is live later, we can't remove cmp insn. */
5458         auto &flagReg = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
5459         if (IfOperandIsLiveAfterInsn(flagReg, insn)) {
5460             return;
5461         }
5462 
5463         auto &csetReg = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
5464         auto &prevInsnSecondReg = prevInsn->GetOperand(kInsnSecondOpnd);
5465         bool isRegDiff = !RegOperand::IsSameRegNO(csetReg, prevInsnSecondReg);
5466         if (isRegDiff && IfOperandIsLiveAfterInsn(static_cast<RegOperand &>(prevInsnSecondReg), insn)) {
5467             return;
5468         }
5469         if (andImmVal == 1) {
5470             if (!RegOperand::IsSameRegNO(prevInsnSecondReg, prevPrevInsn->GetOperand(kInsnFirstOpnd))) {
5471                 return;
5472             }
5473             /* save the "and" insn only. */
5474             bb.RemoveInsn(insn);
5475             bb.RemoveInsn(*prevInsn);
5476             if (isRegDiff) {
5477                 prevPrevInsn->Insn::SetOperand(kInsnFirstOpnd, csetReg);
5478             }
5479         } else {
5480             if (!RegOperand::IsSameReg(prevInsnSecondReg, prevPrevInsn->GetOperand(kInsnFirstOpnd)) ||
5481                 !RegOperand::IsSameReg(prevInsnSecondReg, prevPrevInsn->GetOperand(kInsnSecondOpnd))) {
5482                 return;
5483             }
5484 
5485             /* andImmVal is n power of 2 */
5486             int n = LogValueAtBase2(andImmVal);
5487             if (n < 0) {
5488                 return;
5489             }
5490 
5491             /* create ubfx insn */
5492             MOperator ubfxOp = (csetReg.GetSize() <= k32BitSize) ? MOP_wubfxrri5i5 : MOP_xubfxrri6i6;
5493             if (ubfxOp == MOP_wubfxrri5i5 && static_cast<uint32>(n) >= k32BitSize) {
5494                 return;
5495             }
5496             auto &dstReg = static_cast<RegOperand &>(csetReg);
5497             auto &srcReg = static_cast<RegOperand &>(prevInsnSecondReg);
5498             auto *aarch64CGFunc = static_cast<AArch64CGFunc *>(&cgFunc);
5499             ImmOperand &bitPos = aarch64CGFunc->CreateImmOperand(n, k8BitSize, false);
5500             ImmOperand &bitSize = aarch64CGFunc->CreateImmOperand(1, k8BitSize, false);
5501             Insn &ubfxInsn = cgFunc.GetInsnBuilder()->BuildInsn(ubfxOp, dstReg, srcReg, bitPos, bitSize);
5502             bb.InsertInsnBefore(*prevPrevInsn, ubfxInsn);
5503             bb.RemoveInsn(insn);
5504             bb.RemoveInsn(*prevInsn);
5505             bb.RemoveInsn(*prevPrevInsn);
5506         }
5507     }
5508 }
5509 
Run(BB & bb,Insn & insn)5510 void AndCbzBranchesToTstAArch64::Run(BB &bb, Insn &insn)
5511 {
5512     /* nextInsn must be "cbz" or "cbnz" insn */
5513     Insn *nextInsn = insn.GetNextMachineInsn();
5514     if (nextInsn == nullptr || (nextInsn->GetMachineOpcode() != MOP_wcbz && nextInsn->GetMachineOpcode() != MOP_xcbz)) {
5515         return;
5516     }
5517     auto &andRegOp = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
5518     regno_t andRegNO1 = andRegOp.GetRegisterNumber();
5519     auto &cbzRegOp2 = static_cast<RegOperand &>(nextInsn->GetOperand(kInsnFirstOpnd));
5520     regno_t cbzRegNO2 = cbzRegOp2.GetRegisterNumber();
5521     if (andRegNO1 != cbzRegNO2) {
5522         return;
5523     }
5524     /* If the reg will be used later, we shouldn't optimize the and insn here */
5525     if (IfOperandIsLiveAfterInsn(andRegOp, *nextInsn)) {
5526         return;
5527     }
5528     /* build tst insn */
5529     Operand &andOpnd3 = insn.GetOperand(kInsnThirdOpnd);
5530     auto &andRegOp2 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
5531     MOperator newTstOp = MOP_undef;
5532     if (andOpnd3.IsRegister()) {
5533         newTstOp = (andRegOp2.GetSize() <= k32BitSize && andOpnd3.GetSize() <= k32BitSize) ? MOP_wtstrr : MOP_xtstrr;
5534     } else {
5535         newTstOp =
5536             (andRegOp2.GetSize() <= k32BitSize && andOpnd3.GetSize() <= k32BitSize) ? MOP_wtstri32 : MOP_xtstri64;
5537     }
5538     Operand &rflag = static_cast<AArch64CGFunc *>(&cgFunc)->GetOrCreateRflag();
5539     Insn &newInsnTst = cgFunc.GetInsnBuilder()->BuildInsn(newTstOp, rflag, andRegOp2, andOpnd3);
5540     if (andOpnd3.IsImmediate()) {
5541         if (!static_cast<ImmOperand &>(andOpnd3).IsBitmaskImmediate(andRegOp2.GetSize())) {
5542             return;
5543         }
5544     }
5545     /* build beq insn */
5546     MOperator opCode = nextInsn->GetMachineOpcode();
5547     bool reverse = (opCode == MOP_xcbz || opCode == MOP_wcbz);
5548     auto &label = static_cast<LabelOperand &>(nextInsn->GetOperand(kInsnSecondOpnd));
5549     MOperator jmpOperator = reverse ? MOP_beq : MOP_bne;
5550     Insn &newInsnJmp = cgFunc.GetInsnBuilder()->BuildInsn(jmpOperator, rflag, label);
5551     bb.ReplaceInsn(insn, newInsnTst);
5552     bb.ReplaceInsn(*nextInsn, newInsnJmp);
5553 }
5554 
CheckCondition(Insn & insn)5555 bool AndCmpBranchesToCsetPattern::CheckCondition(Insn &insn)
5556 {
5557     /* prevInsn must be "cmp" insn */
5558     auto &ccReg = static_cast<RegOperand &>(insn.GetOperand(kInsnThirdOpnd));
5559     prevCmpInsn = ssaInfo->GetDefInsn(ccReg);
5560     if (prevCmpInsn == nullptr) {
5561         return false;
5562     }
5563     MOperator prevCmpMop = prevCmpInsn->GetMachineOpcode();
5564     if (prevCmpMop != MOP_wcmpri && prevCmpMop != MOP_xcmpri) {
5565         return false;
5566     }
5567     /* prevPrevInsn must be "and" insn */
5568     auto &cmpUseReg = static_cast<RegOperand &>(prevCmpInsn->GetOperand(kInsnSecondOpnd));
5569     prevAndInsn = ssaInfo->GetDefInsn(cmpUseReg);
5570     if (prevAndInsn == nullptr) {
5571         return false;
5572     }
5573     MOperator prevAndMop = prevAndInsn->GetMachineOpcode();
5574     if (prevAndMop != MOP_wandrri12 && prevAndMop != MOP_xandrri13) {
5575         return false;
5576     }
5577     CHECK_FATAL(prevAndInsn->GetOperand(kInsnFirstOpnd).GetSize() == prevCmpInsn->GetOperand(kInsnSecondOpnd).GetSize(),
5578                 "def-use reg size must be same based-on ssa");
5579     auto &csetCond = static_cast<CondOperand &>(insn.GetOperand(kInsnSecondOpnd));
5580     auto &cmpImm = static_cast<ImmOperand &>(prevCmpInsn->GetOperand(kInsnThirdOpnd));
5581     int64 cmpImmVal = cmpImm.GetValue();
5582     auto &andImm = static_cast<ImmOperand &>(prevAndInsn->GetOperand(kInsnThirdOpnd));
5583     int64 andImmVal = andImm.GetValue();
5584     if ((csetCond.GetCode() == CC_EQ && cmpImmVal == andImmVal) || (csetCond.GetCode() == CC_NE && cmpImmVal == 0)) {
5585         /* guaranteed unique point of use */
5586         auto &flagReg = static_cast<RegOperand &>(prevCmpInsn->GetOperand(kInsnFirstOpnd));
5587         InsnSet cmpFirstUseSet = GetAllUseInsn(flagReg);
5588         if (cmpFirstUseSet.size() > 1) {
5589             return false;
5590         }
5591         /* guaranteed unique point of use */
5592         auto &prevInsnSecondReg = prevCmpInsn->GetOperand(kInsnSecondOpnd);
5593         InsnSet cmpSecondUseSet = GetAllUseInsn(static_cast<RegOperand &>(prevInsnSecondReg));
5594         if (cmpSecondUseSet.size() > 1) {
5595             return false;
5596         }
5597         return true;
5598     }
5599     return false;
5600 }
5601 
Run(BB & bb,Insn & insn)5602 void AndCmpBranchesToCsetPattern::Run(BB &bb, Insn &insn)
5603 {
5604     if (!CheckCondition(insn)) {
5605         return;
5606     }
5607     RegOperand *dstOpnd = nullptr;
5608     RegOperand *srcOpnd = nullptr;
5609     auto &andImm = static_cast<ImmOperand &>(prevAndInsn->GetOperand(kInsnThirdOpnd));
5610     int64 andImmVal = andImm.GetValue();
5611     if (andImmVal == 1) {
5612         /* Method 1: Delete cmp and cset, and replace cset with and. */
5613         dstOpnd = &static_cast<RegOperand &>(prevAndInsn->GetOperand(kInsnFirstOpnd));
5614         srcOpnd = &static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
5615         if (dstOpnd->IsPhysicalRegister() || srcOpnd->IsPhysicalRegister()) {
5616             return;
5617         }
5618         VRegVersion *dstVersion = ssaInfo->FindSSAVersion(dstOpnd->GetRegisterNumber());
5619         VRegVersion *srcVersion = ssaInfo->FindSSAVersion(srcOpnd->GetRegisterNumber());
5620         CHECK_FATAL(dstVersion != nullptr, "get dstVersion failed");
5621         CHECK_FATAL(srcVersion != nullptr, "get srcVersion failed");
5622         /* Ensure that there is no use point */
5623         auto &insnDefReg = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
5624         InsnSet csetFirstUseSet = GetAllUseInsn(insnDefReg);
5625         if (csetFirstUseSet.size() < 1) {
5626             return;
5627         }
5628         /* update ssa info */
5629         ssaInfo->ReplaceAllUse(srcVersion, dstVersion);
5630         optSuccess = true;
5631         /* dump pattern info */
5632         if (CG_PEEP_DUMP) {
5633             std::vector<Insn *> prevs;
5634             prevs.emplace_back(prevAndInsn);
5635             prevs.emplace_back(prevCmpInsn);
5636             DumpAfterPattern(prevs, &insn, prevAndInsn);
5637         }
5638     } else {
5639         /* andImmVal is n power of 2 */
5640         int64 n = GetLogValueAtBase2(andImmVal);
5641         if (n < 0) {
5642             return;
5643         }
5644         /* Method 2: ubfx replaces cset. */
5645         /* create ubfx insn */
5646         auto &csetReg = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
5647         MOperator ubfxOp = (csetReg.GetSize() <= k32BitSize) ? MOP_wubfxrri5i5 : MOP_xubfxrri6i6;
5648         if (ubfxOp == MOP_wubfxrri5i5 && static_cast<uint32>(n) >= k32BitSize) {
5649             return;
5650         }
5651         auto &dstReg = static_cast<RegOperand &>(csetReg);
5652         auto &prevAndInsnSecondReg = prevAndInsn->GetOperand(kInsnSecondOpnd);
5653         auto &srcReg = static_cast<RegOperand &>(prevAndInsnSecondReg);
5654         auto *aarch64CGFunc = static_cast<AArch64CGFunc *>(cgFunc);
5655         ImmOperand &bitPos = aarch64CGFunc->CreateImmOperand(n, k8BitSize, false);
5656         ImmOperand &bitSize = aarch64CGFunc->CreateImmOperand(1, k8BitSize, false);
5657         Insn &ubfxInsn = cgFunc->GetInsnBuilder()->BuildInsn(ubfxOp, dstReg, srcReg, bitPos, bitSize);
5658         bb.ReplaceInsn(insn, ubfxInsn);
5659         /* update ssa info */
5660         ssaInfo->ReplaceInsn(insn, ubfxInsn);
5661         optSuccess = true;
5662         SetCurrInsn(&ubfxInsn);
5663         /* dump pattern info */
5664         if (CG_PEEP_DUMP) {
5665             std::vector<Insn *> prevs;
5666             prevs.emplace_back(prevAndInsn);
5667             prevs.emplace_back(prevCmpInsn);
5668             DumpAfterPattern(prevs, &insn, &ubfxInsn);
5669         }
5670     }
5671 }
5672 
Run(BB & bb,Insn & insn)5673 void AndCmpBranchesToTstAArch64::Run(BB &bb, Insn &insn)
5674 {
5675     /* nextInsn must be "cmp" insn */
5676     Insn *nextInsn = insn.GetNextMachineInsn();
5677     if (nextInsn == nullptr ||
5678         (nextInsn->GetMachineOpcode() != MOP_wcmpri && nextInsn->GetMachineOpcode() != MOP_xcmpri)) {
5679         return;
5680     }
5681     /* nextNextInsn must be "beq" or "bne" insn */
5682     Insn *nextNextInsn = nextInsn->GetNextMachineInsn();
5683     if (nextNextInsn == nullptr ||
5684         (nextNextInsn->GetMachineOpcode() != MOP_beq && nextNextInsn->GetMachineOpcode() != MOP_bne)) {
5685         return;
5686     }
5687     auto &andRegOp = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
5688     regno_t andRegNO1 = andRegOp.GetRegisterNumber();
5689     auto &cmpRegOp2 = static_cast<RegOperand &>(nextInsn->GetOperand(kInsnSecondOpnd));
5690     regno_t cmpRegNO2 = cmpRegOp2.GetRegisterNumber();
5691     if (andRegNO1 != cmpRegNO2) {
5692         return;
5693     }
5694     /* If the reg will be used later, we shouldn't optimize the and insn here */
5695     if (IfOperandIsLiveAfterInsn(andRegOp, *nextInsn)) {
5696         return;
5697     }
5698     Operand &immOpnd = nextInsn->GetOperand(kInsnThirdOpnd);
5699     DEBUG_ASSERT(immOpnd.IsIntImmediate(), "expects ImmOperand");
5700     auto &defConst = static_cast<ImmOperand &>(immOpnd);
5701     int64 defConstValue = defConst.GetValue();
5702     if (defConstValue != 0) {
5703         return;
5704     }
5705     /* build tst insn */
5706     Operand &andOpnd3 = insn.GetOperand(kInsnThirdOpnd);
5707     auto &andRegOp2 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
5708     MOperator newOp = MOP_undef;
5709     if (andOpnd3.IsRegister()) {
5710         newOp = (andRegOp2.GetSize() <= k32BitSize) ? MOP_wtstrr : MOP_xtstrr;
5711     } else {
5712         newOp = (andRegOp2.GetSize() <= k32BitSize) ? MOP_wtstri32 : MOP_xtstri64;
5713     }
5714     Operand &rflag = static_cast<AArch64CGFunc *>(&cgFunc)->GetOrCreateRflag();
5715     Insn &newInsn = cgFunc.GetInsnBuilder()->BuildInsn(newOp, rflag, andRegOp2, andOpnd3);
5716     if (CGOptions::DoCGSSA() && CGOptions::GetInstance().GetOptimizeLevel() < CGOptions::kLevel0) {
5717         CHECK_FATAL(false, "check this case in ssa opt");
5718     }
5719     bb.InsertInsnAfter(*nextInsn, newInsn);
5720     bb.RemoveInsn(insn);
5721     bb.RemoveInsn(*nextInsn);
5722 }
5723 
CheckCondition(Insn & insn)5724 bool AndCbzBranchesToTstPattern::CheckCondition(Insn &insn)
5725 {
5726     /* nextInsn must be "cbz" or "cbnz" insn */
5727     Insn *nextInsn = insn.GetNextMachineInsn();
5728     if (nextInsn == nullptr || (nextInsn->GetMachineOpcode() != MOP_wcbz && nextInsn->GetMachineOpcode() != MOP_xcbz)) {
5729         return false;
5730     }
5731     auto &andRegOp1 = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
5732     regno_t andRegNo1 = andRegOp1.GetRegisterNumber();
5733     auto &cbzRegOp1 = static_cast<RegOperand &>(nextInsn->GetOperand(kInsnFirstOpnd));
5734     regno_t cbzRegNo1 = cbzRegOp1.GetRegisterNumber();
5735     if (andRegNo1 != cbzRegNo1) {
5736         return false;
5737     }
5738     /* If the reg will be used later, we shouldn't optimize the and insn here */
5739     if (IfOperandIsLiveAfterInsn(andRegOp1, *nextInsn)) {
5740         return false;
5741     }
5742     auto &andRegOp2 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
5743     Operand &andOpnd3 = insn.GetOperand(kInsnThirdOpnd);
5744     if (andOpnd3.IsImmediate() && !static_cast<ImmOperand &>(andOpnd3).IsBitmaskImmediate(andRegOp2.GetSize())) {
5745         return false;
5746     }
5747     /* avoid redefine cc-reg */
5748     if (static_cast<AArch64CGFunc *>(cgFunc)->GetRflag() != nullptr) {
5749         return false;
5750     }
5751     return true;
5752 }
5753 
Run(BB & bb,Insn & insn)5754 void AndCbzBranchesToTstPattern::Run(BB &bb, Insn &insn)
5755 {
5756     if (!CheckCondition(insn)) {
5757         return;
5758     }
5759     Insn *nextInsn = insn.GetNextMachineInsn();
5760     CHECK_NULL_FATAL(nextInsn);
5761     /* build tst insn */
5762     auto &andRegOp2 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
5763     Operand &andOpnd3 = insn.GetOperand(kInsnThirdOpnd);
5764     MOperator newTstOp = MOP_undef;
5765     if (andOpnd3.IsRegister()) {
5766         newTstOp = (insn.GetMachineOpcode() == MOP_wandrrr) ? MOP_wtstrr : MOP_xtstrr;
5767     } else {
5768         newTstOp = (insn.GetMachineOpcode() == MOP_wandrri12) ? MOP_wtstri32 : MOP_xtstri64;
5769     }
5770     Operand &rflag = static_cast<AArch64CGFunc *>(cgFunc)->GetOrCreateRflag();
5771     Insn &newInsnTst = cgFunc->GetInsnBuilder()->BuildInsn(newTstOp, rflag, andRegOp2, andOpnd3);
5772 
5773     /* build beq insn */
5774     MOperator opCode = nextInsn->GetMachineOpcode();
5775     bool reverse = (opCode == MOP_xcbz || opCode == MOP_wcbz);
5776     auto &label = static_cast<LabelOperand &>(nextInsn->GetOperand(kInsnSecondOpnd));
5777     MOperator jmpOperator = reverse ? MOP_beq : MOP_bne;
5778     Insn &newInsnJmp = cgFunc->GetInsnBuilder()->BuildInsn(jmpOperator, rflag, label);
5779     bb.ReplaceInsn(insn, newInsnTst);
5780     bb.ReplaceInsn(*nextInsn, newInsnJmp);
5781 }
5782 
Run(BB & bb,Insn & insn)5783 void ZeroCmpBranchesAArch64::Run(BB &bb, Insn &insn)
5784 {
5785     Insn *prevInsn = insn.GetPreviousMachineInsn();
5786     if (!insn.IsBranch() || insn.GetOperandSize() <= kInsnSecondOpnd || prevInsn == nullptr) {
5787         return;
5788     }
5789     if (!insn.GetOperand(kInsnSecondOpnd).IsLabel()) {
5790         return;
5791     }
5792     LabelOperand *label = &static_cast<LabelOperand &>(insn.GetOperand(kInsnSecondOpnd));
5793     RegOperand *regOpnd = nullptr;
5794     RegOperand *reg0 = nullptr;
5795     RegOperand *reg1 = nullptr;
5796     MOperator newOp = MOP_undef;
5797     ImmOperand *imm = nullptr;
5798     switch (prevInsn->GetMachineOpcode()) {
5799         case MOP_wcmpri:
5800         case MOP_xcmpri: {
5801             regOpnd = &static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
5802             imm = &static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnThirdOpnd));
5803             if (imm->GetValue() != 0) {
5804                 return;
5805             }
5806             if (insn.GetMachineOpcode() == MOP_bge) {
5807                 newOp = (regOpnd->GetSize() <= k32BitSize) ? MOP_wtbz : MOP_xtbz;
5808             } else if (insn.GetMachineOpcode() == MOP_blt) {
5809                 newOp = (regOpnd->GetSize() <= k32BitSize) ? MOP_wtbnz : MOP_xtbnz;
5810             } else {
5811                 return;
5812             }
5813             break;
5814         }
5815         case MOP_wcmprr:
5816         case MOP_xcmprr: {
5817             reg0 = &static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
5818             reg1 = &static_cast<RegOperand &>(prevInsn->GetOperand(kInsnThirdOpnd));
5819             if (!IsZeroRegister(*reg0) && !IsZeroRegister(*reg1)) {
5820                 return;
5821             }
5822             switch (insn.GetMachineOpcode()) {
5823                 case MOP_bge:
5824                     if (IsZeroRegister(*reg1)) {
5825                         regOpnd = &static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
5826                         newOp = (regOpnd->GetSize() <= k32BitSize) ? MOP_wtbz : MOP_xtbz;
5827                     } else {
5828                         return;
5829                     }
5830                     break;
5831                 case MOP_ble:
5832                     if (IsZeroRegister(*reg0)) {
5833                         regOpnd = &static_cast<RegOperand &>(prevInsn->GetOperand(kInsnThirdOpnd));
5834                         newOp = (regOpnd->GetSize() <= k32BitSize) ? MOP_wtbz : MOP_xtbz;
5835                     } else {
5836                         return;
5837                     }
5838                     break;
5839                 case MOP_blt:
5840                     if (IsZeroRegister(*reg1)) {
5841                         regOpnd = &static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
5842                         newOp = (regOpnd->GetSize() <= k32BitSize) ? MOP_wtbnz : MOP_xtbnz;
5843                     } else {
5844                         return;
5845                     }
5846                     break;
5847                 case MOP_bgt:
5848                     if (IsZeroRegister(*reg0)) {
5849                         regOpnd = &static_cast<RegOperand &>(prevInsn->GetOperand(kInsnThirdOpnd));
5850                         newOp = (regOpnd->GetSize() <= k32BitSize) ? MOP_wtbnz : MOP_xtbnz;
5851                     } else {
5852                         return;
5853                     }
5854                     break;
5855                 default:
5856                     return;
5857             }
5858             break;
5859         }
5860         default:
5861             return;
5862     }
5863     auto aarch64CGFunc = static_cast<AArch64CGFunc *>(&cgFunc);
5864     ImmOperand &bitp = aarch64CGFunc->CreateImmOperand(
5865         (regOpnd->GetSize() <= k32BitSize) ? (k32BitSize - 1) : (k64BitSize - 1), k8BitSize, false);
5866     bb.InsertInsnAfter(insn,
5867                        cgFunc.GetInsnBuilder()->BuildInsn(newOp, *static_cast<RegOperand *>(regOpnd), bitp, *label));
5868     bb.RemoveInsn(insn);
5869     bb.RemoveInsn(*prevInsn);
5870 }
5871 
Run(BB & bb,Insn & insn)5872 void ElimDuplicateExtensionAArch64::Run(BB &bb, Insn &insn)
5873 {
5874     (void)bb;
5875     Insn *prevInsn = insn.GetPreviousMachineInsn();
5876     if (prevInsn == nullptr) {
5877         return;
5878     }
5879     uint32 index;
5880     uint32 upper;
5881     bool is32bits = false;
5882     MOperator *table = nullptr;
5883     MOperator thisMop = insn.GetMachineOpcode();
5884     switch (thisMop) {
5885         case MOP_xsxtb32:
5886             is32bits = true;
5887             [[clang::fallthrough]];
5888         case MOP_xsxtb64:
5889             table = sextMopTable;
5890             index = 0;  // 0 is index of MOP_xsxtb32 in table sextMopTable
5891             upper = kSizeOfSextMopTable;
5892             break;
5893         case MOP_xsxth32:
5894             is32bits = true;
5895             [[clang::fallthrough]];
5896         case MOP_xsxth64:
5897             table = sextMopTable;
5898             index = 2;  // 2 is index of MOP_xsxth32 in table sextMopTable
5899             upper = kSizeOfSextMopTable;
5900             break;
5901         case MOP_xsxtw64:
5902             table = sextMopTable;
5903             index = 4;  // 4 is index of MOP_xsxtw64 in table sextMopTable
5904             upper = kSizeOfSextMopTable;
5905             break;
5906         case MOP_xuxtb32:
5907             is32bits = true;
5908             table = uextMopTable;
5909             index = 0;  // 0 is index of MOP_xuxtb32 in table uextMopTable
5910             upper = kSizeOfUextMopTable;
5911             break;
5912         case MOP_xuxth32:
5913             is32bits = true;
5914             table = uextMopTable;
5915             index = 1;  // 1 is index of MOP_xuxth32 in table uextMopTable
5916             upper = kSizeOfUextMopTable;
5917             break;
5918         case MOP_xuxtw64:
5919             table = uextMopTable;
5920             index = 2;  // 2 is index of MOP_xuxtw64 in table uextMopTable
5921             upper = kSizeOfUextMopTable;
5922             break;
5923         default:
5924             CHECK_FATAL(false, "Unexpected mop");
5925     }
5926     MOperator prevMop = prevInsn->GetMachineOpcode();
5927     for (uint32 i = index; i < upper; ++i) {
5928         if (prevMop == table[i]) {
5929             Operand &prevDestOpnd = prevInsn->GetOperand(kInsnFirstOpnd);
5930             regno_t dest = static_cast<RegOperand &>(prevDestOpnd).GetRegisterNumber();
5931             regno_t src = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd)).GetRegisterNumber();
5932             if (dest == src) {
5933                 insn.SetMOP(is32bits ? AArch64CG::kMd[MOP_wmovrr] : AArch64CG::kMd[MOP_xmovrr]);
5934                 if (upper == kSizeOfSextMopTable &&
5935                     static_cast<RegOperand &>(prevDestOpnd).GetValidBitsNum() !=
5936                         static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd)).GetValidBitsNum()) {
5937                     if (is32bits) {
5938                         insn.GetOperand(kInsnFirstOpnd).SetSize(k64BitSize);
5939                         insn.SetMOP(AArch64CG::kMd[MOP_xmovrr]);
5940                     } else {
5941                         prevDestOpnd.SetSize(k64BitSize);
5942                         prevInsn->SetMOP(prevMop == MOP_xsxtb32 ? AArch64CG::kMd[MOP_xsxtb64]
5943                                                                 : AArch64CG::kMd[MOP_xsxth64]);
5944                     }
5945                 }
5946             }
5947             break;
5948         }
5949     }
5950 }
5951 
5952 /*
5953  * if there is define point of checkInsn->GetOperand(opndIdx) between startInsn and  firstInsn
5954  * return define insn. else return nullptr
5955  */
DefInsnOfOperandInBB(const Insn & startInsn,const Insn & checkInsn,int opndIdx) const5956 const Insn *CmpCsetAArch64::DefInsnOfOperandInBB(const Insn &startInsn, const Insn &checkInsn, int opndIdx) const
5957 {
5958     Insn *prevInsn = nullptr;
5959     for (const Insn *insn = &startInsn; insn != nullptr; insn = prevInsn) {
5960         prevInsn = insn->GetPreviousMachineInsn();
5961         if (!insn->IsMachineInstruction()) {
5962             continue;
5963         }
5964         /* checkInsn.GetOperand(opndIdx) is thought modified conservatively */
5965         if (insn->IsCall()) {
5966             return insn;
5967         }
5968         const InsnDesc *md = insn->GetDesc();
5969         uint32 opndNum = insn->GetOperandSize();
5970         for (uint32 i = 0; i < opndNum; ++i) {
5971             Operand &opnd = insn->GetOperand(i);
5972             if (!md->opndMD[i]->IsDef()) {
5973                 continue;
5974             }
5975             /* Operand is base reg of Memory, defined by str */
5976             if (opnd.IsMemoryAccessOperand()) {
5977                 auto &memOpnd = static_cast<MemOperand &>(opnd);
5978                 RegOperand *base = memOpnd.GetBaseRegister();
5979                 DEBUG_ASSERT(base != nullptr, "nullptr check");
5980                 DEBUG_ASSERT(base->IsRegister(), "expects RegOperand");
5981                 if (RegOperand::IsSameRegNO(*base, checkInsn.GetOperand(static_cast<uint32>(opndIdx))) &&
5982                     memOpnd.GetAddrMode() == MemOperand::kAddrModeBOi &&
5983                     (memOpnd.IsPostIndexed() || memOpnd.IsPreIndexed())) {
5984                     return insn;
5985                 }
5986             } else {
5987                 DEBUG_ASSERT(opnd.IsRegister(), "expects RegOperand");
5988                 if (RegOperand::IsSameRegNO(checkInsn.GetOperand(static_cast<uint32>(opndIdx)), opnd)) {
5989                     return insn;
5990                 }
5991             }
5992         }
5993     }
5994     return nullptr;
5995 }
5996 
OpndDefByOneValidBit(const Insn & defInsn) const5997 bool CmpCsetAArch64::OpndDefByOneValidBit(const Insn &defInsn) const
5998 {
5999     MOperator defMop = defInsn.GetMachineOpcode();
6000     switch (defMop) {
6001         case MOP_wcsetrc:
6002         case MOP_xcsetrc:
6003             return true;
6004         case MOP_wmovri32:
6005         case MOP_xmovri64: {
6006             Operand &defOpnd = defInsn.GetOperand(kInsnSecondOpnd);
6007             DEBUG_ASSERT(defOpnd.IsIntImmediate(), "expects ImmOperand");
6008             auto &defConst = static_cast<ImmOperand &>(defOpnd);
6009             int64 defConstValue = defConst.GetValue();
6010             return (defConstValue == 0 || defConstValue == 1);
6011         }
6012         case MOP_xmovrr:
6013         case MOP_wmovrr:
6014             return IsZeroRegister(defInsn.GetOperand(kInsnSecondOpnd));
6015         case MOP_wlsrrri5:
6016         case MOP_xlsrrri6: {
6017             Operand &opnd2 = defInsn.GetOperand(kInsnThirdOpnd);
6018             DEBUG_ASSERT(opnd2.IsIntImmediate(), "expects ImmOperand");
6019             auto &opndImm = static_cast<ImmOperand &>(opnd2);
6020             int64 shiftBits = opndImm.GetValue();
6021             return ((defMop == MOP_wlsrrri5 && shiftBits == (k32BitSize - 1)) ||
6022                     (defMop == MOP_xlsrrri6 && shiftBits == (k64BitSize - 1)));
6023         }
6024         default:
6025             return false;
6026     }
6027 }
6028 
6029 /*
6030  * help function for cmpcset optimize
6031  * if all define points of used opnd in insn has only one valid bit,return true.
6032  * for cmp reg,#0(#1), that is checking for reg
6033  */
CheckOpndDefPoints(Insn & checkInsn,int opndIdx)6034 bool CmpCsetAArch64::CheckOpndDefPoints(Insn &checkInsn, int opndIdx)
6035 {
6036     if (checkInsn.GetBB()->GetPrev() == nullptr) {
6037         /* For 1st BB, be conservative for def of parameter registers */
6038         /* Since peep is light weight, do not want to insert pseudo defs */
6039         regno_t reg = static_cast<RegOperand &>(checkInsn.GetOperand(static_cast<uint32>(opndIdx))).GetRegisterNumber();
6040         if ((reg >= R0 && reg <= R7) || (reg >= D0 && reg <= D7)) {
6041             return false;
6042         }
6043     }
6044     /* check current BB */
6045     const Insn *defInsn = DefInsnOfOperandInBB(checkInsn, checkInsn, opndIdx);
6046     if (defInsn != nullptr) {
6047         return OpndDefByOneValidBit(*defInsn);
6048     }
6049     /* check pred */
6050     for (auto predBB : checkInsn.GetBB()->GetPreds()) {
6051         const Insn *tempInsn = nullptr;
6052         if (predBB->GetLastInsn() != nullptr) {
6053             tempInsn = DefInsnOfOperandInBB(*predBB->GetLastInsn(), checkInsn, opndIdx);
6054         }
6055         if (tempInsn == nullptr || !OpndDefByOneValidBit(*tempInsn)) {
6056             return false;
6057         }
6058     }
6059     return true;
6060 }
6061 
6062 /* Check there is use point of rflag start from startInsn to current bb bottom */
FlagUsedLaterInCurBB(const BB & bb,Insn & startInsn) const6063 bool CmpCsetAArch64::FlagUsedLaterInCurBB(const BB &bb, Insn &startInsn) const
6064 {
6065     if (&bb != startInsn.GetBB()) {
6066         return false;
6067     }
6068     Insn *nextInsn = nullptr;
6069     for (Insn *insn = &startInsn; insn != nullptr; insn = nextInsn) {
6070         nextInsn = insn->GetNextMachineInsn();
6071         const InsnDesc *md = insn->GetDesc();
6072         uint32 opndNum = insn->GetOperandSize();
6073         for (uint32 i = 0; i < opndNum; ++i) {
6074             Operand &opnd = insn->GetOperand(i);
6075             /*
6076              * For condition operand, such as NE, EQ and so on, the register number should be
6077              * same with RFLAG, we only need check the property of use/def.
6078              */
6079             if (!opnd.IsConditionCode()) {
6080                 continue;
6081             }
6082             if (md->opndMD[i]->IsUse()) {
6083                 return true;
6084             } else {
6085                 DEBUG_ASSERT(md->opndMD[i]->IsDef(), "register should be redefined.");
6086                 return false;
6087             }
6088         }
6089     }
6090     return false;
6091 }
6092 
Run(BB & bb,Insn & insn)6093 void CmpCsetAArch64::Run(BB &bb, Insn &insn)
6094 {
6095     Insn *nextInsn = insn.GetNextMachineInsn();
6096     if (nextInsn == nullptr) {
6097         return;
6098     }
6099     MOperator firstMop = insn.GetMachineOpcode();
6100     MOperator secondMop = nextInsn->GetMachineOpcode();
6101     if ((firstMop == MOP_wcmpri || firstMop == MOP_xcmpri) && (secondMop == MOP_wcsetrc || secondMop == MOP_xcsetrc)) {
6102         Operand &cmpFirstOpnd = insn.GetOperand(kInsnSecondOpnd);
6103         /* get ImmOperand, must be 0 or 1 */
6104         Operand &cmpSecondOpnd = insn.GetOperand(kInsnThirdOpnd);
6105         auto &cmpFlagReg = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
6106         DEBUG_ASSERT(cmpSecondOpnd.IsIntImmediate(), "expects ImmOperand");
6107         auto &cmpConst = static_cast<ImmOperand &>(cmpSecondOpnd);
6108         int64 cmpConstVal = cmpConst.GetValue();
6109         Operand &csetFirstOpnd = nextInsn->GetOperand(kInsnFirstOpnd);
6110         if ((cmpConstVal != 0 && cmpConstVal != 1) || !CheckOpndDefPoints(insn, 1) ||
6111             (nextInsn->GetNextMachineInsn() != nullptr && FlagUsedLaterInCurBB(bb, *nextInsn->GetNextMachineInsn())) ||
6112             FindRegLiveOut(cmpFlagReg, *insn.GetBB())) {
6113             return;
6114         }
6115 
6116         Insn *csetInsn = nextInsn;
6117         nextInsn = nextInsn->GetNextMachineInsn();
6118         auto &cond = static_cast<CondOperand &>(csetInsn->GetOperand(kInsnSecondOpnd));
6119         if ((cmpConstVal == 0 && cond.GetCode() == CC_NE) || (cmpConstVal == 1 && cond.GetCode() == CC_EQ)) {
6120             if (RegOperand::IsSameRegNO(cmpFirstOpnd, csetFirstOpnd)) {
6121                 bb.RemoveInsn(insn);
6122                 bb.RemoveInsn(*csetInsn);
6123             } else {
6124                 if (cmpFirstOpnd.GetSize() != csetFirstOpnd.GetSize()) {
6125                     return;
6126                 }
6127                 MOperator mopCode = (cmpFirstOpnd.GetSize() == k64BitSize) ? MOP_xmovrr : MOP_wmovrr;
6128                 Insn &newInsn = cgFunc.GetInsnBuilder()->BuildInsn(mopCode, csetFirstOpnd, cmpFirstOpnd);
6129                 bb.ReplaceInsn(insn, newInsn);
6130                 bb.RemoveInsn(*csetInsn);
6131             }
6132         } else if ((cmpConstVal == 1 && cond.GetCode() == CC_NE) || (cmpConstVal == 0 && cond.GetCode() == CC_EQ)) {
6133             if (cmpFirstOpnd.GetSize() != csetFirstOpnd.GetSize()) {
6134                 return;
6135             }
6136             MOperator mopCode = (cmpFirstOpnd.GetSize() == k64BitSize) ? MOP_xeorrri13 : MOP_weorrri12;
6137             ImmOperand &one = static_cast<AArch64CGFunc *>(&cgFunc)->CreateImmOperand(1, k8BitSize, false);
6138             Insn &newInsn = cgFunc.GetInsnBuilder()->BuildInsn(mopCode, csetFirstOpnd, cmpFirstOpnd, one);
6139             bb.ReplaceInsn(insn, newInsn);
6140             bb.RemoveInsn(*csetInsn);
6141         }
6142     }
6143 }
6144 
6145 /*
6146  * help function for DeleteMovAfterCbzOrCbnz
6147  * input:
6148  *        bb: the bb to be checked out
6149  *        checkCbz: to check out BB end with cbz or cbnz, if cbz, input true
6150  *        opnd: for MOV reg, #0, opnd indicate reg
6151  * return:
6152  *        according to cbz, return true if insn is cbz or cbnz and the first operand of cbz(cbnz) is same as input
6153  *      operand
6154  */
PredBBCheck(BB & bb,bool checkCbz,const Operand & opnd,bool is64BitOnly) const6155 bool DeleteMovAfterCbzOrCbnzAArch64::PredBBCheck(BB &bb, bool checkCbz, const Operand &opnd, bool is64BitOnly) const
6156 {
6157     if (bb.GetKind() != BB::kBBIf) {
6158         return false;
6159     }
6160 
6161     Insn *condBr = cgcfg->FindLastCondBrInsn(bb);
6162     DEBUG_ASSERT(condBr != nullptr, "condBr must be found");
6163     if (!cgcfg->IsCompareAndBranchInsn(*condBr)) {
6164         return false;
6165     }
6166     MOperator mOp = condBr->GetMachineOpcode();
6167     if (is64BitOnly && checkCbz && mOp != MOP_xcbz) {
6168         return false;
6169     }
6170     if (is64BitOnly && !checkCbz && mOp != MOP_xcbnz) {
6171         return false;
6172     }
6173     if (!is64BitOnly && checkCbz && mOp != MOP_xcbz && mOp != MOP_wcbz) {
6174         return false;
6175     }
6176     if (!is64BitOnly && !checkCbz && mOp != MOP_xcbnz && mOp != MOP_wcbnz) {
6177         return false;
6178     }
6179     return RegOperand::IsSameRegNO(condBr->GetOperand(kInsnFirstOpnd), opnd);
6180 }
6181 
OpndDefByMovZero(const Insn & insn) const6182 bool DeleteMovAfterCbzOrCbnzAArch64::OpndDefByMovZero(const Insn &insn) const
6183 {
6184     MOperator defMop = insn.GetMachineOpcode();
6185     switch (defMop) {
6186         case MOP_wmovri32:
6187         case MOP_xmovri64: {
6188             Operand &defOpnd = insn.GetOperand(kInsnSecondOpnd);
6189             DEBUG_ASSERT(defOpnd.IsIntImmediate(), "expects ImmOperand");
6190             auto &defConst = static_cast<ImmOperand &>(defOpnd);
6191             int64 defConstValue = defConst.GetValue();
6192             if (defConstValue == 0) {
6193                 return true;
6194             }
6195             return false;
6196         }
6197         case MOP_xmovrr:
6198         case MOP_wmovrr: {
6199             Operand &secondOpnd = insn.GetOperand(kInsnSecondOpnd);
6200             DEBUG_ASSERT(secondOpnd.IsRegister(), "expects RegOperand here");
6201             auto &regOpnd = static_cast<RegOperand &>(secondOpnd);
6202             return IsZeroRegister(regOpnd);
6203         }
6204         default:
6205             return false;
6206     }
6207 }
6208 
6209 /* check whether predefine insn of first operand of test_insn is exist in current BB */
NoPreDefine(Insn & testInsn) const6210 bool DeleteMovAfterCbzOrCbnzAArch64::NoPreDefine(Insn &testInsn) const
6211 {
6212     Insn *nextInsn = nullptr;
6213     for (Insn *insn = testInsn.GetBB()->GetFirstMachineInsn(); insn != nullptr && insn != &testInsn; insn = nextInsn) {
6214         nextInsn = insn->GetNextMachineInsn();
6215         if (!insn->IsMachineInstruction()) {
6216             continue;
6217         }
6218         DEBUG_ASSERT(!insn->IsCall(), "CG internal error, call insn should not be at the middle of the BB.");
6219         const InsnDesc *md = insn->GetDesc();
6220         uint32 opndNum = insn->GetOperandSize();
6221         for (uint32 i = 0; i < opndNum; ++i) {
6222             Operand &opnd = insn->GetOperand(i);
6223             if (!md->opndMD[i]->IsDef()) {
6224                 continue;
6225             }
6226             if (opnd.IsMemoryAccessOperand()) {
6227                 auto &memOpnd = static_cast<MemOperand &>(opnd);
6228                 RegOperand *base = memOpnd.GetBaseRegister();
6229                 DEBUG_ASSERT(base != nullptr, "nullptr check");
6230                 DEBUG_ASSERT(base->IsRegister(), "expects RegOperand");
6231                 if (RegOperand::IsSameRegNO(*base, testInsn.GetOperand(kInsnFirstOpnd)) &&
6232                     memOpnd.GetAddrMode() == MemOperand::kAddrModeBOi &&
6233                     (memOpnd.IsPostIndexed() || memOpnd.IsPreIndexed())) {
6234                     return false;
6235                 }
6236             } else if (opnd.IsList()) {
6237                 for (auto &operand : static_cast<const ListOperand &>(opnd).GetOperands()) {
6238                     if (RegOperand::IsSameRegNO(testInsn.GetOperand(kInsnFirstOpnd), *operand)) {
6239                         return false;
6240                     }
6241                 }
6242             } else if (opnd.IsRegister()) {
6243                 if (RegOperand::IsSameRegNO(testInsn.GetOperand(kInsnFirstOpnd), opnd)) {
6244                     return false;
6245                 }
6246             }
6247         }
6248     }
6249     return true;
6250 }
6251 
NoMoreThan32BitUse(Insn & testInsn) const6252 bool DeleteMovAfterCbzOrCbnzAArch64::NoMoreThan32BitUse(Insn &testInsn) const
6253 {
6254     auto &testOpnd = static_cast<RegOperand &>(testInsn.GetOperand(kFirstOpnd));
6255     InsnSet regUseInsnSet = cgFunc.GetRD()->FindUseForRegOpnd(testInsn, kInsnFirstOpnd, false);
6256     for (auto useInsn : regUseInsnSet) {
6257         MOperator mop = useInsn->GetMachineOpcode();
6258         if (mop == MOP_pseudo_ret_int) {
6259             if (cgFunc.GetFunction().GetReturnType()->GetSize() > k4ByteSize) {
6260                 return false;
6261             }
6262             continue;
6263         }
6264         uint32 optSize = useInsn->GetOperandSize();
6265         const InsnDesc *md = useInsn->GetDesc();
6266         for (uint32 i = 0; i < optSize; i++) {
6267             auto &opnd = useInsn->GetOperand(i);
6268             const auto *opndDesc = md->GetOpndDes(i);
6269             if (opndDesc->IsDef()) {
6270                 continue;
6271             }
6272             if (opnd.IsRegister()) {
6273                 auto &regOpnd = static_cast<RegOperand &>(opnd);
6274                 if (RegOperand::IsSameRegNO(regOpnd, testOpnd) && opndDesc->GetSize() > k32BitSize) {
6275                     return false;
6276                 }
6277             } else if (opnd.IsMemoryAccessOperand()) {
6278                 auto &memOpnd = static_cast<MemOperand &>(opnd);
6279                 auto *baseOpnd = memOpnd.GetBaseRegister();
6280                 auto *indexOpnd = memOpnd.GetIndexRegister();
6281                 if ((baseOpnd != nullptr) && (RegOperand::IsSameRegNO(*baseOpnd, testOpnd)) &&
6282                     (baseOpnd->GetSize() > k32BitSize)) {
6283                     return false;
6284                 }
6285                 if ((indexOpnd != nullptr) && (RegOperand::IsSameRegNO(*indexOpnd, testOpnd)) &&
6286                     (indexOpnd->GetSize() > k32BitSize)) {
6287                     return false;
6288                 }
6289             } else if (opnd.IsList()) {
6290                 auto &listOpnd = static_cast<ListOperand &>(opnd);
6291                 for (auto *regOpnd : std::as_const(listOpnd.GetOperands())) {
6292                     if (RegOperand::IsSameRegNO(*regOpnd, testOpnd) && regOpnd->GetSize() > k32BitSize) {
6293                         return false;
6294                     }
6295                 }
6296             }
6297         }
6298     }
6299     return true;
6300 }
6301 
ProcessBBHandle(BB * processBB,const BB & bb,const Insn & insn) const6302 void DeleteMovAfterCbzOrCbnzAArch64::ProcessBBHandle(BB *processBB, const BB &bb, const Insn &insn) const
6303 {
6304     DEBUG_ASSERT(processBB != nullptr, "process_bb is null in ProcessBBHandle");
6305     MOperator condBrMop = insn.GetMachineOpcode();
6306     bool is64BitOnly = (condBrMop == MOP_xcbz || condBrMop == MOP_xcbnz);
6307     FOR_BB_INSNS_SAFE(processInsn, processBB, nextProcessInsn)
6308     {
6309         nextProcessInsn = processInsn->GetNextMachineInsn();
6310         if (!processInsn->IsMachineInstruction()) {
6311             continue;
6312         }
6313         /* register may be a caller save register */
6314         if (processInsn->IsCall()) {
6315             break;
6316         }
6317         if (!OpndDefByMovZero(*processInsn) || !NoPreDefine(*processInsn) ||
6318             !RegOperand::IsSameRegNO(processInsn->GetOperand(kInsnFirstOpnd), insn.GetOperand(kInsnFirstOpnd))) {
6319             continue;
6320         }
6321         bool toDoOpt = true;
6322         MOperator condBrMop = insn.GetMachineOpcode();
6323         /* process elseBB, other preds must be cbz */
6324         if (condBrMop == MOP_wcbnz || condBrMop == MOP_xcbnz) {
6325             /* check out all preds of process_bb */
6326             for (auto *processBBPred : processBB->GetPreds()) {
6327                 if (processBBPred == &bb) {
6328                     continue;
6329                 }
6330                 if (!PredBBCheck(*processBBPred, true, processInsn->GetOperand(kInsnFirstOpnd), is64BitOnly)) {
6331                     toDoOpt = false;
6332                     break;
6333                 }
6334             }
6335         } else {
6336             /* process ifBB, other preds can be cbz or cbnz(one at most) */
6337             for (auto processBBPred : processBB->GetPreds()) {
6338                 if (processBBPred == &bb) {
6339                     continue;
6340                 }
6341                 /* for cbnz pred, there is one at most */
6342                 if (!PredBBCheck(*processBBPred, processBBPred != processBB->GetPrev(),
6343                                  processInsn->GetOperand(kInsnFirstOpnd), is64BitOnly)) {
6344                     toDoOpt = false;
6345                     break;
6346                 }
6347             }
6348         }
6349         if (!is64BitOnly && !NoMoreThan32BitUse(*processInsn)) {
6350             toDoOpt = false;
6351         }
6352         if (!toDoOpt) {
6353             continue;
6354         }
6355         processBB->RemoveInsn(*processInsn);
6356     }
6357 }
6358 
Run(BB & bb,Insn & insn)6359 void DeleteMovAfterCbzOrCbnzAArch64::Run(BB &bb, Insn &insn)
6360 {
6361     if (!cgFunc.GetRDStatus()) {
6362         return;
6363     }
6364     if (bb.GetKind() != BB::kBBIf) {
6365         return;
6366     }
6367     if (&insn != cgcfg->FindLastCondBrInsn(bb)) {
6368         return;
6369     }
6370     if (!cgcfg->IsCompareAndBranchInsn(insn)) {
6371         return;
6372     }
6373     BB *processBB = nullptr;
6374     if (bb.GetNext() == maplebe::CGCFG::GetTargetSuc(bb)) {
6375         return;
6376     }
6377 
6378     MOperator condBrMop = insn.GetMachineOpcode();
6379     if (condBrMop == MOP_wcbnz || condBrMop == MOP_xcbnz) {
6380         processBB = bb.GetNext();
6381     } else {
6382         processBB = maplebe::CGCFG::GetTargetSuc(bb);
6383     }
6384 
6385     DEBUG_ASSERT(processBB != nullptr, "process_bb is null in DeleteMovAfterCbzOrCbnzAArch64::Run");
6386     ProcessBBHandle(processBB, bb, insn);
6387 }
6388 
6389 /* ldr wn, [x1, wn, SXTW]
6390  * add x2, wn, x2
6391  */
IsExpandBaseOpnd(const Insn & insn,const Insn & prevInsn) const6392 bool ComplexMemOperandAddAArch64::IsExpandBaseOpnd(const Insn &insn, const Insn &prevInsn) const
6393 {
6394     MOperator prevMop = prevInsn.GetMachineOpcode();
6395     if (prevMop >= MOP_wldrsb && prevMop <= MOP_xldr &&
6396         prevInsn.GetOperand(kInsnFirstOpnd).Equals(insn.GetOperand(kInsnSecondOpnd))) {
6397         return true;
6398     }
6399     return false;
6400 }
6401 
Run(BB & bb,Insn & insn)6402 void ComplexMemOperandAddAArch64::Run(BB &bb, Insn &insn)
6403 {
6404     AArch64CGFunc *aarch64CGFunc = static_cast<AArch64CGFunc *>(&cgFunc);
6405     Insn *nextInsn = insn.GetNextMachineInsn();
6406     if (nextInsn == nullptr) {
6407         return;
6408     }
6409     Insn *prevInsn = insn.GetPreviousMachineInsn();
6410     MOperator thisMop = insn.GetMachineOpcode();
6411     if (thisMop != MOP_xaddrrr && thisMop != MOP_waddrrr) {
6412         return;
6413     }
6414     MOperator nextMop = nextInsn->GetMachineOpcode();
6415     if (nextMop && ((nextMop >= MOP_wldrsb && nextMop <= MOP_dldr) || (nextMop >= MOP_wstrb && nextMop <= MOP_dstr))) {
6416         if (!IsMemOperandOptPattern(insn, *nextInsn)) {
6417             return;
6418         }
6419         MemOperand *memOpnd = static_cast<MemOperand *>(nextInsn->GetMemOpnd());
6420         auto newBaseOpnd = static_cast<RegOperand *>(&insn.GetOperand(kInsnSecondOpnd));
6421         auto newIndexOpnd = static_cast<RegOperand *>(&insn.GetOperand(kInsnThirdOpnd));
6422         regno_t memBaseOpndRegNO = newBaseOpnd->GetRegisterNumber();
6423         if (newBaseOpnd->GetSize() <= k32BitSize && prevInsn != nullptr && IsExpandBaseOpnd(insn, *prevInsn)) {
6424             newBaseOpnd = &aarch64CGFunc->GetOrCreatePhysicalRegisterOperand(static_cast<AArch64reg>(memBaseOpndRegNO),
6425                                                                              k64BitSize, kRegTyInt);
6426         }
6427         if (newBaseOpnd->GetSize() != k64BitSize) {
6428             return;
6429         }
6430         DEBUG_ASSERT(memOpnd != nullptr, "memOpnd should not be nullptr");
6431         if (newIndexOpnd->GetSize() <= k32BitSize) {
6432             MemOperand &newMemOpnd = aarch64CGFunc->GetOrCreateMemOpnd(MemOperand::kAddrModeBOrX, memOpnd->GetSize(),
6433                                                                        newBaseOpnd, newIndexOpnd, 0, false);
6434             nextInsn->SetOperand(kInsnSecondOpnd, newMemOpnd);
6435         } else {
6436             MemOperand &newMemOpnd = aarch64CGFunc->GetOrCreateMemOpnd(MemOperand::kAddrModeBOrX, memOpnd->GetSize(),
6437                                                                        newBaseOpnd, newIndexOpnd, nullptr, nullptr);
6438             nextInsn->SetOperand(kInsnSecondOpnd, newMemOpnd);
6439         }
6440         bb.RemoveInsn(insn);
6441     }
6442 }
6443 
FindPrevMovInsn(const Insn & insn,regno_t regNo) const6444 Insn *CombineMovInsnBeforeCSelPattern::FindPrevMovInsn(const Insn &insn, regno_t regNo) const
6445 {
6446     for (Insn *curInsn = insn.GetPreviousMachineInsn(); curInsn != nullptr;
6447          curInsn = curInsn->GetPreviousMachineInsn()) {
6448         MOperator mop = curInsn->GetMachineOpcode();
6449         if ((mop == MOP_wmovri32 || mop == MOP_xmovri64) &&
6450             static_cast<RegOperand &>(curInsn->GetOperand(kInsnFirstOpnd)).GetRegisterNumber() == regNo) {
6451             return curInsn;
6452         }
6453         // If the register is redefined between the mov and csel insns, the optimization cannot be performed.
6454         if (curInsn->IsRegDefined(regNo)) {
6455             break;
6456         }
6457     }
6458     return nullptr;
6459 }
6460 
FindPrevCmpInsn(const Insn & insn) const6461 Insn *CombineMovInsnBeforeCSelPattern::FindPrevCmpInsn(const Insn &insn) const
6462 {
6463     for (Insn *curInsn = insn.GetPreviousMachineInsn(); curInsn != nullptr;
6464          curInsn = curInsn->GetPreviousMachineInsn()) {
6465         MOperator mop = curInsn->GetMachineOpcode();
6466         if (mop == MOP_wcmpri || mop == MOP_xcmpri) {
6467             return curInsn;
6468         }
6469     }
6470     return nullptr;
6471 }
6472 
CheckCondition(Insn & insn)6473 bool CombineMovInsnBeforeCSelPattern::CheckCondition(Insn &insn)
6474 {
6475     MOperator curMop = insn.GetMachineOpcode();
6476     if (curMop != MOP_wcselrrrc && curMop != MOP_xcselrrrc) {
6477         return false;
6478     }
6479 
6480     auto &condOpnd = static_cast<CondOperand &>(insn.GetOperand(kInsnFourthOpnd));
6481     if (condOpnd.GetCode() != CC_NE && condOpnd.GetCode() != CC_EQ) {
6482         return false;
6483     }
6484 
6485     auto &opnd1 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
6486     auto &opnd2 = static_cast<RegOperand &>(insn.GetOperand(kInsnThirdOpnd));
6487     regno_t regNo1 = opnd1.GetRegisterNumber();
6488     regno_t regNo2 = opnd2.GetRegisterNumber();
6489     if (regNo1 == regNo2) {
6490         return false;
6491     }
6492 
6493     insnMov1 = FindPrevMovInsn(insn, regNo1);
6494     if (insnMov1 == nullptr) {
6495         return false;
6496     }
6497     insnMov2 = FindPrevMovInsn(insn, regNo2);
6498     if (insnMov2 == nullptr) {
6499         return false;
6500     }
6501     cmpInsn = FindPrevCmpInsn(insn);
6502     if (cmpInsn == nullptr) {
6503         return false;
6504     }
6505 
6506     auto &cmpImmOpnd = static_cast<ImmOperand &>(cmpInsn->GetOperand(kInsnThirdOpnd));
6507     auto &immOpnd1 = static_cast<ImmOperand &>(insnMov1->GetOperand(kInsnSecondOpnd));
6508     auto &immOpnd2 = static_cast<ImmOperand &>(insnMov2->GetOperand(kInsnSecondOpnd));
6509     auto maxImm = std::max(immOpnd1.GetValue(), immOpnd2.GetValue());
6510     auto minImm = std::min(immOpnd1.GetValue(), immOpnd2.GetValue());
6511     // to avoid difference value of imm1 and imm2 overflows
6512     if (minImm < 0 && maxImm >= minImm - INT64_MIN) {
6513         return false;
6514     }
6515     auto diffValue = maxImm - minImm;
6516     if (diffValue == 0 || cmpImmOpnd.GetValue() != diffValue) {
6517         return false;
6518     }
6519     // condition 5
6520     if (immOpnd1.GetValue() < immOpnd2.GetValue() && condOpnd.GetCode() != CC_NE && diffValue != 1) {
6521         return false;
6522     }
6523     // condition 6
6524     if (immOpnd1.GetValue() > immOpnd2.GetValue() && condOpnd.GetCode() != CC_EQ && diffValue != 1) {
6525         return false;
6526     }
6527     if (immOpnd1.GetValue() < immOpnd2.GetValue()) {
6528         needReverseCond = true;
6529     }
6530     if (diffValue == 1 && ((immOpnd1.GetValue() < immOpnd2.GetValue() && condOpnd.GetCode() != CC_NE) ||
6531                            (immOpnd1.GetValue() > immOpnd2.GetValue() && condOpnd.GetCode() != CC_EQ))) {
6532         needCsetInsn = true;
6533     }
6534 
6535     if (IfOperandIsLiveAfterInsn(opnd1, insn) || IfOperandIsLiveAfterInsn(opnd2, insn)) {
6536         return false;
6537     }
6538 
6539     return true;
6540 }
6541 
Run(BB & bb,Insn & insn)6542 void CombineMovInsnBeforeCSelPattern::Run(BB &bb, Insn &insn)
6543 {
6544     if (!CheckCondition(insn)) {
6545         return;
6546     }
6547 
6548     uint32 opndSize = insn.GetDesc()->opndMD[kInsnFirstOpnd]->GetSize();
6549     MOperator mOp = opndSize <= k32BitSize ? MOP_waddrri12 : MOP_xaddrri12;
6550     auto &opnd0 = insn.GetOperand(kInsnFirstOpnd);
6551     auto &cmpSrcopnd = cmpInsn->GetOperand(kInsnSecondOpnd);
6552     auto *aarFunc = static_cast<AArch64CGFunc *>(cgFunc);
6553     auto *condOpnd = static_cast<CondOperand *>(&insn.GetOperand(kInsnFourthOpnd));
6554     CondOperand &reverseCondOpnd = aarFunc->GetCondOperand(GetReverseCC(condOpnd->GetCode()));
6555     if (needReverseCond) {
6556         condOpnd = &reverseCondOpnd;
6557     }
6558 
6559     // csel insn or cset insn
6560     Insn *newInsn = nullptr;
6561     // cset insn
6562     if (needCsetInsn) {
6563         MOperator newMop = opndSize <= k32BitSize ? MOP_wcsetrc : MOP_xcsetrc;
6564         newInsn = &cgFunc->GetInsnBuilder()->BuildInsn(newMop, opnd0, *condOpnd, insn.GetOperand(kInsnFifthOpnd));
6565     } else {
6566         // csel insn
6567         newInsn = &cgFunc->GetInsnBuilder()->BuildInsn(insn.GetMachineOpcode(), opnd0, cmpSrcopnd,
6568                                                        cgFunc->GetZeroOpnd(opndSize), *condOpnd,
6569                                                        insn.GetOperand(kInsnFifthOpnd));
6570     }
6571 
6572     auto &immOpnd1 = static_cast<ImmOperand &>(insnMov1->GetOperand(kInsnSecondOpnd));
6573     auto &immOpnd2 = static_cast<ImmOperand &>(insnMov2->GetOperand(kInsnSecondOpnd));
6574     int64 value = immOpnd1.GetValue() > immOpnd2.GetValue() ? immOpnd2.GetValue() : immOpnd1.GetValue();
6575     // add Insn
6576     auto &newImmOpnd = aarFunc->CreateImmOperand(value, k12BitSize, false);
6577     Insn &addInsn = cgFunc->GetInsnBuilder()->BuildInsn(mOp, opnd0, opnd0, newImmOpnd);
6578 
6579     bb.InsertInsnAfter(insn, addInsn);
6580     bb.ReplaceInsn(insn, *newInsn);
6581 }
6582 
FindNewMop(const BB & bb,const Insn & insn) const6583 MOperator OneHoleBranchesPreAArch64::FindNewMop(const BB &bb, const Insn &insn) const
6584 {
6585     MOperator newOp = MOP_undef;
6586     if (&insn != bb.GetLastInsn()) {
6587         return newOp;
6588     }
6589     MOperator thisMop = insn.GetMachineOpcode();
6590     if (thisMop != MOP_wcbz && thisMop != MOP_wcbnz && thisMop != MOP_xcbz && thisMop != MOP_xcbnz) {
6591         return newOp;
6592     }
6593     switch (thisMop) {
6594         case MOP_wcbz:
6595             newOp = MOP_wtbnz;
6596             break;
6597         case MOP_wcbnz:
6598             newOp = MOP_wtbz;
6599             break;
6600         case MOP_xcbz:
6601             newOp = MOP_xtbnz;
6602             break;
6603         case MOP_xcbnz:
6604             newOp = MOP_xtbz;
6605             break;
6606         default:
6607             CHECK_FATAL(false, "can not touch here");
6608             break;
6609     }
6610     return newOp;
6611 }
6612 
Run(BB & bb,Insn & insn)6613 void OneHoleBranchesPreAArch64::Run(BB &bb, Insn &insn)
6614 {
6615     AArch64CGFunc *aarch64CGFunc = static_cast<AArch64CGFunc *>(&cgFunc);
6616     MOperator newOp = FindNewMop(bb, insn);
6617     if (newOp == MOP_undef) {
6618         return;
6619     }
6620     Insn *prevInsn = insn.GetPreviousMachineInsn();
6621     LabelOperand &label = static_cast<LabelOperand &>(insn.GetOperand(kInsnSecondOpnd));
6622     if (prevInsn != nullptr && prevInsn->GetMachineOpcode() == MOP_xuxtb32 &&
6623         (static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd)).GetValidBitsNum() <= k8BitSize ||
6624          static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd)).GetValidBitsNum() <= k8BitSize)) {
6625         if (&(prevInsn->GetOperand(kInsnFirstOpnd)) != &(insn.GetOperand(kInsnFirstOpnd))) {
6626             return;
6627         }
6628         if (IfOperandIsLiveAfterInsn(static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd)), insn)) {
6629             return;
6630         }
6631         insn.SetOperand(kInsnFirstOpnd, prevInsn->GetOperand(kInsnSecondOpnd));
6632         if (CGOptions::DoCGSSA()) {
6633             CHECK_FATAL(false, "check this case in ssa opt");
6634         }
6635         bb.RemoveInsn(*prevInsn);
6636     }
6637     if (prevInsn != nullptr &&
6638         (prevInsn->GetMachineOpcode() == MOP_xeorrri13 || prevInsn->GetMachineOpcode() == MOP_weorrri12) &&
6639         static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnThirdOpnd)).GetValue() == 1) {
6640         if (&(prevInsn->GetOperand(kInsnFirstOpnd)) != &(insn.GetOperand(kInsnFirstOpnd))) {
6641             return;
6642         }
6643         Insn *prevPrevInsn = prevInsn->GetPreviousMachineInsn();
6644         if (prevPrevInsn == nullptr) {
6645             return;
6646         }
6647         if (prevPrevInsn->GetMachineOpcode() != MOP_xuxtb32 ||
6648             static_cast<RegOperand &>(prevPrevInsn->GetOperand(kInsnSecondOpnd)).GetValidBitsNum() != 1) {
6649             return;
6650         }
6651         if (&(prevPrevInsn->GetOperand(kInsnFirstOpnd)) != &(prevInsn->GetOperand(kInsnSecondOpnd))) {
6652             return;
6653         }
6654         ImmOperand &oneHoleOpnd = aarch64CGFunc->CreateImmOperand(0, k8BitSize, false);
6655         auto &regOperand = static_cast<RegOperand &>(prevPrevInsn->GetOperand(kInsnSecondOpnd));
6656         if (CGOptions::DoCGSSA()) {
6657             CHECK_FATAL(false, "check this case in ssa opt");
6658         }
6659         bb.InsertInsnAfter(insn, cgFunc.GetInsnBuilder()->BuildInsn(newOp, regOperand, oneHoleOpnd, label));
6660         bb.RemoveInsn(insn);
6661         bb.RemoveInsn(*prevInsn);
6662         bb.RemoveInsn(*prevPrevInsn);
6663     }
6664 }
6665 
FindLoadFloatPoint(Insn & insn)6666 bool LoadFloatPointPattern::FindLoadFloatPoint(Insn &insn)
6667 {
6668     MOperator mOp = insn.GetMachineOpcode();
6669     optInsn.clear();
6670     if (mOp != MOP_xmovzri16) {
6671         return false;
6672     }
6673     optInsn.emplace_back(&insn);
6674 
6675     Insn *insnMov2 = insn.GetNextMachineInsn();
6676     if (insnMov2 == nullptr) {
6677         return false;
6678     }
6679     if (insnMov2->GetMachineOpcode() != MOP_xmovkri16) {
6680         return false;
6681     }
6682     optInsn.emplace_back(insnMov2);
6683 
6684     Insn *insnMov3 = insnMov2->GetNextMachineInsn();
6685     if (insnMov3 == nullptr) {
6686         return false;
6687     }
6688     if (insnMov3->GetMachineOpcode() != MOP_xmovkri16) {
6689         return false;
6690     }
6691     optInsn.emplace_back(insnMov3);
6692 
6693     Insn *insnMov4 = insnMov3->GetNextMachineInsn();
6694     if (insnMov4 == nullptr) {
6695         return false;
6696     }
6697     if (insnMov4->GetMachineOpcode() != MOP_xmovkri16) {
6698         return false;
6699     }
6700     optInsn.emplace_back(insnMov4);
6701     return true;
6702 }
6703 
IsPatternMatch()6704 bool LoadFloatPointPattern::IsPatternMatch()
6705 {
6706     int insnNum = 0;
6707     Insn *insn1 = optInsn[insnNum];
6708     Insn *insn2 = optInsn[++insnNum];
6709     Insn *insn3 = optInsn[++insnNum];
6710     Insn *insn4 = optInsn[++insnNum];
6711     if ((static_cast<RegOperand &>(insn1->GetOperand(kInsnFirstOpnd)).GetRegisterNumber() !=
6712          static_cast<RegOperand &>(insn2->GetOperand(kInsnFirstOpnd)).GetRegisterNumber()) ||
6713         (static_cast<RegOperand &>(insn2->GetOperand(kInsnFirstOpnd)).GetRegisterNumber() !=
6714          static_cast<RegOperand &>(insn3->GetOperand(kInsnFirstOpnd)).GetRegisterNumber()) ||
6715         (static_cast<RegOperand &>(insn3->GetOperand(kInsnFirstOpnd)).GetRegisterNumber() !=
6716          static_cast<RegOperand &>(insn4->GetOperand(kInsnFirstOpnd)).GetRegisterNumber())) {
6717         return false;
6718     }
6719     if ((static_cast<BitShiftOperand &>(insn1->GetOperand(kInsnThirdOpnd)).GetShiftAmount() != 0) ||
6720         (static_cast<BitShiftOperand &>(insn2->GetOperand(kInsnThirdOpnd)).GetShiftAmount() != k16BitSize) ||
6721         (static_cast<BitShiftOperand &>(insn3->GetOperand(kInsnThirdOpnd)).GetShiftAmount() != k32BitSize) ||
6722         (static_cast<BitShiftOperand &>(insn4->GetOperand(kInsnThirdOpnd)).GetShiftAmount() !=
6723          (k16BitSize + k32BitSize))) {
6724         return false;
6725     }
6726     return true;
6727 }
6728 
CheckCondition(Insn & insn)6729 bool LoadFloatPointPattern::CheckCondition(Insn &insn)
6730 {
6731     if (FindLoadFloatPoint(insn) && IsPatternMatch()) {
6732         return true;
6733     }
6734     return false;
6735 }
6736 
Run(BB & bb,Insn & insn)6737 void LoadFloatPointPattern::Run(BB &bb, Insn &insn)
6738 {
6739     /* logical shift left values in three optimized pattern */
6740     if (CheckCondition(insn)) {
6741         int insnNum = 0;
6742         Insn *insn1 = optInsn[insnNum];
6743         Insn *insn2 = optInsn[++insnNum];
6744         Insn *insn3 = optInsn[++insnNum];
6745         Insn *insn4 = optInsn[++insnNum];
6746         auto &movConst1 = static_cast<ImmOperand &>(insn1->GetOperand(kInsnSecondOpnd));
6747         auto &movConst2 = static_cast<ImmOperand &>(insn2->GetOperand(kInsnSecondOpnd));
6748         auto &movConst3 = static_cast<ImmOperand &>(insn3->GetOperand(kInsnSecondOpnd));
6749         auto &movConst4 = static_cast<ImmOperand &>(insn4->GetOperand(kInsnSecondOpnd));
6750         /* movk/movz's immOpnd is 16-bit unsigned immediate */
6751         uint64 value = static_cast<uint64>(movConst1.GetValue()) +
6752                        (static_cast<uint64>(movConst2.GetValue()) << k16BitSize) +
6753                        (static_cast<uint64>(movConst3.GetValue()) << k32BitSize) +
6754                        (static_cast<uint64>(movConst4.GetValue()) << (k16BitSize + k32BitSize));
6755 
6756         LabelIdx lableIdx = cgFunc->CreateLabel();
6757         AArch64CGFunc *aarch64CGFunc = static_cast<AArch64CGFunc *>(cgFunc);
6758         LabelOperand &target = aarch64CGFunc->GetOrCreateLabelOperand(lableIdx);
6759         cgFunc->InsertLabelMap(lableIdx, value);
6760         Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(MOP_xldli, insn4->GetOperand(kInsnFirstOpnd), target);
6761         bb.InsertInsnAfter(*insn4, newInsn);
6762         bb.RemoveInsn(*insn1);
6763         bb.RemoveInsn(*insn2);
6764         bb.RemoveInsn(*insn3);
6765         bb.RemoveInsn(*insn4);
6766     }
6767 }
6768 
Run(BB & bb,Insn & insn)6769 void ReplaceOrrToMovAArch64::Run(BB &bb, Insn &insn)
6770 {
6771     Operand *opndOfOrr = nullptr;
6772     ImmOperand *immOpnd = nullptr;
6773     RegOperand *reg1 = nullptr;
6774     RegOperand *reg2 = nullptr;
6775     MOperator thisMop = insn.GetMachineOpcode();
6776     MOperator newMop = MOP_undef;
6777     switch (thisMop) {
6778         case MOP_wiorrri12: { /* opnd1 is reg32 and opnd3 is immediate. */
6779             opndOfOrr = &(insn.GetOperand(kInsnThirdOpnd));
6780             reg2 = &static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
6781             newMop = MOP_wmovrr;
6782             break;
6783         }
6784         case MOP_xiorrri13: { /* opnd1 is reg64 and opnd3 is immediate. */
6785             opndOfOrr = &(insn.GetOperand(kInsnThirdOpnd));
6786             reg2 = &static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
6787             newMop = MOP_xmovrr;
6788             break;
6789         }
6790         default:
6791             break;
6792     }
6793     DEBUG_ASSERT(opndOfOrr->IsIntImmediate(), "expects immediate operand");
6794     immOpnd = static_cast<ImmOperand *>(opndOfOrr);
6795     if (immOpnd->GetValue() == 0) {
6796         reg1 = &static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
6797         if (CGOptions::DoCGSSA()) {
6798             CHECK_FATAL(false, "check this case in ssa opt");
6799         }
6800         bb.ReplaceInsn(insn, cgFunc.GetInsnBuilder()->BuildInsn(newMop, *reg1, *reg2));
6801     }
6802 }
6803 
Run(BB & bb,Insn & insn)6804 void ReplaceCmpToCmnAArch64::Run(BB &bb, Insn &insn)
6805 {
6806     AArch64CGFunc *aarch64CGFunc = static_cast<AArch64CGFunc *>(&cgFunc);
6807     MOperator thisMop = insn.GetMachineOpcode();
6808     MOperator nextMop = MOP_undef;
6809     MOperator newMop = MOP_undef;
6810     uint64 negOne = UINT64_MAX;
6811     switch (thisMop) {
6812         case MOP_wmovri32: {
6813             nextMop = MOP_wcmprr;
6814             newMop = MOP_wcmnri;
6815             negOne = UINT32_MAX;
6816             break;
6817         }
6818         case MOP_xmovri64: {
6819             nextMop = MOP_xcmprr;
6820             newMop = MOP_xcmnri;
6821             break;
6822         }
6823         default:
6824             break;
6825     }
6826     Operand *opnd1OfMov = &(insn.GetOperand(kInsnFirstOpnd));
6827     Operand *opnd2OfMov = &(insn.GetOperand(kInsnSecondOpnd));
6828     if (opnd2OfMov->IsIntImmediate()) {
6829         ImmOperand *immOpnd = static_cast<ImmOperand *>(opnd2OfMov);
6830         int64 iVal = immOpnd->GetValue();
6831         if ((kNegativeImmLowerLimit <= iVal && iVal < 0) || static_cast<uint64>(iVal) == negOne) {
6832             Insn *nextInsn = insn.GetNextMachineInsn(); /* get the next insn to judge if it is a cmp instruction. */
6833             if (nextInsn != nullptr) {
6834                 if (nextInsn->GetMachineOpcode() == nextMop) {
6835                     Operand *opndCmp2 = &(nextInsn->GetOperand(kInsnSecondOpnd));
6836                     Operand *opndCmp3 = &(nextInsn->GetOperand(kInsnThirdOpnd)); /* get the third operand of cmp */
6837                     /* if the first operand of mov equals the third operand of cmp, match the pattern. */
6838                     if (opnd1OfMov == opndCmp3) {
6839                         if (static_cast<uint64>(iVal) == negOne) {
6840                             iVal = -1;
6841                         }
6842                         ImmOperand &newOpnd = aarch64CGFunc->CreateImmOperand(iVal * (-1), immOpnd->GetSize(), false);
6843                         Operand &regFlag = nextInsn->GetOperand(kInsnFirstOpnd);
6844                         bb.ReplaceInsn(*nextInsn,
6845                                        cgFunc.GetInsnBuilder()->BuildInsn(newMop, regFlag, *opndCmp2, newOpnd));
6846                     }
6847                 }
6848             }
6849         }
6850     }
6851 }
6852 
CheckCondition(Insn & insn)6853 bool RemoveIncRefPattern::CheckCondition(Insn &insn)
6854 {
6855     MOperator mOp = insn.GetMachineOpcode();
6856     if (mOp != MOP_xbl) {
6857         return false;
6858     }
6859     auto &target = static_cast<FuncNameOperand &>(insn.GetOperand(kInsnFirstOpnd));
6860     if (target.GetName() != "MCC_IncDecRef_NaiveRCFast") {
6861         return false;
6862     }
6863     insnMov2 = insn.GetPreviousMachineInsn();
6864     if (insnMov2 == nullptr) {
6865         return false;
6866     }
6867     MOperator mopMov2 = insnMov2->GetMachineOpcode();
6868     if (mopMov2 != MOP_xmovrr) {
6869         return false;
6870     }
6871     insnMov1 = insnMov2->GetPreviousMachineInsn();
6872     if (insnMov1 == nullptr) {
6873         return false;
6874     }
6875     MOperator mopMov1 = insnMov1->GetMachineOpcode();
6876     if (mopMov1 != MOP_xmovrr) {
6877         return false;
6878     }
6879     if (static_cast<RegOperand &>(insnMov1->GetOperand(kInsnSecondOpnd)).GetRegisterNumber() !=
6880         static_cast<RegOperand &>(insnMov2->GetOperand(kInsnSecondOpnd)).GetRegisterNumber()) {
6881         return false;
6882     }
6883     auto &mov2Dest = static_cast<RegOperand &>(insnMov2->GetOperand(kInsnFirstOpnd));
6884     auto &mov1Dest = static_cast<RegOperand &>(insnMov1->GetOperand(kInsnFirstOpnd));
6885     if (mov1Dest.IsVirtualRegister() || mov2Dest.IsVirtualRegister() || mov1Dest.GetRegisterNumber() != R0 ||
6886         mov2Dest.GetRegisterNumber() != R1) {
6887         return false;
6888     }
6889     return true;
6890 }
6891 
Run(BB & bb,Insn & insn)6892 void RemoveIncRefPattern::Run(BB &bb, Insn &insn)
6893 {
6894     if (!CheckCondition(insn)) {
6895         return;
6896     }
6897     bb.RemoveInsn(insn);
6898     bb.RemoveInsn(*insnMov2);
6899     bb.RemoveInsn(*insnMov1);
6900 }
6901 
IsAdjacentMem(const MemOperand & memOperandLow,const MemOperand & memOperandHigh) const6902 bool LdrStrRevPattern::IsAdjacentMem(const MemOperand &memOperandLow, const MemOperand &memOperandHigh) const
6903 {
6904     OfstOperand *highOfstOpnd = memOperandHigh.GetOffsetImmediate();
6905     int64 highOfstVal = highOfstOpnd ? highOfstOpnd->GetOffsetValue() : 0;
6906     OfstOperand *lowOfstOpnd = memOperandLow.GetOffsetImmediate();
6907     int64 lowOfstVal = lowOfstOpnd ? lowOfstOpnd->GetOffsetValue() : 0;
6908     if (highOfstVal - lowOfstVal != k8BitSize / kBitsPerByte) {
6909         return false;
6910     }
6911     if (!RegOperand::IsSameReg(*memOperandLow.GetBaseRegister(), *memOperandHigh.GetBaseRegister())) {
6912         return false;
6913     }
6914     return true;
6915 }
6916 
Run(BB & bb,Insn & insn)6917 void LdrStrRevPattern::Run(BB &bb, Insn &insn)
6918 {
6919     AArch64CGFunc *aarch64CGFunc = static_cast<AArch64CGFunc *>(cgFunc);
6920     if (!CheckCondition(insn)) {
6921         return;
6922     }
6923     Insn *newInsn = nullptr;
6924     Insn *revInsn = nullptr;
6925     Insn *lastInsn = nullptr;
6926     MOperator newMemMop = insn.GetMachineOpcode() == MOP_wstrb ? MOP_wstrh : MOP_wldrh;
6927     MemOperand *newMemOperand =
6928         aarch64CGFunc->CreateMemOperand(MemOperand::kAddrModeBOi, k8BitSize * 2, *adjacentMemOpnd->GetBaseRegister(),
6929                                         nullptr, adjacentMemOpnd->GetOffsetOperand(), nullptr);
6930     if (isStrInsn) {
6931         newInsn = &cgFunc->GetInsnBuilder()->BuildInsn(newMemMop, lsrInsn->GetOperand(kFirstOpnd), *newMemOperand);
6932         revInsn = &cgFunc->GetInsnBuilder()->BuildInsn(MOP_wrevrr16, lsrInsn->GetOperand(kFirstOpnd),
6933                                                        lsrInsn->GetOperand(kSecondOpnd));
6934     } else {
6935         newInsn = &cgFunc->GetInsnBuilder()->BuildInsn(newMemMop, insn.GetOperand(kFirstOpnd), *newMemOperand);
6936         revInsn = &cgFunc->GetInsnBuilder()->BuildInsn(MOP_wrevrr16, lsrInsn->GetOperand(kFirstOpnd),
6937                                                        insn.GetOperand(kFirstOpnd));
6938     }
6939     if (isStrInsn) {
6940         bb.InsertInsnAfter(insn, *newInsn);
6941         bb.InsertInsnAfter(insn, *revInsn);
6942         lastInsn = revInsn;
6943     } else {
6944         bb.InsertInsnAfter(insn, *revInsn);
6945         bb.InsertInsnAfter(insn, *newInsn);
6946         lastInsn = newInsn;
6947     }
6948     bb.RemoveInsn(insn);
6949     bb.RemoveInsn(*lsrInsn);
6950     bb.RemoveInsn(*adjacentInsn);
6951     SetCurrInsn(lastInsn);
6952     return;
6953 }
6954 
CheckCondition(Insn & insn)6955 bool LdrStrRevPattern::CheckCondition(Insn &insn)
6956 {
6957     MOperator mop = insn.GetMachineOpcode();
6958     isStrInsn = mop == MOP_wstrb;
6959     curMemOpnd = static_cast<MemOperand *>(insn.GetMemOpnd());
6960     if (curMemOpnd->GetAddrMode() != MemOperand::kAddrModeBOi) {
6961         return false;
6962     }
6963     if (isStrInsn) {
6964         adjacentInsn = insn.GetPreviousMachineInsn();
6965         if (adjacentInsn == nullptr || adjacentInsn->GetMachineOpcode() != mop) {
6966             return false;
6967         }
6968         adjacentMemOpnd = static_cast<MemOperand *>(adjacentInsn->GetMemOpnd());
6969         lsrInsn = adjacentInsn->GetPreviousMachineInsn();
6970         if (lsrInsn == nullptr ||
6971             (lsrInsn->GetMachineOpcode() != MOP_xlsrrri6 && lsrInsn->GetMachineOpcode() != MOP_wlsrrri5)) {
6972             return false;
6973         }
6974         RegOperand &lsrDst = static_cast<RegOperand &>(lsrInsn->GetOperand(kFirstOpnd));
6975         RegOperand &lsrSrc = static_cast<RegOperand &>(lsrInsn->GetOperand(kSecondOpnd));
6976         ImmOperand &lsrImm = static_cast<ImmOperand &>(lsrInsn->GetOperand(kThirdOpnd));
6977 
6978         RegOperand &currSrc = static_cast<RegOperand &>(insn.GetOperand(kFirstOpnd));
6979         RegOperand &adjacentSrc = static_cast<RegOperand &>(adjacentInsn->GetOperand(kFirstOpnd));
6980         if (lsrImm.GetValue() != k8BitSize) {
6981             return false;
6982         }
6983         if (!RegOperand::IsSameReg(lsrDst, adjacentSrc) || !RegOperand::IsSameReg(lsrSrc, currSrc)) {
6984             return false;
6985         }
6986         CHECK_NULL_FATAL(adjacentMemOpnd);
6987         if (!IsAdjacentMem(*adjacentMemOpnd, *curMemOpnd)) {
6988             return false;
6989         }
6990         if (IfOperandIsLiveAfterInsn(lsrDst, insn)) {
6991             return false;
6992         }
6993     } else {
6994         adjacentInsn = insn.GetNextMachineInsn();
6995         if (adjacentInsn == nullptr || adjacentInsn->GetMachineOpcode() != mop) {
6996             return false;
6997         }
6998         adjacentMemOpnd = static_cast<MemOperand *>(adjacentInsn->GetMemOpnd());
6999         lsrInsn = adjacentInsn->GetNextMachineInsn();
7000         if (lsrInsn == nullptr ||
7001             (lsrInsn->GetMachineOpcode() != MOP_waddrrrs && lsrInsn->GetMachineOpcode() != MOP_xaddrrrs)) {
7002             return false;
7003         }
7004         auto &addSrc1 = static_cast<RegOperand &>(lsrInsn->GetOperand(kSecondOpnd));
7005         auto &addSrc2 = static_cast<RegOperand &>(lsrInsn->GetOperand(kThirdOpnd));
7006         auto &shiftOpnd = static_cast<BitShiftOperand &>(lsrInsn->GetOperand(kInsnFourthOpnd));
7007 
7008         RegOperand &currSrc = static_cast<RegOperand &>(insn.GetOperand(kFirstOpnd));
7009         RegOperand &adjacentSrc = static_cast<RegOperand &>(adjacentInsn->GetOperand(kFirstOpnd));
7010         if (!RegOperand::IsSameReg(addSrc1, currSrc) || !RegOperand::IsSameReg(addSrc2, adjacentSrc)) {
7011             return false;
7012         }
7013         if (shiftOpnd.GetShiftOp() != BitShiftOperand::kLSL || shiftOpnd.GetShiftAmount() != k8BitSize) {
7014             return false;
7015         }
7016         if (!RegOperand::IsSameReg(*curMemOpnd->GetBaseRegister(), *adjacentMemOpnd->GetBaseRegister())) {
7017             return false;
7018         }
7019         if (!IsAdjacentMem(*adjacentMemOpnd, *curMemOpnd)) {
7020             return false;
7021         }
7022         if (IfOperandIsLiveAfterInsn(currSrc, *lsrInsn)) {
7023             return false;
7024         }
7025         if (IfOperandIsLiveAfterInsn(adjacentSrc, *lsrInsn)) {
7026             return false;
7027         }
7028     }
7029     return true;
7030 }
7031 
FindLondIntCmpWithZ(Insn & insn)7032 bool LongIntCompareWithZPattern::FindLondIntCmpWithZ(Insn &insn)
7033 {
7034     MOperator thisMop = insn.GetMachineOpcode();
7035     optInsn.clear();
7036     /* forth */
7037     if (thisMop != MOP_wcmpri) {
7038         return false;
7039     }
7040     (void)optInsn.emplace_back(&insn);
7041 
7042     /* third */
7043     Insn *preInsn1 = insn.GetPreviousMachineInsn();
7044     if (preInsn1 == nullptr) {
7045         return false;
7046     }
7047     MOperator preMop1 = preInsn1->GetMachineOpcode();
7048     if (preMop1 != MOP_wcsincrrrc) {
7049         return false;
7050     }
7051     (void)optInsn.emplace_back(preInsn1);
7052 
7053     /* second */
7054     Insn *preInsn2 = preInsn1->GetPreviousMachineInsn();
7055     if (preInsn2 == nullptr) {
7056         return false;
7057     }
7058     MOperator preMop2 = preInsn2->GetMachineOpcode();
7059     if (preMop2 != MOP_wcsinvrrrc) {
7060         return false;
7061     }
7062     (void)optInsn.emplace_back(preInsn2);
7063 
7064     /* first */
7065     Insn *preInsn3 = preInsn2->GetPreviousMachineInsn();
7066     if (preInsn3 == nullptr) {
7067         return false;
7068     }
7069     MOperator preMop3 = preInsn3->GetMachineOpcode();
7070     if (preMop3 != MOP_xcmpri) {
7071         return false;
7072     }
7073     (void)optInsn.emplace_back(preInsn3);
7074     return true;
7075 }
7076 
IsPatternMatch()7077 bool LongIntCompareWithZPattern::IsPatternMatch()
7078 {
7079     constexpr int insnLen = 4;
7080     if (optInsn.size() != insnLen) {
7081         return false;
7082     }
7083     int insnNum = 0;
7084     Insn *insn1 = optInsn[insnNum];
7085     Insn *insn2 = optInsn[++insnNum];
7086     Insn *insn3 = optInsn[++insnNum];
7087     Insn *insn4 = optInsn[++insnNum];
7088     if (IsZeroRegister(insn3->GetOperand(kInsnSecondOpnd)) && IsZeroRegister(insn3->GetOperand(kInsnThirdOpnd)) &&
7089         IsZeroRegister(insn2->GetOperand(kInsnThirdOpnd)) &&
7090         &(insn2->GetOperand(kInsnFirstOpnd)) == &(insn2->GetOperand(kInsnSecondOpnd)) &&
7091         static_cast<CondOperand &>(insn3->GetOperand(kInsnFourthOpnd)).GetCode() == CC_GE &&
7092         static_cast<CondOperand &>(insn2->GetOperand(kInsnFourthOpnd)).GetCode() == CC_LE &&
7093         static_cast<ImmOperand &>(insn1->GetOperand(kInsnThirdOpnd)).GetValue() == 0 &&
7094         static_cast<ImmOperand &>(insn4->GetOperand(kInsnThirdOpnd)).GetValue() == 0) {
7095         return true;
7096     }
7097     return false;
7098 }
7099 
CheckCondition(Insn & insn)7100 bool LongIntCompareWithZPattern::CheckCondition(Insn &insn)
7101 {
7102     if (FindLondIntCmpWithZ(insn) && IsPatternMatch()) {
7103         return true;
7104     }
7105     return false;
7106 }
7107 
Run(BB & bb,Insn & insn)7108 void LongIntCompareWithZPattern::Run(BB &bb, Insn &insn)
7109 {
7110     /* found pattern */
7111     if (CheckCondition(insn)) {
7112         Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(
7113             optInsn[3]->GetMachineOpcode(), optInsn[3]->GetOperand(kInsnFirstOpnd),
7114             optInsn[3]->GetOperand(kInsnSecondOpnd), optInsn[3]->GetOperand(kInsnThirdOpnd));
7115         /* use newInsn to replace  the third optInsn */
7116         bb.ReplaceInsn(*optInsn[0], newInsn);
7117         optInsn.clear();
7118     }
7119 }
7120 
Run(BB & bb,Insn & insn)7121 void ComplexMemOperandAArch64::Run(BB &bb, Insn &insn)
7122 {
7123     AArch64CGFunc *aarch64CGFunc = static_cast<AArch64CGFunc *>(&cgFunc);
7124     Insn *nextInsn = insn.GetNextMachineInsn();
7125     if (nextInsn == nullptr) {
7126         return;
7127     }
7128     MOperator thisMop = insn.GetMachineOpcode();
7129     if (thisMop != MOP_xadrpl12) {
7130         return;
7131     }
7132 
7133     MOperator nextMop = nextInsn->GetMachineOpcode();
7134     if (nextMop > 0 &&
7135         ((nextMop >= MOP_wldrsb && nextMop <= MOP_dldp) || (nextMop >= MOP_wstrb && nextMop <= MOP_dstp))) {
7136         /* Check if base register of nextInsn and the dest operand of insn are identical. */
7137         MemOperand *memOpnd = static_cast<MemOperand *>(nextInsn->GetMemOpnd());
7138         DEBUG_ASSERT(memOpnd != nullptr, "memOpnd is null in AArch64Peep::ComplexMemOperandAArch64");
7139 
7140         /* Only for AddrMode_B_OI addressing mode. */
7141         if (memOpnd->GetAddrMode() != MemOperand::kAddrModeBOi) {
7142             return;
7143         }
7144 
7145         /* Only for intact memory addressing. */
7146         if (!memOpnd->IsIntactIndexed()) {
7147             return;
7148         }
7149 
7150         auto &regOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
7151 
7152         /* Avoid linking issues when object is not 16byte aligned */
7153         if (memOpnd->GetSize() == k128BitSize) {
7154             return;
7155         }
7156 
7157         /* Check if dest operand of insn is idential with base register of nextInsn. */
7158         if (memOpnd->GetBaseRegister() != &regOpnd) {
7159             return;
7160         }
7161 
7162         /* Check if x0 is used after ldr insn, and if it is in live-out. */
7163         if (IfOperandIsLiveAfterInsn(regOpnd, *nextInsn)) {
7164             return;
7165         }
7166 
7167         /* load store pairs cannot have relocation */
7168         if (nextInsn->IsLoadStorePair() && insn.GetOperand(kInsnThirdOpnd).IsStImmediate()) {
7169             return;
7170         }
7171 
7172         auto &stImmOpnd = static_cast<StImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
7173         OfstOperand &offOpnd = aarch64CGFunc->GetOrCreateOfstOpnd(
7174             stImmOpnd.GetOffset() + memOpnd->GetOffsetImmediate()->GetOffsetValue(), k32BitSize);
7175 
7176         /* do not guarantee rodata alignment at Os */
7177         if (CGOptions::OptimizeForSize() && stImmOpnd.GetSymbol()->IsReadOnly()) {
7178             return;
7179         }
7180 
7181         /* avoid relocation */
7182         if ((offOpnd.GetValue() % static_cast<int8>(kBitsPerByte)) != 0) {
7183             return;
7184         }
7185 
7186         auto &newBaseOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
7187         MemOperand &newMemOpnd = aarch64CGFunc->GetOrCreateMemOpnd(
7188             MemOperand::kAddrModeLo12Li, memOpnd->GetSize(), &newBaseOpnd, nullptr, &offOpnd, stImmOpnd.GetSymbol());
7189         if (!aarch64CGFunc->IsOperandImmValid(nextMop, &newMemOpnd, nextInsn->GetMemOpndIdx())) {
7190             return;
7191         }
7192 
7193         if (cgFunc.GetMirModule().IsCModule()) {
7194             Insn *prevInsn = insn.GetPrev();
7195             MOperator prevMop = prevInsn->GetMachineOpcode();
7196             if (prevMop != MOP_xadrp) {
7197                 return;
7198             } else {
7199                 auto &prevStImmOpnd = static_cast<StImmOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
7200                 prevStImmOpnd.SetOffset(offOpnd.GetValue());
7201             }
7202         }
7203 
7204         nextInsn->SetMemOpnd(&newMemOpnd);
7205         bb.RemoveInsn(insn);
7206         CHECK_FATAL(!CGOptions::IsLazyBinding() || cgFunc.GetCG()->IsLibcore(),
7207                     "this pattern can't be found in this phase");
7208     }
7209 }
7210 
Run(BB & bb,Insn & insn)7211 void ComplexMemOperandPreAddAArch64::Run(BB &bb, Insn &insn)
7212 {
7213     AArch64CGFunc *aarch64CGFunc = static_cast<AArch64CGFunc *>(&cgFunc);
7214     Insn *nextInsn = insn.GetNextMachineInsn();
7215     if (nextInsn == nullptr) {
7216         return;
7217     }
7218     MOperator thisMop = insn.GetMachineOpcode();
7219     if (thisMop != MOP_xaddrrr && thisMop != MOP_waddrrr) {
7220         return;
7221     }
7222     MOperator nextMop = nextInsn->GetMachineOpcode();
7223     if (nextMop > 0 &&
7224         ((nextMop >= MOP_wldrsb && nextMop <= MOP_dldr) || (nextMop >= MOP_wstrb && nextMop <= MOP_dstr))) {
7225         if (!IsMemOperandOptPattern(insn, *nextInsn)) {
7226             return;
7227         }
7228         MemOperand *memOpnd = static_cast<MemOperand *>(nextInsn->GetMemOpnd());
7229         auto &newBaseOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
7230         if (newBaseOpnd.GetSize() != k64BitSize) {
7231             return;
7232         }
7233         auto &newIndexOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnThirdOpnd));
7234         DEBUG_ASSERT(memOpnd != nullptr, "memOpnd should not be nullptr");
7235         if (newIndexOpnd.GetSize() <= k32BitSize) {
7236             MemOperand &newMemOpnd = aarch64CGFunc->GetOrCreateMemOpnd(MemOperand::kAddrModeBOrX, memOpnd->GetSize(),
7237                                                                        &newBaseOpnd, &newIndexOpnd, 0, false);
7238             nextInsn->SetOperand(kInsnSecondOpnd, newMemOpnd);
7239         } else {
7240             auto *newOfstOpnd = &aarch64CGFunc->GetOrCreateOfstOpnd(0, k32BitSize);
7241             MemOperand &newMemOpnd = aarch64CGFunc->GetOrCreateMemOpnd(
7242                 MemOperand::kAddrModeBOrX, memOpnd->GetSize(), &newBaseOpnd, &newIndexOpnd, newOfstOpnd, nullptr);
7243             nextInsn->SetOperand(kInsnSecondOpnd, newMemOpnd);
7244         }
7245         bb.RemoveInsn(insn);
7246     }
7247 }
7248 
CheckShiftValid(const Insn & insn,const BitShiftOperand & lsl) const7249 bool ComplexMemOperandLSLAArch64::CheckShiftValid(const Insn &insn, const BitShiftOperand &lsl) const
7250 {
7251     /* check if shift amount is valid */
7252     uint32 lslAmount = lsl.GetShiftAmount();
7253     constexpr uint8 twoShiftBits = 2;
7254     constexpr uint8 threeShiftBits = 3;
7255     uint32 memSize = insn.GetMemoryByteSize();
7256     if ((memSize == k4ByteSize && (lsl.GetShiftAmount() != 0 && lslAmount != twoShiftBits)) ||
7257         (memSize == k8ByteSize && (lsl.GetShiftAmount() != 0 && lslAmount != threeShiftBits))) {
7258         return false;
7259     }
7260     if (memSize != (k5BitSize << lslAmount)) {
7261         return false;
7262     }
7263     return true;
7264 }
7265 
Run(BB & bb,Insn & insn)7266 void ComplexMemOperandLSLAArch64::Run(BB &bb, Insn &insn)
7267 {
7268     AArch64CGFunc *aarch64CGFunc = static_cast<AArch64CGFunc *>(&cgFunc);
7269     Insn *nextInsn = insn.GetNextMachineInsn();
7270     if (nextInsn == nullptr) {
7271         return;
7272     }
7273     MOperator thisMop = insn.GetMachineOpcode();
7274     if (thisMop != MOP_xaddrrrs) {
7275         return;
7276     }
7277     MOperator nextMop = nextInsn->GetMachineOpcode();
7278     if (nextMop && ((nextMop >= MOP_wldrsb && nextMop <= MOP_dldr) || (nextMop >= MOP_wstrb && nextMop <= MOP_dstr))) {
7279         /* Check if base register of nextInsn and the dest operand of insn are identical. */
7280         MemOperand *memOpnd = static_cast<MemOperand *>(nextInsn->GetMemOpnd());
7281         DEBUG_ASSERT(memOpnd != nullptr, "null ptr check");
7282 
7283         /* Only for AddrMode_B_OI addressing mode. */
7284         if (memOpnd->GetAddrMode() != MemOperand::kAddrModeBOi) {
7285             return;
7286         }
7287 
7288         /* Only for immediate is  0. */
7289         if (memOpnd->GetOffsetImmediate()->GetOffsetValue() != 0) {
7290             return;
7291         }
7292 
7293         /* Only for intact memory addressing. */
7294         if (!memOpnd->IsIntactIndexed()) {
7295             return;
7296         }
7297 
7298         auto &regOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
7299 
7300         /* Check if dest operand of insn is idential with base register of nextInsn. */
7301         if (memOpnd->GetBaseRegister() != &regOpnd) {
7302             return;
7303         }
7304 
7305 #ifdef USE_32BIT_REF
7306         if (nextInsn->IsAccessRefField() && nextInsn->GetOperand(kInsnFirstOpnd).GetSize() > k32BitSize) {
7307             return;
7308         }
7309 #endif
7310 
7311         /* Check if x0 is used after ldr insn, and if it is in live-out. */
7312         if (IfOperandIsLiveAfterInsn(regOpnd, *nextInsn)) {
7313             return;
7314         }
7315         auto &lsl = static_cast<BitShiftOperand &>(insn.GetOperand(kInsnFourthOpnd));
7316         if (!CheckShiftValid(*nextInsn, lsl)) {
7317             return;
7318         }
7319         auto &newBaseOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
7320         auto &newIndexOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnThirdOpnd));
7321         MemOperand &newMemOpnd =
7322             aarch64CGFunc->GetOrCreateMemOpnd(MemOperand::kAddrModeBOrX, memOpnd->GetSize(), &newBaseOpnd,
7323                                               &newIndexOpnd, static_cast<int32>(lsl.GetShiftAmount()), false);
7324         nextInsn->SetOperand(kInsnSecondOpnd, newMemOpnd);
7325         bb.RemoveInsn(insn);
7326     }
7327 }
7328 
Run(BB & bb,Insn & insn)7329 void ComplexMemOperandLabelAArch64::Run(BB &bb, Insn &insn)
7330 {
7331     Insn *nextInsn = insn.GetNextMachineInsn();
7332     if (nextInsn == nullptr) {
7333         return;
7334     }
7335     MOperator thisMop = insn.GetMachineOpcode();
7336     if (thisMop != MOP_xldli) {
7337         return;
7338     }
7339     MOperator nextMop = nextInsn->GetMachineOpcode();
7340     if (nextMop != MOP_xvmovdr) {
7341         return;
7342     }
7343     auto &regOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
7344     if (regOpnd.GetRegisterNumber() !=
7345         static_cast<RegOperand &>(nextInsn->GetOperand(kInsnSecondOpnd)).GetRegisterNumber()) {
7346         return;
7347     }
7348 
7349     /* Check if x0 is used after ldr insn, and if it is in live-out. */
7350     if (IfOperandIsLiveAfterInsn(regOpnd, *nextInsn)) {
7351         return;
7352     }
7353     if (CGOptions::DoCGSSA()) {
7354         /* same as CombineFmovLdrPattern in ssa */
7355         CHECK_FATAL(false, "check this case in ssa");
7356     }
7357     Insn &newInsn = cgFunc.GetInsnBuilder()->BuildInsn(MOP_dldli, nextInsn->GetOperand(kInsnFirstOpnd),
7358                                                        insn.GetOperand(kInsnSecondOpnd));
7359     bb.InsertInsnAfter(*nextInsn, newInsn);
7360     bb.RemoveInsn(insn);
7361     bb.RemoveInsn(*nextInsn);
7362 }
7363 
MayThrowBetweenInsn(const Insn & prevCallInsn,const Insn & currCallInsn)7364 static bool MayThrowBetweenInsn(const Insn &prevCallInsn, const Insn &currCallInsn)
7365 {
7366     for (Insn *insn = prevCallInsn.GetNext(); insn != nullptr && insn != &currCallInsn; insn = insn->GetNext()) {
7367         if (insn->MayThrow()) {
7368             return true;
7369         }
7370     }
7371     return false;
7372 }
7373 
7374 /*
7375  * mov R0, vreg1 / R0      -> objDesignateInsn
7376  * add vreg2, vreg1, #imm  -> fieldDesignateInsn
7377  * mov R1, vreg2           -> fieldParamDefInsn
7378  * mov R2, vreg3           -> fieldValueDefInsn
7379  */
WriteFieldCallOptPatternMatch(const Insn & writeFieldCallInsn,WriteRefFieldParam & param)7380 bool WriteFieldCallPattern::WriteFieldCallOptPatternMatch(const Insn &writeFieldCallInsn, WriteRefFieldParam &param)
7381 {
7382     Insn *fieldValueDefInsn = writeFieldCallInsn.GetPreviousMachineInsn();
7383     if (fieldValueDefInsn == nullptr || fieldValueDefInsn->GetMachineOpcode() != MOP_xmovrr) {
7384         return false;
7385     }
7386     Operand &fieldValueDefInsnDestOpnd = fieldValueDefInsn->GetOperand(kInsnFirstOpnd);
7387     auto &fieldValueDefInsnDestReg = static_cast<RegOperand &>(fieldValueDefInsnDestOpnd);
7388     if (fieldValueDefInsnDestReg.GetRegisterNumber() != R2) {
7389         return false;
7390     }
7391     paramDefInsns.emplace_back(fieldValueDefInsn);
7392     param.fieldValue = &(fieldValueDefInsn->GetOperand(kInsnSecondOpnd));
7393     Insn *fieldParamDefInsn = fieldValueDefInsn->GetPreviousMachineInsn();
7394     if (fieldParamDefInsn == nullptr || fieldParamDefInsn->GetMachineOpcode() != MOP_xmovrr) {
7395         return false;
7396     }
7397     Operand &fieldParamDestOpnd = fieldParamDefInsn->GetOperand(kInsnFirstOpnd);
7398     auto &fieldParamDestReg = static_cast<RegOperand &>(fieldParamDestOpnd);
7399     if (fieldParamDestReg.GetRegisterNumber() != R1) {
7400         return false;
7401     }
7402     paramDefInsns.emplace_back(fieldParamDefInsn);
7403     Insn *fieldDesignateInsn = fieldParamDefInsn->GetPreviousMachineInsn();
7404     if (fieldDesignateInsn == nullptr || fieldDesignateInsn->GetMachineOpcode() != MOP_xaddrri12) {
7405         return false;
7406     }
7407     Operand &fieldParamDefSrcOpnd = fieldParamDefInsn->GetOperand(kInsnSecondOpnd);
7408     Operand &fieldDesignateDestOpnd = fieldDesignateInsn->GetOperand(kInsnFirstOpnd);
7409     if (!RegOperand::IsSameReg(fieldParamDefSrcOpnd, fieldDesignateDestOpnd)) {
7410         return false;
7411     }
7412     Operand &fieldDesignateBaseOpnd = fieldDesignateInsn->GetOperand(kInsnSecondOpnd);
7413     param.fieldBaseOpnd = &(static_cast<RegOperand &>(fieldDesignateBaseOpnd));
7414     auto &immOpnd = static_cast<ImmOperand &>(fieldDesignateInsn->GetOperand(kInsnThirdOpnd));
7415     param.fieldOffset = immOpnd.GetValue();
7416     paramDefInsns.emplace_back(fieldDesignateInsn);
7417     Insn *objDesignateInsn = fieldDesignateInsn->GetPreviousMachineInsn();
7418     if (objDesignateInsn == nullptr || objDesignateInsn->GetMachineOpcode() != MOP_xmovrr) {
7419         return false;
7420     }
7421     Operand &objDesignateDestOpnd = objDesignateInsn->GetOperand(kInsnFirstOpnd);
7422     auto &objDesignateDestReg = static_cast<RegOperand &>(objDesignateDestOpnd);
7423     if (objDesignateDestReg.GetRegisterNumber() != R0) {
7424         return false;
7425     }
7426     Operand &objDesignateSrcOpnd = objDesignateInsn->GetOperand(kInsnSecondOpnd);
7427     if (RegOperand::IsSameReg(objDesignateDestOpnd, objDesignateSrcOpnd) ||
7428         !RegOperand::IsSameReg(objDesignateSrcOpnd, fieldDesignateBaseOpnd)) {
7429         return false;
7430     }
7431     param.objOpnd = &(objDesignateInsn->GetOperand(kInsnSecondOpnd));
7432     paramDefInsns.emplace_back(objDesignateInsn);
7433     return true;
7434 }
7435 
IsWriteRefFieldCallInsn(const Insn & insn) const7436 bool WriteFieldCallPattern::IsWriteRefFieldCallInsn(const Insn &insn) const
7437 {
7438     if (!insn.IsCall() || insn.GetMachineOpcode() == MOP_xblr) {
7439         return false;
7440     }
7441     Operand *targetOpnd = insn.GetCallTargetOperand();
7442     DEBUG_ASSERT(targetOpnd != nullptr, "targetOpnd must not be nullptr");
7443     if (!targetOpnd->IsFuncNameOpnd()) {
7444         return false;
7445     }
7446     auto *target = static_cast<FuncNameOperand *>(targetOpnd);
7447     const MIRSymbol *funcSt = target->GetFunctionSymbol();
7448     DEBUG_ASSERT(funcSt->GetSKind() == kStFunc, "the kind of funcSt is unreasonable");
7449     const std::string &funcName = funcSt->GetName();
7450     return funcName == "MCC_WriteRefField" || funcName == "MCC_WriteVolatileField";
7451 }
7452 
CheckCondition(Insn & insn)7453 bool WriteFieldCallPattern::CheckCondition(Insn &insn)
7454 {
7455     nextInsn = insn.GetNextMachineInsn();
7456     if (nextInsn == nullptr) {
7457         return false;
7458     }
7459     if (!IsWriteRefFieldCallInsn(insn)) {
7460         return false;
7461     }
7462     if (!hasWriteFieldCall) {
7463         if (!WriteFieldCallOptPatternMatch(insn, firstCallParam)) {
7464             return false;
7465         }
7466         prevCallInsn = &insn;
7467         hasWriteFieldCall = true;
7468         return false;
7469     }
7470     if (!WriteFieldCallOptPatternMatch(insn, currentCallParam)) {
7471         return false;
7472     }
7473     if (prevCallInsn == nullptr || MayThrowBetweenInsn(*prevCallInsn, insn)) {
7474         return false;
7475     }
7476     if (firstCallParam.objOpnd == nullptr || currentCallParam.objOpnd == nullptr ||
7477         currentCallParam.fieldBaseOpnd == nullptr) {
7478         return false;
7479     }
7480     if (!RegOperand::IsSameReg(*firstCallParam.objOpnd, *currentCallParam.objOpnd)) {
7481         return false;
7482     }
7483     return true;
7484 }
7485 
Run(BB & bb,Insn & insn)7486 void WriteFieldCallPattern::Run(BB &bb, Insn &insn)
7487 {
7488     paramDefInsns.clear();
7489     if (!CheckCondition(insn)) {
7490         return;
7491     }
7492     auto *aarCGFunc = static_cast<AArch64CGFunc *>(cgFunc);
7493     MemOperand &addr =
7494         aarCGFunc->CreateMemOpnd(*currentCallParam.fieldBaseOpnd, currentCallParam.fieldOffset, k64BitSize);
7495     Insn &strInsn = cgFunc->GetInsnBuilder()->BuildInsn(MOP_xstr, *currentCallParam.fieldValue, addr);
7496     strInsn.AppendComment("store reference field");
7497     strInsn.MarkAsAccessRefField(true);
7498     bb.InsertInsnAfter(insn, strInsn);
7499     for (Insn *paramDefInsn : paramDefInsns) {
7500         bb.RemoveInsn(*paramDefInsn);
7501     }
7502     bb.RemoveInsn(insn);
7503     prevCallInsn = &strInsn;
7504     nextInsn = strInsn.GetNextMachineInsn();
7505 }
7506 
CheckCondition(Insn & insn)7507 bool RemoveDecRefPattern::CheckCondition(Insn &insn)
7508 {
7509     if (insn.GetMachineOpcode() != MOP_xbl) {
7510         return false;
7511     }
7512     auto &target = static_cast<FuncNameOperand &>(insn.GetOperand(kInsnFirstOpnd));
7513     if (target.GetName() != "MCC_DecRef_NaiveRCFast") {
7514         return false;
7515     }
7516     prevInsn = insn.GetPreviousMachineInsn();
7517     if (prevInsn == nullptr) {
7518         return false;
7519     }
7520     MOperator mopMov = prevInsn->GetMachineOpcode();
7521     if ((mopMov != MOP_xmovrr && mopMov != MOP_xmovri64) ||
7522         static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd)).GetRegisterNumber() != R0) {
7523         return false;
7524     }
7525     Operand &srcOpndOfMov = prevInsn->GetOperand(kInsnSecondOpnd);
7526     if (!IsZeroRegister(srcOpndOfMov) &&
7527         !(srcOpndOfMov.IsImmediate() && static_cast<ImmOperand &>(srcOpndOfMov).GetValue() == 0)) {
7528         return false;
7529     }
7530     return true;
7531 }
7532 
Run(BB & bb,Insn & insn)7533 void RemoveDecRefPattern::Run(BB &bb, Insn &insn)
7534 {
7535     if (!CheckCondition(insn)) {
7536         return;
7537     }
7538     bb.RemoveInsn(*prevInsn);
7539     bb.RemoveInsn(insn);
7540 }
7541 
7542 /*
7543  * We optimize the following pattern in this function:
7544  * and x1, x1, #imm (is n power of 2)
7545  * cbz/cbnz x1, .label
7546  * =>
7547  * and x1, x1, #imm (is n power of 2)
7548  * tbnz/tbz x1, #n, .label
7549  */
Run(BB & bb,Insn & insn)7550 void OneHoleBranchesAArch64::Run(BB &bb, Insn &insn)
7551 {
7552     AArch64CGFunc *aarch64CGFunc = static_cast<AArch64CGFunc *>(&cgFunc);
7553     if (&insn != bb.GetLastInsn()) {
7554         return;
7555     }
7556     /* check cbz/cbnz insn */
7557     MOperator thisMop = insn.GetMachineOpcode();
7558     if (thisMop != MOP_wcbz && thisMop != MOP_wcbnz && thisMop != MOP_xcbz && thisMop != MOP_xcbnz) {
7559         return;
7560     }
7561     /* check and insn */
7562     Insn *prevInsn = insn.GetPreviousMachineInsn();
7563     if (prevInsn == nullptr) {
7564         return;
7565     }
7566     MOperator prevMop = prevInsn->GetMachineOpcode();
7567     if (prevMop != MOP_wandrri12 && prevMop != MOP_xandrri13) {
7568         return;
7569     }
7570     /* check opearnd of two insns */
7571     if (&(prevInsn->GetOperand(kInsnFirstOpnd)) != &(insn.GetOperand(kInsnFirstOpnd))) {
7572         return;
7573     }
7574     auto &imm = static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnThirdOpnd));
7575     int n = LogValueAtBase2(imm.GetValue());
7576     if (n < 0) {
7577         return;
7578     }
7579 
7580     /* replace insn */
7581     auto &label = static_cast<LabelOperand &>(insn.GetOperand(kInsnSecondOpnd));
7582     MOperator newOp = MOP_undef;
7583     switch (thisMop) {
7584         case MOP_wcbz:
7585             newOp = MOP_wtbz;
7586             break;
7587         case MOP_wcbnz:
7588             newOp = MOP_wtbnz;
7589             break;
7590         case MOP_xcbz:
7591             newOp = MOP_xtbz;
7592             break;
7593         case MOP_xcbnz:
7594             newOp = MOP_xtbnz;
7595             break;
7596         default:
7597             CHECK_FATAL(false, "can not touch here");
7598             break;
7599     }
7600     ImmOperand &oneHoleOpnd = aarch64CGFunc->CreateImmOperand(n, k8BitSize, false);
7601     (void)bb.InsertInsnAfter(
7602         insn, cgFunc.GetInsnBuilder()->BuildInsn(newOp, prevInsn->GetOperand(kInsnSecondOpnd), oneHoleOpnd, label));
7603     bb.RemoveInsn(insn);
7604 }
7605 
CheckCondition(Insn & insn)7606 bool ReplaceIncDecWithIncPattern::CheckCondition(Insn &insn)
7607 {
7608     if (insn.GetMachineOpcode() != MOP_xbl) {
7609         return false;
7610     }
7611     target = &static_cast<FuncNameOperand &>(insn.GetOperand(kInsnFirstOpnd));
7612     if (target->GetName() != "MCC_IncDecRef_NaiveRCFast") {
7613         return false;
7614     }
7615     prevInsn = insn.GetPreviousMachineInsn();
7616     if (prevInsn == nullptr) {
7617         return false;
7618     }
7619     MOperator mopMov = prevInsn->GetMachineOpcode();
7620     if (mopMov != MOP_xmovrr) {
7621         return false;
7622     }
7623     if (static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd)).GetRegisterNumber() != R1 ||
7624         !IsZeroRegister(prevInsn->GetOperand(kInsnSecondOpnd))) {
7625         return false;
7626     }
7627     return true;
7628 }
7629 
Run(BB & bb,Insn & insn)7630 void ReplaceIncDecWithIncPattern::Run(BB &bb, Insn &insn)
7631 {
7632     if (!CheckCondition(insn)) {
7633         return;
7634     }
7635     std::string funcName = "MCC_IncRef_NaiveRCFast";
7636     GStrIdx strIdx = GlobalTables::GetStrTable().GetStrIdxFromName(funcName);
7637     MIRSymbol *st = GlobalTables::GetGsymTable().GetSymbolFromStrIdx(strIdx, true);
7638     if (st == nullptr) {
7639         LogInfo::MapleLogger() << "WARNING: Replace IncDec With Inc fail due to no MCC_IncRef_NaiveRCFast func\n";
7640         return;
7641     }
7642     bb.RemoveInsn(*prevInsn);
7643     target->SetFunctionSymbol(*st);
7644 }
7645 
Run(BB & bb,Insn & insn)7646 void AndCmpBranchesToTbzAArch64::Run(BB &bb, Insn &insn)
7647 {
7648     AArch64CGFunc *aarch64CGFunc = static_cast<AArch64CGFunc *>(&cgFunc);
7649     if (&insn != bb.GetLastInsn()) {
7650         return;
7651     }
7652     MOperator mopB = insn.GetMachineOpcode();
7653     if (mopB != MOP_beq && mopB != MOP_bne) {
7654         return;
7655     }
7656     auto &label = static_cast<LabelOperand &>(insn.GetOperand(kInsnSecondOpnd));
7657     /* get the instruction before bne/beq, expects its type is cmp. */
7658     Insn *prevInsn = insn.GetPreviousMachineInsn();
7659     if (prevInsn == nullptr) {
7660         return;
7661     }
7662     MOperator prevMop = prevInsn->GetMachineOpcode();
7663     if (prevMop != MOP_wcmpri && prevMop != MOP_xcmpri) {
7664         return;
7665     }
7666 
7667     /* get the instruction before "cmp", expect its type is "and". */
7668     Insn *prevPrevInsn = prevInsn->GetPreviousMachineInsn();
7669     if (prevPrevInsn == nullptr) {
7670         return;
7671     }
7672     MOperator mopAnd = prevPrevInsn->GetMachineOpcode();
7673     if (mopAnd != MOP_wandrri12 && mopAnd != MOP_xandrri13) {
7674         return;
7675     }
7676 
7677     /*
7678      * check operand
7679      *
7680      * the real register of "cmp" and "and" must be the same.
7681      */
7682     if (&(prevInsn->GetOperand(kInsnSecondOpnd)) != &(prevPrevInsn->GetOperand(kInsnFirstOpnd))) {
7683         return;
7684     }
7685 
7686     uint32 opndIdx = 2;
7687     if (!prevPrevInsn->GetOperand(opndIdx).IsIntImmediate() || !prevInsn->GetOperand(opndIdx).IsIntImmediate()) {
7688         return;
7689     }
7690     auto &immAnd = static_cast<ImmOperand &>(prevPrevInsn->GetOperand(opndIdx));
7691     auto &immCmp = static_cast<ImmOperand &>(prevInsn->GetOperand(opndIdx));
7692     if (immCmp.GetValue() == 0) {
7693         int n = LogValueAtBase2(immAnd.GetValue());
7694         if (n < 0) {
7695             return;
7696         }
7697         /* judge whether the flag_reg and "w0" is live later. */
7698         auto &flagReg = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
7699         auto &cmpReg = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
7700         if (FindRegLiveOut(flagReg, *prevInsn->GetBB()) || FindRegLiveOut(cmpReg, *prevInsn->GetBB())) {
7701             return;
7702         }
7703         MOperator mopNew = MOP_undef;
7704         switch (mopB) {
7705             case MOP_beq:
7706                 if (mopAnd == MOP_wandrri12) {
7707                     mopNew = MOP_wtbz;
7708                 } else if (mopAnd == MOP_xandrri13) {
7709                     mopNew = MOP_xtbz;
7710                 }
7711                 break;
7712             case MOP_bne:
7713                 if (mopAnd == MOP_wandrri12) {
7714                     mopNew = MOP_wtbnz;
7715                 } else if (mopAnd == MOP_xandrri13) {
7716                     mopNew = MOP_xtbnz;
7717                 }
7718                 break;
7719             default:
7720                 CHECK_FATAL(false, "expects beq or bne insn");
7721                 break;
7722         }
7723         ImmOperand &newImm = aarch64CGFunc->CreateImmOperand(n, k8BitSize, false);
7724         (void)bb.InsertInsnAfter(
7725             insn, cgFunc.GetInsnBuilder()->BuildInsn(mopNew, prevPrevInsn->GetOperand(kInsnSecondOpnd), newImm, label));
7726         bb.RemoveInsn(insn);
7727         bb.RemoveInsn(*prevInsn);
7728         bb.RemoveInsn(*prevPrevInsn);
7729     } else {
7730         int n = LogValueAtBase2(immAnd.GetValue());
7731         int m = LogValueAtBase2(immCmp.GetValue());
7732         if (n < 0 || m < 0 || n != m) {
7733             return;
7734         }
7735         /* judge whether the flag_reg and "w0" is live later. */
7736         auto &flagReg = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
7737         auto &cmpReg = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
7738         if (FindRegLiveOut(flagReg, *prevInsn->GetBB()) || FindRegLiveOut(cmpReg, *prevInsn->GetBB())) {
7739             return;
7740         }
7741         MOperator mopNew = MOP_undef;
7742         switch (mopB) {
7743             case MOP_beq:
7744                 if (mopAnd == MOP_wandrri12) {
7745                     mopNew = MOP_wtbnz;
7746                 } else if (mopAnd == MOP_xandrri13) {
7747                     mopNew = MOP_xtbnz;
7748                 }
7749                 break;
7750             case MOP_bne:
7751                 if (mopAnd == MOP_wandrri12) {
7752                     mopNew = MOP_wtbz;
7753                 } else if (mopAnd == MOP_xandrri13) {
7754                     mopNew = MOP_xtbz;
7755                 }
7756                 break;
7757             default:
7758                 CHECK_FATAL(false, "expects beq or bne insn");
7759                 break;
7760         }
7761         ImmOperand &newImm = aarch64CGFunc->CreateImmOperand(n, k8BitSize, false);
7762         (void)bb.InsertInsnAfter(
7763             insn, cgFunc.GetInsnBuilder()->BuildInsn(mopNew, prevPrevInsn->GetOperand(kInsnSecondOpnd), newImm, label));
7764         bb.RemoveInsn(insn);
7765         bb.RemoveInsn(*prevInsn);
7766         bb.RemoveInsn(*prevPrevInsn);
7767     }
7768 }
7769 
Run(BB & bb,Insn & insn)7770 void RemoveSxtBeforeStrAArch64::Run(BB &bb, Insn &insn)
7771 {
7772     MOperator mop = insn.GetMachineOpcode();
7773     Insn *prevInsn = insn.GetPreviousMachineInsn();
7774     if (prevInsn == nullptr) {
7775         return;
7776     }
7777     MOperator prevMop = prevInsn->GetMachineOpcode();
7778     if (!(mop == MOP_wstrh && prevMop == MOP_xsxth32) && !(mop == MOP_wstrb && prevMop == MOP_xsxtb32)) {
7779         return;
7780     }
7781     auto &prevOpnd0 = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
7782     if (IfOperandIsLiveAfterInsn(prevOpnd0, insn)) {
7783         return;
7784     }
7785     auto &prevOpnd1 = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
7786     regno_t prevRegNO0 = prevOpnd0.GetRegisterNumber();
7787     regno_t prevRegNO1 = prevOpnd1.GetRegisterNumber();
7788     regno_t regNO0 = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd)).GetRegisterNumber();
7789     if (prevRegNO0 != prevRegNO1) {
7790         return;
7791     }
7792     if (prevRegNO0 == regNO0) {
7793         bb.RemoveInsn(*prevInsn);
7794         return;
7795     }
7796     insn.SetOperand(0, prevOpnd1);
7797     bb.RemoveInsn(*prevInsn);
7798 }
7799 
CheckCondition(Insn & insn)7800 bool UbfxToUxtwPattern::CheckCondition(Insn &insn)
7801 {
7802     ImmOperand &imm0 = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
7803     ImmOperand &imm1 = static_cast<ImmOperand &>(insn.GetOperand(kInsnFourthOpnd));
7804     if ((imm0.GetValue() != 0) || (imm1.GetValue() != k32BitSize)) {
7805         return false;
7806     }
7807     return true;
7808 }
7809 
Run(BB & bb,Insn & insn)7810 void UbfxToUxtwPattern::Run(BB &bb, Insn &insn)
7811 {
7812     if (!CheckCondition(insn)) {
7813         return;
7814     }
7815     Insn *newInsn = &cgFunc->GetInsnBuilder()->BuildInsn(MOP_xuxtw64, insn.GetOperand(kInsnFirstOpnd),
7816                                                          insn.GetOperand(kInsnSecondOpnd));
7817     bb.ReplaceInsn(insn, *newInsn);
7818     SetCurrInsn(newInsn);
7819     optSuccess = true;
7820     if (CG_PEEP_DUMP) {
7821         std::vector<Insn *> prevs;
7822         prevs.emplace_back(&insn);
7823         DumpAfterPattern(prevs, newInsn, nullptr);
7824     }
7825 }
7826 
CheckCondition(Insn & insn)7827 bool NormRevTbzToTbzPattern::CheckCondition(Insn &insn)
7828 {
7829     auto &revReg = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
7830     for (Insn *nextInsn = insn.GetNextMachineInsn(); nextInsn != nullptr; nextInsn = nextInsn->GetNextMachineInsn()) {
7831         MOperator useMop = nextInsn->GetMachineOpcode();
7832         auto &useReg = static_cast<RegOperand &>(nextInsn->GetOperand(kInsnFirstOpnd));
7833         if ((useMop == MOP_wtbnz || useMop == MOP_xtbnz || useMop == MOP_wtbz || useMop == MOP_xtbz) &&
7834             useReg.Equals(revReg)) {
7835             if (IfOperandIsLiveAfterInsn(useReg, *nextInsn)) {
7836                 return false;
7837             }
7838             tbzInsn = nextInsn;
7839             return true;
7840         }
7841         uint32 opndSize = nextInsn->GetOperandSize();
7842         for (uint32 i = 0; i < opndSize; i++) {
7843             auto &duOpnd = nextInsn->GetOperand(i);
7844             if (!duOpnd.IsRegister()) {
7845                 continue;
7846             }
7847             if ((static_cast<RegOperand &>(duOpnd)).GetRegisterNumber() != revReg.GetRegisterNumber()) {
7848                 continue;
7849             }
7850             return false;
7851         }
7852     }
7853     return false;
7854 }
7855 
SetRev16Value(const uint32 & oldValue,uint32 & revValue) const7856 void NormRevTbzToTbzPattern::SetRev16Value(const uint32 &oldValue, uint32 &revValue) const
7857 {
7858     switch (oldValue / k8BitSize) {
7859         case k0BitSize:
7860         case k2BitSize:
7861         case k4BitSize:
7862         case k6BitSize:
7863             revValue = oldValue + k8BitSize;
7864             break;
7865         case k1BitSize:
7866         case k3BitSize:
7867         case k5BitSize:
7868         case k7BitSize:
7869             revValue = oldValue - k8BitSize;
7870             break;
7871         default:
7872             CHECK_FATAL(false, "revValue must be the above value");
7873     }
7874 }
7875 
SetWrevValue(const uint32 & oldValue,uint32 & revValue) const7876 void NormRevTbzToTbzPattern::SetWrevValue(const uint32 &oldValue, uint32 &revValue) const
7877 {
7878     switch (oldValue / k8BitSize) {
7879         case k0BitSize: {
7880             revValue = oldValue + k24BitSize;
7881             break;
7882         }
7883         case k1BitSize: {
7884             revValue = oldValue + k8BitSize;
7885             break;
7886         }
7887         case k2BitSize: {
7888             revValue = oldValue - k8BitSize;
7889             break;
7890         }
7891         case k4BitSize: {
7892             revValue = oldValue - k24BitSize;
7893             break;
7894         }
7895         default:
7896             CHECK_FATAL(false, "revValue must be the above value");
7897     }
7898 }
7899 
SetXrevValue(const uint32 & oldValue,uint32 & revValue) const7900 void NormRevTbzToTbzPattern::SetXrevValue(const uint32 &oldValue, uint32 &revValue) const
7901 {
7902     switch (oldValue / k8BitSize) {
7903         case k0BitSize:
7904             revValue = oldValue + k56BitSize;
7905             break;
7906         case k1BitSize:
7907             revValue = oldValue + k40BitSize;
7908             break;
7909         case k2BitSize:
7910             revValue = oldValue + k24BitSize;
7911             break;
7912         case k3BitSize:
7913             revValue = oldValue + k8BitSize;
7914             break;
7915         case k4BitSize:
7916             revValue = oldValue - k8BitSize;
7917             break;
7918         case k5BitSize:
7919             revValue = oldValue - k24BitSize;
7920             break;
7921         case k6BitSize:
7922             revValue = oldValue - k40BitSize;
7923             break;
7924         case k7BitSize:
7925             revValue = oldValue - k56BitSize;
7926             break;
7927         default:
7928             CHECK_FATAL(false, "revValue must be the above value");
7929     }
7930 }
7931 
Run(BB & bb,Insn & insn)7932 void NormRevTbzToTbzPattern::Run(BB &bb, Insn &insn)
7933 {
7934     if (!CheckCondition(insn)) {
7935         return;
7936     }
7937     auto &oldImmOpnd1 = static_cast<ImmOperand &>(tbzInsn->GetOperand(kInsnSecondOpnd));
7938     uint32 oldValue = static_cast<uint32>(oldImmOpnd1.GetValue());
7939     uint32 revValue = k0BitSize;
7940     MOperator curMop = insn.GetMachineOpcode();
7941     if (curMop == MOP_wrevrr16) {
7942         SetRev16Value(oldValue, revValue);
7943     } else if (curMop == MOP_wrevrr) {
7944         SetWrevValue(oldValue, revValue);
7945     } else if (curMop == MOP_xrevrr) {
7946         SetXrevValue(oldValue, revValue);
7947     }
7948     auto *aarFunc = static_cast<AArch64CGFunc *>(cgFunc);
7949     ImmOperand &newImmOpnd = aarFunc->CreateImmOperand(revValue, k6BitSize, false);
7950     MOperator useMop = tbzInsn->GetMachineOpcode();
7951     Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(useMop, insn.GetOperand(kInsnSecondOpnd), newImmOpnd,
7952                                                         tbzInsn->GetOperand(kInsnThirdOpnd));
7953     if (!VERIFY_INSN(&newInsn)) {
7954         return;
7955     }
7956     bb.ReplaceInsn(*tbzInsn, newInsn);
7957     optSuccess = true;
7958     /* dump pattern info */
7959     if (CG_PEEP_DUMP) {
7960         std::vector<Insn *> prevs;
7961         (void)prevs.emplace_back(&insn);
7962         DumpAfterPattern(prevs, tbzInsn, &newInsn);
7963     }
7964 }
7965 
FindRegInBB(const Insn & insn,bool isAbove) const7966 Insn *AddSubMergeLdStPattern::FindRegInBB(const Insn &insn, bool isAbove) const
7967 {
7968     regno_t regNO = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd)).GetRegisterNumber();
7969     for (Insn *resInsn = isAbove ? insn.GetPreviousMachineInsn() : insn.GetNextMachineInsn(); resInsn != nullptr;
7970          resInsn = isAbove ? resInsn->GetPreviousMachineInsn() : resInsn->GetNextMachineInsn()) {
7971         if (resInsn->GetDesc()->IsCall() || resInsn->GetDesc()->IsInlineAsm() ||
7972             resInsn->GetDesc()->IsSpecialIntrinsic()) {
7973             return nullptr;
7974         }
7975         if (resInsn->ScanReg(regNO)) {
7976             return resInsn;
7977         }
7978     }
7979     return nullptr;
7980 }
7981 
CheckCondition(Insn & insn)7982 bool AddSubMergeLdStPattern::CheckCondition(Insn &insn)
7983 {
7984     insnDefReg = &static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
7985     insnUseReg = &static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
7986     regno_t insnDefRegNO = insnDefReg->GetRegisterNumber();
7987     regno_t insnUseRegNO = insnUseReg->GetRegisterNumber();
7988     if (insnDefRegNO != insnUseRegNO) {
7989         return false;
7990     }
7991     // Do not combine x16 until cgaggressiveopt
7992     if (insnDefReg->IsPhysicalRegister() && insnDefRegNO == R16) {
7993         return false;
7994     }
7995     nextInsn = FindRegInBB(insn, false);
7996     prevInsn = FindRegInBB(insn, true);
7997     isAddSubFront = CheckIfCanBeMerged(nextInsn, insn);
7998     isLdStFront = CheckIfCanBeMerged(prevInsn, insn);
7999     // If prev & next all can be merged, only one will be merged, otherwise #imm will be add/sub twice.
8000     if (isAddSubFront && isLdStFront) {
8001         isLdStFront = false;
8002     }
8003     return isAddSubFront || isLdStFront;
8004 }
8005 
CheckIfCanBeMerged(const Insn * adjacentInsn,const Insn &)8006 bool AddSubMergeLdStPattern::CheckIfCanBeMerged(const Insn *adjacentInsn, const Insn & /* insn */)
8007 {
8008     if (adjacentInsn == nullptr || adjacentInsn->IsVectorOp() || (!adjacentInsn->AccessMem())) {
8009         return false;
8010     }
8011     Operand &opnd = adjacentInsn->IsLoadStorePair() ? adjacentInsn->GetOperand(kInsnThirdOpnd)
8012                                                     : adjacentInsn->GetOperand(kInsnSecondOpnd);
8013     if (opnd.GetKind() != Operand::kOpdMem) {
8014         return false;
8015     }
8016     MemOperand *memOpnd = &static_cast<MemOperand &>(opnd);
8017     // load/store memopnd offset value must be #0
8018     if (memOpnd->GetAddrMode() != MemOperand::kAddrModeBOi || AArch64isa::GetMemOpndOffsetValue(memOpnd) != 0) {
8019         return false;
8020     }
8021     RegOperand *memUseReg = memOpnd->GetBaseRegister();
8022     regno_t insnDefRegNO = insnDefReg->GetRegisterNumber();
8023     regno_t memUseRegNO = memUseReg->GetRegisterNumber();
8024     if (insnDefRegNO != memUseRegNO) {
8025         return false;
8026     }
8027     // When load/store insn def & use regno are the same, it will trigger unpredictable transfer with writeback.
8028     regno_t ldstDefRegNO0 = static_cast<RegOperand &>(adjacentInsn->GetOperand(kInsnFirstOpnd)).GetRegisterNumber();
8029     if (ldstDefRegNO0 == memUseRegNO) {
8030         return false;
8031     }
8032     if (adjacentInsn->IsLoadStorePair()) {
8033         regno_t ldstDefRegNO1 =
8034             static_cast<RegOperand &>(adjacentInsn->GetOperand(kInsnSecondOpnd)).GetRegisterNumber();
8035         if (ldstDefRegNO1 == memUseRegNO) {
8036             return false;
8037         }
8038     }
8039     return true;
8040 }
8041 
Run(BB & bb,Insn & insn)8042 void AddSubMergeLdStPattern::Run(BB &bb, Insn &insn)
8043 {
8044     if (!CheckCondition(insn)) {
8045         return;
8046     }
8047     insnToBeReplaced = isAddSubFront ? nextInsn : prevInsn;
8048     // isInsnAdd returns true -- add, isInsnAdd returns false -- sub.
8049     isInsnAdd = (insn.GetMachineOpcode() == MOP_xaddrri12);
8050     int64 immVal = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd)).GetValue();
8051     // Pre/Post-index simm cannot be absent, when ofstVal is #0, the assembly file will appear memopnd: [x0]!
8052     if (immVal == static_cast<int64>(k0BitSize)) {
8053         return;
8054     }
8055     Operand &opnd = insnToBeReplaced->IsLoadStorePair() ? insnToBeReplaced->GetOperand(kInsnThirdOpnd)
8056                                                         : insnToBeReplaced->GetOperand(kInsnSecondOpnd);
8057     MemOperand *memOpnd = &static_cast<MemOperand &>(opnd);
8058     ImmOperand &newImmOpnd =
8059         static_cast<AArch64CGFunc *>(cgFunc)->CreateImmOperand((isInsnAdd ? immVal : (-immVal)), k64BitSize, true);
8060     MemOperand *newMemOpnd = static_cast<AArch64CGFunc *>(cgFunc)->CreateMemOperand(
8061         MemOperand::kAddrModeBOi, memOpnd->GetSize(), *insnUseReg, nullptr, &newImmOpnd, nullptr);
8062     Insn *newInsn = nullptr;
8063     if (insnToBeReplaced->IsLoadStorePair()) {
8064         newInsn = &static_cast<AArch64CGFunc *>(cgFunc)->GetInsnBuilder()->BuildInsn(
8065             insnToBeReplaced->GetMachineOpcode(), insnToBeReplaced->GetOperand(kInsnFirstOpnd),
8066             insnToBeReplaced->GetOperand(kInsnSecondOpnd), *newMemOpnd);
8067     } else {
8068         newInsn = &static_cast<AArch64CGFunc *>(cgFunc)->GetInsnBuilder()->BuildInsn(
8069             insnToBeReplaced->GetMachineOpcode(), insnToBeReplaced->GetOperand(kInsnFirstOpnd), *newMemOpnd);
8070     }
8071     if (!VERIFY_INSN(newInsn)) {
8072         return;
8073     } else {
8074         // Both [RSP, #imm]! and [RSP], #imm should be set true for stackdef.
8075         if (insnUseReg->GetRegisterNumber() == RSP) {
8076             newInsn->SetStackDef(true);
8077         }
8078         bb.ReplaceInsn(*insnToBeReplaced, *newInsn);
8079         bb.RemoveInsn(insn);
8080     }
8081 }
8082 
Run(BB & bb,Insn & insn)8083 void UbfxAndMergetPattern::Run(BB &bb, Insn &insn)
8084 {
8085     if (!CheckCondition(insn)) {
8086         return;
8087     }
8088     int64 newLsb = currLsb + prevLsb;
8089     int64 newWidth = std::min(prevWidth - currLsb, currWidth);
8090     if (newLsb + newWidth > k32BitSize) {
8091         newMop = MOP_xubfxrri6i6;
8092     }
8093     CHECK_FATAL(newLsb >= 0, "must be");
8094     CHECK_FATAL(newLsb + newWidth <= k64BitSize, "must be");
8095     Insn *newInsn = nullptr;
8096     if (newWidth <= 0) {
8097         // two insns have no overlap
8098         ImmOperand &zeroOpnd = static_cast<AArch64CGFunc *>(cgFunc)->CreateImmOperand(0, k64BitSize, true);
8099         newInsn = &static_cast<AArch64CGFunc *>(cgFunc)->GetInsnBuilder()->BuildInsn(
8100             MOP_xmovri64, insn.GetOperand(kInsnFirstOpnd), zeroOpnd);
8101     } else {
8102         ImmOperand &newLsbImmOpnd = static_cast<AArch64CGFunc *>(cgFunc)->CreateImmOperand(newLsb, k64BitSize, true);
8103         ImmOperand &newWidthImmOpnd =
8104             static_cast<AArch64CGFunc *>(cgFunc)->CreateImmOperand(newWidth, k64BitSize, true);
8105         newInsn = &static_cast<AArch64CGFunc *>(cgFunc)->GetInsnBuilder()->BuildInsn(
8106             newMop, insn.GetOperand(kInsnFirstOpnd), *prevSrc, newLsbImmOpnd, newWidthImmOpnd);
8107     }
8108     bb.ReplaceInsn(insn, *newInsn);
8109     if (ssaInfo) {
8110         ssaInfo->ReplaceInsn(insn, *newInsn);
8111     }
8112     return;
8113 }
8114 
CheckCondition(Insn & insn)8115 bool UbfxAndMergetPattern::CheckCondition(Insn &insn)
8116 {
8117     // and   def src imm
8118     // ubfx  def src imm imm
8119     auto &srcReg = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
8120     Insn *prevInsn = ssaInfo->GetDefInsn(srcReg);
8121     // not in ssa form
8122     if (prevInsn == nullptr) {
8123         return false;
8124     }
8125     if (prevInsn->GetMachineOpcode() != MOP_xubfxrri6i6 && prevInsn->GetMachineOpcode() != MOP_wubfxrri5i5) {
8126         return false;
8127     }
8128     auto &prevLsbOperand = static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnThirdOpnd));
8129     auto &prevWidthOperand = static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnFourthOpnd));
8130     prevSrc = &static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
8131     prevLsb = prevLsbOperand.GetValue();
8132     prevWidth = prevWidthOperand.GetValue();
8133     // do not prop phaysical reg, it may cross call.
8134     if (!prevSrc->IsSSAForm() || prevSrc->IsPhysicalRegister()) {
8135         return false;
8136     }
8137     if (insn.GetMachineOpcode() == MOP_xubfxrri6i6 || insn.GetMachineOpcode() == MOP_wubfxrri5i5) {
8138         auto &currLsbOperand = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
8139         auto &currWidthOperand = static_cast<ImmOperand &>(insn.GetOperand(kInsnFourthOpnd));
8140         currLsb = currLsbOperand.GetValue();
8141         currWidth = currWidthOperand.GetValue();
8142         newMop = insn.GetMachineOpcode();
8143     } else if (insn.GetMachineOpcode() == MOP_xandrri13 || insn.GetMachineOpcode() == MOP_wandrri12) {
8144         // and R1 R0 0xFF = ubfx R1 R0 0 8
8145         auto &andImm = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
8146         int64 andVal = andImm.GetValue();
8147         if (!IsAllOneToMSB(andVal)) {
8148             return false;
8149         }
8150         currLsb = 0;
8151         currWidth = GetMSB(andVal);
8152         newMop = insn.GetMachineOpcode() == MOP_xandrri13 ? MOP_xubfxrri6i6 : MOP_wubfxrri5i5;
8153     }
8154     return true;
8155 }
8156 
IsAllOneToMSB(int64 val) const8157 bool UbfxAndMergetPattern::IsAllOneToMSB(int64 val) const
8158 {
8159     return ((static_cast<uint64>(val) + 1) & static_cast<uint64>(val)) == 0;
8160 }
8161 
GetMSB(int64 val) const8162 int32 UbfxAndMergetPattern::GetMSB(int64 val) const
8163 {
8164     return static_cast<int32>(k64BitSize - static_cast<uint32>(__builtin_clzll(static_cast<uint64>(val))));
8165 }
8166 
Run(BB & bb,Insn & insn)8167 void UbfxAndCbzToTbzPattern::Run(BB &bb, Insn &insn)
8168 {
8169     Operand &opnd2 = static_cast<Operand &>(insn.GetOperand(kInsnSecondOpnd));
8170     ImmOperand &imm3 = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
8171     if (!CheckCondition(insn)) {
8172         return;
8173     }
8174     auto &label = static_cast<LabelOperand &>(useInsn->GetOperand(kInsnSecondOpnd));
8175     MOperator nextMop = useInsn->GetMachineOpcode();
8176     switch (nextMop) {
8177         case MOP_wcbz:
8178         case MOP_xcbz:
8179             newMop = opnd2.GetSize() == k64BitSize ? MOP_xtbz : MOP_wtbz;
8180             break;
8181         case MOP_wcbnz:
8182         case MOP_xcbnz:
8183             newMop = opnd2.GetSize() == k64BitSize ? MOP_xtbnz : MOP_wtbnz;
8184             break;
8185         default:
8186             return;
8187     }
8188     if (newMop == MOP_undef) {
8189         return;
8190     }
8191     Insn *newInsn = &cgFunc->GetInsnBuilder()->BuildInsn(newMop, opnd2, imm3, label);
8192     if (!VERIFY_INSN(newInsn)) {
8193         return;
8194     }
8195     BB *useInsnBB = useInsn->GetBB();
8196     useInsnBB->ReplaceInsn(*useInsn, *newInsn);
8197     if (ssaInfo) {
8198         // update ssa info
8199         ssaInfo->ReplaceInsn(*useInsn, *newInsn);
8200     } else {
8201         useInsnBB->RemoveInsn(insn);
8202     }
8203     optSuccess = true;
8204     if (CG_PEEP_DUMP) {
8205         std::vector<Insn *> prevs;
8206         (void)prevs.emplace_back(useInsn);
8207         DumpAfterPattern(prevs, newInsn, nullptr);
8208     }
8209 }
8210 
CheckCondition(Insn & insn)8211 bool UbfxAndCbzToTbzPattern::CheckCondition(Insn &insn)
8212 {
8213     ImmOperand &imm4 = static_cast<ImmOperand &>(insn.GetOperand(kInsnFourthOpnd));
8214     RegOperand &opnd1 = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
8215     InsnSet useInsns = GetAllUseInsn(opnd1);
8216     if (ssaInfo) {
8217         InsnSet useInsns = GetAllUseInsn(opnd1);
8218         if (useInsns.size() != 1) {
8219             return false;
8220         }
8221         useInsn = *useInsns.begin();
8222     } else {
8223         useInsn = insn.GetNextMachineInsn();
8224     }
8225     if (useInsn == nullptr) {
8226         return false;
8227     }
8228     if (!ssaInfo) {
8229         regno_t regNO1 = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd)).GetRegisterNumber();
8230         regno_t regNO2 = static_cast<RegOperand &>(useInsn->GetOperand(kInsnFirstOpnd)).GetRegisterNumber();
8231         if ((regNO1 != regNO2) ||
8232             IfOperandIsLiveAfterInsn(static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd)), *useInsn)) {
8233             return false;
8234         }
8235     }
8236     if (imm4.GetValue() == 1) {
8237         switch (useInsn->GetMachineOpcode()) {
8238             case MOP_wcbz:
8239             case MOP_xcbz:
8240             case MOP_wcbnz:
8241             case MOP_xcbnz:
8242                 return true;
8243             default:
8244                 break;
8245         }
8246     }
8247     return false;
8248 }
8249 
CheckAddCmpZeroCheckAdd(const Insn & insn) const8250 bool AddCmpZeroPattern::CheckAddCmpZeroCheckAdd(const Insn &insn) const
8251 {
8252     MOperator mop = prevInsn->GetMachineOpcode();
8253     switch (mop) {
8254         case MOP_xaddrrr:
8255         case MOP_waddrrr:
8256         case MOP_xaddrrrs:
8257         case MOP_waddrrrs: {
8258             RegOperand opnd0 = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
8259             RegOperand opnd = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
8260             if (opnd0.Equals(opnd) && insn.GetDesc()->GetOpndDes(kInsnSecondOpnd)->GetSize() ==
8261                                           prevInsn->GetDesc()->GetOpndDes(kInsnFirstOpnd)->GetSize()) {
8262                 return true;
8263             } else {
8264                 return false;
8265             }
8266         }
8267         case MOP_waddrri12:
8268         case MOP_xaddrri12: {
8269             RegOperand opnd0 = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
8270             RegOperand opnd = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
8271             if (!(opnd0.Equals(opnd) && insn.GetDesc()->GetOpndDes(kInsnSecondOpnd)->GetSize() ==
8272                                             prevInsn->GetDesc()->GetOpndDes(kInsnFirstOpnd)->GetSize())) {
8273                 return false;
8274             }
8275             auto &immOpnd = static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnThirdOpnd));
8276             auto *aarch64CGFunc = static_cast<AArch64CGFunc *>(cgFunc);
8277             if (aarch64CGFunc->IsOperandImmValid(prevInsn->GetMachineOpcode(), &immOpnd, kInsnThirdOpnd)) {
8278                 return true;
8279             } else {
8280                 return false;
8281             }
8282         }
8283         default:
8284             break;
8285     }
8286     return false;
8287 }
8288 
CheckAddCmpZeroContinue(const Insn & insn,const RegOperand & opnd) const8289 bool AddCmpZeroPattern::CheckAddCmpZeroContinue(const Insn &insn, const RegOperand &opnd) const
8290 {
8291     // check if insn will redef target reg or status reg
8292     if (insn.GetDesc()->IsCall() || insn.GetDesc()->IsSpecialCall()) {
8293         return false;
8294     }
8295     for (uint32 i = 0; i < insn.GetOperandSize(); ++i) {
8296         if (insn.GetDesc()->GetOpndDes(i) == &OpndDesc::CCS) {
8297             return false;
8298         }
8299         if (insn.GetOperand(i).IsRegister()) {
8300             RegOperand &opnd0 = static_cast<RegOperand &>(insn.GetOperand(i));
8301             if (insn.GetDesc()->GetOpndDes(i)->IsDef() && opnd0.RegNumEqual(opnd)) {
8302                 return false;
8303             }
8304         }
8305     }
8306     return true;
8307 }
8308 
CheckCondition(Insn & insn)8309 bool AddCmpZeroPattern::CheckCondition(Insn &insn)
8310 {
8311     auto &opnd2 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
8312     auto &opnd3 = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
8313     if (!opnd3.IsZero()) {
8314         return false;
8315     }
8316     prevInsn = insn.GetPrev();
8317     while (prevInsn != nullptr) {
8318         if (!prevInsn->IsMachineInstruction()) {
8319             prevInsn = prevInsn->GetPrev();
8320             continue;
8321         }
8322         if (CheckAddCmpZeroCheckAdd(insn)) {
8323             if (CheckAddCmpZeroCheckCond(insn)) {
8324                 return (prevInsn != nullptr);
8325             } else {
8326                 return false;
8327             }
8328         }
8329         if (!CheckAddCmpZeroContinue(*prevInsn, opnd2)) {
8330             return false;
8331         }
8332         prevInsn = prevInsn->GetPrev();
8333     }
8334     return (prevInsn != nullptr);
8335 }
8336 
CheckAddCmpZeroCheckCond(const Insn & insn) const8337 bool AddCmpZeroPattern::CheckAddCmpZeroCheckCond(const Insn &insn) const
8338 {
8339     Insn *nextInsn = insn.GetNext();
8340     while (nextInsn != nullptr) {
8341         if (!nextInsn->IsMachineInstruction()) {
8342             nextInsn = nextInsn->GetNext();
8343             continue;
8344         }
8345         for (uint32 i = 0; i < nextInsn->GetOperandSize(); ++i) {
8346             if (nextInsn->GetDesc()->GetOpndDes(i) == &OpndDesc::Cond) {
8347                 CondOperand &cond = static_cast<CondOperand &>(nextInsn->GetOperand(i));
8348                 if (cond.GetCode() == CC_EQ) {
8349                     return true;
8350                 } else {
8351                     return false;
8352                 }
8353             }
8354         }
8355         nextInsn = nextInsn->GetNext();
8356     }
8357     return false;
8358 }
8359 
Run(BB & bb,Insn & insn)8360 void AddCmpZeroPattern::Run(BB &bb, Insn &insn)
8361 {
8362     MOperator mop = insn.GetMachineOpcode();
8363     if (mop != MOP_wcmpri && mop != MOP_xcmpri) {
8364         return;
8365     }
8366     if (!CheckCondition(insn)) {
8367         return;
8368     }
8369 
8370     bool isAddShift = false;
8371     MOperator newMop = GetMopUpdateAPSR(prevInsn->GetMachineOpcode(), isAddShift);
8372     Insn *newInsn = nullptr;
8373     if (isAddShift) {
8374         newInsn = &cgFunc->GetInsnBuilder()->BuildInsn(
8375             newMop, insn.GetOperand(kInsnFirstOpnd), prevInsn->GetOperand(kInsnFirstOpnd),
8376             prevInsn->GetOperand(kInsnSecondOpnd), prevInsn->GetOperand(kInsnThirdOpnd),
8377             prevInsn->GetOperand(kInsnFourthOpnd));
8378     } else {
8379         newInsn = &cgFunc->GetInsnBuilder()->BuildInsn(
8380             newMop, insn.GetOperand(kInsnFirstOpnd), prevInsn->GetOperand(kInsnFirstOpnd),
8381             prevInsn->GetOperand(kInsnSecondOpnd), prevInsn->GetOperand(kInsnThirdOpnd));
8382     }
8383     bb.ReplaceInsn(*prevInsn, *newInsn);
8384     bb.RemoveInsn(insn);
8385     optSuccess = true;
8386     if (CG_PEEP_DUMP) {
8387         std::vector<Insn *> prevs;
8388         (void)prevs.emplace_back(prevInsn);
8389         DumpAfterPattern(prevs, newInsn, nullptr);
8390     }
8391 }
8392 
CheckCondition(Insn & insn)8393 bool ComplexExtendWordLslPattern::CheckCondition(Insn &insn)
8394 {
8395     if (insn.GetMachineOpcode() != MOP_xsxtw64 && insn.GetMachineOpcode() != MOP_xuxtw64) {
8396         return false;
8397     }
8398     useInsn = insn.GetNextMachineInsn();
8399     if (useInsn == nullptr) {
8400         return false;
8401     }
8402     MOperator nextMop = useInsn->GetMachineOpcode();
8403     if (nextMop != MOP_xlslrri6) {
8404         return false;
8405     }
8406     return true;
8407 }
8408 
Run(BB & bb,Insn & insn)8409 void ComplexExtendWordLslPattern::Run(BB &bb, Insn &insn)
8410 {
8411     if (!CheckCondition(insn)) {
8412         return;
8413     }
8414     MOperator curMop = insn.GetMachineOpcode();
8415     auto &lslImmOpnd = static_cast<ImmOperand &>(useInsn->GetOperand(kInsnThirdOpnd));
8416     DEBUG_ASSERT(lslImmOpnd.GetValue() >= 0, "invalid immOpnd of lsl");
8417     if (lslImmOpnd.GetValue() > k32BitSize) {
8418         return;
8419     }
8420     auto &extDefOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
8421     auto &lslUseOpnd = static_cast<RegOperand &>(useInsn->GetOperand(kInsnSecondOpnd));
8422     regno_t extDefRegNO = extDefOpnd.GetRegisterNumber();
8423     regno_t lslUseRegNO = lslUseOpnd.GetRegisterNumber();
8424     if (extDefRegNO != lslUseRegNO || IfOperandIsLiveAfterInsn(extDefOpnd, *useInsn)) {
8425         return;
8426     }
8427 
8428     MOperator mopNew = (curMop == MOP_xsxtw64 ? MOP_xsbfizrri6i6 : MOP_xubfizrri6i6);
8429     auto &extUseOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
8430     auto &lslDefOpnd = static_cast<RegOperand &>(useInsn->GetOperand(kInsnFirstOpnd));
8431     ImmOperand &newImmOpnd = static_cast<AArch64CGFunc *>(cgFunc)->CreateImmOperand(k32BitSize, k6BitSize, false);
8432     Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(mopNew, lslDefOpnd, extUseOpnd, lslImmOpnd, newImmOpnd);
8433     bb.RemoveInsn(*useInsn);
8434     bb.ReplaceInsn(insn, newInsn);
8435     optSuccess = true;
8436 }
8437 
IsExtendWordLslPattern(const Insn & insn) const8438 bool ComplexExtendWordLslAArch64::IsExtendWordLslPattern(const Insn &insn) const
8439 {
8440     Insn *nextInsn = insn.GetNext();
8441     if (nextInsn == nullptr) {
8442         return false;
8443     }
8444     MOperator nextMop = nextInsn->GetMachineOpcode();
8445     if (nextMop != MOP_xlslrri6) {
8446         return false;
8447     }
8448     return true;
8449 }
8450 
Run(BB & bb,Insn & insn)8451 void ComplexExtendWordLslAArch64::Run(BB &bb, Insn &insn)
8452 {
8453     if (!IsExtendWordLslPattern(insn)) {
8454         return;
8455     }
8456     MOperator mop = insn.GetMachineOpcode();
8457     Insn *nextInsn = insn.GetNext();
8458     auto &nextOpnd2 = static_cast<ImmOperand &>(nextInsn->GetOperand(kInsnThirdOpnd));
8459     if (nextOpnd2.GetValue() > k32BitSize) {
8460         return;
8461     }
8462     auto &opnd0 = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
8463     auto &nextOpnd1 = static_cast<RegOperand &>(nextInsn->GetOperand(kInsnSecondOpnd));
8464     regno_t regNO0 = opnd0.GetRegisterNumber();
8465     regno_t nextRegNO1 = nextOpnd1.GetRegisterNumber();
8466     if (regNO0 != nextRegNO1 || IfOperandIsLiveAfterInsn(opnd0, *nextInsn)) {
8467         return;
8468     }
8469     auto &opnd1 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
8470     auto &nextOpnd0 = static_cast<RegOperand &>(nextInsn->GetOperand(kInsnFirstOpnd));
8471     regno_t regNO1 = opnd1.GetRegisterNumber();
8472     cgFunc.InsertExtendSet(regNO1);
8473     MOperator mopNew = mop == MOP_xsxtw64 ? MOP_xsbfizrri6i6 : MOP_xubfizrri6i6;
8474     auto *aarch64CGFunc = static_cast<AArch64CGFunc *>(&cgFunc);
8475     RegOperand &reg1 = aarch64CGFunc->GetOrCreateVirtualRegisterOperand(regNO1);
8476     ImmOperand &newImm = aarch64CGFunc->CreateImmOperand(k32BitSize, k6BitSize, false);
8477     Insn &newInsnSbfiz = cgFunc.GetInsnBuilder()->BuildInsn(mopNew, nextOpnd0, reg1, nextOpnd2, newImm);
8478     bb.RemoveInsn(*nextInsn);
8479     bb.ReplaceInsn(insn, newInsnSbfiz);
8480 }
8481 
CheckCondition(Insn & insn)8482 bool AddCmpZeroPatternSSA::CheckCondition(Insn &insn)
8483 {
8484     MOperator curMop = insn.GetMachineOpcode();
8485     if (curMop != MOP_wcmpri && curMop != MOP_xcmpri) {
8486         return false;
8487     }
8488     auto &immOpnd = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
8489     if (!immOpnd.IsZero()) {
8490         return false;
8491     }
8492 
8493     auto &ccReg = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
8494     prevAddInsn = ssaInfo->GetDefInsn(ccReg);
8495     if (prevAddInsn == nullptr) {
8496         return false;
8497     }
8498     MOperator prevAddMop = prevAddInsn->GetMachineOpcode();
8499     if (prevAddMop != MOP_xaddrrr && prevAddMop != MOP_xaddrri12 && prevAddMop != MOP_waddrrr &&
8500         prevAddMop != MOP_waddrri12 && prevAddMop != MOP_xaddrrrs && prevAddMop != MOP_waddrrrs) {
8501         return false;
8502     }
8503     Insn *nextInsn = insn.GetNext();
8504     while (nextInsn != nullptr) {
8505         if (!nextInsn->IsMachineInstruction()) {
8506             return false;
8507         }
8508         for (uint32 i = 0; i < nextInsn->GetOperandSize(); ++i) {
8509             if (nextInsn->GetDesc()->GetOpndDes(i) == &OpndDesc::Cond) {
8510                 CondOperand &cond = static_cast<CondOperand &>(nextInsn->GetOperand(i));
8511                 if (cond.GetCode() == CC_EQ) {
8512                     return true;
8513                 } else {
8514                     return false;
8515                 }
8516             }
8517         }
8518         nextInsn = nextInsn->GetNext();
8519     }
8520     return false;
8521 }
8522 
Run(BB & bb,Insn & insn)8523 void AddCmpZeroPatternSSA::Run(BB &bb, Insn &insn)
8524 {
8525     if (!CheckCondition(insn)) {
8526         return;
8527     }
8528     bool isShiftAdd = false;
8529     MOperator prevAddMop = prevAddInsn->GetMachineOpcode();
8530     MOperator newAddMop = GetMopUpdateAPSR(prevAddMop, isShiftAdd);
8531     DEBUG_ASSERT(newAddMop != MOP_undef, "unknown Add code");
8532     /*
8533      * Since new opnd can not be defined in SSA ReplaceInsn, we should avoid pattern matching again.
8534      * For "adds" can only be inserted in this phase, so we could do a simple check.
8535      */
8536     Insn *nextInsn = insn.GetNext();
8537     while (nextInsn != nullptr) {
8538         if (!nextInsn->IsMachineInstruction()) {
8539             nextInsn = nextInsn->GetNext();
8540             continue;
8541         }
8542         MOperator nextMop = nextInsn->GetMachineOpcode();
8543         if (nextMop == newAddMop) {
8544             return;
8545         }
8546         nextInsn = nextInsn->GetNext();
8547     }
8548 
8549     Insn *newInsn = nullptr;
8550     Operand &rflag = insn.GetOperand(kInsnFirstOpnd);
8551     if (isShiftAdd) {
8552         newInsn = &cgFunc->GetInsnBuilder()->BuildInsn(
8553             newAddMop, rflag, prevAddInsn->GetOperand(kInsnFirstOpnd), prevAddInsn->GetOperand(kInsnSecondOpnd),
8554             prevAddInsn->GetOperand(kInsnThirdOpnd), prevAddInsn->GetOperand(kInsnFourthOpnd));
8555     } else {
8556         newInsn = &cgFunc->GetInsnBuilder()->BuildInsn(newAddMop, rflag, prevAddInsn->GetOperand(kInsnFirstOpnd),
8557                                                        prevAddInsn->GetOperand(kInsnSecondOpnd),
8558                                                        prevAddInsn->GetOperand(kInsnThirdOpnd));
8559     }
8560     bb.InsertInsnAfter(insn, *newInsn);
8561     /* update ssa info */
8562     auto *a64SSAInfo = static_cast<AArch64CGSSAInfo *>(ssaInfo);
8563     a64SSAInfo->CreateNewInsnSSAInfo(*newInsn);
8564     SetCurrInsn(newInsn);
8565 
8566     /* dump pattern info */
8567     if (CG_PEEP_DUMP) {
8568         std::vector<Insn *> prevs;
8569         prevs.emplace_back(prevAddInsn);
8570         prevs.emplace_back(&insn);
8571         DumpAfterPattern(prevs, newInsn, nullptr);
8572     }
8573 }
8574 
CheckCondition(Insn & insn)8575 bool DeleteAndBeforeRevStrPattern::CheckCondition(Insn &insn)
8576 {
8577     auto &andImmOpnd = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
8578     uint64 andImmValue = static_cast<uint64>(andImmOpnd.GetValue());
8579     if (andImmValue != 0xFFFFULL) {  // 0xFFFF : 16bits mask
8580         return false;
8581     }
8582     auto &insnDefOpnd = insn.GetOperand(kInsnFirstOpnd);
8583     Insn *revInsn = insn.GetNextMachineInsn();
8584     if ((revInsn == nullptr) || (revInsn->GetMachineOpcode() != MOP_wrevrr16) ||
8585         (!RegOperand::IsSameRegNO(insnDefOpnd, revInsn->GetOperand(kInsnSecondOpnd)))) {
8586         return false;
8587     }
8588     Insn *strInsn = revInsn->GetNextMachineInsn();
8589     if ((strInsn == nullptr) || (strInsn->GetMachineOpcode() != MOP_wstrh) ||
8590         (!RegOperand::IsSameRegNO(revInsn->GetOperand(kInsnFirstOpnd), strInsn->GetOperand(kInsnFirstOpnd)))) {
8591         return false;
8592     }
8593     if ((!RegOperand::IsSameRegNO(insnDefOpnd, strInsn->GetOperand(kInsnFirstOpnd))) &&
8594         IfOperandIsLiveAfterInsn(static_cast<RegOperand &>(insnDefOpnd), *strInsn)) {
8595         return false;
8596     }
8597     return true;
8598 }
8599 
Run(BB & bb,Insn & insn)8600 void DeleteAndBeforeRevStrPattern::Run(BB &bb, Insn &insn)
8601 {
8602     if (!CheckCondition(insn)) {
8603         return;
8604     }
8605     auto &insnUseOpnd = insn.GetOperand(kInsnSecondOpnd);
8606     Insn *nextInsn = insn.GetNextMachineInsn();
8607     nextInsn->SetOperand(kInsnSecondOpnd, insnUseOpnd);
8608     bb.RemoveInsn(insn);
8609 }
8610 } /* namespace maplebe */
8611