• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef MAPLEBE_INCLUDE_CG_AARCH64_AARCH64_PEEP_H
17 #define MAPLEBE_INCLUDE_CG_AARCH64_AARCH64_PEEP_H
18 
19 #include <vector>
20 #include "peep.h"
21 #include "aarch64_isa.h"
22 #include "optimize_common.h"
23 #include "mir_builder.h"
24 
25 namespace maplebe {
26 class AArch64CGPeepHole : public CGPeepHole {
27 public:
28     /* normal constructor */
AArch64CGPeepHole(CGFunc & f,MemPool * memPool)29     AArch64CGPeepHole(CGFunc &f, MemPool *memPool) : CGPeepHole(f, memPool) {};
30     ~AArch64CGPeepHole() override = default;
31 
32     void Run() override;
33     void DoNormalOptimize(BB &bb, Insn &insn) override;
34 };
35 
36 /* ======== CGPeepPattern End ======== */
37 /*
38  * Looking for identical mem insn to eliminate.
39  * If two back-to-back is:
40  * 1. str + str
41  * 2. str + ldr
42  * And the [MEM] is pattern of [base + offset]
43  * 1. The [MEM] operand is exactly same then first
44  *    str can be eliminate.
45  * 2. The [MEM] operand is exactly same and src opnd
46  *    of str is same as the dest opnd of ldr then
47  *    ldr can be eliminate
48  */
49 class RemoveIdenticalLoadAndStoreAArch64 : public PeepPattern {
50 public:
RemoveIdenticalLoadAndStoreAArch64(CGFunc & cgFunc)51     explicit RemoveIdenticalLoadAndStoreAArch64(CGFunc &cgFunc) : PeepPattern(cgFunc) {}
52     ~RemoveIdenticalLoadAndStoreAArch64() override = default;
53     void Run(BB &bb, Insn &insn) override;
54 
55 private:
56     bool IsMemOperandsIdentical(const Insn &insn1, const Insn &insn2) const;
57 };
58 
59 /* Remove redundant mov which src and dest opnd is exactly same */
60 class RemoveMovingtoSameRegPattern : public CGPeepPattern {
61 public:
RemoveMovingtoSameRegPattern(CGFunc & cgFunc,BB & currBB,Insn & currInsn)62     RemoveMovingtoSameRegPattern(CGFunc &cgFunc, BB &currBB, Insn &currInsn) : CGPeepPattern(cgFunc, currBB, currInsn)
63     {
64     }
65     ~RemoveMovingtoSameRegPattern() override = default;
66     void Run(BB &bb, Insn &insn) override;
67     bool CheckCondition(Insn &insn) override;
GetPatternName()68     std::string GetPatternName() override
69     {
70         return "RemoveMovingtoSameRegPattern";
71     }
72 };
73 
74 /*
75  * add x0, x1, x2
76  * mov x19, x0
77  * ==>
78  * add x19, x1, x2
79  */
80 class RedundantMovAArch64 : public PeepPattern {
81 public:
RedundantMovAArch64(CGFunc & cgFunc)82     explicit RedundantMovAArch64(CGFunc &cgFunc) : PeepPattern(cgFunc) {}
83     ~RedundantMovAArch64() override = default;
84     void Run(BB &bb, Insn &insn) override;
85 };
86 
87 /* Remove redundant mov which src and dest opnd is exactly same */
88 class RemoveMovingtoSameRegAArch64 : public PeepPattern {
89 public:
RemoveMovingtoSameRegAArch64(CGFunc & cgFunc)90     explicit RemoveMovingtoSameRegAArch64(CGFunc &cgFunc) : PeepPattern(cgFunc) {}
91     ~RemoveMovingtoSameRegAArch64() override = default;
92     void Run(BB &bb, Insn &insn) override;
93 };
94 
95 /*
96  * Combining {2 str into 1 stp || 2 ldr into 1 ldp || 2 strb into 1 strh || 2 strh into 1 str},
97  * when they are back to back and the [MEM] they access is conjoined.
98  */
99 class CombineContiLoadAndStorePattern : public CGPeepPattern {
100 public:
CombineContiLoadAndStorePattern(CGFunc & cgFunc,BB & currBB,Insn & currInsn)101     CombineContiLoadAndStorePattern(CGFunc &cgFunc, BB &currBB, Insn &currInsn)
102         : CGPeepPattern(cgFunc, currBB, currInsn)
103     {
104         doAggressiveCombine = cgFunc.GetMirModule().IsCModule();
105     }
106     ~CombineContiLoadAndStorePattern() override = default;
107     void Run(BB &bb, Insn &insn) override;
108     bool CheckCondition(Insn &insn) override;
GetPatternName()109     std::string GetPatternName() override
110     {
111         return "CombineContiLoadAndStorePattern";
112     }
113 
114 private:
115     std::vector<Insn *> FindPrevStrLdr(Insn &insn, regno_t destRegNO, regno_t memBaseRegNO, int64 baseOfst) const;
116     /*
117      * avoid the following situation:
118      * str x2, [x19, #8]
119      * mov x0, x19
120      * bl foo (change memory)
121      * str x21, [x19, #16]
122      */
123     bool IsRegNotSameMemUseInInsn(const Insn &checkInsn, const Insn &curInsn, regno_t curBaseRegNO, bool isCurStore,
124                                   int64 curBaseOfst, int64 curMemRange) const;
125     bool IsValidNormalLoadOrStorePattern(const Insn &insn, const Insn &prevInsn, const MemOperand &memOpnd,
126                                          int64 curOfstVal, int64 prevOfstVal);
127     bool IsValidStackArgLoadOrStorePattern(const Insn &curInsn, const Insn &prevInsn, const MemOperand &curMemOpnd,
128                                            const MemOperand &prevMemOpnd, int64 curOfstVal, int64 prevOfstVal) const;
129     Insn *GenerateMemPairInsn(MOperator newMop, RegOperand &curDestOpnd, RegOperand &prevDestOpnd,
130                               MemOperand &combineMemOpnd, bool isCurDestFirst);
131     bool FindUseX16AfterInsn(const Insn &curInsn) const;
132     void RemoveInsnAndKeepComment(BB &bb, Insn &insn, Insn &prevInsn) const;
133 
134     bool doAggressiveCombine = false;
135     bool isPairAfterCombine = true;
136 };
137 
138 /*
139  * add xt, xn, #imm               add  xt, xn, xm
140  * ldr xd, [xt]                   ldr xd, [xt]
141  * =====================>
142  * ldr xd, [xn, #imm]             ldr xd, [xn, xm]
143  *
144  * load/store can do extend shift as well
145  */
146 class EnhanceStrLdrAArch64 : public PeepPattern {
147 public:
EnhanceStrLdrAArch64(CGFunc & cgFunc)148     explicit EnhanceStrLdrAArch64(CGFunc &cgFunc) : PeepPattern(cgFunc) {}
149     ~EnhanceStrLdrAArch64() override = default;
150     void Run(BB &bb, Insn &insn) override;
151 
152 private:
153     ImmOperand *GetInsnAddOrSubNewOffset(Insn &insn, ImmOperand &offset);
154     void OptimizeAddrBOI(Insn &insn, MemOperand &memOpnd, Insn &prevInsn);
155     void OptimizeAddrBOrX(Insn &insn, MemOperand &memOpnd, Insn &prevInsn);
156     void OptimizeAddrBOrXShiftExtend(Insn &insn, MemOperand &memOpnd, Insn &shiftExtendInsn);
157     void OptimizeWithAddrrrs(Insn &insn, MemOperand &memOpnd, Insn &addInsn);
158     bool CheckOperandIsDeadFromInsn(const RegOperand &regOpnd, Insn &insn);
159     void OptimizeAddrBOrXShift(MemOperand &memOpnd, Insn &insn);
160 };
161 
162 /* Eliminate the sxt[b|h|w] w0, w0;, when w0 is satisify following:
163  * i)  mov w0, #imm (#imm is not out of range)
164  * ii) ldrs[b|h] w0, [MEM]
165  */
166 class EliminateSpecifcSXTAArch64 : public PeepPattern {
167 public:
EliminateSpecifcSXTAArch64(CGFunc & cgFunc)168     explicit EliminateSpecifcSXTAArch64(CGFunc &cgFunc) : PeepPattern(cgFunc) {}
169     ~EliminateSpecifcSXTAArch64() override = default;
170     void Run(BB &bb, Insn &insn) override;
171 };
172 
173 /* Eliminate the uxt[b|h|w] w0, w0;when w0 is satisify following:
174  * i)  mov w0, #imm (#imm is not out of range)
175  * ii) mov w0, R0(Is return value of call and return size is not of range)
176  * iii)w0 is defined and used by special load insn and uxt[] pattern
177  */
178 class EliminateSpecifcUXTAArch64 : public PeepPattern {
179 public:
EliminateSpecifcUXTAArch64(CGFunc & cgFunc)180     explicit EliminateSpecifcUXTAArch64(CGFunc &cgFunc) : PeepPattern(cgFunc) {}
181     ~EliminateSpecifcUXTAArch64() override = default;
182     void Run(BB &bb, Insn &insn) override;
183 };
184 
185 /* fmov ireg1 <- freg1   previous insn
186  * fmov ireg2 <- freg1   current insn
187  * use  ireg2            may or may not be present
188  * =>
189  * fmov ireg1 <- freg1   previous insn
190  * mov  ireg2 <- ireg1   current insn
191  * use  ireg1            may or may not be present
192  */
193 class FmovRegPattern : public CGPeepPattern {
194 public:
FmovRegPattern(CGFunc & cgFunc,BB & currBB,Insn & currInsn)195     FmovRegPattern(CGFunc &cgFunc, BB &currBB, Insn &currInsn) : CGPeepPattern(cgFunc, currBB, currInsn) {}
~FmovRegPattern()196     ~FmovRegPattern() override
197     {
198         prevInsn = nullptr;
199         nextInsn = nullptr;
200     }
201     void Run(BB &bb, Insn &insn) override;
202     bool CheckCondition(Insn &insn) override;
GetPatternName()203     std::string GetPatternName() override
204     {
205         return "FmovRegPattern";
206     }
207 
208 private:
209     Insn *prevInsn = nullptr;
210     Insn *nextInsn = nullptr;
211 };
212 
213 /* sbfx ireg1, ireg2, 0, 32
214  * use  ireg1.32
215  * =>
216  * sbfx ireg1, ireg2, 0, 32
217  * use  ireg2.32
218  */
219 class SbfxOptPattern : public CGPeepPattern {
220 public:
SbfxOptPattern(CGFunc & cgFunc,BB & currBB,Insn & currInsn)221     SbfxOptPattern(CGFunc &cgFunc, BB &currBB, Insn &currInsn) : CGPeepPattern(cgFunc, currBB, currInsn) {}
~SbfxOptPattern()222     ~SbfxOptPattern() override
223     {
224         nextInsn = nullptr;
225     }
226     void Run(BB &bb, Insn &insn) override;
227     bool CheckCondition(Insn &insn) override;
GetPatternName()228     std::string GetPatternName() override
229     {
230         return "SbfxOptPattern";
231     }
232 
233 private:
234     Insn *nextInsn = nullptr;
235     bool toRemove = false;
236     std::vector<uint32> cands;
237 };
238 
239 /* cbnz x0, labelA
240  * mov x0, 0
241  * b  return-bb
242  * labelA:
243  * =>
244  * cbz x0, return-bb
245  * labelA:
246  */
247 class CbnzToCbzPattern : public CGPeepPattern {
248 public:
CbnzToCbzPattern(CGFunc & cgFunc,BB & currBB,Insn & currInsn)249     CbnzToCbzPattern(CGFunc &cgFunc, BB &currBB, Insn &currInsn) : CGPeepPattern(cgFunc, currBB, currInsn) {}
~CbnzToCbzPattern()250     ~CbnzToCbzPattern() override
251     {
252         nextBB = nullptr;
253         movInsn = nullptr;
254         brInsn = nullptr;
255     }
256     void Run(BB &bb, Insn &insn) override;
257     bool CheckCondition(Insn &insn) override;
GetPatternName()258     std::string GetPatternName() override
259     {
260         return "CbnzToCbzPattern";
261     }
262 
263 private:
264     BB *nextBB = nullptr;
265     Insn *movInsn = nullptr;
266     Insn *brInsn = nullptr;
267 };
268 
269 /* i.   cset    w0, EQ
270  *      cbnz    w0, .label    ===> beq .label
271  *
272  * ii.  cset    w0, EQ
273  *      cbz    w0, .label     ===> bne .label
274  *
275  * iii. cset    w0, NE
276  *      cbnz    w0, .label    ===> bne .label
277  *
278  * iiii.cset    w0, NE
279  *      cbz    w0, .label     ===> beq .label
280  * ... ...
281  */
282 class CsetCbzToBeqOptAArch64 : public PeepPattern {
283 public:
CsetCbzToBeqOptAArch64(CGFunc & cgFunc)284     explicit CsetCbzToBeqOptAArch64(CGFunc &cgFunc) : PeepPattern(cgFunc) {}
285     ~CsetCbzToBeqOptAArch64() override = default;
286     void Run(BB &bb, Insn &insn) override;
287     MOperator SelectMOperator(ConditionCode condCode, bool inverse) const;
288     void IntrinsicOptimize(BB &bb, Insn *preInsn, Insn &insn);
289 };
290 
291 /* When exist load after load or load after store, and [MEM] is
292  * totally same. Then optimize them.
293  */
294 class ContiLDRorSTRToSameMEMPattern : public CGPeepPattern {
295 public:
ContiLDRorSTRToSameMEMPattern(CGFunc & cgFunc,BB & currBB,Insn & currInsn)296     ContiLDRorSTRToSameMEMPattern(CGFunc &cgFunc, BB &currBB, Insn &currInsn) : CGPeepPattern(cgFunc, currBB, currInsn)
297     {
298     }
~ContiLDRorSTRToSameMEMPattern()299     ~ContiLDRorSTRToSameMEMPattern() override
300     {
301         prevInsn = nullptr;
302     }
303     void Run(BB &bb, Insn &insn) override;
304     bool HasImplicitSizeUse(const Insn &insn) const;
305     bool CheckCondition(Insn &insn) override;
GetPatternName()306     std::string GetPatternName() override
307     {
308         return "ContiLDRorSTRToSameMEMPattern";
309     }
310 
311 private:
312     Insn *prevInsn = nullptr;
313     bool loadAfterStore = false;
314     bool loadAfterLoad = false;
315 };
316 
317 /*
318  * Optimize the following patterns:
319  *  and  w0, w0, #imm  ====> tst  w0, #imm
320  *  cmp  w0, #0              beq/bne  .label
321  *  beq/bne  .label
322  *
323  *  and  x0, x0, #imm  ====> tst  x0, #imm
324  *  cmp  x0, #0              beq/bne  .label
325  *  beq/bne  .label
326  */
327 class AndCmpBranchesToTstAArch64 : public PeepPattern {
328 public:
AndCmpBranchesToTstAArch64(CGFunc & cgFunc)329     explicit AndCmpBranchesToTstAArch64(CGFunc &cgFunc) : PeepPattern(cgFunc) {}
330     ~AndCmpBranchesToTstAArch64() override = default;
331     void Run(BB &bb, Insn &insn) override;
332 };
333 
334 /*
335  * Optimize the following patterns:
336  *  and  w0, w0, #imm  ====> tst  w0, #imm
337  *  cbz/cbnz  .label         beq/bne  .label
338  */
339 class AndCbzBranchesToTstAArch64 : public PeepPattern {
340 public:
AndCbzBranchesToTstAArch64(CGFunc & cgFunc)341     explicit AndCbzBranchesToTstAArch64(CGFunc &cgFunc) : PeepPattern(cgFunc) {}
342     ~AndCbzBranchesToTstAArch64() override = default;
343     void Run(BB &bb, Insn &insn) override;
344 };
345 
346 /*
347  * Optimize the following patterns:
348  *  and  w0, w0, #1  ====> and  w0, w0, #1
349  *  cmp  w0, #1
350  *  cset w0, EQ
351  *
352  *  and  w0, w0, #1  ====> and  w0, w0, #1
353  *  cmp  w0, #0
354  *  cset w0, NE
355  *  ---------------------------------------------------
356  *  and  w0, w0, #imm  ====> ubfx  w0, w0, pos, size
357  *  cmp  w0, #imm
358  *  cset w0, EQ
359  *
360  *  and  w0, w0, #imm  ====> ubfx  w0, w0, pos, size
361  *  cmp  w0, #0
362  *  cset w0, NE
363  *  conditions:
364  *  imm is pos power of 2
365  *
366  *  ---------------------------------------------------
367  *  and  w0, w0, #1  ====> and  wn, w0, #1
368  *  cmp  w0, #1
369  *  cset wn, EQ        # wn != w0 && w0 is not live after cset
370  *
371  *  and  w0, w0, #1  ====> and  wn, w0, #1
372  *  cmp  w0, #0
373  *  cset wn, NE        # wn != w0 && w0 is not live after cset
374  *  ---------------------------------------------------
375  *  and  w0, w0, #imm  ====> ubfx  wn, w0, pos, size
376  *  cmp  w0, #imm
377  *  cset wn, EQ        # wn != w0 && w0 is not live after cset
378  *
379  *  and  w0, w0, #imm  ====> ubfx  wn, w0, pos, size
380  *  cmp  w0, #0
381  *  cset wn, NE        # wn != w0 && w0 is not live after cset
382  *  conditions:
383  *  imm is pos power of 2 and w0 is not live after cset
384  */
385 class AndCmpBranchesToCsetAArch64 : public PeepPattern {
386 public:
AndCmpBranchesToCsetAArch64(CGFunc & cgFunc)387     explicit AndCmpBranchesToCsetAArch64(CGFunc &cgFunc) : PeepPattern(cgFunc) {}
388     ~AndCmpBranchesToCsetAArch64() override = default;
389     void Run(BB &bb, Insn &insn) override;
390 
391 private:
392     Insn *FindPreviousCmp(Insn &insn) const;
393 };
394 
395 /*
396  * We optimize the following pattern in this function:
397  * cmp w[0-9]*, wzr  ====> tbz w[0-9]*, #31, .label
398  * bge .label
399  *
400  * cmp wzr, w[0-9]*  ====> tbz w[0-9]*, #31, .label
401  * ble .label
402  *
403  * cmp w[0-9]*,wzr   ====> tbnz w[0-9]*, #31, .label
404  * blt .label
405  *
406  * cmp wzr, w[0-9]*  ====> tbnz w[0-9]*, #31, .label
407  * bgt .label
408  *
409  * cmp w[0-9]*, #0   ====> tbz w[0-9]*, #31, .label
410  * bge .label
411  *
412  * cmp w[0-9]*, #0   ====> tbnz w[0-9]*, #31, .label
413  * blt .label
414  */
415 class ZeroCmpBranchesAArch64 : public PeepPattern {
416 public:
ZeroCmpBranchesAArch64(CGFunc & cgFunc)417     explicit ZeroCmpBranchesAArch64(CGFunc &cgFunc) : PeepPattern(cgFunc) {}
418     ~ZeroCmpBranchesAArch64() override = default;
419     void Run(BB &bb, Insn &insn) override;
420 };
421 
422 /*
423  *  cmp  w0, #0
424  *  cset w1, NE --> mov w1, w0
425  *
426  *  cmp  w0, #0
427  *  cset w1, EQ --> eor w1, w0, 1
428  *
429  *  cmp  w0, #1
430  *  cset w1, NE --> eor w1, w0, 1
431  *
432  *  cmp  w0, #1
433  *  cset w1, EQ --> mov w1, w0
434  *
435  *  cmp w0,  #0
436  *  cset w0, NE -->null
437  *
438  *  cmp w0, #1
439  *  cset w0, EQ -->null
440  *
441  *  condition:
442  *    1. the first operand of cmp instruction must has only one valid bit
443  *    2. the second operand of cmp instruction must be 0 or 1
444  *    3. flag register of cmp isntruction must not be used later
445  */
446 class CmpCsetAArch64 : public PeepPattern {
447 public:
CmpCsetAArch64(CGFunc & cgFunc)448     explicit CmpCsetAArch64(CGFunc &cgFunc) : PeepPattern(cgFunc) {}
449     ~CmpCsetAArch64() override = default;
450     void Run(BB &bb, Insn &insn) override;
451 
452 private:
453     bool CheckOpndDefPoints(Insn &checkInsn, int opndIdx);
454     const Insn *DefInsnOfOperandInBB(const Insn &startInsn, const Insn &checkInsn, int opndIdx) const;
455     bool OpndDefByOneValidBit(const Insn &defInsn) const;
456     bool FlagUsedLaterInCurBB(const BB &bb, Insn &startInsn) const;
457 };
458 
459 /*
460  *  add     x0, x1, x0
461  *  ldr     x2, [x0]
462  *  ==>
463  *  ldr     x2, [x1, x0]
464  */
465 class ComplexMemOperandAddAArch64 : public PeepPattern {
466 public:
ComplexMemOperandAddAArch64(CGFunc & cgFunc)467     explicit ComplexMemOperandAddAArch64(CGFunc &cgFunc) : PeepPattern(cgFunc) {}
468     ~ComplexMemOperandAddAArch64() override = default;
469     void Run(BB &bb, Insn &insn) override;
470 
471 private:
472     bool IsExpandBaseOpnd(const Insn &insn, const Insn &prevInsn) const;
473 };
474 
475 /*
476  * Optimize the following patterns:
477  * sxth  r4, r4         ====> strh r4, [r0, r3]
478  * strh  r4, [r0, r3]
479  *
480  * sxtb  r4, r4         ====> strb r4, [r0, r3]
481  * strb  r4, [r0, r3]
482  */
483 class RemoveSxtBeforeStrAArch64 : public PeepPattern {
484 public:
RemoveSxtBeforeStrAArch64(CGFunc & cgFunc)485     explicit RemoveSxtBeforeStrAArch64(CGFunc &cgFunc) : PeepPattern(cgFunc) {}
486     ~RemoveSxtBeforeStrAArch64() override = default;
487     void Run(BB &bb, Insn &insn) override;
488 };
489 
490 /*
491  * Optimize the following patterns:
492  * mov x1, #1
493  * csel  x22, xzr, x1, LS   ====> cset x22, HI
494  *
495  * mov x1, #1
496  * csel  x22, x1, xzr, LS   ====> cset x22, LS
497  */
498 class CselZeroOneToCsetOpt : public PeepPattern {
499 public:
CselZeroOneToCsetOpt(CGFunc & cgFunc)500     explicit CselZeroOneToCsetOpt(CGFunc &cgFunc) : PeepPattern(cgFunc), cgFunc(&cgFunc) {}
501     ~CselZeroOneToCsetOpt() override = default;
502     void Run(BB &bb, Insn &insn) override;
503 
504 private:
505     Insn *trueMovInsn = nullptr;
506     Insn *falseMovInsn = nullptr;
507     Insn *FindFixedValue(Operand &opnd, BB &bb, Operand *&tempOp, const Insn &insn) const;
508 
509 protected:
510     CGFunc *cgFunc;
511 };
512 
513 /*
514  * Optimize the following patterns:
515  * and w0, w0, #0x1
516  * cmp w0, #0x0
517  * cset w0, eq
518  * eor w0, w0, #0x1
519  * cbz w0, label
520  * ====>
521  * tbz w0, 0, label
522  */
523 class AndCmpCsetEorCbzOpt : public PeepPattern {
524 public:
AndCmpCsetEorCbzOpt(CGFunc & cgFunc)525     explicit AndCmpCsetEorCbzOpt(CGFunc &cgFunc) : PeepPattern(cgFunc), cgFunc(&cgFunc) {}
526     ~AndCmpCsetEorCbzOpt() override = default;
527     void Run(BB &bb, Insn &insn) override;
528 
529 private:
530     CGFunc *cgFunc;
531 };
532 
533 /*
534  * Optimize the following patterns:
535  * add x0, x0, x1
536  * ldr w0, [x0]
537  * ====>
538  * ldr w0, [x0, x1]
539  */
540 class AddLdrOpt : public PeepPattern {
541 public:
AddLdrOpt(CGFunc & cgFunc)542     explicit AddLdrOpt(CGFunc &cgFunc) : PeepPattern(cgFunc), cgFunc(&cgFunc) {}
543     ~AddLdrOpt() override = default;
544     void Run(BB &bb, Insn &insn) override;
545 
546 private:
547     CGFunc *cgFunc;
548 };
549 
550 /*
551  * Optimize the following patterns:
552  * cset x0, eq
553  * eor x0, x0, 0x1
554  * ====>
555  * cset x0, ne
556  */
557 class CsetEorOpt : public PeepPattern {
558 public:
CsetEorOpt(CGFunc & cgFunc)559     explicit CsetEorOpt(CGFunc &cgFunc) : PeepPattern(cgFunc), cgFunc(&cgFunc) {}
560     ~CsetEorOpt() override = default;
561     void Run(BB &bb, Insn &insn) override;
562 
563 private:
564     CGFunc *cgFunc;
565 };
566 
567 class CmpZeroBranch : public PeepPattern {
568 public:
CmpZeroBranch(CGFunc & cgFunc)569     explicit CmpZeroBranch(CGFunc &cgFunc) : PeepPattern(cgFunc) {}
570     ~CmpZeroBranch() override = default;
571     void Run(BB &bb, Insn &insn) override;
572 };
573 
574 /*
575  * Optimize the following patterns:
576  * mov x1, #0x5
577  * cmp x0, x1
578  * ====>
579  * cmp x0, #0x5
580  */
581 class MoveCmpOpt : public PeepPattern {
582 public:
MoveCmpOpt(CGFunc & cgFunc)583     explicit MoveCmpOpt(CGFunc &cgFunc) : PeepPattern(cgFunc), cgFunc(&cgFunc) {}
584     ~MoveCmpOpt() override = default;
585     void Run(BB &bb, Insn &insn) override;
586 
587 private:
588     CGFunc *cgFunc;
589 };
590 
591 /*
592  * Optimize the following patterns:
593  * add x0, x0, #0x0         add x0, x1, #0x0
594  * ====>
595  * ---                      mov x0, x1
596  */
597 class AddImmZeroToMov : public PeepPattern {
598 public:
AddImmZeroToMov(CGFunc & cgFunc)599     explicit AddImmZeroToMov(CGFunc &cgFunc) : PeepPattern(cgFunc), cgFunc(&cgFunc) {}
600     ~AddImmZeroToMov() override = default;
601     void Run(BB &bb, Insn &insn) override;
602 
603 private:
604     CGFunc *cgFunc;
605 };
606 
607 class AArch64PeepHole : public PeepPatternMatch {
608 public:
AArch64PeepHole(CGFunc & oneCGFunc,MemPool * memPool)609     AArch64PeepHole(CGFunc &oneCGFunc, MemPool *memPool) : PeepPatternMatch(oneCGFunc, memPool) {}
610     ~AArch64PeepHole() override = default;
611     void InitOpts() override;
612     void Run(BB &bb, Insn &insn) override;
613 
614 private:
615     enum PeepholeOpts : int32 {
616         kRemoveIdenticalLoadAndStoreOpt = 0,
617         kRemoveMovingtoSameRegOpt,
618         kCombineContiLoadAndStoreOpt,
619         kEliminateSpecifcSXTOpt,
620         kEliminateSpecifcUXTOpt,
621         kFmovRegOpt,
622         kCbnzToCbzOpt,
623         kCsetCbzToBeqOpt,
624         kContiLDRorSTRToSameMEMOpt,
625         kInlineReadBarriersOpt,
626         kReplaceDivToMultiOpt,
627         kAndCmpBranchesToCsetOpt,
628         kAndCmpBranchesToTstOpt,
629         kAndCbzBranchesToTstOpt,
630         kZeroCmpBranchesOpt,
631         kCselZeroOneToCsetOpt,
632         kAndCmpCsetEorCbzOpt,
633         kAddLdrOpt,
634         kCsetEorOpt,
635         kMoveCmpOpt,
636         kCmpZeroBranch,
637         kPeepholeOptsNum
638     };
639 };
640 
641 class AArch64PeepHole0 : public PeepPatternMatch {
642 public:
AArch64PeepHole0(CGFunc & oneCGFunc,MemPool * memPool)643     AArch64PeepHole0(CGFunc &oneCGFunc, MemPool *memPool) : PeepPatternMatch(oneCGFunc, memPool) {}
644     ~AArch64PeepHole0() override = default;
645     void InitOpts() override;
646     void Run(BB &bb, Insn &insn) override;
647 
648 private:
649     enum PeepholeOpts : int32 {
650         kRemoveIdenticalLoadAndStoreOpt = 0,
651         kCmpCsetOpt,
652         kComplexMemOperandOptAdd,
653         kDeleteMovAfterCbzOrCbnzOpt,
654         kRemoveSxtBeforeStrOpt,
655         kRedundantMovAArch64Opt,
656         kRemoveMovingtoSameRegOpt,
657         kEnhanceStrLdrAArch64Opt,
658         kAddImmZeroToMov,
659         kPeepholeOptsNum
660     };
661 };
662 } /* namespace maplebe */
663 #endif /* MAPLEBE_INCLUDE_CG_AARCH64_AARCH64_PEEP_H */
664