• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef MAPLEBE_INCLUDE_CG_AARCH64_AARCH64_PEEP_H
17 #define MAPLEBE_INCLUDE_CG_AARCH64_AARCH64_PEEP_H
18 
19 #include <vector>
20 #include "peep.h"
21 #include "aarch64_isa.h"
22 #include "optimize_common.h"
23 #include "mir_builder.h"
24 
25 namespace maplebe {
26 class AArch64CGPeepHole : public CGPeepHole {
27 public:
28     /* normal constructor */
AArch64CGPeepHole(CGFunc & f,MemPool * memPool)29     AArch64CGPeepHole(CGFunc &f, MemPool *memPool) : CGPeepHole(f, memPool) {};
30     ~AArch64CGPeepHole() override = default;
31 
32     void Run() override;
33     void DoNormalOptimize(BB &bb, Insn &insn) override;
34 };
35 
36 /* ======== CGPeepPattern End ======== */
37 /*
38  * Looking for identical mem insn to eliminate.
39  * If two back-to-back is:
40  * 1. str + str
41  * 2. str + ldr
42  * And the [MEM] is pattern of [base + offset]
43  * 1. The [MEM] operand is exactly same then first
44  *    str can be eliminate.
45  * 2. The [MEM] operand is exactly same and src opnd
46  *    of str is same as the dest opnd of ldr then
47  *    ldr can be eliminate
48  */
49 class RemoveIdenticalLoadAndStoreAArch64 : public PeepPattern {
50 public:
RemoveIdenticalLoadAndStoreAArch64(CGFunc & cgFunc)51     explicit RemoveIdenticalLoadAndStoreAArch64(CGFunc &cgFunc) : PeepPattern(cgFunc) {}
52     ~RemoveIdenticalLoadAndStoreAArch64() override = default;
53     void Run(BB &bb, Insn &insn) override;
54 
55 private:
56     bool IsMemOperandsIdentical(const Insn &insn1, const Insn &insn2) const;
57 };
58 
59 /* Remove redundant mov which src and dest opnd is exactly same */
60 class RemoveMovingtoSameRegPattern : public CGPeepPattern {
61 public:
RemoveMovingtoSameRegPattern(CGFunc & cgFunc,BB & currBB,Insn & currInsn)62     RemoveMovingtoSameRegPattern(CGFunc &cgFunc, BB &currBB, Insn &currInsn) : CGPeepPattern(cgFunc, currBB, currInsn)
63     {
64     }
65     ~RemoveMovingtoSameRegPattern() override = default;
66     void Run(BB &bb, Insn &insn) override;
67     bool CheckCondition(Insn &insn) override;
GetPatternName()68     std::string GetPatternName() override
69     {
70         return "RemoveMovingtoSameRegPattern";
71     }
72 };
73 
74 /*
75  * add x0, x1, x2
76  * mov x19, x0
77  * ==>
78  * add x19, x1, x2
79  */
80 class RedundantMovAArch64 : public PeepPattern {
81 public:
RedundantMovAArch64(CGFunc & cgFunc)82     explicit RedundantMovAArch64(CGFunc &cgFunc) : PeepPattern(cgFunc) {}
83     ~RedundantMovAArch64() override = default;
84     void Run(BB &bb, Insn &insn) override;
85 };
86 
87 /* Remove redundant mov which src and dest opnd is exactly same */
88 class RemoveMovingtoSameRegAArch64 : public PeepPattern {
89 public:
RemoveMovingtoSameRegAArch64(CGFunc & cgFunc)90     explicit RemoveMovingtoSameRegAArch64(CGFunc &cgFunc) : PeepPattern(cgFunc) {}
91     ~RemoveMovingtoSameRegAArch64() override = default;
92     void Run(BB &bb, Insn &insn) override;
93 };
94 
95 /*
96  * Combining {2 str into 1 stp || 2 ldr into 1 ldp || 2 strb into 1 strh || 2 strh into 1 str},
97  * when they are back to back and the [MEM] they access is conjoined.
98  */
99 class CombineContiLoadAndStorePattern : public CGPeepPattern {
100 public:
CombineContiLoadAndStorePattern(CGFunc & cgFunc,BB & currBB,Insn & currInsn)101     CombineContiLoadAndStorePattern(CGFunc &cgFunc, BB &currBB, Insn &currInsn)
102         : CGPeepPattern(cgFunc, currBB, currInsn)
103     {
104         doAggressiveCombine = cgFunc.GetMirModule().IsCModule();
105     }
106     ~CombineContiLoadAndStorePattern() override = default;
107     void Run(BB &bb, Insn &insn) override;
108     bool CheckCondition(Insn &insn) override;
GetPatternName()109     std::string GetPatternName() override
110     {
111         return "CombineContiLoadAndStorePattern";
112     }
113 
114 private:
115     std::vector<Insn *> FindPrevStrLdr(Insn &insn, regno_t destRegNO, regno_t memBaseRegNO, int64 baseOfst) const;
116     /*
117      * avoid the following situation:
118      * str x2, [x19, #8]
119      * mov x0, x19
120      * bl foo (change memory)
121      * str x21, [x19, #16]
122      */
123     bool IsRegNotSameMemUseInInsn(const Insn &checkInsn, const Insn &curInsn, regno_t curBaseRegNO, bool isCurStore,
124                                   int64 curBaseOfst, int64 curMemRange) const;
125     bool IsValidNormalLoadOrStorePattern(const Insn &insn, const Insn &prevInsn, const MemOperand &memOpnd,
126                                          int64 curOfstVal, int64 prevOfstVal);
127     bool IsValidStackArgLoadOrStorePattern(const Insn &curInsn, const Insn &prevInsn, const MemOperand &curMemOpnd,
128                                            const MemOperand &prevMemOpnd, int64 curOfstVal, int64 prevOfstVal) const;
129     Insn *GenerateMemPairInsn(MOperator newMop, RegOperand &curDestOpnd, RegOperand &prevDestOpnd,
130                               MemOperand &combineMemOpnd, bool isCurDestFirst);
131     bool FindUseX16AfterInsn(const Insn &curInsn) const;
132     void RemoveInsnAndKeepComment(BB &bb, Insn &insn, Insn &prevInsn) const;
133 
134     bool doAggressiveCombine = false;
135     bool isPairAfterCombine = true;
136 };
137 
138 /*
139  * add xt, xn, #imm               add  xt, xn, xm
140  * ldr xd, [xt]                   ldr xd, [xt]
141  * =====================>
142  * ldr xd, [xn, #imm]             ldr xd, [xn, xm]
143  *
144  * load/store can do extend shift as well
145  */
146 class EnhanceStrLdrAArch64 : public PeepPattern {
147 public:
EnhanceStrLdrAArch64(CGFunc & cgFunc)148     explicit EnhanceStrLdrAArch64(CGFunc &cgFunc) : PeepPattern(cgFunc) {}
149     ~EnhanceStrLdrAArch64() override = default;
150     void Run(BB &bb, Insn &insn) override;
151 
152 private:
153     ImmOperand *GetInsnAddOrSubNewOffset(Insn &insn, ImmOperand &offset);
154     void OptimizeAddrBOI(Insn &insn, MemOperand &memOpnd, Insn &prevInsn);
155     void OptimizeAddrBOrX(Insn &insn, MemOperand &memOpnd, Insn &prevInsn);
156     void OptimizeAddrBOrXShiftExtend(Insn &insn, MemOperand &memOpnd, Insn &shiftExtendInsn);
157     void OptimizeWithAddrrrs(Insn &insn, MemOperand &memOpnd, Insn &addInsn);
158     bool CheckOperandIsDeadFromInsn(const RegOperand &regOpnd, Insn &insn);
159 };
160 
161 /* Eliminate the sxt[b|h|w] w0, w0;, when w0 is satisify following:
162  * i)  mov w0, #imm (#imm is not out of range)
163  * ii) ldrs[b|h] w0, [MEM]
164  */
165 class EliminateSpecifcSXTAArch64 : public PeepPattern {
166 public:
EliminateSpecifcSXTAArch64(CGFunc & cgFunc)167     explicit EliminateSpecifcSXTAArch64(CGFunc &cgFunc) : PeepPattern(cgFunc) {}
168     ~EliminateSpecifcSXTAArch64() override = default;
169     void Run(BB &bb, Insn &insn) override;
170 };
171 
172 /* Eliminate the uxt[b|h|w] w0, w0;when w0 is satisify following:
173  * i)  mov w0, #imm (#imm is not out of range)
174  * ii) mov w0, R0(Is return value of call and return size is not of range)
175  * iii)w0 is defined and used by special load insn and uxt[] pattern
176  */
177 class EliminateSpecifcUXTAArch64 : public PeepPattern {
178 public:
EliminateSpecifcUXTAArch64(CGFunc & cgFunc)179     explicit EliminateSpecifcUXTAArch64(CGFunc &cgFunc) : PeepPattern(cgFunc) {}
180     ~EliminateSpecifcUXTAArch64() override = default;
181     void Run(BB &bb, Insn &insn) override;
182 };
183 
184 /* fmov ireg1 <- freg1   previous insn
185  * fmov ireg2 <- freg1   current insn
186  * use  ireg2            may or may not be present
187  * =>
188  * fmov ireg1 <- freg1   previous insn
189  * mov  ireg2 <- ireg1   current insn
190  * use  ireg1            may or may not be present
191  */
192 class FmovRegPattern : public CGPeepPattern {
193 public:
FmovRegPattern(CGFunc & cgFunc,BB & currBB,Insn & currInsn)194     FmovRegPattern(CGFunc &cgFunc, BB &currBB, Insn &currInsn) : CGPeepPattern(cgFunc, currBB, currInsn) {}
~FmovRegPattern()195     ~FmovRegPattern() override
196     {
197         prevInsn = nullptr;
198         nextInsn = nullptr;
199     }
200     void Run(BB &bb, Insn &insn) override;
201     bool CheckCondition(Insn &insn) override;
GetPatternName()202     std::string GetPatternName() override
203     {
204         return "FmovRegPattern";
205     }
206 
207 private:
208     Insn *prevInsn = nullptr;
209     Insn *nextInsn = nullptr;
210 };
211 
212 /* sbfx ireg1, ireg2, 0, 32
213  * use  ireg1.32
214  * =>
215  * sbfx ireg1, ireg2, 0, 32
216  * use  ireg2.32
217  */
218 class SbfxOptPattern : public CGPeepPattern {
219 public:
SbfxOptPattern(CGFunc & cgFunc,BB & currBB,Insn & currInsn)220     SbfxOptPattern(CGFunc &cgFunc, BB &currBB, Insn &currInsn) : CGPeepPattern(cgFunc, currBB, currInsn) {}
~SbfxOptPattern()221     ~SbfxOptPattern() override
222     {
223         nextInsn = nullptr;
224     }
225     void Run(BB &bb, Insn &insn) override;
226     bool CheckCondition(Insn &insn) override;
GetPatternName()227     std::string GetPatternName() override
228     {
229         return "SbfxOptPattern";
230     }
231 
232 private:
233     Insn *nextInsn = nullptr;
234     bool toRemove = false;
235     std::vector<uint32> cands;
236 };
237 
238 /* cbnz x0, labelA
239  * mov x0, 0
240  * b  return-bb
241  * labelA:
242  * =>
243  * cbz x0, return-bb
244  * labelA:
245  */
246 class CbnzToCbzPattern : public CGPeepPattern {
247 public:
CbnzToCbzPattern(CGFunc & cgFunc,BB & currBB,Insn & currInsn)248     CbnzToCbzPattern(CGFunc &cgFunc, BB &currBB, Insn &currInsn) : CGPeepPattern(cgFunc, currBB, currInsn) {}
~CbnzToCbzPattern()249     ~CbnzToCbzPattern() override
250     {
251         nextBB = nullptr;
252         movInsn = nullptr;
253         brInsn = nullptr;
254     }
255     void Run(BB &bb, Insn &insn) override;
256     bool CheckCondition(Insn &insn) override;
GetPatternName()257     std::string GetPatternName() override
258     {
259         return "CbnzToCbzPattern";
260     }
261 
262 private:
263     BB *nextBB = nullptr;
264     Insn *movInsn = nullptr;
265     Insn *brInsn = nullptr;
266 };
267 
268 /* i.   cset    w0, EQ
269  *      cbnz    w0, .label    ===> beq .label
270  *
271  * ii.  cset    w0, EQ
272  *      cbz    w0, .label     ===> bne .label
273  *
274  * iii. cset    w0, NE
275  *      cbnz    w0, .label    ===> bne .label
276  *
277  * iiii.cset    w0, NE
278  *      cbz    w0, .label     ===> beq .label
279  * ... ...
280  */
281 class CsetCbzToBeqOptAArch64 : public PeepPattern {
282 public:
CsetCbzToBeqOptAArch64(CGFunc & cgFunc)283     explicit CsetCbzToBeqOptAArch64(CGFunc &cgFunc) : PeepPattern(cgFunc) {}
284     ~CsetCbzToBeqOptAArch64() override = default;
285     void Run(BB &bb, Insn &insn) override;
286     MOperator SelectMOperator(ConditionCode condCode, bool inverse) const;
287 };
288 
289 /* When exist load after load or load after store, and [MEM] is
290  * totally same. Then optimize them.
291  */
292 class ContiLDRorSTRToSameMEMPattern : public CGPeepPattern {
293 public:
ContiLDRorSTRToSameMEMPattern(CGFunc & cgFunc,BB & currBB,Insn & currInsn)294     ContiLDRorSTRToSameMEMPattern(CGFunc &cgFunc, BB &currBB, Insn &currInsn) : CGPeepPattern(cgFunc, currBB, currInsn)
295     {
296     }
~ContiLDRorSTRToSameMEMPattern()297     ~ContiLDRorSTRToSameMEMPattern() override
298     {
299         prevInsn = nullptr;
300     }
301     void Run(BB &bb, Insn &insn) override;
302     bool HasImplicitSizeUse(const Insn &insn) const;
303     bool CheckCondition(Insn &insn) override;
GetPatternName()304     std::string GetPatternName() override
305     {
306         return "ContiLDRorSTRToSameMEMPattern";
307     }
308 
309 private:
310     Insn *prevInsn = nullptr;
311     bool loadAfterStore = false;
312     bool loadAfterLoad = false;
313 };
314 
315 /*
316  * Optimize the following patterns:
317  *  and  w0, w0, #imm  ====> tst  w0, #imm
318  *  cmp  w0, #0              beq/bne  .label
319  *  beq/bne  .label
320  *
321  *  and  x0, x0, #imm  ====> tst  x0, #imm
322  *  cmp  x0, #0              beq/bne  .label
323  *  beq/bne  .label
324  */
325 class AndCmpBranchesToTstAArch64 : public PeepPattern {
326 public:
AndCmpBranchesToTstAArch64(CGFunc & cgFunc)327     explicit AndCmpBranchesToTstAArch64(CGFunc &cgFunc) : PeepPattern(cgFunc) {}
328     ~AndCmpBranchesToTstAArch64() override = default;
329     void Run(BB &bb, Insn &insn) override;
330 };
331 
332 /*
333  * Optimize the following patterns:
334  *  and  w0, w0, #imm  ====> tst  w0, #imm
335  *  cbz/cbnz  .label         beq/bne  .label
336  */
337 class AndCbzBranchesToTstAArch64 : public PeepPattern {
338 public:
AndCbzBranchesToTstAArch64(CGFunc & cgFunc)339     explicit AndCbzBranchesToTstAArch64(CGFunc &cgFunc) : PeepPattern(cgFunc) {}
340     ~AndCbzBranchesToTstAArch64() override = default;
341     void Run(BB &bb, Insn &insn) override;
342 };
343 
344 /*
345  * Optimize the following patterns:
346  *  and  w0, w0, #1  ====> and  w0, w0, #1
347  *  cmp  w0, #1
348  *  cset w0, EQ
349  *
350  *  and  w0, w0, #1  ====> and  w0, w0, #1
351  *  cmp  w0, #0
352  *  cset w0, NE
353  *  ---------------------------------------------------
354  *  and  w0, w0, #imm  ====> ubfx  w0, w0, pos, size
355  *  cmp  w0, #imm
356  *  cset w0, EQ
357  *
358  *  and  w0, w0, #imm  ====> ubfx  w0, w0, pos, size
359  *  cmp  w0, #0
360  *  cset w0, NE
361  *  conditions:
362  *  imm is pos power of 2
363  *
364  *  ---------------------------------------------------
365  *  and  w0, w0, #1  ====> and  wn, w0, #1
366  *  cmp  w0, #1
367  *  cset wn, EQ        # wn != w0 && w0 is not live after cset
368  *
369  *  and  w0, w0, #1  ====> and  wn, w0, #1
370  *  cmp  w0, #0
371  *  cset wn, NE        # wn != w0 && w0 is not live after cset
372  *  ---------------------------------------------------
373  *  and  w0, w0, #imm  ====> ubfx  wn, w0, pos, size
374  *  cmp  w0, #imm
375  *  cset wn, EQ        # wn != w0 && w0 is not live after cset
376  *
377  *  and  w0, w0, #imm  ====> ubfx  wn, w0, pos, size
378  *  cmp  w0, #0
379  *  cset wn, NE        # wn != w0 && w0 is not live after cset
380  *  conditions:
381  *  imm is pos power of 2 and w0 is not live after cset
382  */
383 class AndCmpBranchesToCsetAArch64 : public PeepPattern {
384 public:
AndCmpBranchesToCsetAArch64(CGFunc & cgFunc)385     explicit AndCmpBranchesToCsetAArch64(CGFunc &cgFunc) : PeepPattern(cgFunc) {}
386     ~AndCmpBranchesToCsetAArch64() override = default;
387     void Run(BB &bb, Insn &insn) override;
388 
389 private:
390     Insn *FindPreviousCmp(Insn &insn) const;
391 };
392 
393 /*
394  * We optimize the following pattern in this function:
395  * cmp w[0-9]*, wzr  ====> tbz w[0-9]*, #31, .label
396  * bge .label
397  *
398  * cmp wzr, w[0-9]*  ====> tbz w[0-9]*, #31, .label
399  * ble .label
400  *
401  * cmp w[0-9]*,wzr   ====> tbnz w[0-9]*, #31, .label
402  * blt .label
403  *
404  * cmp wzr, w[0-9]*  ====> tbnz w[0-9]*, #31, .label
405  * bgt .label
406  *
407  * cmp w[0-9]*, #0   ====> tbz w[0-9]*, #31, .label
408  * bge .label
409  *
410  * cmp w[0-9]*, #0   ====> tbnz w[0-9]*, #31, .label
411  * blt .label
412  */
413 class ZeroCmpBranchesAArch64 : public PeepPattern {
414 public:
ZeroCmpBranchesAArch64(CGFunc & cgFunc)415     explicit ZeroCmpBranchesAArch64(CGFunc &cgFunc) : PeepPattern(cgFunc) {}
416     ~ZeroCmpBranchesAArch64() override = default;
417     void Run(BB &bb, Insn &insn) override;
418 };
419 
420 /*
421  *  cmp  w0, #0
422  *  cset w1, NE --> mov w1, w0
423  *
424  *  cmp  w0, #0
425  *  cset w1, EQ --> eor w1, w0, 1
426  *
427  *  cmp  w0, #1
428  *  cset w1, NE --> eor w1, w0, 1
429  *
430  *  cmp  w0, #1
431  *  cset w1, EQ --> mov w1, w0
432  *
433  *  cmp w0,  #0
434  *  cset w0, NE -->null
435  *
436  *  cmp w0, #1
437  *  cset w0, EQ -->null
438  *
439  *  condition:
440  *    1. the first operand of cmp instruction must has only one valid bit
441  *    2. the second operand of cmp instruction must be 0 or 1
442  *    3. flag register of cmp isntruction must not be used later
443  */
444 class CmpCsetAArch64 : public PeepPattern {
445 public:
CmpCsetAArch64(CGFunc & cgFunc)446     explicit CmpCsetAArch64(CGFunc &cgFunc) : PeepPattern(cgFunc) {}
447     ~CmpCsetAArch64() override = default;
448     void Run(BB &bb, Insn &insn) override;
449 
450 private:
451     bool CheckOpndDefPoints(Insn &checkInsn, int opndIdx);
452     const Insn *DefInsnOfOperandInBB(const Insn &startInsn, const Insn &checkInsn, int opndIdx) const;
453     bool OpndDefByOneValidBit(const Insn &defInsn) const;
454     bool FlagUsedLaterInCurBB(const BB &bb, Insn &startInsn) const;
455 };
456 
457 /*
458  *  add     x0, x1, x0
459  *  ldr     x2, [x0]
460  *  ==>
461  *  ldr     x2, [x1, x0]
462  */
463 class ComplexMemOperandAddAArch64 : public PeepPattern {
464 public:
ComplexMemOperandAddAArch64(CGFunc & cgFunc)465     explicit ComplexMemOperandAddAArch64(CGFunc &cgFunc) : PeepPattern(cgFunc) {}
466     ~ComplexMemOperandAddAArch64() override = default;
467     void Run(BB &bb, Insn &insn) override;
468 
469 private:
470     bool IsExpandBaseOpnd(const Insn &insn, const Insn &prevInsn) const;
471 };
472 
473 /*
474  * Optimize the following patterns:
475  * sxth  r4, r4         ====> strh r4, [r0, r3]
476  * strh  r4, [r0, r3]
477  *
478  * sxtb  r4, r4         ====> strb r4, [r0, r3]
479  * strb  r4, [r0, r3]
480  */
481 class RemoveSxtBeforeStrAArch64 : public PeepPattern {
482 public:
RemoveSxtBeforeStrAArch64(CGFunc & cgFunc)483     explicit RemoveSxtBeforeStrAArch64(CGFunc &cgFunc) : PeepPattern(cgFunc) {}
484     ~RemoveSxtBeforeStrAArch64() override = default;
485     void Run(BB &bb, Insn &insn) override;
486 };
487 
488 /*
489  * Optimize the following patterns:
490  * mov x1, #1
491  * csel  x22, xzr, x1, LS   ====> cset x22, HI
492  *
493  * mov x1, #1
494  * csel  x22, x1, xzr, LS   ====> cset x22, LS
495  */
496 class CselZeroOneToCsetOpt : public PeepPattern {
497 public:
CselZeroOneToCsetOpt(CGFunc & cgFunc)498     explicit CselZeroOneToCsetOpt(CGFunc &cgFunc) : PeepPattern(cgFunc), cgFunc(&cgFunc) {}
499     ~CselZeroOneToCsetOpt() override = default;
500     void Run(BB &bb, Insn &insn) override;
501 
502 private:
503     Insn *trueMovInsn = nullptr;
504     Insn *falseMovInsn = nullptr;
505     Insn *FindFixedValue(Operand &opnd, BB &bb, Operand *&tempOp, const Insn &insn) const;
506 
507 protected:
508     CGFunc *cgFunc;
509 };
510 
511 /*
512  * Optimize the following patterns:
513  * and w0, w0, #0x1
514  * cmp w0, #0x0
515  * cset w0, eq
516  * eor w0, w0, #0x1
517  * cbz w0, label
518  * ====>
519  * tbz w0, 0, label
520  */
521 class AndCmpCsetEorCbzOpt : public PeepPattern {
522 public:
AndCmpCsetEorCbzOpt(CGFunc & cgFunc)523     explicit AndCmpCsetEorCbzOpt(CGFunc &cgFunc) : PeepPattern(cgFunc), cgFunc(&cgFunc) {}
524     ~AndCmpCsetEorCbzOpt() override = default;
525     void Run(BB &bb, Insn &insn) override;
526 
527 private:
528     CGFunc *cgFunc;
529 };
530 
531 /*
532  * Optimize the following patterns:
533  * add x0, x0, x1
534  * ldr w0, [x0]
535  * ====>
536  * ldr w0, [x0, x1]
537  */
538 class AddLdrOpt : public PeepPattern {
539 public:
AddLdrOpt(CGFunc & cgFunc)540     explicit AddLdrOpt(CGFunc &cgFunc) : PeepPattern(cgFunc), cgFunc(&cgFunc) {}
541     ~AddLdrOpt() override = default;
542     void Run(BB &bb, Insn &insn) override;
543 
544 private:
545     CGFunc *cgFunc;
546 };
547 
548 /*
549  * Optimize the following patterns:
550  * cset x0, eq
551  * eor x0, x0, 0x1
552  * ====>
553  * cset x0, ne
554  */
555 class CsetEorOpt : public PeepPattern {
556 public:
CsetEorOpt(CGFunc & cgFunc)557     explicit CsetEorOpt(CGFunc &cgFunc) : PeepPattern(cgFunc), cgFunc(&cgFunc) {}
558     ~CsetEorOpt() override = default;
559     void Run(BB &bb, Insn &insn) override;
560 
561 private:
562     CGFunc *cgFunc;
563 };
564 
565 /*
566  * Optimize the following patterns:
567  * mov x1, #0x5
568  * cmp x0, x1
569  * ====>
570  * cmp x0, #0x5
571  */
572 class MoveCmpOpt : public PeepPattern {
573 public:
MoveCmpOpt(CGFunc & cgFunc)574     explicit MoveCmpOpt(CGFunc &cgFunc) : PeepPattern(cgFunc), cgFunc(&cgFunc) {}
575     ~MoveCmpOpt() override = default;
576     void Run(BB &bb, Insn &insn) override;
577 
578 private:
579     CGFunc *cgFunc;
580 };
581 
582 /*
583  * Optimize the following patterns:
584  * add x0, x0, #0x0         add x0, x1, #0x0
585  * ====>
586  * ---                      mov x0, x1
587  */
588 class AddImmZeroToMov : public PeepPattern {
589 public:
AddImmZeroToMov(CGFunc & cgFunc)590     explicit AddImmZeroToMov(CGFunc &cgFunc) : PeepPattern(cgFunc), cgFunc(&cgFunc) {}
591     ~AddImmZeroToMov() override = default;
592     void Run(BB &bb, Insn &insn) override;
593 
594 private:
595     CGFunc *cgFunc;
596 };
597 
598 class AArch64PeepHole : public PeepPatternMatch {
599 public:
AArch64PeepHole(CGFunc & oneCGFunc,MemPool * memPool)600     AArch64PeepHole(CGFunc &oneCGFunc, MemPool *memPool) : PeepPatternMatch(oneCGFunc, memPool) {}
601     ~AArch64PeepHole() override = default;
602     void InitOpts() override;
603     void Run(BB &bb, Insn &insn) override;
604 
605 private:
606     enum PeepholeOpts : int32 {
607         kRemoveIdenticalLoadAndStoreOpt = 0,
608         kRemoveMovingtoSameRegOpt,
609         kCombineContiLoadAndStoreOpt,
610         kEliminateSpecifcSXTOpt,
611         kEliminateSpecifcUXTOpt,
612         kFmovRegOpt,
613         kCbnzToCbzOpt,
614         kCsetCbzToBeqOpt,
615         kContiLDRorSTRToSameMEMOpt,
616         kInlineReadBarriersOpt,
617         kReplaceDivToMultiOpt,
618         kAndCmpBranchesToCsetOpt,
619         kAndCmpBranchesToTstOpt,
620         kAndCbzBranchesToTstOpt,
621         kZeroCmpBranchesOpt,
622         kCselZeroOneToCsetOpt,
623         kAndCmpCsetEorCbzOpt,
624         kAddLdrOpt,
625         kCsetEorOpt,
626         kMoveCmpOpt,
627         kPeepholeOptsNum
628     };
629 };
630 
631 class AArch64PeepHole0 : public PeepPatternMatch {
632 public:
AArch64PeepHole0(CGFunc & oneCGFunc,MemPool * memPool)633     AArch64PeepHole0(CGFunc &oneCGFunc, MemPool *memPool) : PeepPatternMatch(oneCGFunc, memPool) {}
634     ~AArch64PeepHole0() override = default;
635     void InitOpts() override;
636     void Run(BB &bb, Insn &insn) override;
637 
638 private:
639     enum PeepholeOpts : int32 {
640         kRemoveIdenticalLoadAndStoreOpt = 0,
641         kCmpCsetOpt,
642         kComplexMemOperandOptAdd,
643         kDeleteMovAfterCbzOrCbnzOpt,
644         kRemoveSxtBeforeStrOpt,
645         kRedundantMovAArch64Opt,
646         kRemoveMovingtoSameRegOpt,
647         kEnhanceStrLdrAArch64Opt,
648         kAddImmZeroToMov,
649         kPeepholeOptsNum
650     };
651 };
652 } /* namespace maplebe */
653 #endif /* MAPLEBE_INCLUDE_CG_AARCH64_AARCH64_PEEP_H */
654