• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "aarch64_global.h"
17 #include "aarch64_reaching.h"
18 #include "aarch64_cg.h"
19 #include "aarch64_live.h"
20 
21 namespace maplebe {
22 using namespace maple;
23 #define GLOBAL_DUMP CG_DEBUG_FUNC(cgFunc)
24 
25 constexpr uint32 kExMOpTypeSize = 9;
26 constexpr uint32 kLsMOpTypeSize = 15;
27 
28 MOperator exMOpTable[kExMOpTypeSize] = {MOP_undef,    MOP_xxwaddrrre, MOP_wwwaddrrre, MOP_xxwsubrrre, MOP_wwwsubrrre,
29                                         MOP_xwcmnrre, MOP_wwcmnrre,   MOP_xwcmprre,   MOP_wwcmprre};
30 MOperator lsMOpTable[kLsMOpTypeSize] = {MOP_undef,    MOP_xaddrrrs, MOP_waddrrrs, MOP_xsubrrrs, MOP_wsubrrrs,
31                                         MOP_xcmnrrs,  MOP_wcmnrrs,  MOP_xcmprrs,  MOP_wcmprrs,  MOP_xeorrrrs,
32                                         MOP_weorrrrs, MOP_xinegrrs, MOP_winegrrs, MOP_xiorrrrs, MOP_wiorrrrs};
33 
34 /* Optimize ExtendShiftOptPattern:
35  * ==========================================================
36  *           nosuffix  LSL   LSR   ASR      extrn   (def)
37  * nosuffix |   F    | LSL | LSR | ASR |    extrn  |
38  * LSL      |   F    | LSL |  F  |  F  |    extrn  |
39  * LSR      |   F    |  F  | LSR |  F  |     F     |
40  * ASR      |   F    |  F  |  F  | ASR |     F     |
41  * exten    |   F    |  F  |  F  |  F  |exten(self)|
42  * (use)
43  * ===========================================================
44  */
45 constexpr uint32 kExtenAddShift = 5;
46 ExtendShiftOptPattern::SuffixType doOptimize[kExtenAddShift][kExtenAddShift] = {
47     {ExtendShiftOptPattern::kNoSuffix, ExtendShiftOptPattern::kLSL, ExtendShiftOptPattern::kLSR,
48      ExtendShiftOptPattern::kASR, ExtendShiftOptPattern::kExten},
49     {ExtendShiftOptPattern::kNoSuffix, ExtendShiftOptPattern::kLSL, ExtendShiftOptPattern::kNoSuffix,
50      ExtendShiftOptPattern::kNoSuffix, ExtendShiftOptPattern::kExten},
51     {ExtendShiftOptPattern::kNoSuffix, ExtendShiftOptPattern::kNoSuffix, ExtendShiftOptPattern::kLSR,
52      ExtendShiftOptPattern::kNoSuffix, ExtendShiftOptPattern::kNoSuffix},
53     {ExtendShiftOptPattern::kNoSuffix, ExtendShiftOptPattern::kNoSuffix, ExtendShiftOptPattern::kNoSuffix,
54      ExtendShiftOptPattern::kASR, ExtendShiftOptPattern::kNoSuffix},
55     {ExtendShiftOptPattern::kNoSuffix, ExtendShiftOptPattern::kNoSuffix, ExtendShiftOptPattern::kNoSuffix,
56      ExtendShiftOptPattern::kNoSuffix, ExtendShiftOptPattern::kExten}};
57 
IsZeroRegister(const Operand & opnd)58 static bool IsZeroRegister(const Operand &opnd)
59 {
60     if (!opnd.IsRegister()) {
61         return false;
62     }
63     const RegOperand *regOpnd = static_cast<const RegOperand *>(&opnd);
64     return regOpnd->GetRegisterNumber() == RZR;
65 }
66 
Run()67 void AArch64GlobalOpt::Run()
68 {
69     OptimizeManager optManager(cgFunc);
70     bool hasSpillBarrier = (cgFunc.NumBBs() > kMaxBBNum) || (cgFunc.GetRD()->GetMaxInsnNO() > kMaxInsnNum);
71     if (cgFunc.IsAfterRegAlloc()) {
72         optManager.Optimize<SameRHSPropPattern>();
73         optManager.Optimize<BackPropPattern>();
74         return;
75     }
76     if (!hasSpillBarrier) {
77         optManager.Optimize<ExtenToMovPattern>();
78         optManager.Optimize<SameRHSPropPattern>();
79         optManager.Optimize<BackPropPattern>();
80         optManager.Optimize<ForwardPropPattern>();
81         optManager.Optimize<CselPattern>();
82         optManager.Optimize<CmpCsetPattern>();
83         optManager.Optimize<RedundantUxtPattern>();
84         optManager.Optimize<LocalVarSaveInsnPattern>();
85     }
86     optManager.Optimize<SameDefPattern>();
87     optManager.Optimize<ExtendShiftOptPattern>();
88     optManager.Optimize<AndCbzPattern>();
89 }
90 
91 /* if used Operand in insn is defined by zero in all define insn, return true */
OpndDefByZero(Insn & insn,int32 useIdx) const92 bool OptimizePattern::OpndDefByZero(Insn &insn, int32 useIdx) const
93 {
94     DEBUG_ASSERT(insn.GetOperand(useIdx).IsRegister(), "the used Operand must be Register");
95     /* Zero Register don't need be defined */
96     if (IsZeroRegister(insn.GetOperand(static_cast<uint32>(useIdx)))) {
97         return true;
98     }
99 
100     InsnSet defInsns = cgFunc.GetRD()->FindDefForRegOpnd(insn, useIdx);
101     if (defInsns.empty()) {
102         return false;
103     }
104     for (auto &defInsn : defInsns) {
105         if (!InsnDefZero(*defInsn)) {
106             return false;
107         }
108     }
109     return true;
110 }
111 
112 /* if used Operand in insn is defined by one in all define insn, return true */
OpndDefByOne(Insn & insn,int32 useIdx) const113 bool OptimizePattern::OpndDefByOne(Insn &insn, int32 useIdx) const
114 {
115     DEBUG_ASSERT(insn.GetOperand(useIdx).IsRegister(), "the used Operand must be Register");
116     /* Zero Register don't need be defined */
117     if (IsZeroRegister(insn.GetOperand(static_cast<uint32>(useIdx)))) {
118         return false;
119     }
120     InsnSet defInsns = cgFunc.GetRD()->FindDefForRegOpnd(insn, useIdx);
121     if (defInsns.empty()) {
122         return false;
123     }
124     for (auto &defInsn : defInsns) {
125         if (!InsnDefOne(*defInsn)) {
126             return false;
127         }
128     }
129     return true;
130 }
131 
132 /* if used Operand in insn is defined by one valid bit in all define insn, return true */
OpndDefByOneOrZero(Insn & insn,int32 useIdx) const133 bool OptimizePattern::OpndDefByOneOrZero(Insn &insn, int32 useIdx) const
134 {
135     if (IsZeroRegister(insn.GetOperand(static_cast<uint32>(useIdx)))) {
136         return true;
137     }
138 
139     InsnSet defInsnSet = cgFunc.GetRD()->FindDefForRegOpnd(insn, useIdx);
140     if (defInsnSet.empty()) {
141         return false;
142     }
143 
144     for (auto &defInsn : defInsnSet) {
145         if (!InsnDefOneOrZero(*defInsn)) {
146             return false;
147         }
148     }
149     return true;
150 }
151 
152 /* if defined operand(must be first insn currently) in insn is const one, return true */
InsnDefOne(const Insn & insn)153 bool OptimizePattern::InsnDefOne(const Insn &insn)
154 {
155     MOperator defMop = insn.GetMachineOpcode();
156     switch (defMop) {
157         case MOP_wmovri32:
158         case MOP_xmovri64: {
159             Operand &srcOpnd = insn.GetOperand(1);
160             DEBUG_ASSERT(srcOpnd.IsIntImmediate(), "expects ImmOperand");
161             ImmOperand &srcConst = static_cast<ImmOperand &>(srcOpnd);
162             int64 srcConstValue = srcConst.GetValue();
163             if (srcConstValue == 1) {
164                 return true;
165             }
166             return false;
167         }
168         default:
169             return false;
170     }
171 }
172 
173 /* if defined operand(must be first insn currently) in insn is const zero, return true */
InsnDefZero(const Insn & insn)174 bool OptimizePattern::InsnDefZero(const Insn &insn)
175 {
176     MOperator defMop = insn.GetMachineOpcode();
177     switch (defMop) {
178         case MOP_wmovri32:
179         case MOP_xmovri64: {
180             Operand &srcOpnd = insn.GetOperand(kInsnSecondOpnd);
181             DEBUG_ASSERT(srcOpnd.IsIntImmediate(), "expects ImmOperand");
182             ImmOperand &srcConst = static_cast<ImmOperand &>(srcOpnd);
183             int64 srcConstValue = srcConst.GetValue();
184             if (srcConstValue == 0) {
185                 return true;
186             }
187             return false;
188         }
189         case MOP_xmovrr:
190         case MOP_wmovrr:
191             return IsZeroRegister(insn.GetOperand(kInsnSecondOpnd));
192         default:
193             return false;
194     }
195 }
196 
197 /* if defined operand(must be first insn currently) in insn has only one valid bit, return true */
InsnDefOneOrZero(const Insn & insn)198 bool OptimizePattern::InsnDefOneOrZero(const Insn &insn)
199 {
200     MOperator defMop = insn.GetMachineOpcode();
201     switch (defMop) {
202         case MOP_wcsetrc:
203         case MOP_xcsetrc:
204             return true;
205         case MOP_wmovri32:
206         case MOP_xmovri64: {
207             Operand &defOpnd = insn.GetOperand(kInsnSecondOpnd);
208             DEBUG_ASSERT(defOpnd.IsIntImmediate(), "expects ImmOperand");
209             auto &defConst = static_cast<ImmOperand &>(defOpnd);
210             int64 defConstValue = defConst.GetValue();
211             if (defConstValue != 0 && defConstValue != 1) {
212                 return false;
213             } else {
214                 return true;
215             }
216         }
217         case MOP_xmovrr:
218         case MOP_wmovrr: {
219             return IsZeroRegister(insn.GetOperand(kInsnSecondOpnd));
220         }
221         case MOP_wlsrrri5:
222         case MOP_xlsrrri6: {
223             Operand &opnd2 = insn.GetOperand(kInsnThirdOpnd);
224             DEBUG_ASSERT(opnd2.IsIntImmediate(), "expects ImmOperand");
225             ImmOperand &opndImm = static_cast<ImmOperand &>(opnd2);
226             int64 shiftBits = opndImm.GetValue();
227             if (((defMop == MOP_wlsrrri5) && (shiftBits == k32BitSize - 1)) ||
228                 ((defMop == MOP_xlsrrri6) && (shiftBits == k64BitSize - 1))) {
229                 return true;
230             } else {
231                 return false;
232             }
233         }
234         default:
235             return false;
236     }
237 }
238 
ReplaceAsmListReg(const Insn * insn,uint32 index,uint32 regNO,Operand * newOpnd)239 void ReplaceAsmListReg(const Insn *insn, uint32 index, uint32 regNO, Operand *newOpnd)
240 {
241     MapleList<RegOperand *> *list = &static_cast<ListOperand &>(insn->GetOperand(index)).GetOperands();
242     int32 size = static_cast<int32>(list->size());
243     for (int i = 0; i < size; ++i) {
244         RegOperand *opnd = static_cast<RegOperand *>(*(list->begin()));
245         list->pop_front();
246         if (opnd->GetRegisterNumber() == regNO) {
247             list->push_back(static_cast<RegOperand *>(newOpnd));
248         } else {
249             list->push_back(opnd);
250         }
251     }
252 }
253 
ReplaceAllUsedOpndWithNewOpnd(const InsnSet & useInsnSet,uint32 regNO,Operand & newOpnd,bool updateInfo) const254 void OptimizePattern::ReplaceAllUsedOpndWithNewOpnd(const InsnSet &useInsnSet, uint32 regNO, Operand &newOpnd,
255                                                     bool updateInfo) const
256 {
257     for (auto useInsn : useInsnSet) {
258         if (useInsn->GetMachineOpcode() == MOP_asm) {
259             ReplaceAsmListReg(useInsn, kAsmInputListOpnd, regNO, &newOpnd);
260         }
261         const InsnDesc *md = useInsn->GetDesc();
262         uint32 opndNum = useInsn->GetOperandSize();
263         for (uint32 i = 0; i < opndNum; ++i) {
264             Operand &opnd = useInsn->GetOperand(i);
265             auto *regProp = md->opndMD[i];
266             if (!regProp->IsRegUse() && !opnd.IsMemoryAccessOperand()) {
267                 continue;
268             }
269 
270             if (opnd.IsRegister() && (static_cast<RegOperand &>(opnd).GetRegisterNumber() == regNO)) {
271                 useInsn->SetOperand(i, newOpnd);
272                 if (updateInfo) {
273                     cgFunc.GetRD()->InitGenUse(*useInsn->GetBB(), false);
274                 }
275             } else if (opnd.IsMemoryAccessOperand()) {
276                 MemOperand &memOpnd = static_cast<MemOperand &>(opnd);
277                 RegOperand *base = memOpnd.GetBaseRegister();
278                 RegOperand *index = memOpnd.GetIndexRegister();
279                 MemOperand *newMem = nullptr;
280                 if (base != nullptr && (base->GetRegisterNumber() == regNO)) {
281                     newMem = static_cast<MemOperand *>(opnd.Clone(*cgFunc.GetMemoryPool()));
282                     CHECK_FATAL(newMem != nullptr, "null ptr check");
283                     newMem->SetBaseRegister(*static_cast<RegOperand *>(&newOpnd));
284                     useInsn->SetOperand(i, *newMem);
285                     if (updateInfo) {
286                         cgFunc.GetRD()->InitGenUse(*useInsn->GetBB(), false);
287                     }
288                 }
289                 if (index != nullptr && (index->GetRegisterNumber() == regNO)) {
290                     newMem = static_cast<MemOperand *>(opnd.Clone(*cgFunc.GetMemoryPool()));
291                     CHECK_FATAL(newMem != nullptr, "null ptr check");
292                     newMem->SetIndexRegister(*static_cast<RegOperand *>(&newOpnd));
293                     if (static_cast<RegOperand &>(newOpnd).GetValidBitsNum() != index->GetValidBitsNum()) {
294                         newMem->UpdateExtend(MemOperand::kSignExtend);
295                     }
296                     useInsn->SetOperand(i, *newMem);
297                     if (updateInfo) {
298                         cgFunc.GetRD()->InitGenUse(*useInsn->GetBB(), false);
299                     }
300                 }
301             }
302         }
303     }
304 }
305 
CheckCondition(Insn & insn)306 bool ForwardPropPattern::CheckCondition(Insn &insn)
307 {
308     if (!insn.IsMachineInstruction()) {
309         return false;
310     }
311     if ((insn.GetMachineOpcode() != MOP_xmovrr) && (insn.GetMachineOpcode() != MOP_wmovrr) &&
312         (insn.GetMachineOpcode() != MOP_xmovrr_uxtw)) {
313         return false;
314     }
315     Operand &firstOpnd = insn.GetOperand(kInsnFirstOpnd);
316     Operand &secondOpnd = insn.GetOperand(kInsnSecondOpnd);
317     if (firstOpnd.GetSize() != secondOpnd.GetSize() && insn.GetMachineOpcode() != MOP_xmovrr_uxtw) {
318         return false;
319     }
320     RegOperand &firstRegOpnd = static_cast<RegOperand &>(firstOpnd);
321     RegOperand &secondRegOpnd = static_cast<RegOperand &>(secondOpnd);
322     uint32 firstRegNO = firstRegOpnd.GetRegisterNumber();
323     uint32 secondRegNO = secondRegOpnd.GetRegisterNumber();
324     if (IsZeroRegister(firstRegOpnd) || !firstRegOpnd.IsVirtualRegister() || !secondRegOpnd.IsVirtualRegister()) {
325         return false;
326     }
327     firstRegUseInsnSet = cgFunc.GetRD()->FindUseForRegOpnd(insn, firstRegNO, true);
328     if (firstRegUseInsnSet.empty()) {
329         return false;
330     }
331     InsnSet secondRegDefInsnSet = cgFunc.GetRD()->FindDefForRegOpnd(insn, secondRegNO, true);
332     if (secondRegDefInsnSet.size() != 1 || RegOperand::IsSameReg(firstOpnd, secondOpnd)) {
333         return false;
334     }
335     bool toDoOpt = true;
336     for (auto useInsn : firstRegUseInsnSet) {
337         if (!cgFunc.GetRD()->RegIsLiveBetweenInsn(secondRegNO, insn, *useInsn)) {
338             toDoOpt = false;
339             break;
340         }
341         /* part defined */
342         if ((useInsn->GetMachineOpcode() == MOP_xmovkri16) || (useInsn->GetMachineOpcode() == MOP_wmovkri16)) {
343             toDoOpt = false;
344             break;
345         }
346         if (useInsn->GetMachineOpcode() == MOP_asm) {
347             toDoOpt = false;
348             break;
349         }
350         InsnSet defInsnSet = cgFunc.GetRD()->FindDefForRegOpnd(*useInsn, firstRegNO, true);
351         if (defInsnSet.size() > 1) {
352             toDoOpt = false;
353             break;
354         } else if (defInsnSet.size() == 1 && *defInsnSet.begin() != &insn) {
355             toDoOpt = false;
356             break;
357         }
358     }
359     return toDoOpt;
360 }
361 
Optimize(Insn & insn)362 void ForwardPropPattern::Optimize(Insn &insn)
363 {
364     Operand &firstOpnd = insn.GetOperand(kInsnFirstOpnd);
365     Operand &secondOpnd = insn.GetOperand(kInsnSecondOpnd);
366     RegOperand &firstRegOpnd = static_cast<RegOperand &>(firstOpnd);
367     uint32 firstRegNO = firstRegOpnd.GetRegisterNumber();
368     for (auto *useInsn : firstRegUseInsnSet) {
369         if (useInsn->GetMachineOpcode() == MOP_asm) {
370             ReplaceAsmListReg(useInsn, kAsmInputListOpnd, firstRegNO, &secondOpnd);
371             cgFunc.GetRD()->InitGenUse(*useInsn->GetBB(), false);
372             continue;
373         }
374         const InsnDesc *md = useInsn->GetDesc();
375         uint32 opndNum = useInsn->GetOperandSize();
376         for (uint32 i = 0; i < opndNum; ++i) {
377             Operand &opnd = useInsn->GetOperand(i);
378             const OpndDesc *regProp = md->GetOpndDes(i);
379             if (!regProp->IsRegUse() && !opnd.IsMemoryAccessOperand()) {
380                 continue;
381             }
382 
383             if (opnd.IsRegister() && (static_cast<RegOperand &>(opnd).GetRegisterNumber() == firstRegNO)) {
384                 useInsn->SetOperand(i, secondOpnd);
385                 if (((useInsn->GetMachineOpcode() == MOP_xmovrr) || (useInsn->GetMachineOpcode() == MOP_wmovrr)) &&
386                     (static_cast<RegOperand &>(useInsn->GetOperand(kInsnSecondOpnd)).IsVirtualRegister()) &&
387                     (static_cast<RegOperand &>(useInsn->GetOperand(kInsnFirstOpnd)).IsVirtualRegister())) {
388                     (void)modifiedBB.insert(useInsn->GetBB());
389                 }
390                 cgFunc.GetRD()->InitGenUse(*useInsn->GetBB(), false);
391             } else if (opnd.IsMemoryAccessOperand()) {
392                 MemOperand &memOpnd = static_cast<MemOperand &>(opnd);
393                 RegOperand *base = memOpnd.GetBaseRegister();
394                 RegOperand *index = memOpnd.GetIndexRegister();
395                 MemOperand *newMem = nullptr;
396                 if (base != nullptr && (base->GetRegisterNumber() == firstRegNO)) {
397                     newMem = static_cast<MemOperand *>(opnd.Clone(*cgFunc.GetMemoryPool()));
398                     CHECK_FATAL(newMem != nullptr, "null ptr check");
399                     newMem->SetBaseRegister(static_cast<RegOperand &>(secondOpnd));
400                     useInsn->SetOperand(i, *newMem);
401                     cgFunc.GetRD()->InitGenUse(*useInsn->GetBB(), false);
402                 }
403                 if ((index != nullptr) && (index->GetRegisterNumber() == firstRegNO)) {
404                     newMem = static_cast<MemOperand *>(opnd.Clone(*cgFunc.GetMemoryPool()));
405                     CHECK_FATAL(newMem != nullptr, "null ptr check");
406                     newMem->SetIndexRegister(static_cast<RegOperand &>(secondOpnd));
407                     if (static_cast<RegOperand &>(secondOpnd).GetValidBitsNum() != index->GetValidBitsNum()) {
408                         newMem->UpdateExtend(MemOperand::kSignExtend);
409                     }
410                     useInsn->SetOperand(i, *newMem);
411                     cgFunc.GetRD()->InitGenUse(*useInsn->GetBB(), false);
412                 }
413             }
414         }
415     }
416     insn.SetOperand(0, secondOpnd);
417     cgFunc.GetRD()->UpdateInOut(*insn.GetBB(), true);
418 }
419 
RemoveMopUxtwToMov(Insn & insn)420 void ForwardPropPattern::RemoveMopUxtwToMov(Insn &insn)
421 {
422     if (CGOptions::DoCGSSA()) {
423         CHECK_FATAL(false, "check case in ssa");
424     }
425     auto &secondOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
426     auto &destOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
427     uint32 destRegNo = destOpnd.GetRegisterNumber();
428     destOpnd.SetRegisterNumber(secondOpnd.GetRegisterNumber());
429     auto *newOpnd = static_cast<RegOperand *>(destOpnd.Clone(*cgFunc.GetMemoryPool()));
430     cgFunc.InsertExtendSet(secondOpnd.GetRegisterNumber());
431     InsnSet regUseInsnSet = cgFunc.GetRD()->FindUseForRegOpnd(insn, destRegNo, true);
432     if (regUseInsnSet.size() >= 1) {
433         for (auto useInsn : regUseInsnSet) {
434             uint32 optSize = useInsn->GetOperandSize();
435             for (uint32 i = 0; i < optSize; i++) {
436                 DEBUG_ASSERT(useInsn->GetOperand(i).IsRegister(), "only design for register");
437                 if (destRegNo == static_cast<RegOperand &>(useInsn->GetOperand(i)).GetRegisterNumber()) {
438                     useInsn->SetOperand(i, *newOpnd);
439                 }
440             }
441             cgFunc.GetRD()->InitGenUse(*useInsn->GetBB(), false);
442         }
443     }
444     insn.GetBB()->RemoveInsn(insn);
445 }
446 
Init()447 void ForwardPropPattern::Init()
448 {
449     firstRegUseInsnSet.clear();
450 }
451 
Run()452 void ForwardPropPattern::Run()
453 {
454     bool secondTime = false;
455     do {
456         FOR_ALL_BB(bb, &cgFunc) {
457             if (bb->IsUnreachable() || (secondTime && modifiedBB.find(bb) == modifiedBB.end())) {
458                 continue;
459             }
460 
461             if (secondTime) {
462                 modifiedBB.erase(bb);
463             }
464 
465             FOR_BB_INSNS(insn, bb) {
466                 Init();
467                 if (!CheckCondition(*insn)) {
468                     if (insn->GetMachineOpcode() == MOP_xmovrr_uxtw) {
469                         insn->SetMOP(AArch64CG::kMd[MOP_xuxtw64]);
470                     }
471                     continue;
472                 }
473                 if (insn->GetMachineOpcode() == MOP_xmovrr_uxtw) {
474                     RemoveMopUxtwToMov(*insn);
475                     continue;
476                 }
477                 Optimize(*insn);
478             }
479         }
480         secondTime = true;
481     } while (!modifiedBB.empty());
482 }
483 
CheckAndGetOpnd(const Insn & insn)484 bool BackPropPattern::CheckAndGetOpnd(const Insn &insn)
485 {
486     if (!insn.IsMachineInstruction()) {
487         return false;
488     }
489     if (!cgFunc.IsAfterRegAlloc() && (insn.GetMachineOpcode() != MOP_xmovrr) &&
490         (insn.GetMachineOpcode() != MOP_wmovrr)) {
491         return false;
492     }
493     if (cgFunc.IsAfterRegAlloc() && (insn.GetMachineOpcode() != MOP_xmovrr) &&
494         (insn.GetMachineOpcode() != MOP_wmovrr) && (insn.GetMachineOpcode() != MOP_xvmovs) &&
495         (insn.GetMachineOpcode() != MOP_xvmovd)) {
496         return false;
497     }
498     Operand &firstOpnd = insn.GetOperand(kInsnFirstOpnd);
499     Operand &secondOpnd = insn.GetOperand(kInsnSecondOpnd);
500     if (RegOperand::IsSameReg(firstOpnd, secondOpnd)) {
501         return false;
502     }
503     if (firstOpnd.GetSize() != secondOpnd.GetSize()) {
504         return false;
505     }
506     firstRegOpnd = &static_cast<RegOperand &>(firstOpnd);
507     secondRegOpnd = &static_cast<RegOperand &>(secondOpnd);
508     if (IsZeroRegister(*firstRegOpnd)) {
509         return false;
510     }
511     if (!cgFunc.IsAfterRegAlloc() && (!secondRegOpnd->IsVirtualRegister() || !firstRegOpnd->IsVirtualRegister())) {
512         return false;
513     }
514     firstRegNO = firstRegOpnd->GetRegisterNumber();
515     secondRegNO = secondRegOpnd->GetRegisterNumber();
516     return true;
517 }
518 
DestOpndHasUseInsns(Insn & insn)519 bool BackPropPattern::DestOpndHasUseInsns(Insn &insn)
520 {
521     BB &bb = *insn.GetBB();
522     InsnSet useInsnSetOfFirstOpnd;
523     bool findRes =
524         cgFunc.GetRD()->FindRegUseBetweenInsn(firstRegNO, insn.GetNext(), bb.GetLastInsn(), useInsnSetOfFirstOpnd);
525     if ((findRes && useInsnSetOfFirstOpnd.empty()) ||
526         (!findRes && useInsnSetOfFirstOpnd.empty() && !bb.GetLiveOut()->TestBit(firstRegNO))) {
527         return false;
528     }
529     return true;
530 }
531 
DestOpndLiveOutToEHSuccs(Insn & insn) const532 bool BackPropPattern::DestOpndLiveOutToEHSuccs(Insn &insn) const
533 {
534     BB &bb = *insn.GetBB();
535     for (auto ehSucc : bb.GetEhSuccs()) {
536         if (ehSucc->GetLiveIn()->TestBit(firstRegNO)) {
537             return true;
538         }
539     }
540     return false;
541 }
542 
CheckSrcOpndDefAndUseInsns(Insn & insn)543 bool BackPropPattern::CheckSrcOpndDefAndUseInsns(Insn &insn)
544 {
545     BB &bb = *insn.GetBB();
546     /* secondOpnd is defined in other BB */
547     std::vector<Insn *> defInsnVec =
548         cgFunc.GetRD()->FindRegDefBetweenInsn(secondRegNO, bb.GetFirstInsn(), insn.GetPrev());
549     if (defInsnVec.size() != 1) {
550         return false;
551     }
552     defInsnForSecondOpnd = defInsnVec.back();
553     /* part defined */
554     if ((defInsnForSecondOpnd->GetMachineOpcode() == MOP_xmovkri16) ||
555         (defInsnForSecondOpnd->GetMachineOpcode() == MOP_wmovkri16) ||
556         (defInsnForSecondOpnd->GetMachineOpcode() == MOP_asm)) {
557         return false;
558     }
559     if (AArch64isa::IsPseudoInstruction(defInsnForSecondOpnd->GetMachineOpcode()) || defInsnForSecondOpnd->IsCall()) {
560         return false;
561     }
562     /* unconcerned regs. */
563     if ((secondRegNO >= RLR && secondRegNO <= RZR) || secondRegNO == RFP) {
564         return false;
565     }
566     if (defInsnForSecondOpnd->IsStore() || defInsnForSecondOpnd->IsLoad()) {
567         auto *memOpnd = static_cast<MemOperand *>(defInsnForSecondOpnd->GetMemOpnd());
568         if (memOpnd != nullptr && !memOpnd->IsIntactIndexed()) {
569             return false;
570         }
571     }
572 
573     bool findFinish = cgFunc.GetRD()->FindRegUseBetweenInsn(secondRegNO, defInsnForSecondOpnd->GetNext(),
574                                                             bb.GetLastInsn(), srcOpndUseInsnSet);
575     if (!findFinish && bb.GetLiveOut()->TestBit(secondRegNO)) {
576         return false;
577     }
578     if (cgFunc.IsAfterRegAlloc() && findFinish && srcOpndUseInsnSet.size() > 1) {
579         /* use later before killed. */
580         return false;
581     }
582     if (cgFunc.IsAfterRegAlloc()) {
583         for (auto *usePoint : srcOpndUseInsnSet) {
584             if (usePoint->IsCall()) {
585                 return false;
586             }
587         }
588     }
589     return true;
590 }
591 
CheckSrcOpndDefAndUseInsnsGlobal(Insn & insn)592 bool BackPropPattern::CheckSrcOpndDefAndUseInsnsGlobal(Insn &insn)
593 {
594     /* secondOpnd is defined in other BB */
595     InsnSet defInsnVec = cgFunc.GetRD()->FindDefForRegOpnd(insn, secondRegNO, true);
596     if (defInsnVec.size() != 1) {
597         return false;
598     }
599     defInsnForSecondOpnd = *(defInsnVec.begin());
600 
601     /* ensure that there is no fisrt RegNO def/use between insn and defInsnForSecondOpnd */
602     std::vector<Insn *> defInsnVecFirst;
603 
604     if (insn.GetBB() != defInsnForSecondOpnd->GetBB()) {
605         defInsnVecFirst = cgFunc.GetRD()->FindRegDefBetweenInsnGlobal(firstRegNO, defInsnForSecondOpnd, &insn);
606     } else {
607         defInsnVecFirst = cgFunc.GetRD()->FindRegDefBetweenInsn(firstRegNO, defInsnForSecondOpnd, insn.GetPrev());
608     }
609     if (!defInsnVecFirst.empty()) {
610         return false;
611     }
612     /* part defined */
613     if ((defInsnForSecondOpnd->GetMachineOpcode() == MOP_xmovkri16) ||
614         (defInsnForSecondOpnd->GetMachineOpcode() == MOP_wmovkri16) ||
615         (defInsnForSecondOpnd->GetMachineOpcode() == MOP_asm)) {
616         return false;
617     }
618 
619     if (defInsnForSecondOpnd->IsStore() || defInsnForSecondOpnd->IsLoad()) {
620         auto *memOpnd = static_cast<MemOperand *>(defInsnForSecondOpnd->GetMemOpnd());
621         if (memOpnd != nullptr && !memOpnd->IsIntactIndexed()) {
622             return false;
623         }
624     }
625 
626     srcOpndUseInsnSet = cgFunc.GetRD()->FindUseForRegOpnd(*defInsnForSecondOpnd, secondRegNO, true);
627     /*
628      * useInsn is not expected to have multiple definition
629      * replaced opnd is not expected to have definition already
630      */
631     return CheckReplacedUseInsn(insn);
632 }
633 
CheckPredefineInsn(Insn & insn)634 bool BackPropPattern::CheckPredefineInsn(Insn &insn)
635 {
636     if (insn.GetPrev() == defInsnForSecondOpnd) {
637         return true;
638     }
639     std::vector<Insn *> preDefInsnForFirstOpndVec;
640     /* there is no predefine insn in current bb */
641     if (!cgFunc.GetRD()->RegIsUsedOrDefBetweenInsn(firstRegNO, *defInsnForSecondOpnd, insn)) {
642         return false;
643     }
644     return true;
645 }
646 
CheckReplacedUseInsn(Insn & insn)647 bool BackPropPattern::CheckReplacedUseInsn(Insn &insn)
648 {
649     for (auto *useInsn : srcOpndUseInsnSet) {
650         if (useInsn->GetMemOpnd() != nullptr) {
651             auto *a64MemOpnd = static_cast<MemOperand *>(useInsn->GetMemOpnd());
652             if (!a64MemOpnd->IsIntactIndexed()) {
653                 if (a64MemOpnd->GetBaseRegister() != nullptr &&
654                     a64MemOpnd->GetBaseRegister()->GetRegisterNumber() == secondRegNO) {
655                     return false;
656                 }
657             }
658         }
659         /* insn has been checked def */
660         if (useInsn == &insn) {
661             if (defInsnForSecondOpnd != useInsn->GetPrev() &&
662                 cgFunc.GetRD()->FindRegUseBetweenInsnGlobal(firstRegNO, defInsnForSecondOpnd, useInsn, insn.GetBB())) {
663                 return false;
664             }
665             continue;
666         }
667         auto checkOneDefOnly = [](const InsnSet &defSet, const Insn &oneDef, bool checkHasDef = false) -> bool {
668             if (defSet.size() > 1) {
669                 return false;
670             } else if (defSet.size() == 1) {
671                 if (&oneDef != *(defSet.begin())) {
672                     return false;
673                 }
674             } else {
675                 if (checkHasDef) {
676                     CHECK_FATAL(false, "find def insn failed");
677                 }
678             }
679             return true;
680         };
681         /* ensure that the use insns to be replaced is defined by defInsnForSecondOpnd only */
682         if (useInsn->IsMemAccess() &&
683             static_cast<MemOperand *>(useInsn->GetMemOpnd())->GetIndexOpt() != MemOperand::kIntact) {
684             return false;
685         }
686         InsnSet defInsnVecOfSrcOpnd = cgFunc.GetRD()->FindDefForRegOpnd(*useInsn, secondRegNO, true);
687         if (!checkOneDefOnly(defInsnVecOfSrcOpnd, *defInsnForSecondOpnd, true)) {
688             return false;
689         }
690 
691         InsnSet defInsnVecOfFirstReg = cgFunc.GetRD()->FindDefForRegOpnd(*useInsn, firstRegNO, true);
692         if (!checkOneDefOnly(defInsnVecOfFirstReg, insn)) {
693             return false;
694         }
695 
696         if (defInsnForSecondOpnd != useInsn->GetPrev() &&
697             cgFunc.GetRD()->FindRegUseBetweenInsnGlobal(firstRegNO, defInsnForSecondOpnd, useInsn, insn.GetBB())) {
698             return false;
699         }
700     }
701     return true;
702 }
703 
CheckRedefineInsn(Insn & insn)704 bool BackPropPattern::CheckRedefineInsn(Insn &insn)
705 {
706     for (auto useInsn : srcOpndUseInsnSet) {
707         Insn *startInsn = &insn;
708         Insn *endInsn = useInsn;
709         if (endInsn == startInsn) {
710             if (cgFunc.GetRD()->RegIsUsedIncaller(firstRegNO, insn, *useInsn)) {
711                 return false;
712             } else {
713                 continue;
714             }
715         }
716 
717         if (useInsn->GetBB() == insn.GetBB()) {
718             if (useInsn->GetId() < insn.GetId()) {
719                 startInsn = useInsn;
720                 endInsn = &insn;
721             }
722         }
723         if (!cgFunc.GetRD()->RegIsLiveBetweenInsn(firstRegNO, *startInsn, *endInsn, true, true)) {
724             return false;
725         }
726         if (!cgFunc.GetRD()->RegIsLiveBetweenInsn(secondRegNO, *startInsn, *endInsn, true)) {
727             return false;
728         }
729     }
730     return true;
731 }
732 
CheckCondition(Insn & insn)733 bool BackPropPattern::CheckCondition(Insn &insn)
734 {
735     if (!CheckAndGetOpnd(insn)) {
736         return false;
737     }
738     /* Unless there is a reason that dest can not live out the current BB */
739     if (cgFunc.HasAsm() && !DestOpndHasUseInsns(insn)) {
740         return false;
741     }
742     /* first register must not be live out to eh_succs */
743     if (DestOpndLiveOutToEHSuccs(insn)) {
744         return false;
745     }
746     if (globalProp) {
747         if (!CheckSrcOpndDefAndUseInsnsGlobal(insn)) {
748             return false;
749         }
750     } else {
751         if (!CheckSrcOpndDefAndUseInsns(insn)) {
752             return false;
753         }
754         if (!CheckPredefineInsn(insn)) {
755             return false;
756         }
757         if (!CheckRedefineInsn(insn)) {
758             return false;
759         }
760     }
761     return true;
762 }
763 
Optimize(Insn & insn)764 void BackPropPattern::Optimize(Insn &insn)
765 {
766     Operand &firstOpnd = insn.GetOperand(kInsnFirstOpnd);
767     ReplaceAllUsedOpndWithNewOpnd(srcOpndUseInsnSet, secondRegNO, firstOpnd, true);
768     /* replace define insn */
769     const InsnDesc *md = defInsnForSecondOpnd->GetDesc();
770     uint32 opndNum = defInsnForSecondOpnd->GetOperandSize();
771     for (uint32 i = 0; i < opndNum; ++i) {
772         Operand &opnd = defInsnForSecondOpnd->GetOperand(i);
773         if (!md->opndMD[i]->IsRegDef() && !opnd.IsMemoryAccessOperand()) {
774             continue;
775         }
776 
777         if (opnd.IsRegister() && (static_cast<RegOperand &>(opnd).GetRegisterNumber() == secondRegNO)) {
778             /* remove remat info */
779             Operand &defOp = defInsnForSecondOpnd->GetOperand(i);
780             CHECK_FATAL(defOp.IsRegister(), "unexpect def opnd type");
781             auto &defRegOp = static_cast<RegOperand &>(defOp);
782             MIRPreg *preg = static_cast<AArch64CGFunc &>(cgFunc).GetPseudoRegFromVirtualRegNO(
783                 defRegOp.GetRegisterNumber(), CGOptions::DoCGSSA());
784             if (preg != nullptr) {
785                 preg->SetOp(OP_undef);
786             }
787             defInsnForSecondOpnd->SetOperand(i, firstOpnd);
788             cgFunc.GetRD()->UpdateInOut(*defInsnForSecondOpnd->GetBB());
789         } else if (opnd.IsMemoryAccessOperand()) {
790             MemOperand &memOpnd = static_cast<MemOperand &>(opnd);
791             RegOperand *base = memOpnd.GetBaseRegister();
792             if (base != nullptr && memOpnd.GetAddrMode() == MemOperand::kAddrModeBOi &&
793                 (memOpnd.IsPostIndexed() || memOpnd.IsPreIndexed()) && base->GetRegisterNumber() == secondRegNO) {
794                 MemOperand *newMem = static_cast<MemOperand *>(opnd.Clone(*cgFunc.GetMemoryPool()));
795                 CHECK_FATAL(newMem != nullptr, "null ptr check");
796                 newMem->SetBaseRegister(static_cast<RegOperand &>(firstOpnd));
797                 defInsnForSecondOpnd->SetOperand(i, *newMem);
798                 cgFunc.GetRD()->UpdateInOut(*defInsnForSecondOpnd->GetBB());
799             }
800         }
801     }
802     /* There is special implication when backward propagation is allowed for physical register R0.
803      * This is a case that the calling func foo directly returns the result from the callee bar as follows:
804      *
805      * foo:
806      * bl                                               bl // bar()
807      * mov vreg, X0  //res = bar()        naive bkprop
808      * ....          //X0 is not redefined    ====>        ....  //X0 may be reused as RA sees "X0 has not been used"
809      * after bl mov X0, vreg                                              //In fact, X0 is implicitly used by foo. We
810      * need to tell RA that X0 is live ret                                              ret
811      *
812      * To make RA simple, we tell RA to not use X0 by keeping "mov X0, X0". That is
813      * foo:
814      * bl //bar()
815      * ....          // Perform backward prop X0 and force X0 cant be reused
816      * mov X0, X0    // This can be easily remved later in peephole phase
817      * ret
818      */
819     if (cgFunc.HasCall() && !(cgFunc.GetFunction().IsReturnVoid()) && (firstRegNO == R0) &&
820         (static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd)).GetRegisterNumber() == R0)) {
821         /* Keep this instruction: mov R0, R0 */
822         cgFunc.GetRD()->UpdateInOut(*insn.GetBB(), true);
823         return;
824     } else {
825         insn.GetBB()->RemoveInsn(insn);
826         cgFunc.GetRD()->UpdateInOut(*insn.GetBB(), true);
827     }
828 }
829 
Init()830 void BackPropPattern::Init()
831 {
832     firstRegOpnd = nullptr;
833     secondRegOpnd = nullptr;
834     firstRegNO = 0;
835     secondRegNO = 0;
836     srcOpndUseInsnSet.clear();
837     defInsnForSecondOpnd = nullptr;
838 }
839 
Run()840 void BackPropPattern::Run()
841 {
842     bool secondTime = false;
843     std::set<BB *, BBIdCmp> modifiedBB;
844     do {
845         FOR_ALL_BB(bb, &cgFunc) {
846             if (bb->IsUnreachable() || (secondTime && modifiedBB.find(bb) == modifiedBB.end())) {
847                 continue;
848             }
849 
850             if (secondTime) {
851                 modifiedBB.erase(bb);
852             }
853 
854             FOR_BB_INSNS_REV(insn, bb) {
855                 Init();
856                 if (!CheckCondition(*insn)) {
857                     continue;
858                 }
859                 (void)modifiedBB.insert(bb);
860                 Optimize(*insn);
861             }
862         }
863         secondTime = true;
864     } while (!modifiedBB.empty());
865 }
866 
CheckCondition(Insn & insn)867 bool CmpCsetPattern::CheckCondition(Insn &insn)
868 {
869     nextInsn = insn.GetNextMachineInsn();
870     if (nextInsn == nullptr || !insn.IsMachineInstruction()) {
871         return false;
872     }
873 
874     MOperator firstMop = insn.GetMachineOpcode();
875     MOperator secondMop = nextInsn->GetMachineOpcode();
876     if (!(((firstMop == MOP_wcmpri) || (firstMop == MOP_xcmpri)) &&
877           ((secondMop == MOP_wcsetrc) || (secondMop == MOP_xcsetrc)))) {
878         return false;
879     }
880 
881     /* get cmp_first operand */
882     cmpFirstOpnd = &(insn.GetOperand(kInsnSecondOpnd));
883     /* get cmp second Operand, ImmOperand must be 0 or 1 */
884     cmpSecondOpnd = &(insn.GetOperand(kInsnThirdOpnd));
885     DEBUG_ASSERT(cmpSecondOpnd->IsIntImmediate(), "expects ImmOperand");
886     ImmOperand *cmpConstOpnd = static_cast<ImmOperand *>(cmpSecondOpnd);
887     cmpConstVal = cmpConstOpnd->GetValue();
888     /* get cset first Operand */
889     csetFirstOpnd = &(nextInsn->GetOperand(kInsnFirstOpnd));
890     if (((cmpConstVal != 0) && (cmpConstVal != 1)) || (cmpFirstOpnd->GetSize() != csetFirstOpnd->GetSize()) ||
891         !OpndDefByOneOrZero(insn, 1)) {
892         return false;
893     }
894 
895     InsnSet useInsnSet = cgFunc.GetRD()->FindUseForRegOpnd(insn, 0, false);
896     if (useInsnSet.size() > 1) {
897         return false;
898     }
899     return true;
900 }
901 
Optimize(Insn & insn)902 void CmpCsetPattern::Optimize(Insn &insn)
903 {
904     Insn *csetInsn = nextInsn;
905     BB &bb = *insn.GetBB();
906     nextInsn = nextInsn->GetNextMachineInsn();
907     /* get condition Operand */
908     CondOperand &cond = static_cast<CondOperand &>(csetInsn->GetOperand(kInsnSecondOpnd));
909     if (((cmpConstVal == 0) && (cond.GetCode() == CC_NE)) || ((cmpConstVal == 1) && (cond.GetCode() == CC_EQ))) {
910         if (RegOperand::IsSameReg(*cmpFirstOpnd, *csetFirstOpnd)) {
911             bb.RemoveInsn(insn);
912             bb.RemoveInsn(*csetInsn);
913         } else {
914             MOperator mopCode = (cmpFirstOpnd->GetSize() == k64BitSize) ? MOP_xmovrr : MOP_wmovrr;
915             Insn &newInsn = cgFunc.GetInsnBuilder()->BuildInsn(mopCode, *csetFirstOpnd, *cmpFirstOpnd);
916             newInsn.SetId(insn.GetId());
917             bb.ReplaceInsn(insn, newInsn);
918             bb.RemoveInsn(*csetInsn);
919         }
920     } else if (((cmpConstVal == 1) && (cond.GetCode() == CC_NE)) || ((cmpConstVal == 0) && (cond.GetCode() == CC_EQ))) {
921         MOperator mopCode = (cmpFirstOpnd->GetSize() == k64BitSize) ? MOP_xeorrri13 : MOP_weorrri12;
922         constexpr int64 eorImm = 1;
923         auto &aarch64CGFunc = static_cast<AArch64CGFunc &>(cgFunc);
924         ImmOperand &one = aarch64CGFunc.CreateImmOperand(eorImm, k8BitSize, false);
925         Insn &newInsn = cgFunc.GetInsnBuilder()->BuildInsn(mopCode, *csetFirstOpnd, *cmpFirstOpnd, one);
926         newInsn.SetId(insn.GetId());
927         bb.ReplaceInsn(insn, newInsn);
928         bb.RemoveInsn(*csetInsn);
929     }
930     cgFunc.GetRD()->UpdateInOut(bb, true);
931 }
932 
Init()933 void CmpCsetPattern::Init()
934 {
935     cmpConstVal = 0;
936     cmpFirstOpnd = nullptr;
937     cmpSecondOpnd = nullptr;
938     csetFirstOpnd = nullptr;
939 }
940 
Run()941 void CmpCsetPattern::Run()
942 {
943     FOR_ALL_BB(bb, &cgFunc) {
944         FOR_BB_INSNS(insn, bb) {
945             Init();
946             if (!CheckCondition(*insn)) {
947                 continue;
948             }
949             Optimize(*insn);
950         }
951     }
952 }
953 
CheckCondition(Insn & insn)954 bool CselPattern::CheckCondition(Insn &insn)
955 {
956     MOperator mopCode = insn.GetMachineOpcode();
957     if ((mopCode != MOP_xcselrrrc) && (mopCode != MOP_wcselrrrc)) {
958         return false;
959     }
960     return true;
961 }
962 
Optimize(Insn & insn)963 void CselPattern::Optimize(Insn &insn)
964 {
965     BB &bb = *insn.GetBB();
966     Operand &opnd0 = insn.GetOperand(kInsnFirstOpnd);
967     Operand &cond = insn.GetOperand(kInsnFourthOpnd);
968     MOperator newMop = ((opnd0.GetSize()) == k64BitSize ? MOP_xcsetrc : MOP_wcsetrc);
969     Operand &rflag = cgFunc.GetOrCreateRflag();
970     if (OpndDefByOne(insn, kInsnSecondOpnd) && OpndDefByZero(insn, kInsnThirdOpnd)) {
971         Insn &newInsn = cgFunc.GetInsnBuilder()->BuildInsn(newMop, opnd0, cond, rflag);
972         newInsn.SetId(insn.GetId());
973         bb.ReplaceInsn(insn, newInsn);
974         cgFunc.GetRD()->InitGenUse(bb, false);
975     } else if (OpndDefByZero(insn, kInsnSecondOpnd) && OpndDefByOne(insn, kInsnThirdOpnd)) {
976         auto &originCond = static_cast<CondOperand &>(cond);
977         ConditionCode inverseCondCode = GetReverseBasicCC(originCond.GetCode());
978         if (inverseCondCode == kCcLast) {
979             return;
980         }
981         auto &aarchCGFunc = static_cast<AArch64CGFunc &>(cgFunc);
982         CondOperand &inverseCond = aarchCGFunc.GetCondOperand(inverseCondCode);
983         Insn &newInsn = cgFunc.GetInsnBuilder()->BuildInsn(newMop, opnd0, inverseCond, rflag);
984         newInsn.SetId(insn.GetId());
985         bb.ReplaceInsn(insn, newInsn);
986         cgFunc.GetRD()->InitGenUse(bb, false);
987     }
988 }
989 
Run()990 void CselPattern::Run()
991 {
992     FOR_ALL_BB(bb, &cgFunc) {
993         FOR_BB_INSNS_SAFE(insn, bb, nextInsn) {
994             if (!CheckCondition(*insn)) {
995                 continue;
996             }
997             Optimize(*insn);
998         }
999     }
1000 }
1001 
GetInsnValidBit(const Insn & insn)1002 uint32 RedundantUxtPattern::GetInsnValidBit(const Insn &insn)
1003 {
1004     MOperator mOp = insn.GetMachineOpcode();
1005     uint32 nRet;
1006     switch (mOp) {
1007         case MOP_wcsetrc:
1008         case MOP_xcsetrc:
1009             nRet = 1;
1010             break;
1011         case MOP_wldrb:
1012         case MOP_wldarb:
1013         case MOP_wldxrb:
1014         case MOP_wldaxrb:
1015             nRet = k8BitSize;
1016             break;
1017         case MOP_wldrh:
1018         case MOP_wldarh:
1019         case MOP_wldxrh:
1020         case MOP_wldaxrh:
1021             nRet = k16BitSize;
1022             break;
1023         case MOP_wmovrr:
1024         case MOP_wmovri32:
1025         case MOP_wldrsb:
1026         case MOP_wldrsh:
1027         case MOP_wldli:
1028         case MOP_wldr:
1029         case MOP_wldp:
1030         case MOP_wldar:
1031         case MOP_wmovkri16:
1032         case MOP_wmovzri16:
1033         case MOP_wmovnri16:
1034         case MOP_wldxr:
1035         case MOP_wldaxr:
1036         case MOP_wldaxp:
1037         case MOP_wcsincrrrc:
1038         case MOP_wcselrrrc:
1039         case MOP_wcsinvrrrc:
1040             nRet = k32BitSize;
1041             break;
1042         default:
1043             nRet = k64BitSize;
1044             break;
1045     }
1046     return nRet;
1047 }
1048 
GetMaximumValidBit(Insn & insn,uint8 index,InsnSet & visitedInsn) const1049 uint32 RedundantUxtPattern::GetMaximumValidBit(Insn &insn, uint8 index, InsnSet &visitedInsn) const
1050 {
1051     InsnSet defInsnSet = cgFunc.GetRD()->FindDefForRegOpnd(insn, index);
1052     if (defInsnSet.empty()) {
1053         /* disable opt when there is no def point. */
1054         return k64BitSize;
1055     }
1056 
1057     uint32 validBit = 0;
1058     uint32 nMaxValidBit = 0;
1059     for (auto &defInsn : defInsnSet) {
1060         if (visitedInsn.find(defInsn) != visitedInsn.end()) {
1061             continue;
1062         }
1063 
1064         (void)visitedInsn.insert(defInsn);
1065         MOperator mOp = defInsn->GetMachineOpcode();
1066         if ((mOp == MOP_wmovrr) || (mOp == MOP_xmovrr)) {
1067             validBit = GetMaximumValidBit(*defInsn, 1, visitedInsn);
1068         } else {
1069             validBit = GetInsnValidBit(*defInsn);
1070         }
1071 
1072         nMaxValidBit = nMaxValidBit < validBit ? validBit : nMaxValidBit;
1073     }
1074     return nMaxValidBit;
1075 }
1076 
CheckCondition(Insn & insn)1077 bool RedundantUxtPattern::CheckCondition(Insn &insn)
1078 {
1079     BB &bb = *insn.GetBB();
1080     InsnSet visitedInsn1;
1081     InsnSet visitedInsn2;
1082     if (!((insn.GetMachineOpcode() == MOP_xuxth32 &&
1083            GetMaximumValidBit(insn, kInsnSecondOpnd, visitedInsn1) <= k16BitSize) ||
1084           (insn.GetMachineOpcode() == MOP_xuxtb32 &&
1085            GetMaximumValidBit(insn, kInsnSecondOpnd, visitedInsn2) <= k8BitSize))) {
1086         return false;
1087     }
1088 
1089     Operand &firstOpnd = insn.GetOperand(kInsnFirstOpnd);
1090     secondOpnd = &(insn.GetOperand(kInsnSecondOpnd));
1091     if (RegOperand::IsSameReg(firstOpnd, *secondOpnd)) {
1092         bb.RemoveInsn(insn);
1093         /* update in/out */
1094         cgFunc.GetRD()->UpdateInOut(bb, true);
1095         return false;
1096     }
1097     useInsnSet = cgFunc.GetRD()->FindUseForRegOpnd(insn, 0, false);
1098     RegOperand &firstRegOpnd = static_cast<RegOperand &>(firstOpnd);
1099     firstRegNO = firstRegOpnd.GetRegisterNumber();
1100     /* for uxth R1, V501, R1 is parameter register, this can't be optimized. */
1101     if (firstRegOpnd.IsPhysicalRegister()) {
1102         return false;
1103     }
1104 
1105     if (useInsnSet.empty()) {
1106         bb.RemoveInsn(insn);
1107         /* update in/out */
1108         cgFunc.GetRD()->UpdateInOut(bb, true);
1109         return false;
1110     }
1111 
1112     RegOperand *secondRegOpnd = static_cast<RegOperand *>(secondOpnd);
1113     DEBUG_ASSERT(secondRegOpnd != nullptr, "secondRegOpnd should not be nullptr");
1114     uint32 secondRegNO = secondRegOpnd->GetRegisterNumber();
1115     for (auto useInsn : useInsnSet) {
1116         InsnSet defInsnSet = cgFunc.GetRD()->FindDefForRegOpnd(*useInsn, firstRegNO, true);
1117         if ((defInsnSet.size() > 1) || !(cgFunc.GetRD()->RegIsLiveBetweenInsn(secondRegNO, insn, *useInsn))) {
1118             return false;
1119         }
1120     }
1121     return true;
1122 }
1123 
Optimize(Insn & insn)1124 void RedundantUxtPattern::Optimize(Insn &insn)
1125 {
1126     BB &bb = *insn.GetBB();
1127     ReplaceAllUsedOpndWithNewOpnd(useInsnSet, firstRegNO, *secondOpnd, true);
1128     bb.RemoveInsn(insn);
1129     cgFunc.GetRD()->UpdateInOut(bb, true);
1130 }
1131 
Init()1132 void RedundantUxtPattern::Init()
1133 {
1134     useInsnSet.clear();
1135     secondOpnd = nullptr;
1136 }
1137 
Run()1138 void RedundantUxtPattern::Run()
1139 {
1140     FOR_ALL_BB(bb, &cgFunc) {
1141         if (bb->IsUnreachable()) {
1142             continue;
1143         }
1144         FOR_BB_INSNS_SAFE(insn, bb, nextInsn) {
1145             Init();
1146             if (!CheckCondition(*insn)) {
1147                 continue;
1148             }
1149             Optimize(*insn);
1150         }
1151     }
1152 }
1153 
CheckFirstInsn(const Insn & firstInsn)1154 bool LocalVarSaveInsnPattern::CheckFirstInsn(const Insn &firstInsn)
1155 {
1156     MOperator mOp = firstInsn.GetMachineOpcode();
1157     if (mOp != MOP_xmovrr && mOp != MOP_wmovrr) {
1158         return false;
1159     }
1160     firstInsnSrcOpnd = &(firstInsn.GetOperand(kInsnSecondOpnd));
1161     RegOperand *firstInsnSrcReg = static_cast<RegOperand *>(firstInsnSrcOpnd);
1162     if (firstInsnSrcReg->GetRegisterNumber() != R0) {
1163         return false;
1164     }
1165     firstInsnDestOpnd = &(firstInsn.GetOperand(kInsnFirstOpnd));
1166     RegOperand *firstInsnDestReg = static_cast<RegOperand *>(firstInsnDestOpnd);
1167     if (firstInsnDestReg->IsPhysicalRegister()) {
1168         return false;
1169     }
1170     return true;
1171 }
1172 
CheckSecondInsn()1173 bool LocalVarSaveInsnPattern::CheckSecondInsn()
1174 {
1175     MOperator mOp = secondInsn->GetMachineOpcode();
1176     if (mOp != MOP_wstr && mOp != MOP_xstr) {
1177         return false;
1178     }
1179     secondInsnSrcOpnd = &(secondInsn->GetOperand(kInsnFirstOpnd));
1180     if (!RegOperand::IsSameReg(*firstInsnDestOpnd, *secondInsnSrcOpnd)) {
1181         return false;
1182     }
1183     /* check memOperand is stack memOperand, and x0 is stored in localref var region */
1184     secondInsnDestOpnd = &(secondInsn->GetOperand(kInsnSecondOpnd));
1185     MemOperand *secondInsnDestMem = static_cast<MemOperand *>(secondInsnDestOpnd);
1186     RegOperand *baseReg = secondInsnDestMem->GetBaseRegister();
1187     RegOperand *indexReg = secondInsnDestMem->GetIndexRegister();
1188     if ((baseReg == nullptr) || !(cgFunc.IsFrameReg(*baseReg)) || (indexReg != nullptr)) {
1189         return false;
1190     }
1191     return true;
1192 }
1193 
CheckAndGetUseInsn(Insn & firstInsn)1194 bool LocalVarSaveInsnPattern::CheckAndGetUseInsn(Insn &firstInsn)
1195 {
1196     InsnSet useInsnSet = cgFunc.GetRD()->FindUseForRegOpnd(firstInsn, kInsnFirstOpnd, false);
1197     if (useInsnSet.size() != 2) { /* 2 for secondInsn and another useInsn */
1198         return false;
1199     }
1200 
1201     /* useInsnSet includes secondInsn and another useInsn */
1202     for (auto tmpUseInsn : useInsnSet) {
1203         if (tmpUseInsn->GetId() != secondInsn->GetId()) {
1204             useInsn = tmpUseInsn;
1205             break;
1206         }
1207     }
1208     return true;
1209 }
1210 
CheckLiveRange(const Insn & firstInsn)1211 bool LocalVarSaveInsnPattern::CheckLiveRange(const Insn &firstInsn)
1212 {
1213     uint32 maxInsnNO = cgFunc.GetRD()->GetMaxInsnNO();
1214     uint32 useInsnID = useInsn->GetId();
1215     uint32 defInsnID = firstInsn.GetId();
1216     uint32 distance = useInsnID > defInsnID ? useInsnID - defInsnID : defInsnID - useInsnID;
1217     float liveRangeProportion = static_cast<float>(distance) / maxInsnNO;
1218     /* 0.3 is a balance for real optimization effect */
1219     if (liveRangeProportion < 0.3) {
1220         return false;
1221     }
1222     return true;
1223 }
1224 
CheckCondition(Insn & firstInsn)1225 bool LocalVarSaveInsnPattern::CheckCondition(Insn &firstInsn)
1226 {
1227     secondInsn = firstInsn.GetNext();
1228     if (secondInsn == nullptr) {
1229         return false;
1230     }
1231     /* check firstInsn is : mov vreg, R0; */
1232     if (!CheckFirstInsn(firstInsn)) {
1233         return false;
1234     }
1235     /* check the secondInsn is : str vreg, stackMem */
1236     if (!CheckSecondInsn()) {
1237         return false;
1238     }
1239     /* find the uses of the vreg */
1240     if (!CheckAndGetUseInsn(firstInsn)) {
1241         return false;
1242     }
1243     /* simulate live range using insn distance */
1244     if (!CheckLiveRange(firstInsn)) {
1245         return false;
1246     }
1247     RegOperand *firstInsnDestReg = static_cast<RegOperand *>(firstInsnDestOpnd);
1248     regno_t firstInsnDestRegNO = firstInsnDestReg->GetRegisterNumber();
1249     InsnSet defInsnSet = cgFunc.GetRD()->FindDefForRegOpnd(*useInsn, firstInsnDestRegNO, true);
1250     if (defInsnSet.size() != 1) {
1251         return false;
1252     }
1253     DEBUG_ASSERT((*(defInsnSet.begin()))->GetId() == firstInsn.GetId(), "useInsn has only one define Insn : firstInsn");
1254     /* check whether the stack mem is changed or not */
1255     MemOperand *secondInsnDestMem = static_cast<MemOperand *>(secondInsnDestOpnd);
1256     int64 memOffset = secondInsnDestMem->GetOffsetImmediate()->GetOffsetValue();
1257     InsnSet memDefInsnSet = cgFunc.GetRD()->FindDefForMemOpnd(*useInsn, memOffset, true);
1258     if (memDefInsnSet.size() != 1) {
1259         return false;
1260     }
1261     if ((*(memDefInsnSet.begin()))->GetId() != secondInsn->GetId()) {
1262         return false;
1263     }
1264     /* check whether has call between use and def */
1265     if (!cgFunc.GetRD()->HasCallBetweenDefUse(firstInsn, *useInsn)) {
1266         return false;
1267     }
1268     return true;
1269 }
1270 
Optimize(Insn & insn)1271 void LocalVarSaveInsnPattern::Optimize(Insn &insn)
1272 {
1273     /* insert ldr insn before useInsn */
1274     MOperator ldrOpCode = secondInsnSrcOpnd->GetSize() == k64BitSize ? MOP_xldr : MOP_wldr;
1275     Insn &ldrInsn = cgFunc.GetInsnBuilder()->BuildInsn(ldrOpCode, *secondInsnSrcOpnd, *secondInsnDestOpnd);
1276     ldrInsn.SetId(useInsn->GetId() - 1);
1277     useInsn->GetBB()->InsertInsnBefore(*useInsn, ldrInsn);
1278     cgFunc.GetRD()->UpdateInOut(*useInsn->GetBB(), true);
1279     secondInsn->SetOperand(kInsnFirstOpnd, *firstInsnSrcOpnd);
1280     BB *saveInsnBB = insn.GetBB();
1281     saveInsnBB->RemoveInsn(insn);
1282     cgFunc.GetRD()->UpdateInOut(*saveInsnBB, true);
1283 }
1284 
Init()1285 void LocalVarSaveInsnPattern::Init()
1286 {
1287     firstInsnSrcOpnd = nullptr;
1288     firstInsnDestOpnd = nullptr;
1289     secondInsnSrcOpnd = nullptr;
1290     secondInsnDestOpnd = nullptr;
1291     useInsn = nullptr;
1292     secondInsn = nullptr;
1293 }
1294 
Run()1295 void LocalVarSaveInsnPattern::Run()
1296 {
1297     FOR_ALL_BB(bb, &cgFunc) {
1298         if (bb->IsCleanup()) {
1299             continue;
1300         }
1301         FOR_BB_INSNS(insn, bb) {
1302             if (!insn->IsMachineInstruction()) {
1303                 continue;
1304             }
1305             if (!insn->IsCall()) {
1306                 continue;
1307             }
1308             Insn *firstInsn = insn->GetNextMachineInsn();
1309             if (firstInsn == nullptr) {
1310                 continue;
1311             }
1312             Init();
1313             if (!CheckCondition(*firstInsn)) {
1314                 continue;
1315             }
1316             Optimize(*firstInsn);
1317         }
1318     }
1319 }
1320 
SetExMOpType(const Insn & use)1321 void ExtendShiftOptPattern::SetExMOpType(const Insn &use)
1322 {
1323     MOperator op = use.GetMachineOpcode();
1324     switch (op) {
1325         case MOP_xaddrrr:
1326         case MOP_xxwaddrrre:
1327         case MOP_xaddrrrs: {
1328             exMOpType = kExAdd;
1329             break;
1330         }
1331         case MOP_waddrrr:
1332         case MOP_wwwaddrrre:
1333         case MOP_waddrrrs: {
1334             exMOpType = kEwAdd;
1335             break;
1336         }
1337         case MOP_xsubrrr:
1338         case MOP_xxwsubrrre:
1339         case MOP_xsubrrrs: {
1340             exMOpType = kExSub;
1341             break;
1342         }
1343         case MOP_wsubrrr:
1344         case MOP_wwwsubrrre:
1345         case MOP_wsubrrrs: {
1346             exMOpType = kEwSub;
1347             break;
1348         }
1349         case MOP_xcmnrr:
1350         case MOP_xwcmnrre:
1351         case MOP_xcmnrrs: {
1352             exMOpType = kExCmn;
1353             break;
1354         }
1355         case MOP_wcmnrr:
1356         case MOP_wwcmnrre:
1357         case MOP_wcmnrrs: {
1358             exMOpType = kEwCmn;
1359             break;
1360         }
1361         case MOP_xcmprr:
1362         case MOP_xwcmprre:
1363         case MOP_xcmprrs: {
1364             exMOpType = kExCmp;
1365             break;
1366         }
1367         case MOP_wcmprr:
1368         case MOP_wwcmprre:
1369         case MOP_wcmprrs: {
1370             exMOpType = kEwCmp;
1371             break;
1372         }
1373         default: {
1374             exMOpType = kExUndef;
1375         }
1376     }
1377 }
1378 
SetLsMOpType(const Insn & use)1379 void ExtendShiftOptPattern::SetLsMOpType(const Insn &use)
1380 {
1381     MOperator op = use.GetMachineOpcode();
1382     switch (op) {
1383         case MOP_xaddrrr:
1384         case MOP_xaddrrrs: {
1385             lsMOpType = kLxAdd;
1386             break;
1387         }
1388         case MOP_waddrrr:
1389         case MOP_waddrrrs: {
1390             lsMOpType = kLwAdd;
1391             break;
1392         }
1393         case MOP_xsubrrr:
1394         case MOP_xsubrrrs: {
1395             lsMOpType = kLxSub;
1396             break;
1397         }
1398         case MOP_wsubrrr:
1399         case MOP_wsubrrrs: {
1400             lsMOpType = kLwSub;
1401             break;
1402         }
1403         case MOP_xcmnrr:
1404         case MOP_xcmnrrs: {
1405             lsMOpType = kLxCmn;
1406             break;
1407         }
1408         case MOP_wcmnrr:
1409         case MOP_wcmnrrs: {
1410             lsMOpType = kLwCmn;
1411             break;
1412         }
1413         case MOP_xcmprr:
1414         case MOP_xcmprrs: {
1415             lsMOpType = kLxCmp;
1416             break;
1417         }
1418         case MOP_wcmprr:
1419         case MOP_wcmprrs: {
1420             lsMOpType = kLwCmp;
1421             break;
1422         }
1423         case MOP_xeorrrr:
1424         case MOP_xeorrrrs: {
1425             lsMOpType = kLxEor;
1426             break;
1427         }
1428         case MOP_weorrrr:
1429         case MOP_weorrrrs: {
1430             lsMOpType = kLwEor;
1431             break;
1432         }
1433         case MOP_xinegrr:
1434         case MOP_xinegrrs: {
1435             lsMOpType = kLxNeg;
1436             replaceIdx = kInsnSecondOpnd;
1437             break;
1438         }
1439         case MOP_winegrr:
1440         case MOP_winegrrs: {
1441             lsMOpType = kLwNeg;
1442             replaceIdx = kInsnSecondOpnd;
1443             break;
1444         }
1445         case MOP_xiorrrr:
1446         case MOP_xiorrrrs: {
1447             lsMOpType = kLxIor;
1448             break;
1449         }
1450         case MOP_wiorrrr:
1451         case MOP_wiorrrrs: {
1452             lsMOpType = kLwIor;
1453             break;
1454         }
1455         default: {
1456             lsMOpType = kLsUndef;
1457         }
1458     }
1459 }
1460 
SelectExtendOrShift(const Insn & def)1461 void ExtendShiftOptPattern::SelectExtendOrShift(const Insn &def)
1462 {
1463     MOperator op = def.GetMachineOpcode();
1464     switch (op) {
1465         case MOP_xsxtb32:
1466         case MOP_xsxtb64:
1467             extendOp = ExtendShiftOperand::kSXTB;
1468             break;
1469         case MOP_xsxth32:
1470         case MOP_xsxth64:
1471             extendOp = ExtendShiftOperand::kSXTH;
1472             break;
1473         case MOP_xsxtw64:
1474             extendOp = ExtendShiftOperand::kSXTW;
1475             break;
1476         case MOP_xuxtb32:
1477             extendOp = ExtendShiftOperand::kUXTB;
1478             break;
1479         case MOP_xuxth32:
1480             extendOp = ExtendShiftOperand::kUXTH;
1481             break;
1482         case MOP_xuxtw64:
1483             extendOp = ExtendShiftOperand::kUXTW;
1484             break;
1485         case MOP_wlslrri5:
1486         case MOP_xlslrri6:
1487             shiftOp = BitShiftOperand::kLSL;
1488             break;
1489         case MOP_xlsrrri6:
1490         case MOP_wlsrrri5:
1491             shiftOp = BitShiftOperand::kLSR;
1492             break;
1493         case MOP_xasrrri6:
1494         case MOP_wasrrri5:
1495             shiftOp = BitShiftOperand::kASR;
1496             break;
1497         default: {
1498             extendOp = ExtendShiftOperand::kUndef;
1499             shiftOp = BitShiftOperand::kUndef;
1500         }
1501     }
1502 }
1503 
1504 /* first use must match SelectExtendOrShift */
CheckDefUseInfo(Insn & use,uint32 size)1505 bool ExtendShiftOptPattern::CheckDefUseInfo(Insn &use, uint32 size)
1506 {
1507     auto &regOperand = static_cast<RegOperand &>(defInsn->GetOperand(kInsnFirstOpnd));
1508     Operand &defSrcOpnd = defInsn->GetOperand(kInsnSecondOpnd);
1509     CHECK_FATAL(defSrcOpnd.IsRegister(), "defSrcOpnd must be register!");
1510     auto &regDefSrc = static_cast<RegOperand &>(defSrcOpnd);
1511     if (regDefSrc.IsPhysicalRegister()) {
1512         return false;
1513     }
1514     /*
1515      * has Implict cvt
1516      *
1517      * avoid cases as following:
1518      *   lsr  x2, x2, #8
1519      *   ubfx w2, x2, #0, #32                lsr  x2, x2, #8
1520      *   eor  w0, w0, w2           ===>      eor  w0, w0, x2     ==\=>  eor w0, w0, w2, LSR #8
1521      *
1522      * the truncation causes the wrong value by shift right
1523      * shift left does not matter
1524      */
1525     auto &useDefOpnd = static_cast<RegOperand &>(use.GetOperand(kInsnFirstOpnd));
1526     if ((shiftOp != BitShiftOperand::kUndef || extendOp != ExtendShiftOperand::kUndef) &&
1527         (regDefSrc.GetSize() > regOperand.GetSize() || useDefOpnd.GetSize() != size)) {
1528         return false;
1529     }
1530     if ((shiftOp == BitShiftOperand::kLSR || shiftOp == BitShiftOperand::kASR) && (defSrcOpnd.GetSize() > size)) {
1531         return false;
1532     }
1533     regno_t defSrcRegNo = regDefSrc.GetRegisterNumber();
1534     /* check regDefSrc */
1535     InsnSet defSrcSet = cgFunc.GetRD()->FindDefForRegOpnd(use, defSrcRegNo, true);
1536     /* The first defSrcInsn must be closest to useInsn */
1537     if (defSrcSet.empty()) {
1538         return false;
1539     }
1540     Insn *defSrcInsn = *defSrcSet.begin();
1541     const InsnDesc *md = defSrcInsn->GetDesc();
1542     if ((size != regOperand.GetSize()) && md->IsMove()) {
1543         return false;
1544     }
1545     if (defInsn->GetBB() == use.GetBB()) {
1546         /* check replace reg def between defInsn and currInsn */
1547         Insn *tmpInsn = defInsn->GetNext();
1548         while (tmpInsn != &use) {
1549             if (tmpInsn == defSrcInsn || tmpInsn == nullptr) {
1550                 return false;
1551             }
1552             tmpInsn = tmpInsn->GetNext();
1553         }
1554     } else { /* def use not in same BB */
1555         if (defSrcInsn->GetBB() != defInsn->GetBB()) {
1556             return false;
1557         }
1558         if (defSrcInsn->GetId() > defInsn->GetId()) {
1559             return false;
1560         }
1561     }
1562     /* case:
1563      * lsl w0, w0, #5
1564      * eor w0, w2, w0
1565      * --->
1566      * eor w0, w2, w0, lsl 5
1567      */
1568     if (defSrcInsn == defInsn) {
1569         InsnSet replaceRegUseSet = cgFunc.GetRD()->FindUseForRegOpnd(*defInsn, defSrcRegNo, true);
1570         if (replaceRegUseSet.size() != k1BitSize) {
1571             return false;
1572         }
1573         removeDefInsn = true;
1574     }
1575     return true;
1576 }
1577 
1578 /* Check whether ExtendShiftOptPattern optimization can be performed. */
CheckOpType(const Operand & lastOpnd) const1579 ExtendShiftOptPattern::SuffixType ExtendShiftOptPattern::CheckOpType(const Operand &lastOpnd) const
1580 {
1581     /* Assign values to useType and defType. */
1582     uint32 useType = ExtendShiftOptPattern::kNoSuffix;
1583     uint32 defType = shiftOp;
1584     if (extendOp != ExtendShiftOperand::kUndef) {
1585         defType = ExtendShiftOptPattern::kExten;
1586     }
1587     if (lastOpnd.IsOpdShift()) {
1588         BitShiftOperand lastShiftOpnd = static_cast<const BitShiftOperand &>(lastOpnd);
1589         useType = lastShiftOpnd.GetShiftOp();
1590     } else if (lastOpnd.IsOpdExtend()) {
1591         ExtendShiftOperand lastExtendOpnd = static_cast<const ExtendShiftOperand &>(lastOpnd);
1592         useType = ExtendShiftOptPattern::kExten;
1593         /* two insn is exten and exten ,value is exten(oneself) */
1594         if (useType == defType && extendOp != lastExtendOpnd.GetExtendOp()) {
1595             return ExtendShiftOptPattern::kNoSuffix;
1596         }
1597     }
1598     return doOptimize[useType][defType];
1599 }
1600 
1601 /* new Insn extenType:
1602  * =====================
1603  * (useMop)   (defMop) (newmop)
1604  * | nosuffix |  all  | all|
1605  * | exten    |  ex   | ex |
1606  * |  ls      |  ex   | ls |
1607  * |  asr     |  !asr | F  |
1608  * |  !asr    |  asr  | F  |
1609  * (useMop)   (defMop)
1610  * =====================
1611  */
ReplaceUseInsn(Insn & use,const Insn & def,uint32 amount)1612 void ExtendShiftOptPattern::ReplaceUseInsn(Insn &use, const Insn &def, uint32 amount)
1613 {
1614     AArch64CGFunc &a64CGFunc = static_cast<AArch64CGFunc &>(cgFunc);
1615     uint32 lastIdx = use.GetOperandSize() - k1BitSize;
1616     Operand &lastOpnd = use.GetOperand(lastIdx);
1617     ExtendShiftOptPattern::SuffixType optType = CheckOpType(lastOpnd);
1618     Operand *shiftOpnd = nullptr;
1619     if (optType == ExtendShiftOptPattern::kNoSuffix) {
1620         return;
1621     } else if (optType == ExtendShiftOptPattern::kExten) {
1622         replaceOp = exMOpTable[exMOpType];
1623         if (amount > k4BitSize) {
1624             return;
1625         }
1626         shiftOpnd = &a64CGFunc.CreateExtendShiftOperand(extendOp, amount, static_cast<int32>(k64BitSize));
1627     } else {
1628         replaceOp = lsMOpTable[lsMOpType];
1629         if (amount >= k32BitSize) {
1630             return;
1631         }
1632         shiftOpnd = &a64CGFunc.CreateBitShiftOperand(shiftOp, amount, static_cast<int32>(k64BitSize));
1633     }
1634     if (replaceOp == MOP_undef) {
1635         return;
1636     }
1637 
1638     Insn *replaceUseInsn = nullptr;
1639     Operand &firstOpnd = use.GetOperand(kInsnFirstOpnd);
1640     Operand *secondOpnd = &use.GetOperand(kInsnSecondOpnd);
1641     if (replaceIdx == kInsnSecondOpnd) { /* replace neg insn */
1642         secondOpnd = &def.GetOperand(kInsnSecondOpnd);
1643         replaceUseInsn = &cgFunc.GetInsnBuilder()->BuildInsn(replaceOp, firstOpnd, *secondOpnd, *shiftOpnd);
1644     } else {
1645         Operand &thirdOpnd = def.GetOperand(kInsnSecondOpnd);
1646         replaceUseInsn = &cgFunc.GetInsnBuilder()->BuildInsn(replaceOp, firstOpnd, *secondOpnd, thirdOpnd, *shiftOpnd);
1647     }
1648     use.GetBB()->ReplaceInsn(use, *replaceUseInsn);
1649     if (GLOBAL_DUMP) {
1650         LogInfo::MapleLogger() << ">>>>>>> In ExtendShiftOptPattern : <<<<<<<\n";
1651         LogInfo::MapleLogger() << "=======ReplaceInsn :\n";
1652         use.Dump();
1653         LogInfo::MapleLogger() << "=======NewInsn :\n";
1654         replaceUseInsn->Dump();
1655     }
1656     if (removeDefInsn) {
1657         if (GLOBAL_DUMP) {
1658             LogInfo::MapleLogger() << ">>>>>>> In ExtendShiftOptPattern : <<<<<<<\n";
1659             LogInfo::MapleLogger() << "=======RemoveDefInsn :\n";
1660             defInsn->Dump();
1661         }
1662         defInsn->GetBB()->RemoveInsn(*defInsn);
1663     }
1664     cgFunc.GetRD()->InitGenUse(*defInsn->GetBB(), false);
1665     cgFunc.GetRD()->UpdateInOut(*use.GetBB(), true);
1666     newInsn = replaceUseInsn;
1667     optSuccess = true;
1668 }
1669 
1670 /*
1671  * pattern1:
1672  * UXTB/UXTW X0, W1              <---- def x0
1673  * ....                          <---- (X0 not used)
1674  * AND/SUB/EOR X0, X1, X0        <---- use x0
1675  * ======>
1676  * AND/SUB/EOR X0, X1, W1 UXTB/UXTW
1677  *
1678  * pattern2:
1679  * LSL/LSR X0, X1, #8
1680  * ....(X0 not used)
1681  * AND/SUB/EOR X0, X1, X0
1682  * ======>
1683  * AND/SUB/EOR X0, X1, X1 LSL/LSR #8
1684  */
Optimize(Insn & insn)1685 void ExtendShiftOptPattern::Optimize(Insn &insn)
1686 {
1687     uint32 amount = 0;
1688     uint32 offset = 0;
1689     uint32 lastIdx = insn.GetOperandSize() - k1BitSize;
1690     Operand &lastOpnd = insn.GetOperand(lastIdx);
1691     if (lastOpnd.IsOpdShift()) {
1692         BitShiftOperand &lastShiftOpnd = static_cast<BitShiftOperand &>(lastOpnd);
1693         amount = lastShiftOpnd.GetShiftAmount();
1694     } else if (lastOpnd.IsOpdExtend()) {
1695         ExtendShiftOperand &lastExtendOpnd = static_cast<ExtendShiftOperand &>(lastOpnd);
1696         amount = lastExtendOpnd.GetShiftAmount();
1697     }
1698     if (shiftOp != BitShiftOperand::kUndef) {
1699         ImmOperand &immOpnd = static_cast<ImmOperand &>(defInsn->GetOperand(kInsnThirdOpnd));
1700         offset = static_cast<uint32>(immOpnd.GetValue());
1701     }
1702     amount += offset;
1703 
1704     ReplaceUseInsn(insn, *defInsn, amount);
1705 }
1706 
DoExtendShiftOpt(Insn & insn)1707 void ExtendShiftOptPattern::DoExtendShiftOpt(Insn &insn)
1708 {
1709     Init();
1710     if (!CheckCondition(insn)) {
1711         return;
1712     }
1713     Optimize(insn);
1714     if (optSuccess) {
1715         DoExtendShiftOpt(*newInsn);
1716     }
1717 }
1718 
1719 /* check and set:
1720  * exMOpType, lsMOpType, extendOp, shiftOp, defInsn
1721  */
CheckCondition(Insn & insn)1722 bool ExtendShiftOptPattern::CheckCondition(Insn &insn)
1723 {
1724     SetLsMOpType(insn);
1725     SetExMOpType(insn);
1726     if ((exMOpType == kExUndef) && (lsMOpType == kLsUndef)) {
1727         return false;
1728     }
1729     RegOperand &regOperand = static_cast<RegOperand &>(insn.GetOperand(replaceIdx));
1730     if (regOperand.IsPhysicalRegister()) {
1731         return false;
1732     }
1733     regno_t regNo = regOperand.GetRegisterNumber();
1734     InsnSet regDefInsnSet = cgFunc.GetRD()->FindDefForRegOpnd(insn, regNo, true);
1735     if (regDefInsnSet.size() != k1BitSize) {
1736         return false;
1737     }
1738     defInsn = *regDefInsnSet.begin();
1739     CHECK_FATAL((defInsn != nullptr), "defInsn is null!");
1740 
1741     SelectExtendOrShift(*defInsn);
1742     /* defInsn must be shift or extend */
1743     if ((extendOp == ExtendShiftOperand::kUndef) && (shiftOp == BitShiftOperand::kUndef)) {
1744         return false;
1745     }
1746     return CheckDefUseInfo(insn, regOperand.GetSize());
1747 }
1748 
Init()1749 void ExtendShiftOptPattern::Init()
1750 {
1751     replaceOp = MOP_undef;
1752     extendOp = ExtendShiftOperand::kUndef;
1753     shiftOp = BitShiftOperand::kUndef;
1754     defInsn = nullptr;
1755     replaceIdx = kInsnThirdOpnd;
1756     newInsn = nullptr;
1757     optSuccess = false;
1758     removeDefInsn = false;
1759     exMOpType = kExUndef;
1760     lsMOpType = kLsUndef;
1761 }
1762 
Run()1763 void ExtendShiftOptPattern::Run()
1764 {
1765     if (!cgFunc.GetMirModule().IsCModule()) {
1766         return;
1767     }
1768     FOR_ALL_BB_REV(bb, &cgFunc) {
1769         FOR_BB_INSNS_REV(insn, bb) {
1770             if (!insn->IsMachineInstruction()) {
1771                 continue;
1772             }
1773             DoExtendShiftOpt(*insn);
1774         }
1775     }
1776 }
1777 
Run()1778 void ExtenToMovPattern::Run()
1779 {
1780     if (!cgFunc.GetMirModule().IsCModule()) {
1781         return;
1782     }
1783     FOR_ALL_BB(bb, &cgFunc) {
1784         FOR_BB_INSNS(insn, bb) {
1785             if (!insn->IsMachineInstruction()) {
1786                 continue;
1787             }
1788             if (!CheckCondition(*insn)) {
1789                 continue;
1790             }
1791             Optimize(*insn);
1792         }
1793     }
1794 }
1795 
1796 /* Check for Implicit uxtw */
CheckHideUxtw(const Insn & insn,regno_t regno) const1797 bool ExtenToMovPattern::CheckHideUxtw(const Insn &insn, regno_t regno) const
1798 {
1799     const InsnDesc *md = &AArch64CG::kMd[insn.GetMachineOpcode()];
1800     if (md->IsMove()) {
1801         return false;
1802     }
1803     uint32 optSize = insn.GetOperandSize();
1804     for (uint32 i = 0; i < optSize; ++i) {
1805         if (regno == static_cast<RegOperand &>(insn.GetOperand(i)).GetRegisterNumber()) {
1806             auto *curOpndDescription = md->GetOpndDes(i);
1807             if (curOpndDescription->IsDef() && curOpndDescription->GetSize() == k32BitSize) {
1808                 return true;
1809             }
1810             break;
1811         }
1812     }
1813     return false;
1814 }
1815 
CheckUxtw(Insn & insn)1816 bool ExtenToMovPattern::CheckUxtw(Insn &insn)
1817 {
1818     if (insn.GetOperand(kInsnFirstOpnd).GetSize() == k64BitSize &&
1819         insn.GetOperand(kInsnSecondOpnd).GetSize() == k32BitSize) {
1820         DEBUG_ASSERT(insn.GetOperand(kInsnSecondOpnd).IsRegister(), "is not Register");
1821         regno_t regno = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd)).GetRegisterNumber();
1822         InsnSet preDef = cgFunc.GetRD()->FindDefForRegOpnd(insn, kInsnSecondOpnd, false);
1823         if (preDef.empty()) {
1824             return false;
1825         }
1826         for (auto defInsn : preDef) {
1827             if (!CheckHideUxtw(*defInsn, regno)) {
1828                 return false;
1829             }
1830         }
1831         replaceMop = MOP_xmovrr_uxtw;
1832         return true;
1833     }
1834     return false;
1835 }
1836 
CheckSrcReg(Insn & insn,regno_t srcRegNo,uint32 validNum)1837 bool ExtenToMovPattern::CheckSrcReg(Insn &insn, regno_t srcRegNo, uint32 validNum)
1838 {
1839     InsnSet srcDefSet = cgFunc.GetRD()->FindDefForRegOpnd(insn, srcRegNo, true);
1840     for (auto defInsn : srcDefSet) {
1841         CHECK_FATAL((defInsn != nullptr), "defInsn is null!");
1842         MOperator mOp = defInsn->GetMachineOpcode();
1843         switch (mOp) {
1844             case MOP_wiorrri12:
1845             case MOP_weorrri12: {
1846                 /* check immVal if mop is OR */
1847                 ImmOperand &imm = static_cast<ImmOperand &>(defInsn->GetOperand(kInsnThirdOpnd));
1848                 auto bitNum = static_cast<uint32>(imm.GetValue());
1849                 if ((bitNum >> validNum) != 0) {
1850                     return false;
1851                 }
1852                 break;
1853             }
1854             case MOP_wandrri12: {
1855                 /* check defSrcReg */
1856                 RegOperand &defSrcRegOpnd = static_cast<RegOperand &>(defInsn->GetOperand(kInsnSecondOpnd));
1857                 regno_t defSrcRegNo = defSrcRegOpnd.GetRegisterNumber();
1858                 if (!CheckSrcReg(*defInsn, defSrcRegNo, validNum)) {
1859                     return false;
1860                 }
1861                 break;
1862             }
1863             case MOP_wandrrr: {
1864                 /* check defSrcReg */
1865                 RegOperand &defSrcRegOpnd1 = static_cast<RegOperand &>(defInsn->GetOperand(kInsnSecondOpnd));
1866                 RegOperand &defSrcRegOpnd2 = static_cast<RegOperand &>(defInsn->GetOperand(kInsnThirdOpnd));
1867                 regno_t defSrcRegNo1 = defSrcRegOpnd1.GetRegisterNumber();
1868                 regno_t defSrcRegNo2 = defSrcRegOpnd2.GetRegisterNumber();
1869                 if (!CheckSrcReg(*defInsn, defSrcRegNo1, validNum) && !CheckSrcReg(*defInsn, defSrcRegNo2, validNum)) {
1870                     return false;
1871                 }
1872                 break;
1873             }
1874             case MOP_wiorrrr:
1875             case MOP_weorrrr: {
1876                 /* check defSrcReg */
1877                 RegOperand &defSrcRegOpnd1 = static_cast<RegOperand &>(defInsn->GetOperand(kInsnSecondOpnd));
1878                 RegOperand &defSrcRegOpnd2 = static_cast<RegOperand &>(defInsn->GetOperand(kInsnThirdOpnd));
1879                 regno_t defSrcRegNo1 = defSrcRegOpnd1.GetRegisterNumber();
1880                 regno_t defSrcRegNo2 = defSrcRegOpnd2.GetRegisterNumber();
1881                 if (!CheckSrcReg(*defInsn, defSrcRegNo1, validNum) || !CheckSrcReg(*defInsn, defSrcRegNo2, validNum)) {
1882                     return false;
1883                 }
1884                 break;
1885             }
1886             case MOP_wldrb: {
1887                 if (validNum != k8BitSize) {
1888                     return false;
1889                 }
1890                 break;
1891             }
1892             case MOP_wldrh: {
1893                 if (validNum != k16BitSize) {
1894                     return false;
1895                 }
1896                 break;
1897             }
1898             default:
1899                 return false;
1900         }
1901     }
1902     return true;
1903 }
1904 
BitNotAffected(Insn & insn,uint32 validNum)1905 bool ExtenToMovPattern::BitNotAffected(Insn &insn, uint32 validNum)
1906 {
1907     RegOperand &firstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
1908     RegOperand &secondOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
1909     regno_t desRegNo = firstOpnd.GetRegisterNumber();
1910     regno_t srcRegNo = secondOpnd.GetRegisterNumber();
1911     InsnSet desDefSet = cgFunc.GetRD()->FindDefForRegOpnd(insn, desRegNo, true);
1912     /* desReg is not redefined */
1913     if (!desDefSet.empty()) {
1914         return false;
1915     }
1916     if (!CheckSrcReg(insn, srcRegNo, validNum)) {
1917         return false;
1918     }
1919     replaceMop = MOP_wmovrr;
1920     return true;
1921 }
1922 
CheckCondition(Insn & insn)1923 bool ExtenToMovPattern::CheckCondition(Insn &insn)
1924 {
1925     MOperator mOp = insn.GetMachineOpcode();
1926     switch (mOp) {
1927         case MOP_xuxtw64:
1928             return CheckUxtw(insn);
1929         case MOP_xuxtb32:
1930             return BitNotAffected(insn, k8BitSize);
1931         case MOP_xuxth32:
1932             return BitNotAffected(insn, k16BitSize);
1933         default:
1934             return false;
1935     }
1936 }
1937 
1938 /* No initialization required */
Init()1939 void ExtenToMovPattern::Init()
1940 {
1941     replaceMop = MOP_undef;
1942 }
1943 
Optimize(Insn & insn)1944 void ExtenToMovPattern::Optimize(Insn &insn)
1945 {
1946     insn.SetMOP(AArch64CG::kMd[replaceMop]);
1947 }
1948 
Run()1949 void SameDefPattern::Run()
1950 {
1951     FOR_ALL_BB_REV(bb, &cgFunc) {
1952         FOR_BB_INSNS_REV(insn, bb) {
1953             if (!CheckCondition(*insn) || !bb->GetEhPreds().empty()) {
1954                 continue;
1955             }
1956             Optimize(*insn);
1957         }
1958     }
1959 }
1960 
Init()1961 void SameDefPattern::Init()
1962 {
1963     currInsn = nullptr;
1964     sameInsn = nullptr;
1965 }
1966 
CheckCondition(Insn & insn)1967 bool SameDefPattern::CheckCondition(Insn &insn)
1968 {
1969     MOperator mOp = insn.GetMachineOpcode();
1970     if (insn.GetBB()->GetPreds().size() > k1BitSize) {
1971         return false;
1972     }
1973     if (insn.GetBB()->HasCall()) {
1974         return false;
1975     }
1976     return (mOp == MOP_wcmprr) || (mOp == MOP_xcmprr) || (mOp == MOP_xwcmprre) || (mOp == MOP_xcmprrs);
1977 }
1978 
Optimize(Insn & insn)1979 void SameDefPattern::Optimize(Insn &insn)
1980 {
1981     InsnSet sameDefSet = cgFunc.GetRD()->FindDefForRegOpnd(insn, 0, false);
1982     if (sameDefSet.size() != k1BitSize) {
1983         return;
1984     }
1985     Insn *sameDefInsn = *sameDefSet.begin();
1986     if (sameDefInsn == nullptr) {
1987         return;
1988     }
1989     currInsn = &insn;
1990     sameInsn = sameDefInsn;
1991     if (!IsSameDef()) {
1992         return;
1993     }
1994     if (GLOBAL_DUMP) {
1995         LogInfo::MapleLogger() << ">>>>>>> In SameDefPattern : <<<<<<<\n";
1996         LogInfo::MapleLogger() << "=======remove insn: \n";
1997         insn.Dump();
1998         LogInfo::MapleLogger() << "=======sameDef insn: \n";
1999         sameDefInsn->Dump();
2000     }
2001     insn.GetBB()->RemoveInsn(insn);
2002 }
2003 
IsSameDef()2004 bool SameDefPattern::IsSameDef()
2005 {
2006     if (!CheckCondition(*sameInsn)) {
2007         return false;
2008     }
2009     if (currInsn == sameInsn) {
2010         return false;
2011     }
2012     if (currInsn->GetMachineOpcode() != sameInsn->GetMachineOpcode()) {
2013         return false;
2014     }
2015     for (uint32 i = k1BitSize; i < currInsn->GetOperandSize(); ++i) {
2016         Operand &opnd0 = currInsn->GetOperand(i);
2017         Operand &opnd1 = sameInsn->GetOperand(i);
2018         if (!IsSameOperand(opnd0, opnd1)) {
2019             return false;
2020         }
2021     }
2022     return true;
2023 }
2024 
IsSameOperand(Operand & opnd0,Operand & opnd1)2025 bool SameDefPattern::IsSameOperand(Operand &opnd0, Operand &opnd1)
2026 {
2027     if (opnd0.IsRegister()) {
2028         CHECK_FATAL(opnd1.IsRegister(), "must be RegOperand!");
2029         RegOperand &regOpnd0 = static_cast<RegOperand &>(opnd0);
2030         RegOperand &regOpnd1 = static_cast<RegOperand &>(opnd1);
2031         if (!RegOperand::IsSameReg(regOpnd0, regOpnd1)) {
2032             return false;
2033         }
2034         regno_t regNo = regOpnd0.GetRegisterNumber();
2035         /* src reg not redefined between sameInsn and currInsn */
2036         if (SrcRegIsRedefined(regNo)) {
2037             return false;
2038         }
2039     } else if (opnd0.IsOpdShift()) {
2040         CHECK_FATAL(opnd1.IsOpdShift(), "must be ShiftOperand!");
2041         BitShiftOperand &shiftOpnd0 = static_cast<BitShiftOperand &>(opnd0);
2042         BitShiftOperand &shiftOpnd1 = static_cast<BitShiftOperand &>(opnd1);
2043         if (shiftOpnd0.GetShiftAmount() != shiftOpnd1.GetShiftAmount()) {
2044             return false;
2045         }
2046     } else if (opnd0.IsOpdExtend()) {
2047         CHECK_FATAL(opnd1.IsOpdExtend(), "must be ExtendOperand!");
2048         ExtendShiftOperand &extendOpnd0 = static_cast<ExtendShiftOperand &>(opnd0);
2049         ExtendShiftOperand &extendOpnd1 = static_cast<ExtendShiftOperand &>(opnd1);
2050         if (extendOpnd0.GetShiftAmount() != extendOpnd1.GetShiftAmount()) {
2051             return false;
2052         }
2053     } else {
2054         return false;
2055     }
2056     return true;
2057 }
2058 
SrcRegIsRedefined(regno_t regNo)2059 bool SameDefPattern::SrcRegIsRedefined(regno_t regNo)
2060 {
2061     AArch64ReachingDefinition *a64RD = static_cast<AArch64ReachingDefinition *>(cgFunc.GetRD());
2062     if (currInsn->GetBB() == sameInsn->GetBB()) {
2063         FOR_BB_INSNS(insn, currInsn->GetBB()) {
2064             if (insn->GetMachineOpcode() == MOP_xbl) {
2065                 return true;
2066             }
2067         }
2068         if (!a64RD->FindRegDefBetweenInsn(regNo, sameInsn, currInsn).empty()) {
2069             return true;
2070         }
2071     } else if (a64RD->HasRegDefBetweenInsnGlobal(regNo, *sameInsn, *currInsn)) {
2072         return true;
2073     }
2074     return false;
2075 }
2076 
Init()2077 void AndCbzPattern::Init()
2078 {
2079     prevInsn = nullptr;
2080 }
2081 
IsAdjacentArea(Insn & prev,Insn & curr) const2082 bool AndCbzPattern::IsAdjacentArea(Insn &prev, Insn &curr) const
2083 {
2084     if (prev.GetBB() == curr.GetBB()) {
2085         return true;
2086     }
2087     for (auto *succ : prev.GetBB()->GetSuccs()) {
2088         if (succ == curr.GetBB()) {
2089             return true;
2090         }
2091     }
2092     return false;
2093 }
2094 
CheckCondition(Insn & insn)2095 bool AndCbzPattern::CheckCondition(Insn &insn)
2096 {
2097     auto *aarch64RD = static_cast<AArch64ReachingDefinition *>(cgFunc.GetRD());
2098     MOperator mOp = insn.GetMachineOpcode();
2099     if ((mOp != MOP_wcbz) && (mOp != MOP_xcbz) && (mOp != MOP_wcbnz) && (mOp != MOP_xcbnz)) {
2100         return false;
2101     }
2102     regno_t regNo = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd)).GetRegisterNumber();
2103     InsnSet defSet = cgFunc.GetRD()->FindDefForRegOpnd(insn, regNo, true);
2104     if (defSet.size() != k1BitSize) {
2105         return false;
2106     }
2107     prevInsn = *defSet.begin();
2108     if (prevInsn->GetMachineOpcode() != MOP_wandrri12 && prevInsn->GetMachineOpcode() != MOP_xandrri13) {
2109         return false;
2110     }
2111     if (!IsAdjacentArea(*prevInsn, insn)) {
2112         return false;
2113     }
2114     regno_t propRegNo = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd)).GetRegisterNumber();
2115     if (prevInsn->GetBB() == insn.GetBB() && !(aarch64RD->FindRegDefBetweenInsn(propRegNo, prevInsn, &insn).empty())) {
2116         return false;
2117     }
2118     if (prevInsn->GetBB() != insn.GetBB() && aarch64RD->HasRegDefBetweenInsnGlobal(propRegNo, *prevInsn, insn)) {
2119         return false;
2120     }
2121     if (!(cgFunc.GetRD()->FindUseForRegOpnd(insn, regNo, true).empty())) {
2122         return false;
2123     }
2124     return true;
2125 }
2126 
CalculateLogValue(int64 val) const2127 int64 AndCbzPattern::CalculateLogValue(int64 val) const
2128 {
2129     return (__builtin_popcountll(static_cast<uint64>(val)) == 1) ? (__builtin_ffsll(val) - 1) : -1;
2130 }
2131 
Optimize(Insn & insn)2132 void AndCbzPattern::Optimize(Insn &insn)
2133 {
2134     BB *bb = insn.GetBB();
2135     auto &aarchFunc = static_cast<AArch64CGFunc &>(cgFunc);
2136     auto &andImm = static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnThirdOpnd));
2137     int64 tbzVal = CalculateLogValue(andImm.GetValue());
2138     if (tbzVal < 0) {
2139         return;
2140     }
2141     MOperator mOp = insn.GetMachineOpcode();
2142     MOperator newMop = MOP_undef;
2143     switch (mOp) {
2144         case MOP_wcbz:
2145             newMop = MOP_wtbz;
2146             break;
2147         case MOP_wcbnz:
2148             newMop = MOP_wtbnz;
2149             break;
2150         case MOP_xcbz:
2151             newMop = MOP_xtbz;
2152             break;
2153         case MOP_xcbnz:
2154             newMop = MOP_xtbnz;
2155             break;
2156         default:
2157             CHECK_FATAL(false, "must be cbz/cbnz");
2158             break;
2159     }
2160     auto &label = static_cast<LabelOperand &>(insn.GetOperand(kInsnSecondOpnd));
2161     ImmOperand &tbzImm = aarchFunc.CreateImmOperand(tbzVal, k8BitSize, false);
2162     Insn &newInsn = cgFunc.GetInsnBuilder()->BuildInsn(newMop, prevInsn->GetOperand(kInsnSecondOpnd), tbzImm, label);
2163     newInsn.SetId(insn.GetId());
2164     bb->ReplaceInsn(insn, newInsn);
2165     if (GLOBAL_DUMP) {
2166         LogInfo::MapleLogger() << ">>>>>>> In AndCbzPattern : <<<<<<<\n";
2167         LogInfo::MapleLogger() << "=======PrevInsn :\n";
2168         LogInfo::MapleLogger() << "=======ReplaceInsn :\n";
2169         insn.Dump();
2170         LogInfo::MapleLogger() << "=======NewInsn :\n";
2171         newInsn.Dump();
2172     }
2173     cgFunc.GetRD()->UpdateInOut(*bb, true);
2174 }
2175 
Run()2176 void AndCbzPattern::Run()
2177 {
2178     Init();
2179     FOR_ALL_BB_REV(bb, &cgFunc) {
2180         FOR_BB_INSNS_REV(insn, bb) {
2181             if (!insn->IsMachineInstruction() || !CheckCondition(*insn)) {
2182                 continue;
2183             }
2184             Optimize(*insn);
2185         }
2186     }
2187 }
2188 
Init()2189 void SameRHSPropPattern::Init()
2190 {
2191     prevInsn = nullptr;
2192     candidates = {MOP_waddrri12, MOP_xaddrri12, MOP_wsubrri12, MOP_xsubrri12,
2193                   MOP_wmovri32,  MOP_xmovri64,  MOP_wmovrr,    MOP_xmovrr};
2194 }
2195 
IsSameOperand(Operand * opnd1,Operand * opnd2) const2196 bool SameRHSPropPattern::IsSameOperand(Operand *opnd1, Operand *opnd2) const
2197 {
2198     if (opnd1 == nullptr && opnd2 == nullptr) {
2199         return true;
2200     } else if (opnd1 == nullptr || opnd2 == nullptr) {
2201         return false;
2202     }
2203     if (opnd1->IsRegister() && opnd2->IsRegister()) {
2204         return RegOperand::IsSameReg(*opnd1, *opnd2);
2205     } else if (opnd1->IsImmediate() && opnd2->IsImmediate()) {
2206         auto *immOpnd1 = static_cast<ImmOperand *>(opnd1);
2207         auto *immOpnd2 = static_cast<ImmOperand *>(opnd2);
2208         return (immOpnd1->GetSize() == immOpnd2->GetSize()) && (immOpnd1->GetValue() == immOpnd2->GetValue());
2209     }
2210     return false;
2211 }
2212 
FindSameRHSInsnInBB(Insn & insn)2213 bool SameRHSPropPattern::FindSameRHSInsnInBB(Insn &insn)
2214 {
2215     uint32 opndNum = insn.GetOperandSize();
2216     Operand *curRegOpnd = nullptr;
2217     Operand *curImmOpnd = nullptr;
2218     for (uint32 i = 0; i < opndNum; ++i) {
2219         if (insn.OpndIsDef(i)) {
2220             continue;
2221         }
2222         Operand &opnd = insn.GetOperand(i);
2223         if (opnd.IsRegister()) {
2224             curRegOpnd = &opnd;
2225         } else if (opnd.IsImmediate()) {
2226             auto &immOpnd = static_cast<ImmOperand &>(opnd);
2227             if (immOpnd.GetVary() == kUnAdjustVary) {
2228                 return false;
2229             }
2230             curImmOpnd = &opnd;
2231         }
2232     }
2233     if (curRegOpnd == nullptr && curImmOpnd != nullptr && static_cast<ImmOperand *>(curImmOpnd)->IsZero()) {
2234         return false;
2235     }
2236     BB *bb = insn.GetBB();
2237     for (auto *cursor = insn.GetPrev(); cursor != nullptr && cursor != bb->GetFirstInsn(); cursor = cursor->GetPrev()) {
2238         if (!cursor->IsMachineInstruction()) {
2239             continue;
2240         }
2241         if (cursor->IsCall() && !cgFunc.IsAfterRegAlloc()) {
2242             return false;
2243         }
2244         if (cursor->GetMachineOpcode() != insn.GetMachineOpcode()) {
2245             continue;
2246         }
2247         uint32 candOpndNum = cursor->GetOperandSize();
2248         Operand *candRegOpnd = nullptr;
2249         Operand *candImmOpnd = nullptr;
2250         for (uint32 i = 0; i < candOpndNum; ++i) {
2251             Operand &opnd = cursor->GetOperand(i);
2252             if (cursor->OpndIsDef(i)) {
2253                 continue;
2254             }
2255             if (opnd.IsRegister()) {
2256                 candRegOpnd = &opnd;
2257             } else if (opnd.IsImmediate()) {
2258                 auto &immOpnd = static_cast<ImmOperand &>(opnd);
2259                 if (immOpnd.GetVary() == kUnAdjustVary) {
2260                     return false;
2261                 }
2262                 candImmOpnd = &opnd;
2263             }
2264         }
2265         if (IsSameOperand(curRegOpnd, candRegOpnd) && IsSameOperand(curImmOpnd, candImmOpnd)) {
2266             prevInsn = cursor;
2267             return true;
2268         }
2269     }
2270     return false;
2271 }
2272 
CheckCondition(Insn & insn)2273 bool SameRHSPropPattern::CheckCondition(Insn &insn)
2274 {
2275     if (!insn.IsMachineInstruction()) {
2276         return false;
2277     }
2278     MOperator mOp = insn.GetMachineOpcode();
2279     if (std::find(candidates.begin(), candidates.end(), mOp) == candidates.end()) {
2280         return false;
2281     }
2282     if (!FindSameRHSInsnInBB(insn)) {
2283         return false;
2284     }
2285     CHECK_FATAL(prevInsn->GetOperand(kInsnFirstOpnd).IsRegister(), "prevInsn first operand must be register");
2286     if (prevInsn->GetOperand(kInsnSecondOpnd).IsRegister() &&
2287         RegOperand::IsSameReg(prevInsn->GetOperand(kInsnFirstOpnd), prevInsn->GetOperand(kInsnSecondOpnd))) {
2288         return false;
2289     }
2290     uint32 opndNum = prevInsn->GetOperandSize();
2291     for (uint32 i = 0; i < opndNum; ++i) {
2292         Operand &opnd = prevInsn->GetOperand(i);
2293         if (!opnd.IsRegister()) {
2294             continue;
2295         }
2296         regno_t regNO = static_cast<RegOperand &>(opnd).GetRegisterNumber();
2297         if (!(cgFunc.GetRD()->FindRegDefBetweenInsn(regNO, prevInsn->GetNext(), insn.GetPrev()).empty())) {
2298             return false;
2299         }
2300     }
2301     return true;
2302 }
2303 
Optimize(Insn & insn)2304 void SameRHSPropPattern::Optimize(Insn &insn)
2305 {
2306     BB *bb = insn.GetBB();
2307     Operand &destOpnd = insn.GetOperand(kInsnFirstOpnd);
2308     uint32 bitSize = static_cast<RegOperand &>(destOpnd).GetSize();
2309     MOperator mOp = (bitSize == k64BitSize ? MOP_xmovrr : MOP_wmovrr);
2310     Insn &newInsn = cgFunc.GetInsnBuilder()->BuildInsn(mOp, destOpnd, prevInsn->GetOperand(kInsnFirstOpnd));
2311     newInsn.SetId(insn.GetId());
2312     bb->ReplaceInsn(insn, newInsn);
2313     if (GLOBAL_DUMP) {
2314         LogInfo::MapleLogger() << ">>>>>>> In SameRHSPropPattern : <<<<<<<\n";
2315         LogInfo::MapleLogger() << "=======PrevInsn :\n";
2316         LogInfo::MapleLogger() << "======= ReplaceInsn :\n";
2317         insn.Dump();
2318         LogInfo::MapleLogger() << "======= NewInsn :\n";
2319         newInsn.Dump();
2320     }
2321     cgFunc.GetRD()->UpdateInOut(*bb, true);
2322 }
2323 
Run()2324 void SameRHSPropPattern::Run()
2325 {
2326     Init();
2327     FOR_ALL_BB_REV(bb, &cgFunc) {
2328         FOR_BB_INSNS_REV(insn, bb) {
2329             if (!CheckCondition(*insn)) {
2330                 continue;
2331             }
2332             Optimize(*insn);
2333         }
2334     }
2335 }
2336 } /* namespace maplebe */
2337