• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "aarch64_global.h"
17 #include "aarch64_reaching.h"
18 #include "aarch64_cg.h"
19 #include "aarch64_live.h"
20 
21 namespace maplebe {
22 using namespace maple;
23 #define GLOBAL_DUMP CG_DEBUG_FUNC(cgFunc)
24 
25 constexpr uint32 kExMOpTypeSize = 9;
26 constexpr uint32 kLsMOpTypeSize = 15;
27 
28 MOperator exMOpTable[kExMOpTypeSize] = {MOP_undef,    MOP_xxwaddrrre, MOP_wwwaddrrre, MOP_xxwsubrrre, MOP_wwwsubrrre,
29                                         MOP_xwcmnrre, MOP_wwcmnrre,   MOP_xwcmprre,   MOP_wwcmprre};
30 MOperator lsMOpTable[kLsMOpTypeSize] = {MOP_undef,    MOP_xaddrrrs, MOP_waddrrrs, MOP_xsubrrrs, MOP_wsubrrrs,
31                                         MOP_xcmnrrs,  MOP_wcmnrrs,  MOP_xcmprrs,  MOP_wcmprrs,  MOP_xeorrrrs,
32                                         MOP_weorrrrs, MOP_xinegrrs, MOP_winegrrs, MOP_xiorrrrs, MOP_wiorrrrs};
33 
34 /* Optimize ExtendShiftOptPattern:
35  * ==========================================================
36  *           nosuffix  LSL   LSR   ASR      extrn   (def)
37  * nosuffix |   F    | LSL | LSR | ASR |    extrn  |
38  * LSL      |   F    | LSL |  F  |  F  |    extrn  |
39  * LSR      |   F    |  F  | LSR |  F  |     F     |
40  * ASR      |   F    |  F  |  F  | ASR |     F     |
41  * exten    |   F    |  F  |  F  |  F  |exten(self)|
42  * (use)
43  * ===========================================================
44  */
45 constexpr uint32 kExtenAddShift = 5;
46 ExtendShiftOptPattern::SuffixType doOptimize[kExtenAddShift][kExtenAddShift] = {
47     {ExtendShiftOptPattern::kNoSuffix, ExtendShiftOptPattern::kLSL, ExtendShiftOptPattern::kLSR,
48      ExtendShiftOptPattern::kASR, ExtendShiftOptPattern::kExten},
49     {ExtendShiftOptPattern::kNoSuffix, ExtendShiftOptPattern::kLSL, ExtendShiftOptPattern::kNoSuffix,
50      ExtendShiftOptPattern::kNoSuffix, ExtendShiftOptPattern::kExten},
51     {ExtendShiftOptPattern::kNoSuffix, ExtendShiftOptPattern::kNoSuffix, ExtendShiftOptPattern::kLSR,
52      ExtendShiftOptPattern::kNoSuffix, ExtendShiftOptPattern::kNoSuffix},
53     {ExtendShiftOptPattern::kNoSuffix, ExtendShiftOptPattern::kNoSuffix, ExtendShiftOptPattern::kNoSuffix,
54      ExtendShiftOptPattern::kASR, ExtendShiftOptPattern::kNoSuffix},
55     {ExtendShiftOptPattern::kNoSuffix, ExtendShiftOptPattern::kNoSuffix, ExtendShiftOptPattern::kNoSuffix,
56      ExtendShiftOptPattern::kNoSuffix, ExtendShiftOptPattern::kExten}};
57 
IsZeroRegister(const Operand & opnd)58 static bool IsZeroRegister(const Operand &opnd)
59 {
60     if (!opnd.IsRegister()) {
61         return false;
62     }
63     const RegOperand *regOpnd = static_cast<const RegOperand *>(&opnd);
64     return regOpnd->GetRegisterNumber() == RZR;
65 }
66 
Run()67 void AArch64GlobalOpt::Run()
68 {
69     OptimizeManager optManager(cgFunc, loopInfo);
70     bool hasSpillBarrier = (cgFunc.NumBBs() > kMaxBBNum) || (cgFunc.GetRD()->GetMaxInsnNO() > kMaxInsnNum);
71     if (cgFunc.IsAfterRegAlloc()) {
72         optManager.Optimize<SameRHSPropPattern>();
73         optManager.Optimize<BackPropPattern>();
74         return;
75     }
76     if (!hasSpillBarrier) {
77         optManager.Optimize<ExtenToMovPattern>();
78         optManager.Optimize<SameRHSPropPattern>();
79         optManager.Optimize<BackPropPattern>();
80         optManager.Optimize<ForwardPropPattern>();
81         optManager.Optimize<CselPattern>();
82         optManager.Optimize<CmpCsetPattern>();
83         optManager.Optimize<RedundantUxtPattern>();
84         optManager.Optimize<LocalVarSaveInsnPattern>();
85     }
86     optManager.Optimize<SameDefPattern>();
87     optManager.Optimize<ExtendShiftOptPattern>();
88     optManager.Optimize<AndCbzPattern>();
89 }
90 
91 /* if used Operand in insn is defined by zero in all define insn, return true */
OpndDefByZero(Insn & insn,int32 useIdx) const92 bool OptimizePattern::OpndDefByZero(Insn &insn, int32 useIdx) const
93 {
94     DEBUG_ASSERT(insn.GetOperand(useIdx).IsRegister(), "the used Operand must be Register");
95     /* Zero Register don't need be defined */
96     if (IsZeroRegister(insn.GetOperand(static_cast<uint32>(useIdx)))) {
97         return true;
98     }
99 
100     InsnSet defInsns = cgFunc.GetRD()->FindDefForRegOpnd(insn, useIdx);
101     if (defInsns.empty()) {
102         return false;
103     }
104     for (auto &defInsn : defInsns) {
105         if (!InsnDefZero(*defInsn)) {
106             return false;
107         }
108     }
109     return true;
110 }
111 
112 /* if used Operand in insn is defined by one in all define insn, return true */
OpndDefByOne(Insn & insn,int32 useIdx) const113 bool OptimizePattern::OpndDefByOne(Insn &insn, int32 useIdx) const
114 {
115     DEBUG_ASSERT(insn.GetOperand(useIdx).IsRegister(), "the used Operand must be Register");
116     /* Zero Register don't need be defined */
117     if (IsZeroRegister(insn.GetOperand(static_cast<uint32>(useIdx)))) {
118         return false;
119     }
120     InsnSet defInsns = cgFunc.GetRD()->FindDefForRegOpnd(insn, useIdx);
121     if (defInsns.empty()) {
122         return false;
123     }
124     for (auto &defInsn : defInsns) {
125         if (!InsnDefOne(*defInsn)) {
126             return false;
127         }
128     }
129     return true;
130 }
131 
132 /* if used Operand in insn is defined by one valid bit in all define insn, return true */
OpndDefByOneOrZero(Insn & insn,int32 useIdx) const133 bool OptimizePattern::OpndDefByOneOrZero(Insn &insn, int32 useIdx) const
134 {
135     if (IsZeroRegister(insn.GetOperand(static_cast<uint32>(useIdx)))) {
136         return true;
137     }
138 
139     InsnSet defInsnSet = cgFunc.GetRD()->FindDefForRegOpnd(insn, useIdx);
140     if (defInsnSet.empty()) {
141         return false;
142     }
143 
144     for (auto &defInsn : defInsnSet) {
145         if (!InsnDefOneOrZero(*defInsn)) {
146             return false;
147         }
148     }
149     return true;
150 }
151 
152 /* if defined operand(must be first insn currently) in insn is const one, return true */
InsnDefOne(const Insn & insn)153 bool OptimizePattern::InsnDefOne(const Insn &insn)
154 {
155     MOperator defMop = insn.GetMachineOpcode();
156     switch (defMop) {
157         case MOP_wmovri32:
158         case MOP_xmovri64: {
159             Operand &srcOpnd = insn.GetOperand(1);
160             DEBUG_ASSERT(srcOpnd.IsIntImmediate(), "expects ImmOperand");
161             ImmOperand &srcConst = static_cast<ImmOperand &>(srcOpnd);
162             int64 srcConstValue = srcConst.GetValue();
163             if (srcConstValue == 1) {
164                 return true;
165             }
166             return false;
167         }
168         default:
169             return false;
170     }
171 }
172 
173 /* if defined operand(must be first insn currently) in insn is const zero, return true */
InsnDefZero(const Insn & insn)174 bool OptimizePattern::InsnDefZero(const Insn &insn)
175 {
176     MOperator defMop = insn.GetMachineOpcode();
177     switch (defMop) {
178         case MOP_wmovri32:
179         case MOP_xmovri64: {
180             Operand &srcOpnd = insn.GetOperand(kInsnSecondOpnd);
181             DEBUG_ASSERT(srcOpnd.IsIntImmediate(), "expects ImmOperand");
182             ImmOperand &srcConst = static_cast<ImmOperand &>(srcOpnd);
183             int64 srcConstValue = srcConst.GetValue();
184             if (srcConstValue == 0) {
185                 return true;
186             }
187             return false;
188         }
189         case MOP_xmovrr:
190         case MOP_wmovrr:
191             return IsZeroRegister(insn.GetOperand(kInsnSecondOpnd));
192         default:
193             return false;
194     }
195 }
196 
197 /* if defined operand(must be first insn currently) in insn has only one valid bit, return true */
InsnDefOneOrZero(const Insn & insn)198 bool OptimizePattern::InsnDefOneOrZero(const Insn &insn)
199 {
200     MOperator defMop = insn.GetMachineOpcode();
201     switch (defMop) {
202         case MOP_wcsetrc:
203         case MOP_xcsetrc:
204             return true;
205         case MOP_wmovri32:
206         case MOP_xmovri64: {
207             Operand &defOpnd = insn.GetOperand(kInsnSecondOpnd);
208             DEBUG_ASSERT(defOpnd.IsIntImmediate(), "expects ImmOperand");
209             auto &defConst = static_cast<ImmOperand &>(defOpnd);
210             int64 defConstValue = defConst.GetValue();
211             if (defConstValue != 0 && defConstValue != 1) {
212                 return false;
213             } else {
214                 return true;
215             }
216         }
217         case MOP_xmovrr:
218         case MOP_wmovrr: {
219             return IsZeroRegister(insn.GetOperand(kInsnSecondOpnd));
220         }
221         case MOP_wlsrrri5:
222         case MOP_xlsrrri6: {
223             Operand &opnd2 = insn.GetOperand(kInsnThirdOpnd);
224             DEBUG_ASSERT(opnd2.IsIntImmediate(), "expects ImmOperand");
225             ImmOperand &opndImm = static_cast<ImmOperand &>(opnd2);
226             int64 shiftBits = opndImm.GetValue();
227             if (((defMop == MOP_wlsrrri5) && (shiftBits == k32BitSize - 1)) ||
228                 ((defMop == MOP_xlsrrri6) && (shiftBits == k64BitSize - 1))) {
229                 return true;
230             } else {
231                 return false;
232             }
233         }
234         default:
235             return false;
236     }
237 }
238 
ReplaceAsmListReg(const Insn * insn,uint32 index,uint32 regNO,Operand * newOpnd)239 void ReplaceAsmListReg(const Insn *insn, uint32 index, uint32 regNO, Operand *newOpnd)
240 {
241     MapleList<RegOperand *> *list = &static_cast<ListOperand &>(insn->GetOperand(index)).GetOperands();
242     int32 size = static_cast<int32>(list->size());
243     for (int i = 0; i < size; ++i) {
244         RegOperand *opnd = static_cast<RegOperand *>(*(list->begin()));
245         list->pop_front();
246         if (opnd->GetRegisterNumber() == regNO) {
247             list->push_back(static_cast<RegOperand *>(newOpnd));
248         } else {
249             list->push_back(opnd);
250         }
251     }
252 }
253 
ReplaceAllUsedOpndWithNewOpnd(const InsnSet & useInsnSet,uint32 regNO,Operand & newOpnd,bool updateInfo) const254 void OptimizePattern::ReplaceAllUsedOpndWithNewOpnd(const InsnSet &useInsnSet, uint32 regNO, Operand &newOpnd,
255                                                     bool updateInfo) const
256 {
257     for (auto useInsn : useInsnSet) {
258         if (useInsn->GetMachineOpcode() == MOP_asm) {
259             ReplaceAsmListReg(useInsn, kAsmInputListOpnd, regNO, &newOpnd);
260         }
261         const InsnDesc *md = useInsn->GetDesc();
262         uint32 opndNum = useInsn->GetOperandSize();
263         for (uint32 i = 0; i < opndNum; ++i) {
264             Operand &opnd = useInsn->GetOperand(i);
265             auto *regProp = md->opndMD[i];
266             if (!regProp->IsRegUse() && !opnd.IsMemoryAccessOperand()) {
267                 continue;
268             }
269 
270             if (opnd.IsRegister() && (static_cast<RegOperand &>(opnd).GetRegisterNumber() == regNO)) {
271                 useInsn->SetOperand(i, newOpnd);
272                 if (updateInfo) {
273                     cgFunc.GetRD()->InitGenUse(*useInsn->GetBB(), false);
274                 }
275             } else if (opnd.IsMemoryAccessOperand()) {
276                 MemOperand &memOpnd = static_cast<MemOperand &>(opnd);
277                 RegOperand *base = memOpnd.GetBaseRegister();
278                 RegOperand *index = memOpnd.GetIndexRegister();
279                 MemOperand *newMem = nullptr;
280                 if (base != nullptr && (base->GetRegisterNumber() == regNO)) {
281                     newMem = static_cast<MemOperand *>(opnd.Clone(*cgFunc.GetMemoryPool()));
282                     CHECK_FATAL(newMem != nullptr, "null ptr check");
283                     newMem->SetBaseRegister(*static_cast<RegOperand *>(&newOpnd));
284                     useInsn->SetOperand(i, *newMem);
285                     if (updateInfo) {
286                         cgFunc.GetRD()->InitGenUse(*useInsn->GetBB(), false);
287                     }
288                 }
289                 if (index != nullptr && (index->GetRegisterNumber() == regNO)) {
290                     newMem = static_cast<MemOperand *>(opnd.Clone(*cgFunc.GetMemoryPool()));
291                     CHECK_FATAL(newMem != nullptr, "null ptr check");
292                     newMem->SetIndexRegister(*static_cast<RegOperand *>(&newOpnd));
293                     if (static_cast<RegOperand &>(newOpnd).GetValidBitsNum() != index->GetValidBitsNum()) {
294                         newMem->UpdateExtend(MemOperand::kSignExtend);
295                     }
296                     useInsn->SetOperand(i, *newMem);
297                     if (updateInfo) {
298                         cgFunc.GetRD()->InitGenUse(*useInsn->GetBB(), false);
299                     }
300                 }
301             }
302         }
303     }
304 }
305 
CheckCondition(Insn & insn)306 bool ForwardPropPattern::CheckCondition(Insn &insn)
307 {
308     if (!insn.IsMachineInstruction()) {
309         return false;
310     }
311     if ((insn.GetMachineOpcode() != MOP_xmovrr) && (insn.GetMachineOpcode() != MOP_wmovrr) &&
312         (insn.GetMachineOpcode() != MOP_xmovrr_uxtw)) {
313         return false;
314     }
315     Operand &firstOpnd = insn.GetOperand(kInsnFirstOpnd);
316     Operand &secondOpnd = insn.GetOperand(kInsnSecondOpnd);
317     if (firstOpnd.GetSize() != secondOpnd.GetSize() && insn.GetMachineOpcode() != MOP_xmovrr_uxtw) {
318         return false;
319     }
320     RegOperand &firstRegOpnd = static_cast<RegOperand &>(firstOpnd);
321     RegOperand &secondRegOpnd = static_cast<RegOperand &>(secondOpnd);
322     uint32 firstRegNO = firstRegOpnd.GetRegisterNumber();
323     uint32 secondRegNO = secondRegOpnd.GetRegisterNumber();
324     if (IsZeroRegister(firstRegOpnd) || !firstRegOpnd.IsVirtualRegister() || !secondRegOpnd.IsVirtualRegister()) {
325         return false;
326     }
327     firstRegUseInsnSet = cgFunc.GetRD()->FindUseForRegOpnd(insn, firstRegNO, true);
328     if (firstRegUseInsnSet.empty()) {
329         return false;
330     }
331     InsnSet secondRegDefInsnSet = cgFunc.GetRD()->FindDefForRegOpnd(insn, secondRegNO, true);
332     if (secondRegDefInsnSet.size() != 1 || RegOperand::IsSameReg(firstOpnd, secondOpnd)) {
333         return false;
334     }
335     bool toDoOpt = true;
336     for (auto useInsn : firstRegUseInsnSet) {
337         if (!cgFunc.GetRD()->RegIsLiveBetweenInsn(secondRegNO, insn, *useInsn)) {
338             toDoOpt = false;
339             break;
340         }
341         /* part defined */
342         if ((useInsn->GetMachineOpcode() == MOP_xmovkri16) || (useInsn->GetMachineOpcode() == MOP_wmovkri16)) {
343             toDoOpt = false;
344             break;
345         }
346         if (useInsn->GetMachineOpcode() == MOP_asm) {
347             toDoOpt = false;
348             break;
349         }
350         InsnSet defInsnSet = cgFunc.GetRD()->FindDefForRegOpnd(*useInsn, firstRegNO, true);
351         if (defInsnSet.size() > 1) {
352             toDoOpt = false;
353             break;
354         } else if (defInsnSet.size() == 1 && *defInsnSet.begin() != &insn) {
355             toDoOpt = false;
356             break;
357         }
358     }
359     return toDoOpt;
360 }
361 
Optimize(Insn & insn)362 void ForwardPropPattern::Optimize(Insn &insn)
363 {
364     Operand &firstOpnd = insn.GetOperand(kInsnFirstOpnd);
365     Operand &secondOpnd = insn.GetOperand(kInsnSecondOpnd);
366     RegOperand &firstRegOpnd = static_cast<RegOperand &>(firstOpnd);
367     uint32 firstRegNO = firstRegOpnd.GetRegisterNumber();
368     for (auto *useInsn : firstRegUseInsnSet) {
369         if (useInsn->GetMachineOpcode() == MOP_asm) {
370             ReplaceAsmListReg(useInsn, kAsmInputListOpnd, firstRegNO, &secondOpnd);
371             cgFunc.GetRD()->InitGenUse(*useInsn->GetBB(), false);
372             continue;
373         }
374         const InsnDesc *md = useInsn->GetDesc();
375         uint32 opndNum = useInsn->GetOperandSize();
376         for (uint32 i = 0; i < opndNum; ++i) {
377             Operand &opnd = useInsn->GetOperand(i);
378             const OpndDesc *regProp = md->GetOpndDes(i);
379             if (!regProp->IsRegUse() && !opnd.IsMemoryAccessOperand()) {
380                 continue;
381             }
382 
383             if (opnd.IsRegister() && (static_cast<RegOperand &>(opnd).GetRegisterNumber() == firstRegNO)) {
384                 useInsn->SetOperand(i, secondOpnd);
385                 if (((useInsn->GetMachineOpcode() == MOP_xmovrr) || (useInsn->GetMachineOpcode() == MOP_wmovrr)) &&
386                     (static_cast<RegOperand &>(useInsn->GetOperand(kInsnSecondOpnd)).IsVirtualRegister()) &&
387                     (static_cast<RegOperand &>(useInsn->GetOperand(kInsnFirstOpnd)).IsVirtualRegister())) {
388                     (void)modifiedBB.insert(useInsn->GetBB());
389                 }
390                 cgFunc.GetRD()->InitGenUse(*useInsn->GetBB(), false);
391             } else if (opnd.IsMemoryAccessOperand()) {
392                 MemOperand &memOpnd = static_cast<MemOperand &>(opnd);
393                 RegOperand *base = memOpnd.GetBaseRegister();
394                 RegOperand *index = memOpnd.GetIndexRegister();
395                 MemOperand *newMem = nullptr;
396                 if (base != nullptr && (base->GetRegisterNumber() == firstRegNO)) {
397                     newMem = static_cast<MemOperand *>(opnd.Clone(*cgFunc.GetMemoryPool()));
398                     CHECK_FATAL(newMem != nullptr, "null ptr check");
399                     newMem->SetBaseRegister(static_cast<RegOperand &>(secondOpnd));
400                     useInsn->SetOperand(i, *newMem);
401                     cgFunc.GetRD()->InitGenUse(*useInsn->GetBB(), false);
402                 }
403                 if ((index != nullptr) && (index->GetRegisterNumber() == firstRegNO)) {
404                     newMem = static_cast<MemOperand *>(opnd.Clone(*cgFunc.GetMemoryPool()));
405                     CHECK_FATAL(newMem != nullptr, "null ptr check");
406                     newMem->SetIndexRegister(static_cast<RegOperand &>(secondOpnd));
407                     if (static_cast<RegOperand &>(secondOpnd).GetValidBitsNum() != index->GetValidBitsNum()) {
408                         newMem->UpdateExtend(MemOperand::kSignExtend);
409                     }
410                     useInsn->SetOperand(i, *newMem);
411                     cgFunc.GetRD()->InitGenUse(*useInsn->GetBB(), false);
412                 }
413             }
414         }
415     }
416     insn.SetOperand(0, secondOpnd);
417     cgFunc.GetRD()->UpdateInOut(*insn.GetBB(), true);
418 }
419 
RemoveMopUxtwToMov(Insn & insn)420 void ForwardPropPattern::RemoveMopUxtwToMov(Insn &insn)
421 {
422     if (CGOptions::DoCGSSA()) {
423         CHECK_FATAL(false, "check case in ssa");
424     }
425     auto &secondOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
426     auto &destOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
427     uint32 destRegNo = destOpnd.GetRegisterNumber();
428     destOpnd.SetRegisterNumber(secondOpnd.GetRegisterNumber());
429     auto *newOpnd = static_cast<RegOperand *>(destOpnd.Clone(*cgFunc.GetMemoryPool()));
430     cgFunc.InsertExtendSet(secondOpnd.GetRegisterNumber());
431     InsnSet regUseInsnSet = cgFunc.GetRD()->FindUseForRegOpnd(insn, destRegNo, true);
432     if (regUseInsnSet.size() >= 1) {
433         for (auto useInsn : regUseInsnSet) {
434             uint32 optSize = useInsn->GetOperandSize();
435             for (uint32 i = 0; i < optSize; i++) {
436                 DEBUG_ASSERT(useInsn->GetOperand(i).IsRegister(), "only design for register");
437                 if (destRegNo == static_cast<RegOperand &>(useInsn->GetOperand(i)).GetRegisterNumber()) {
438                     useInsn->SetOperand(i, *newOpnd);
439                 }
440             }
441             cgFunc.GetRD()->InitGenUse(*useInsn->GetBB(), false);
442         }
443     }
444     insn.GetBB()->RemoveInsn(insn);
445 }
446 
Init()447 void ForwardPropPattern::Init()
448 {
449     firstRegUseInsnSet.clear();
450 }
451 
Run()452 void ForwardPropPattern::Run()
453 {
454     bool secondTime = false;
455     do {
456         FOR_ALL_BB(bb, &cgFunc)
457         {
458             if (bb->IsUnreachable() || (secondTime && modifiedBB.find(bb) == modifiedBB.end())) {
459                 continue;
460             }
461 
462             if (secondTime) {
463                 modifiedBB.erase(bb);
464             }
465 
466             FOR_BB_INSNS(insn, bb)
467             {
468                 Init();
469                 if (!CheckCondition(*insn)) {
470                     if (insn->GetMachineOpcode() == MOP_xmovrr_uxtw) {
471                         insn->SetMOP(AArch64CG::kMd[MOP_xuxtw64]);
472                     }
473                     continue;
474                 }
475                 if (insn->GetMachineOpcode() == MOP_xmovrr_uxtw) {
476                     RemoveMopUxtwToMov(*insn);
477                     continue;
478                 }
479                 Optimize(*insn);
480             }
481         }
482         secondTime = true;
483     } while (!modifiedBB.empty());
484 }
485 
CheckAndGetOpnd(const Insn & insn)486 bool BackPropPattern::CheckAndGetOpnd(const Insn &insn)
487 {
488     if (!insn.IsMachineInstruction()) {
489         return false;
490     }
491     if (!cgFunc.IsAfterRegAlloc() && (insn.GetMachineOpcode() != MOP_xmovrr) &&
492         (insn.GetMachineOpcode() != MOP_wmovrr)) {
493         return false;
494     }
495     if (cgFunc.IsAfterRegAlloc() && (insn.GetMachineOpcode() != MOP_xmovrr) &&
496         (insn.GetMachineOpcode() != MOP_wmovrr) && (insn.GetMachineOpcode() != MOP_xvmovs) &&
497         (insn.GetMachineOpcode() != MOP_xvmovd)) {
498         return false;
499     }
500     Operand &firstOpnd = insn.GetOperand(kInsnFirstOpnd);
501     Operand &secondOpnd = insn.GetOperand(kInsnSecondOpnd);
502     if (RegOperand::IsSameReg(firstOpnd, secondOpnd)) {
503         return false;
504     }
505     if (firstOpnd.GetSize() != secondOpnd.GetSize()) {
506         return false;
507     }
508     firstRegOpnd = &static_cast<RegOperand &>(firstOpnd);
509     secondRegOpnd = &static_cast<RegOperand &>(secondOpnd);
510     if (IsZeroRegister(*firstRegOpnd)) {
511         return false;
512     }
513     if (!cgFunc.IsAfterRegAlloc() && (!secondRegOpnd->IsVirtualRegister() || !firstRegOpnd->IsVirtualRegister())) {
514         return false;
515     }
516     firstRegNO = firstRegOpnd->GetRegisterNumber();
517     secondRegNO = secondRegOpnd->GetRegisterNumber();
518     return true;
519 }
520 
DestOpndHasUseInsns(Insn & insn)521 bool BackPropPattern::DestOpndHasUseInsns(Insn &insn)
522 {
523     BB &bb = *insn.GetBB();
524     InsnSet useInsnSetOfFirstOpnd;
525     bool findRes =
526         cgFunc.GetRD()->FindRegUseBetweenInsn(firstRegNO, insn.GetNext(), bb.GetLastInsn(), useInsnSetOfFirstOpnd);
527     if ((findRes && useInsnSetOfFirstOpnd.empty()) ||
528         (!findRes && useInsnSetOfFirstOpnd.empty() && !bb.GetLiveOut()->TestBit(firstRegNO))) {
529         return false;
530     }
531     return true;
532 }
533 
CheckSrcOpndDefAndUseInsns(Insn & insn)534 bool BackPropPattern::CheckSrcOpndDefAndUseInsns(Insn &insn)
535 {
536     BB &bb = *insn.GetBB();
537     /* secondOpnd is defined in other BB */
538     std::vector<Insn *> defInsnVec =
539         cgFunc.GetRD()->FindRegDefBetweenInsn(secondRegNO, bb.GetFirstInsn(), insn.GetPrev());
540     if (defInsnVec.size() != 1) {
541         return false;
542     }
543     defInsnForSecondOpnd = defInsnVec.back();
544     /* part defined */
545     if ((defInsnForSecondOpnd->GetMachineOpcode() == MOP_xmovkri16) ||
546         (defInsnForSecondOpnd->GetMachineOpcode() == MOP_wmovkri16) ||
547         (defInsnForSecondOpnd->GetMachineOpcode() == MOP_asm)) {
548         return false;
549     }
550     if (AArch64isa::IsPseudoInstruction(defInsnForSecondOpnd->GetMachineOpcode()) || defInsnForSecondOpnd->IsCall()) {
551         return false;
552     }
553     /* unconcerned regs. */
554     if ((secondRegNO >= RLR && secondRegNO <= RZR) || secondRegNO == RFP) {
555         return false;
556     }
557     if (defInsnForSecondOpnd->IsStore() || defInsnForSecondOpnd->IsLoad()) {
558         auto *memOpnd = static_cast<MemOperand *>(defInsnForSecondOpnd->GetMemOpnd());
559         if (memOpnd != nullptr && !memOpnd->IsIntactIndexed()) {
560             return false;
561         }
562     }
563 
564     bool findFinish = cgFunc.GetRD()->FindRegUseBetweenInsn(secondRegNO, defInsnForSecondOpnd->GetNext(),
565                                                             bb.GetLastInsn(), srcOpndUseInsnSet);
566     if (!findFinish && bb.GetLiveOut()->TestBit(secondRegNO)) {
567         return false;
568     }
569     if (cgFunc.IsAfterRegAlloc() && findFinish && srcOpndUseInsnSet.size() > 1) {
570         /* use later before killed. */
571         return false;
572     }
573     if (cgFunc.IsAfterRegAlloc()) {
574         for (auto *usePoint : srcOpndUseInsnSet) {
575             if (usePoint->IsCall()) {
576                 return false;
577             }
578         }
579     }
580     return true;
581 }
582 
CheckSrcOpndDefAndUseInsnsGlobal(Insn & insn)583 bool BackPropPattern::CheckSrcOpndDefAndUseInsnsGlobal(Insn &insn)
584 {
585     /* secondOpnd is defined in other BB */
586     InsnSet defInsnVec = cgFunc.GetRD()->FindDefForRegOpnd(insn, secondRegNO, true);
587     if (defInsnVec.size() != 1) {
588         return false;
589     }
590     defInsnForSecondOpnd = *(defInsnVec.begin());
591 
592     /* ensure that there is no fisrt RegNO def/use between insn and defInsnForSecondOpnd */
593     std::vector<Insn *> defInsnVecFirst;
594 
595     if (insn.GetBB() != defInsnForSecondOpnd->GetBB()) {
596         defInsnVecFirst = cgFunc.GetRD()->FindRegDefBetweenInsnGlobal(firstRegNO, defInsnForSecondOpnd, &insn);
597     } else {
598         defInsnVecFirst = cgFunc.GetRD()->FindRegDefBetweenInsn(firstRegNO, defInsnForSecondOpnd, insn.GetPrev());
599     }
600     if (!defInsnVecFirst.empty()) {
601         return false;
602     }
603     /* part defined */
604     if ((defInsnForSecondOpnd->GetMachineOpcode() == MOP_xmovkri16) ||
605         (defInsnForSecondOpnd->GetMachineOpcode() == MOP_wmovkri16) ||
606         (defInsnForSecondOpnd->GetMachineOpcode() == MOP_asm)) {
607         return false;
608     }
609 
610     if (defInsnForSecondOpnd->IsStore() || defInsnForSecondOpnd->IsLoad()) {
611         auto *memOpnd = static_cast<MemOperand *>(defInsnForSecondOpnd->GetMemOpnd());
612         if (memOpnd != nullptr && !memOpnd->IsIntactIndexed()) {
613             return false;
614         }
615     }
616 
617     srcOpndUseInsnSet = cgFunc.GetRD()->FindUseForRegOpnd(*defInsnForSecondOpnd, secondRegNO, true);
618     /*
619      * useInsn is not expected to have multiple definition
620      * replaced opnd is not expected to have definition already
621      */
622     return CheckReplacedUseInsn(insn);
623 }
624 
CheckPredefineInsn(Insn & insn)625 bool BackPropPattern::CheckPredefineInsn(Insn &insn)
626 {
627     if (insn.GetPrev() == defInsnForSecondOpnd) {
628         return true;
629     }
630     std::vector<Insn *> preDefInsnForFirstOpndVec;
631     /* there is no predefine insn in current bb */
632     if (!cgFunc.GetRD()->RegIsUsedOrDefBetweenInsn(firstRegNO, *defInsnForSecondOpnd, insn)) {
633         return false;
634     }
635     return true;
636 }
637 
CheckReplacedUseInsn(Insn & insn)638 bool BackPropPattern::CheckReplacedUseInsn(Insn &insn)
639 {
640     for (auto *useInsn : srcOpndUseInsnSet) {
641         if (useInsn->GetMemOpnd() != nullptr) {
642             auto *a64MemOpnd = static_cast<MemOperand *>(useInsn->GetMemOpnd());
643             if (!a64MemOpnd->IsIntactIndexed()) {
644                 if (a64MemOpnd->GetBaseRegister() != nullptr &&
645                     a64MemOpnd->GetBaseRegister()->GetRegisterNumber() == secondRegNO) {
646                     return false;
647                 }
648             }
649         }
650         /* insn has been checked def */
651         if (useInsn == &insn) {
652             if (defInsnForSecondOpnd != useInsn->GetPrev() &&
653                 cgFunc.GetRD()->FindRegUseBetweenInsnGlobal(firstRegNO, defInsnForSecondOpnd, useInsn, insn.GetBB())) {
654                 return false;
655             }
656             continue;
657         }
658         auto checkOneDefOnly = [](const InsnSet &defSet, const Insn &oneDef, bool checkHasDef = false) -> bool {
659             if (defSet.size() > 1) {
660                 return false;
661             } else if (defSet.size() == 1) {
662                 if (&oneDef != *(defSet.begin())) {
663                     return false;
664                 }
665             } else {
666                 if (checkHasDef) {
667                     CHECK_FATAL(false, "find def insn failed");
668                 }
669             }
670             return true;
671         };
672         /* ensure that the use insns to be replaced is defined by defInsnForSecondOpnd only */
673         if (useInsn->IsMemAccess() && static_cast<MemOperand *>(useInsn->GetMemOpnd()) != nullptr) {
674             if (static_cast<MemOperand *>(useInsn->GetMemOpnd())->GetIndexOpt() != MemOperand::kIntact) {
675                 return false;
676 	    }
677         }
678         InsnSet defInsnVecOfSrcOpnd = cgFunc.GetRD()->FindDefForRegOpnd(*useInsn, secondRegNO, true);
679         if (!checkOneDefOnly(defInsnVecOfSrcOpnd, *defInsnForSecondOpnd, true)) {
680             return false;
681         }
682 
683         InsnSet defInsnVecOfFirstReg = cgFunc.GetRD()->FindDefForRegOpnd(*useInsn, firstRegNO, true);
684         if (!checkOneDefOnly(defInsnVecOfFirstReg, insn)) {
685             return false;
686         }
687 
688         if (defInsnForSecondOpnd != useInsn->GetPrev() &&
689             cgFunc.GetRD()->FindRegUseBetweenInsnGlobal(firstRegNO, defInsnForSecondOpnd, useInsn, insn.GetBB())) {
690             return false;
691         }
692     }
693     return true;
694 }
695 
CheckRedefineInsn(Insn & insn)696 bool BackPropPattern::CheckRedefineInsn(Insn &insn)
697 {
698     for (auto useInsn : srcOpndUseInsnSet) {
699         Insn *startInsn = &insn;
700         Insn *endInsn = useInsn;
701         if (endInsn == startInsn) {
702             if (cgFunc.GetRD()->RegIsUsedIncaller(firstRegNO, insn, *useInsn)) {
703                 return false;
704             } else {
705                 continue;
706             }
707         }
708 
709         if (useInsn->GetBB() == insn.GetBB()) {
710             if (useInsn->GetId() < insn.GetId()) {
711                 startInsn = useInsn;
712                 endInsn = &insn;
713             }
714         }
715         if (!cgFunc.GetRD()->RegIsLiveBetweenInsn(firstRegNO, *startInsn, *endInsn, true, true)) {
716             return false;
717         }
718         if (!cgFunc.GetRD()->RegIsLiveBetweenInsn(secondRegNO, *startInsn, *endInsn, true)) {
719             return false;
720         }
721     }
722     return true;
723 }
724 
CheckCondition(Insn & insn)725 bool BackPropPattern::CheckCondition(Insn &insn)
726 {
727     if (!CheckAndGetOpnd(insn)) {
728         return false;
729     }
730     /* Unless there is a reason that dest can not live out the current BB */
731     if (cgFunc.HasAsm() && !DestOpndHasUseInsns(insn)) {
732         return false;
733     }
734     if (globalProp) {
735         if (!CheckSrcOpndDefAndUseInsnsGlobal(insn)) {
736             return false;
737         }
738     } else {
739         if (!CheckSrcOpndDefAndUseInsns(insn)) {
740             return false;
741         }
742         if (!CheckPredefineInsn(insn)) {
743             return false;
744         }
745         if (!CheckRedefineInsn(insn)) {
746             return false;
747         }
748     }
749     return true;
750 }
751 
Optimize(Insn & insn)752 void BackPropPattern::Optimize(Insn &insn)
753 {
754     Operand &firstOpnd = insn.GetOperand(kInsnFirstOpnd);
755     ReplaceAllUsedOpndWithNewOpnd(srcOpndUseInsnSet, secondRegNO, firstOpnd, true);
756     /* replace define insn */
757     const InsnDesc *md = defInsnForSecondOpnd->GetDesc();
758     uint32 opndNum = defInsnForSecondOpnd->GetOperandSize();
759     for (uint32 i = 0; i < opndNum; ++i) {
760         Operand &opnd = defInsnForSecondOpnd->GetOperand(i);
761         if (!md->opndMD[i]->IsRegDef() && !opnd.IsMemoryAccessOperand()) {
762             continue;
763         }
764 
765         if (opnd.IsRegister() && (static_cast<RegOperand &>(opnd).GetRegisterNumber() == secondRegNO)) {
766             /* remove remat info */
767             Operand &defOp = defInsnForSecondOpnd->GetOperand(i);
768             CHECK_FATAL(defOp.IsRegister(), "unexpect def opnd type");
769             auto &defRegOp = static_cast<RegOperand &>(defOp);
770             MIRPreg *preg = static_cast<AArch64CGFunc &>(cgFunc).GetPseudoRegFromVirtualRegNO(
771                 defRegOp.GetRegisterNumber(), CGOptions::DoCGSSA());
772             if (preg != nullptr) {
773                 preg->SetOp(OP_undef);
774             }
775             defInsnForSecondOpnd->SetOperand(i, firstOpnd);
776             cgFunc.GetRD()->UpdateInOut(*defInsnForSecondOpnd->GetBB());
777         } else if (opnd.IsMemoryAccessOperand()) {
778             MemOperand &memOpnd = static_cast<MemOperand &>(opnd);
779             RegOperand *base = memOpnd.GetBaseRegister();
780             if (base != nullptr && memOpnd.GetAddrMode() == MemOperand::kAddrModeBOi &&
781                 (memOpnd.IsPostIndexed() || memOpnd.IsPreIndexed()) && base->GetRegisterNumber() == secondRegNO) {
782                 MemOperand *newMem = static_cast<MemOperand *>(opnd.Clone(*cgFunc.GetMemoryPool()));
783                 CHECK_FATAL(newMem != nullptr, "null ptr check");
784                 newMem->SetBaseRegister(static_cast<RegOperand &>(firstOpnd));
785                 defInsnForSecondOpnd->SetOperand(i, *newMem);
786                 cgFunc.GetRD()->UpdateInOut(*defInsnForSecondOpnd->GetBB());
787             }
788         }
789     }
790     /* There is special implication when backward propagation is allowed for physical register R0.
791      * This is a case that the calling func foo directly returns the result from the callee bar as follows:
792      *
793      * foo:
794      * bl                                               bl // bar()
795      * mov vreg, X0  //res = bar()        naive bkprop
796      * ....          //X0 is not redefined    ====>        ....  //X0 may be reused as RA sees "X0 has not been used"
797      * after bl mov X0, vreg                                              //In fact, X0 is implicitly used by foo. We
798      * need to tell RA that X0 is live ret                                              ret
799      *
800      * To make RA simple, we tell RA to not use X0 by keeping "mov X0, X0". That is
801      * foo:
802      * bl //bar()
803      * ....          // Perform backward prop X0 and force X0 cant be reused
804      * mov X0, X0    // This can be easily remved later in peephole phase
805      * ret
806      */
807     if (cgFunc.HasCall() && !(cgFunc.GetFunction().IsReturnVoid()) && (firstRegNO == R0) &&
808         (static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd)).GetRegisterNumber() == R0)) {
809         /* Keep this instruction: mov R0, R0 */
810         cgFunc.GetRD()->UpdateInOut(*insn.GetBB(), true);
811         return;
812     } else {
813         insn.GetBB()->RemoveInsn(insn);
814         cgFunc.GetRD()->UpdateInOut(*insn.GetBB(), true);
815     }
816 }
817 
Init()818 void BackPropPattern::Init()
819 {
820     firstRegOpnd = nullptr;
821     secondRegOpnd = nullptr;
822     firstRegNO = 0;
823     secondRegNO = 0;
824     srcOpndUseInsnSet.clear();
825     defInsnForSecondOpnd = nullptr;
826 }
827 
Run()828 void BackPropPattern::Run()
829 {
830     bool secondTime = false;
831     std::set<BB *, BBIdCmp> modifiedBB;
832     do {
833         FOR_ALL_BB(bb, &cgFunc)
834         {
835             if (bb->IsUnreachable() || (secondTime && modifiedBB.find(bb) == modifiedBB.end())) {
836                 continue;
837             }
838 
839             if (secondTime) {
840                 modifiedBB.erase(bb);
841             }
842 
843             FOR_BB_INSNS_REV(insn, bb)
844             {
845                 Init();
846                 if (!CheckCondition(*insn)) {
847                     continue;
848                 }
849                 (void)modifiedBB.insert(bb);
850                 Optimize(*insn);
851             }
852         }
853         secondTime = true;
854     } while (!modifiedBB.empty());
855 }
856 
CheckCondition(Insn & insn)857 bool CmpCsetPattern::CheckCondition(Insn &insn)
858 {
859     nextInsn = insn.GetNextMachineInsn();
860     if (nextInsn == nullptr || !insn.IsMachineInstruction()) {
861         return false;
862     }
863 
864     MOperator firstMop = insn.GetMachineOpcode();
865     MOperator secondMop = nextInsn->GetMachineOpcode();
866     if (!(((firstMop == MOP_wcmpri) || (firstMop == MOP_xcmpri)) &&
867           ((secondMop == MOP_wcsetrc) || (secondMop == MOP_xcsetrc)))) {
868         return false;
869     }
870 
871     /* get cmp_first operand */
872     cmpFirstOpnd = &(insn.GetOperand(kInsnSecondOpnd));
873     /* get cmp second Operand, ImmOperand must be 0 or 1 */
874     cmpSecondOpnd = &(insn.GetOperand(kInsnThirdOpnd));
875     DEBUG_ASSERT(cmpSecondOpnd->IsIntImmediate(), "expects ImmOperand");
876     ImmOperand *cmpConstOpnd = static_cast<ImmOperand *>(cmpSecondOpnd);
877     cmpConstVal = cmpConstOpnd->GetValue();
878     /* get cset first Operand */
879     csetFirstOpnd = &(nextInsn->GetOperand(kInsnFirstOpnd));
880     if (((cmpConstVal != 0) && (cmpConstVal != 1)) || (cmpFirstOpnd->GetSize() != csetFirstOpnd->GetSize()) ||
881         !OpndDefByOneOrZero(insn, 1)) {
882         return false;
883     }
884 
885     InsnSet useInsnSet = cgFunc.GetRD()->FindUseForRegOpnd(insn, 0, false);
886     if (useInsnSet.size() > 1) {
887         return false;
888     }
889     return true;
890 }
891 
Optimize(Insn & insn)892 void CmpCsetPattern::Optimize(Insn &insn)
893 {
894     Insn *csetInsn = nextInsn;
895     BB &bb = *insn.GetBB();
896     nextInsn = nextInsn->GetNextMachineInsn();
897     /* get condition Operand */
898     CondOperand &cond = static_cast<CondOperand &>(csetInsn->GetOperand(kInsnSecondOpnd));
899     if (((cmpConstVal == 0) && (cond.GetCode() == CC_NE)) || ((cmpConstVal == 1) && (cond.GetCode() == CC_EQ))) {
900         if (RegOperand::IsSameReg(*cmpFirstOpnd, *csetFirstOpnd)) {
901             bb.RemoveInsn(insn);
902             bb.RemoveInsn(*csetInsn);
903         } else {
904             MOperator mopCode = (cmpFirstOpnd->GetSize() == k64BitSize) ? MOP_xmovrr : MOP_wmovrr;
905             Insn &newInsn = cgFunc.GetInsnBuilder()->BuildInsn(mopCode, *csetFirstOpnd, *cmpFirstOpnd);
906             newInsn.SetId(insn.GetId());
907             bb.ReplaceInsn(insn, newInsn);
908             bb.RemoveInsn(*csetInsn);
909         }
910     } else if (((cmpConstVal == 1) && (cond.GetCode() == CC_NE)) || ((cmpConstVal == 0) && (cond.GetCode() == CC_EQ))) {
911         MOperator mopCode = (cmpFirstOpnd->GetSize() == k64BitSize) ? MOP_xeorrri13 : MOP_weorrri12;
912         constexpr int64 eorImm = 1;
913         auto &aarch64CGFunc = static_cast<AArch64CGFunc &>(cgFunc);
914         ImmOperand &one = aarch64CGFunc.CreateImmOperand(eorImm, k8BitSize, false);
915         Insn &newInsn = cgFunc.GetInsnBuilder()->BuildInsn(mopCode, *csetFirstOpnd, *cmpFirstOpnd, one);
916         newInsn.SetId(insn.GetId());
917         bb.ReplaceInsn(insn, newInsn);
918         bb.RemoveInsn(*csetInsn);
919     }
920     cgFunc.GetRD()->UpdateInOut(bb, true);
921 }
922 
Init()923 void CmpCsetPattern::Init()
924 {
925     cmpConstVal = 0;
926     cmpFirstOpnd = nullptr;
927     cmpSecondOpnd = nullptr;
928     csetFirstOpnd = nullptr;
929 }
930 
Run()931 void CmpCsetPattern::Run()
932 {
933     FOR_ALL_BB(bb, &cgFunc)
934     {
935         FOR_BB_INSNS(insn, bb)
936         {
937             Init();
938             if (!CheckCondition(*insn)) {
939                 continue;
940             }
941             Optimize(*insn);
942         }
943     }
944 }
945 
CheckCondition(Insn & insn)946 bool CselPattern::CheckCondition(Insn &insn)
947 {
948     MOperator mopCode = insn.GetMachineOpcode();
949     if ((mopCode != MOP_xcselrrrc) && (mopCode != MOP_wcselrrrc)) {
950         return false;
951     }
952     return true;
953 }
954 
Optimize(Insn & insn)955 void CselPattern::Optimize(Insn &insn)
956 {
957     BB &bb = *insn.GetBB();
958     Operand &opnd0 = insn.GetOperand(kInsnFirstOpnd);
959     Operand &cond = insn.GetOperand(kInsnFourthOpnd);
960     MOperator newMop = ((opnd0.GetSize()) == k64BitSize ? MOP_xcsetrc : MOP_wcsetrc);
961     Operand &rflag = cgFunc.GetOrCreateRflag();
962     if (OpndDefByOne(insn, kInsnSecondOpnd) && OpndDefByZero(insn, kInsnThirdOpnd)) {
963         Insn &newInsn = cgFunc.GetInsnBuilder()->BuildInsn(newMop, opnd0, cond, rflag);
964         newInsn.SetId(insn.GetId());
965         bb.ReplaceInsn(insn, newInsn);
966         cgFunc.GetRD()->InitGenUse(bb, false);
967     } else if (OpndDefByZero(insn, kInsnSecondOpnd) && OpndDefByOne(insn, kInsnThirdOpnd)) {
968         auto &originCond = static_cast<CondOperand &>(cond);
969         ConditionCode inverseCondCode = GetReverseBasicCC(originCond.GetCode());
970         if (inverseCondCode == kCcLast) {
971             return;
972         }
973         auto &aarchCGFunc = static_cast<AArch64CGFunc &>(cgFunc);
974         CondOperand &inverseCond = aarchCGFunc.GetCondOperand(inverseCondCode);
975         Insn &newInsn = cgFunc.GetInsnBuilder()->BuildInsn(newMop, opnd0, inverseCond, rflag);
976         newInsn.SetId(insn.GetId());
977         bb.ReplaceInsn(insn, newInsn);
978         cgFunc.GetRD()->InitGenUse(bb, false);
979     }
980 }
981 
Run()982 void CselPattern::Run()
983 {
984     FOR_ALL_BB(bb, &cgFunc)
985     {
986         FOR_BB_INSNS_SAFE(insn, bb, nextInsn)
987         {
988             if (!CheckCondition(*insn)) {
989                 continue;
990             }
991             Optimize(*insn);
992         }
993     }
994 }
995 
GetInsnValidBit(const Insn & insn)996 uint32 RedundantUxtPattern::GetInsnValidBit(const Insn &insn)
997 {
998     MOperator mOp = insn.GetMachineOpcode();
999     uint32 nRet;
1000     switch (mOp) {
1001         case MOP_wcsetrc:
1002         case MOP_xcsetrc:
1003             nRet = 1;
1004             break;
1005         case MOP_wldrb:
1006         case MOP_wldarb:
1007         case MOP_wldxrb:
1008         case MOP_wldaxrb:
1009             nRet = k8BitSize;
1010             break;
1011         case MOP_wldrh:
1012         case MOP_wldarh:
1013         case MOP_wldxrh:
1014         case MOP_wldaxrh:
1015             nRet = k16BitSize;
1016             break;
1017         case MOP_wmovrr:
1018         case MOP_wmovri32:
1019         case MOP_wldrsb:
1020         case MOP_wldrsh:
1021         case MOP_wldli:
1022         case MOP_wldr:
1023         case MOP_wldp:
1024         case MOP_wldar:
1025         case MOP_wmovkri16:
1026         case MOP_wmovzri16:
1027         case MOP_wmovnri16:
1028         case MOP_wldxr:
1029         case MOP_wldaxr:
1030         case MOP_wldaxp:
1031         case MOP_wcsincrrrc:
1032         case MOP_wcselrrrc:
1033         case MOP_wcsinvrrrc:
1034             nRet = k32BitSize;
1035             break;
1036         default:
1037             nRet = k64BitSize;
1038             break;
1039     }
1040     return nRet;
1041 }
1042 
GetMaximumValidBit(Insn & insn,uint8 index,InsnSet & visitedInsn) const1043 uint32 RedundantUxtPattern::GetMaximumValidBit(Insn &insn, uint8 index, InsnSet &visitedInsn) const
1044 {
1045     InsnSet defInsnSet = cgFunc.GetRD()->FindDefForRegOpnd(insn, index);
1046     if (defInsnSet.empty()) {
1047         /* disable opt when there is no def point. */
1048         return k64BitSize;
1049     }
1050 
1051     uint32 validBit = 0;
1052     uint32 nMaxValidBit = 0;
1053     for (auto &defInsn : defInsnSet) {
1054         if (visitedInsn.find(defInsn) != visitedInsn.end()) {
1055             continue;
1056         }
1057 
1058         (void)visitedInsn.insert(defInsn);
1059         MOperator mOp = defInsn->GetMachineOpcode();
1060         if ((mOp == MOP_wmovrr) || (mOp == MOP_xmovrr)) {
1061             validBit = GetMaximumValidBit(*defInsn, 1, visitedInsn);
1062         } else {
1063             validBit = GetInsnValidBit(*defInsn);
1064         }
1065 
1066         nMaxValidBit = nMaxValidBit < validBit ? validBit : nMaxValidBit;
1067     }
1068     return nMaxValidBit;
1069 }
1070 
CheckCondition(Insn & insn)1071 bool RedundantUxtPattern::CheckCondition(Insn &insn)
1072 {
1073     BB &bb = *insn.GetBB();
1074     InsnSet visitedInsn1;
1075     InsnSet visitedInsn2;
1076     if (!((insn.GetMachineOpcode() == MOP_xuxth32 &&
1077            GetMaximumValidBit(insn, kInsnSecondOpnd, visitedInsn1) <= k16BitSize) ||
1078           (insn.GetMachineOpcode() == MOP_xuxtb32 &&
1079            GetMaximumValidBit(insn, kInsnSecondOpnd, visitedInsn2) <= k8BitSize))) {
1080         return false;
1081     }
1082 
1083     Operand &firstOpnd = insn.GetOperand(kInsnFirstOpnd);
1084     secondOpnd = &(insn.GetOperand(kInsnSecondOpnd));
1085     if (RegOperand::IsSameReg(firstOpnd, *secondOpnd)) {
1086         bb.RemoveInsn(insn);
1087         /* update in/out */
1088         cgFunc.GetRD()->UpdateInOut(bb, true);
1089         return false;
1090     }
1091     useInsnSet = cgFunc.GetRD()->FindUseForRegOpnd(insn, 0, false);
1092     RegOperand &firstRegOpnd = static_cast<RegOperand &>(firstOpnd);
1093     firstRegNO = firstRegOpnd.GetRegisterNumber();
1094     /* for uxth R1, V501, R1 is parameter register, this can't be optimized. */
1095     if (firstRegOpnd.IsPhysicalRegister()) {
1096         return false;
1097     }
1098 
1099     if (useInsnSet.empty()) {
1100         bb.RemoveInsn(insn);
1101         /* update in/out */
1102         cgFunc.GetRD()->UpdateInOut(bb, true);
1103         return false;
1104     }
1105 
1106     RegOperand *secondRegOpnd = static_cast<RegOperand *>(secondOpnd);
1107     DEBUG_ASSERT(secondRegOpnd != nullptr, "secondRegOpnd should not be nullptr");
1108     uint32 secondRegNO = secondRegOpnd->GetRegisterNumber();
1109     for (auto useInsn : useInsnSet) {
1110         InsnSet defInsnSet = cgFunc.GetRD()->FindDefForRegOpnd(*useInsn, firstRegNO, true);
1111         if ((defInsnSet.size() > 1) || !(cgFunc.GetRD()->RegIsLiveBetweenInsn(secondRegNO, insn, *useInsn))) {
1112             return false;
1113         }
1114     }
1115     return true;
1116 }
1117 
Optimize(Insn & insn)1118 void RedundantUxtPattern::Optimize(Insn &insn)
1119 {
1120     BB &bb = *insn.GetBB();
1121     ReplaceAllUsedOpndWithNewOpnd(useInsnSet, firstRegNO, *secondOpnd, true);
1122     bb.RemoveInsn(insn);
1123     cgFunc.GetRD()->UpdateInOut(bb, true);
1124 }
1125 
Init()1126 void RedundantUxtPattern::Init()
1127 {
1128     useInsnSet.clear();
1129     secondOpnd = nullptr;
1130 }
1131 
Run()1132 void RedundantUxtPattern::Run()
1133 {
1134     FOR_ALL_BB(bb, &cgFunc)
1135     {
1136         if (bb->IsUnreachable()) {
1137             continue;
1138         }
1139         FOR_BB_INSNS_SAFE(insn, bb, nextInsn)
1140         {
1141             Init();
1142             if (!CheckCondition(*insn)) {
1143                 continue;
1144             }
1145             Optimize(*insn);
1146         }
1147     }
1148 }
1149 
CheckFirstInsn(const Insn & firstInsn)1150 bool LocalVarSaveInsnPattern::CheckFirstInsn(const Insn &firstInsn)
1151 {
1152     MOperator mOp = firstInsn.GetMachineOpcode();
1153     if (mOp != MOP_xmovrr && mOp != MOP_wmovrr) {
1154         return false;
1155     }
1156     firstInsnSrcOpnd = &(firstInsn.GetOperand(kInsnSecondOpnd));
1157     RegOperand *firstInsnSrcReg = static_cast<RegOperand *>(firstInsnSrcOpnd);
1158     if (firstInsnSrcReg->GetRegisterNumber() != R0) {
1159         return false;
1160     }
1161     firstInsnDestOpnd = &(firstInsn.GetOperand(kInsnFirstOpnd));
1162     RegOperand *firstInsnDestReg = static_cast<RegOperand *>(firstInsnDestOpnd);
1163     if (firstInsnDestReg->IsPhysicalRegister()) {
1164         return false;
1165     }
1166     return true;
1167 }
1168 
CheckSecondInsn()1169 bool LocalVarSaveInsnPattern::CheckSecondInsn()
1170 {
1171     MOperator mOp = secondInsn->GetMachineOpcode();
1172     if (mOp != MOP_wstr && mOp != MOP_xstr) {
1173         return false;
1174     }
1175     secondInsnSrcOpnd = &(secondInsn->GetOperand(kInsnFirstOpnd));
1176     if (!RegOperand::IsSameReg(*firstInsnDestOpnd, *secondInsnSrcOpnd)) {
1177         return false;
1178     }
1179     /* check memOperand is stack memOperand, and x0 is stored in localref var region */
1180     secondInsnDestOpnd = &(secondInsn->GetOperand(kInsnSecondOpnd));
1181     MemOperand *secondInsnDestMem = static_cast<MemOperand *>(secondInsnDestOpnd);
1182     RegOperand *baseReg = secondInsnDestMem->GetBaseRegister();
1183     RegOperand *indexReg = secondInsnDestMem->GetIndexRegister();
1184     if ((baseReg == nullptr) || !(cgFunc.IsFrameReg(*baseReg)) || (indexReg != nullptr)) {
1185         return false;
1186     }
1187     return true;
1188 }
1189 
CheckAndGetUseInsn(Insn & firstInsn)1190 bool LocalVarSaveInsnPattern::CheckAndGetUseInsn(Insn &firstInsn)
1191 {
1192     InsnSet useInsnSet = cgFunc.GetRD()->FindUseForRegOpnd(firstInsn, kInsnFirstOpnd, false);
1193     if (useInsnSet.size() != 2) { /* 2 for secondInsn and another useInsn */
1194         return false;
1195     }
1196 
1197     /* useInsnSet includes secondInsn and another useInsn */
1198     for (auto tmpUseInsn : useInsnSet) {
1199         if (tmpUseInsn->GetId() != secondInsn->GetId()) {
1200             useInsn = tmpUseInsn;
1201             break;
1202         }
1203     }
1204     return true;
1205 }
1206 
CheckLiveRange(const Insn & firstInsn)1207 bool LocalVarSaveInsnPattern::CheckLiveRange(const Insn &firstInsn)
1208 {
1209     uint32 maxInsnNO = cgFunc.GetRD()->GetMaxInsnNO();
1210     uint32 useInsnID = useInsn->GetId();
1211     uint32 defInsnID = firstInsn.GetId();
1212     uint32 distance = useInsnID > defInsnID ? useInsnID - defInsnID : defInsnID - useInsnID;
1213     float liveRangeProportion = static_cast<float>(distance) / maxInsnNO;
1214     /* 0.3 is a balance for real optimization effect */
1215     if (liveRangeProportion < 0.3) {
1216         return false;
1217     }
1218     return true;
1219 }
1220 
CheckCondition(Insn & firstInsn)1221 bool LocalVarSaveInsnPattern::CheckCondition(Insn &firstInsn)
1222 {
1223     secondInsn = firstInsn.GetNext();
1224     if (secondInsn == nullptr) {
1225         return false;
1226     }
1227     /* check firstInsn is : mov vreg, R0; */
1228     if (!CheckFirstInsn(firstInsn)) {
1229         return false;
1230     }
1231     /* check the secondInsn is : str vreg, stackMem */
1232     if (!CheckSecondInsn()) {
1233         return false;
1234     }
1235     /* find the uses of the vreg */
1236     if (!CheckAndGetUseInsn(firstInsn)) {
1237         return false;
1238     }
1239     /* simulate live range using insn distance */
1240     if (!CheckLiveRange(firstInsn)) {
1241         return false;
1242     }
1243     RegOperand *firstInsnDestReg = static_cast<RegOperand *>(firstInsnDestOpnd);
1244     regno_t firstInsnDestRegNO = firstInsnDestReg->GetRegisterNumber();
1245     InsnSet defInsnSet = cgFunc.GetRD()->FindDefForRegOpnd(*useInsn, firstInsnDestRegNO, true);
1246     if (defInsnSet.size() != 1) {
1247         return false;
1248     }
1249     DEBUG_ASSERT((*(defInsnSet.begin()))->GetId() == firstInsn.GetId(), "useInsn has only one define Insn : firstInsn");
1250     /* check whether the stack mem is changed or not */
1251     MemOperand *secondInsnDestMem = static_cast<MemOperand *>(secondInsnDestOpnd);
1252     int64 memOffset = secondInsnDestMem->GetOffsetImmediate()->GetOffsetValue();
1253     InsnSet memDefInsnSet = cgFunc.GetRD()->FindDefForMemOpnd(*useInsn, memOffset, true);
1254     if (memDefInsnSet.size() != 1) {
1255         return false;
1256     }
1257     if ((*(memDefInsnSet.begin()))->GetId() != secondInsn->GetId()) {
1258         return false;
1259     }
1260     /* check whether has call between use and def */
1261     if (!cgFunc.GetRD()->HasCallBetweenDefUse(firstInsn, *useInsn)) {
1262         return false;
1263     }
1264     return true;
1265 }
1266 
Optimize(Insn & insn)1267 void LocalVarSaveInsnPattern::Optimize(Insn &insn)
1268 {
1269     /* insert ldr insn before useInsn */
1270     MOperator ldrOpCode = secondInsnSrcOpnd->GetSize() == k64BitSize ? MOP_xldr : MOP_wldr;
1271     Insn &ldrInsn = cgFunc.GetInsnBuilder()->BuildInsn(ldrOpCode, *secondInsnSrcOpnd, *secondInsnDestOpnd);
1272     CHECK_FATAL(useInsn->GetId() >= 1, "value overflow");
1273     ldrInsn.SetId(useInsn->GetId() - 1);
1274     useInsn->GetBB()->InsertInsnBefore(*useInsn, ldrInsn);
1275     cgFunc.GetRD()->UpdateInOut(*useInsn->GetBB(), true);
1276     secondInsn->SetOperand(kInsnFirstOpnd, *firstInsnSrcOpnd);
1277     BB *saveInsnBB = insn.GetBB();
1278     saveInsnBB->RemoveInsn(insn);
1279     cgFunc.GetRD()->UpdateInOut(*saveInsnBB, true);
1280 }
1281 
Init()1282 void LocalVarSaveInsnPattern::Init()
1283 {
1284     firstInsnSrcOpnd = nullptr;
1285     firstInsnDestOpnd = nullptr;
1286     secondInsnSrcOpnd = nullptr;
1287     secondInsnDestOpnd = nullptr;
1288     useInsn = nullptr;
1289     secondInsn = nullptr;
1290 }
1291 
Run()1292 void LocalVarSaveInsnPattern::Run()
1293 {
1294     FOR_ALL_BB(bb, &cgFunc)
1295     {
1296         if (bb->IsCleanup()) {
1297             continue;
1298         }
1299         FOR_BB_INSNS(insn, bb)
1300         {
1301             if (!insn->IsMachineInstruction()) {
1302                 continue;
1303             }
1304             if (!insn->IsCall()) {
1305                 continue;
1306             }
1307             Insn *firstInsn = insn->GetNextMachineInsn();
1308             if (firstInsn == nullptr) {
1309                 continue;
1310             }
1311             Init();
1312             if (!CheckCondition(*firstInsn)) {
1313                 continue;
1314             }
1315             Optimize(*firstInsn);
1316         }
1317     }
1318 }
1319 
SetExMOpType(const Insn & use)1320 void ExtendShiftOptPattern::SetExMOpType(const Insn &use)
1321 {
1322     MOperator op = use.GetMachineOpcode();
1323     switch (op) {
1324         case MOP_xaddrrr:
1325         case MOP_xxwaddrrre:
1326         case MOP_xaddrrrs: {
1327             exMOpType = kExAdd;
1328             break;
1329         }
1330         case MOP_waddrrr:
1331         case MOP_wwwaddrrre:
1332         case MOP_waddrrrs: {
1333             exMOpType = kEwAdd;
1334             break;
1335         }
1336         case MOP_xsubrrr:
1337         case MOP_xxwsubrrre:
1338         case MOP_xsubrrrs: {
1339             exMOpType = kExSub;
1340             break;
1341         }
1342         case MOP_wsubrrr:
1343         case MOP_wwwsubrrre:
1344         case MOP_wsubrrrs: {
1345             exMOpType = kEwSub;
1346             break;
1347         }
1348         case MOP_xcmnrr:
1349         case MOP_xwcmnrre:
1350         case MOP_xcmnrrs: {
1351             exMOpType = kExCmn;
1352             break;
1353         }
1354         case MOP_wcmnrr:
1355         case MOP_wwcmnrre:
1356         case MOP_wcmnrrs: {
1357             exMOpType = kEwCmn;
1358             break;
1359         }
1360         case MOP_xcmprr:
1361         case MOP_xwcmprre:
1362         case MOP_xcmprrs: {
1363             exMOpType = kExCmp;
1364             break;
1365         }
1366         case MOP_wcmprr:
1367         case MOP_wwcmprre:
1368         case MOP_wcmprrs: {
1369             exMOpType = kEwCmp;
1370             break;
1371         }
1372         default: {
1373             exMOpType = kExUndef;
1374         }
1375     }
1376 }
1377 
SetLsMOpType(const Insn & use)1378 void ExtendShiftOptPattern::SetLsMOpType(const Insn &use)
1379 {
1380     MOperator op = use.GetMachineOpcode();
1381     switch (op) {
1382         case MOP_xaddrrr:
1383         case MOP_xaddrrrs: {
1384             lsMOpType = kLxAdd;
1385             break;
1386         }
1387         case MOP_waddrrr:
1388         case MOP_waddrrrs: {
1389             lsMOpType = kLwAdd;
1390             break;
1391         }
1392         case MOP_xsubrrr:
1393         case MOP_xsubrrrs: {
1394             lsMOpType = kLxSub;
1395             break;
1396         }
1397         case MOP_wsubrrr:
1398         case MOP_wsubrrrs: {
1399             lsMOpType = kLwSub;
1400             break;
1401         }
1402         case MOP_xcmnrr:
1403         case MOP_xcmnrrs: {
1404             lsMOpType = kLxCmn;
1405             break;
1406         }
1407         case MOP_wcmnrr:
1408         case MOP_wcmnrrs: {
1409             lsMOpType = kLwCmn;
1410             break;
1411         }
1412         case MOP_xcmprr:
1413         case MOP_xcmprrs: {
1414             lsMOpType = kLxCmp;
1415             break;
1416         }
1417         case MOP_wcmprr:
1418         case MOP_wcmprrs: {
1419             lsMOpType = kLwCmp;
1420             break;
1421         }
1422         case MOP_xeorrrr:
1423         case MOP_xeorrrrs: {
1424             lsMOpType = kLxEor;
1425             break;
1426         }
1427         case MOP_weorrrr:
1428         case MOP_weorrrrs: {
1429             lsMOpType = kLwEor;
1430             break;
1431         }
1432         case MOP_xinegrr:
1433         case MOP_xinegrrs: {
1434             lsMOpType = kLxNeg;
1435             replaceIdx = kInsnSecondOpnd;
1436             break;
1437         }
1438         case MOP_winegrr:
1439         case MOP_winegrrs: {
1440             lsMOpType = kLwNeg;
1441             replaceIdx = kInsnSecondOpnd;
1442             break;
1443         }
1444         case MOP_xiorrrr:
1445         case MOP_xiorrrrs: {
1446             lsMOpType = kLxIor;
1447             break;
1448         }
1449         case MOP_wiorrrr:
1450         case MOP_wiorrrrs: {
1451             lsMOpType = kLwIor;
1452             break;
1453         }
1454         default: {
1455             lsMOpType = kLsUndef;
1456         }
1457     }
1458 }
1459 
SelectExtendOrShift(const Insn & def)1460 void ExtendShiftOptPattern::SelectExtendOrShift(const Insn &def)
1461 {
1462     MOperator op = def.GetMachineOpcode();
1463     switch (op) {
1464         case MOP_xsxtb32:
1465         case MOP_xsxtb64:
1466             extendOp = ExtendShiftOperand::kSXTB;
1467             break;
1468         case MOP_xsxth32:
1469         case MOP_xsxth64:
1470             extendOp = ExtendShiftOperand::kSXTH;
1471             break;
1472         case MOP_xsxtw64:
1473             extendOp = ExtendShiftOperand::kSXTW;
1474             break;
1475         case MOP_xuxtb32:
1476             extendOp = ExtendShiftOperand::kUXTB;
1477             break;
1478         case MOP_xuxth32:
1479             extendOp = ExtendShiftOperand::kUXTH;
1480             break;
1481         case MOP_xuxtw64:
1482             extendOp = ExtendShiftOperand::kUXTW;
1483             break;
1484         case MOP_wlslrri5:
1485         case MOP_xlslrri6:
1486             shiftOp = BitShiftOperand::kLSL;
1487             break;
1488         case MOP_xlsrrri6:
1489         case MOP_wlsrrri5:
1490             shiftOp = BitShiftOperand::kLSR;
1491             break;
1492         case MOP_xasrrri6:
1493         case MOP_wasrrri5:
1494             shiftOp = BitShiftOperand::kASR;
1495             break;
1496         default: {
1497             extendOp = ExtendShiftOperand::kUndef;
1498             shiftOp = BitShiftOperand::kUndef;
1499         }
1500     }
1501 }
1502 
1503 /* first use must match SelectExtendOrShift */
CheckDefUseInfo(Insn & use,uint32 size)1504 bool ExtendShiftOptPattern::CheckDefUseInfo(Insn &use, uint32 size)
1505 {
1506     auto &regOperand = static_cast<RegOperand &>(defInsn->GetOperand(kInsnFirstOpnd));
1507     Operand &defSrcOpnd = defInsn->GetOperand(kInsnSecondOpnd);
1508     CHECK_FATAL(defSrcOpnd.IsRegister(), "defSrcOpnd must be register!");
1509     auto &regDefSrc = static_cast<RegOperand &>(defSrcOpnd);
1510     if (regDefSrc.IsPhysicalRegister()) {
1511         return false;
1512     }
1513     /*
1514      * has Implict cvt
1515      *
1516      * avoid cases as following:
1517      *   lsr  x2, x2, #8
1518      *   ubfx w2, x2, #0, #32                lsr  x2, x2, #8
1519      *   eor  w0, w0, w2           ===>      eor  w0, w0, x2     ==\=>  eor w0, w0, w2, LSR #8
1520      *
1521      * the truncation causes the wrong value by shift right
1522      * shift left does not matter
1523      */
1524     auto &useDefOpnd = static_cast<RegOperand &>(use.GetOperand(kInsnFirstOpnd));
1525     if ((shiftOp != BitShiftOperand::kUndef || extendOp != ExtendShiftOperand::kUndef) &&
1526         (regDefSrc.GetSize() > regOperand.GetSize() || useDefOpnd.GetSize() != size)) {
1527         return false;
1528     }
1529     if ((shiftOp == BitShiftOperand::kLSR || shiftOp == BitShiftOperand::kASR) && (defSrcOpnd.GetSize() > size)) {
1530         return false;
1531     }
1532     regno_t defSrcRegNo = regDefSrc.GetRegisterNumber();
1533     /* check regDefSrc */
1534     InsnSet defSrcSet = cgFunc.GetRD()->FindDefForRegOpnd(use, defSrcRegNo, true);
1535     /* The first defSrcInsn must be closest to useInsn */
1536     if (defSrcSet.empty()) {
1537         return false;
1538     }
1539     Insn *defSrcInsn = *defSrcSet.begin();
1540     const InsnDesc *md = defSrcInsn->GetDesc();
1541     if ((size != regOperand.GetSize()) && md->IsMove()) {
1542         return false;
1543     }
1544     if (defInsn->GetBB() == use.GetBB()) {
1545         /* check replace reg def between defInsn and currInsn */
1546         Insn *tmpInsn = defInsn->GetNext();
1547         while (tmpInsn != &use) {
1548             if (tmpInsn == defSrcInsn || tmpInsn == nullptr) {
1549                 return false;
1550             }
1551             tmpInsn = tmpInsn->GetNext();
1552         }
1553     } else { /* def use not in same BB */
1554         if (defSrcInsn->GetBB() != defInsn->GetBB()) {
1555             return false;
1556         }
1557         if (defSrcInsn->GetId() > defInsn->GetId()) {
1558             return false;
1559         }
1560     }
1561     /* case:
1562      * lsl w0, w0, #5
1563      * eor w0, w2, w0
1564      * --->
1565      * eor w0, w2, w0, lsl 5
1566      */
1567     if (defSrcInsn == defInsn) {
1568         InsnSet replaceRegUseSet = cgFunc.GetRD()->FindUseForRegOpnd(*defInsn, defSrcRegNo, true);
1569         if (replaceRegUseSet.size() != k1BitSize) {
1570             return false;
1571         }
1572         removeDefInsn = true;
1573     }
1574     return true;
1575 }
1576 
1577 /* Check whether ExtendShiftOptPattern optimization can be performed. */
CheckOpType(const Operand & lastOpnd) const1578 ExtendShiftOptPattern::SuffixType ExtendShiftOptPattern::CheckOpType(const Operand &lastOpnd) const
1579 {
1580     /* Assign values to useType and defType. */
1581     uint32 useType = ExtendShiftOptPattern::kNoSuffix;
1582     uint32 defType = shiftOp;
1583     if (extendOp != ExtendShiftOperand::kUndef) {
1584         defType = ExtendShiftOptPattern::kExten;
1585     }
1586     if (lastOpnd.IsOpdShift()) {
1587         BitShiftOperand lastShiftOpnd = static_cast<const BitShiftOperand &>(lastOpnd);
1588         useType = lastShiftOpnd.GetShiftOp();
1589     } else if (lastOpnd.IsOpdExtend()) {
1590         ExtendShiftOperand lastExtendOpnd = static_cast<const ExtendShiftOperand &>(lastOpnd);
1591         useType = ExtendShiftOptPattern::kExten;
1592         /* two insn is exten and exten ,value is exten(oneself) */
1593         if (useType == defType && extendOp != lastExtendOpnd.GetExtendOp()) {
1594             return ExtendShiftOptPattern::kNoSuffix;
1595         }
1596     }
1597     return doOptimize[useType][defType];
1598 }
1599 
1600 /* new Insn extenType:
1601  * =====================
1602  * (useMop)   (defMop) (newmop)
1603  * | nosuffix |  all  | all|
1604  * | exten    |  ex   | ex |
1605  * |  ls      |  ex   | ls |
1606  * |  asr     |  !asr | F  |
1607  * |  !asr    |  asr  | F  |
1608  * (useMop)   (defMop)
1609  * =====================
1610  */
ReplaceUseInsn(Insn & use,const Insn & def,uint32 amount)1611 void ExtendShiftOptPattern::ReplaceUseInsn(Insn &use, const Insn &def, uint32 amount)
1612 {
1613     AArch64CGFunc &a64CGFunc = static_cast<AArch64CGFunc &>(cgFunc);
1614     uint32 lastIdx = use.GetOperandSize() - k1BitSize;
1615     Operand &lastOpnd = use.GetOperand(lastIdx);
1616     ExtendShiftOptPattern::SuffixType optType = CheckOpType(lastOpnd);
1617     Operand *shiftOpnd = nullptr;
1618     if (optType == ExtendShiftOptPattern::kNoSuffix) {
1619         return;
1620     } else if (optType == ExtendShiftOptPattern::kExten) {
1621         replaceOp = exMOpTable[exMOpType];
1622         if (amount > k4BitSize) {
1623             return;
1624         }
1625         shiftOpnd = &a64CGFunc.CreateExtendShiftOperand(extendOp, amount, static_cast<int32>(k64BitSize));
1626     } else {
1627         replaceOp = lsMOpTable[lsMOpType];
1628         if (amount >= k32BitSize) {
1629             return;
1630         }
1631         shiftOpnd = &a64CGFunc.CreateBitShiftOperand(shiftOp, amount, static_cast<int32>(k64BitSize));
1632     }
1633     if (replaceOp == MOP_undef) {
1634         return;
1635     }
1636 
1637     Insn *replaceUseInsn = nullptr;
1638     Operand &firstOpnd = use.GetOperand(kInsnFirstOpnd);
1639     Operand *secondOpnd = &use.GetOperand(kInsnSecondOpnd);
1640     if (replaceIdx == kInsnSecondOpnd) { /* replace neg insn */
1641         secondOpnd = &def.GetOperand(kInsnSecondOpnd);
1642         replaceUseInsn = &cgFunc.GetInsnBuilder()->BuildInsn(replaceOp, firstOpnd, *secondOpnd, *shiftOpnd);
1643     } else {
1644         Operand &thirdOpnd = def.GetOperand(kInsnSecondOpnd);
1645         replaceUseInsn = &cgFunc.GetInsnBuilder()->BuildInsn(replaceOp, firstOpnd, *secondOpnd, thirdOpnd, *shiftOpnd);
1646     }
1647     use.GetBB()->ReplaceInsn(use, *replaceUseInsn);
1648     if (GLOBAL_DUMP) {
1649         LogInfo::MapleLogger() << ">>>>>>> In ExtendShiftOptPattern : <<<<<<<\n";
1650         LogInfo::MapleLogger() << "=======ReplaceInsn :\n";
1651         use.Dump();
1652         LogInfo::MapleLogger() << "=======NewInsn :\n";
1653         replaceUseInsn->Dump();
1654     }
1655     if (removeDefInsn) {
1656         if (GLOBAL_DUMP) {
1657             LogInfo::MapleLogger() << ">>>>>>> In ExtendShiftOptPattern : <<<<<<<\n";
1658             LogInfo::MapleLogger() << "=======RemoveDefInsn :\n";
1659             defInsn->Dump();
1660         }
1661         defInsn->GetBB()->RemoveInsn(*defInsn);
1662     }
1663     cgFunc.GetRD()->InitGenUse(*defInsn->GetBB(), false);
1664     cgFunc.GetRD()->UpdateInOut(*use.GetBB(), true);
1665     newInsn = replaceUseInsn;
1666     optSuccess = true;
1667 }
1668 
1669 /*
1670  * pattern1:
1671  * UXTB/UXTW X0, W1              <---- def x0
1672  * ....                          <---- (X0 not used)
1673  * AND/SUB/EOR X0, X1, X0        <---- use x0
1674  * ======>
1675  * AND/SUB/EOR X0, X1, W1 UXTB/UXTW
1676  *
1677  * pattern2:
1678  * LSL/LSR X0, X1, #8
1679  * ....(X0 not used)
1680  * AND/SUB/EOR X0, X1, X0
1681  * ======>
1682  * AND/SUB/EOR X0, X1, X1 LSL/LSR #8
1683  */
Optimize(Insn & insn)1684 void ExtendShiftOptPattern::Optimize(Insn &insn)
1685 {
1686     uint32 amount = 0;
1687     uint32 offset = 0;
1688     uint32 lastIdx = insn.GetOperandSize() - k1BitSize;
1689     Operand &lastOpnd = insn.GetOperand(lastIdx);
1690     if (lastOpnd.IsOpdShift()) {
1691         BitShiftOperand &lastShiftOpnd = static_cast<BitShiftOperand &>(lastOpnd);
1692         amount = lastShiftOpnd.GetShiftAmount();
1693     } else if (lastOpnd.IsOpdExtend()) {
1694         ExtendShiftOperand &lastExtendOpnd = static_cast<ExtendShiftOperand &>(lastOpnd);
1695         amount = lastExtendOpnd.GetShiftAmount();
1696     }
1697     if (shiftOp != BitShiftOperand::kUndef) {
1698         ImmOperand &immOpnd = static_cast<ImmOperand &>(defInsn->GetOperand(kInsnThirdOpnd));
1699         offset = static_cast<uint32>(immOpnd.GetValue());
1700     }
1701     amount += offset;
1702 
1703     ReplaceUseInsn(insn, *defInsn, amount);
1704 }
1705 
DoExtendShiftOpt(Insn & insn)1706 void ExtendShiftOptPattern::DoExtendShiftOpt(Insn &insn)
1707 {
1708     Init();
1709     if (!CheckCondition(insn)) {
1710         return;
1711     }
1712     Optimize(insn);
1713     if (optSuccess) {
1714         DoExtendShiftOpt(*newInsn);
1715     }
1716 }
1717 
1718 /* check and set:
1719  * exMOpType, lsMOpType, extendOp, shiftOp, defInsn
1720  */
CheckCondition(Insn & insn)1721 bool ExtendShiftOptPattern::CheckCondition(Insn &insn)
1722 {
1723     SetLsMOpType(insn);
1724     SetExMOpType(insn);
1725     if ((exMOpType == kExUndef) && (lsMOpType == kLsUndef)) {
1726         return false;
1727     }
1728     RegOperand &regOperand = static_cast<RegOperand &>(insn.GetOperand(replaceIdx));
1729     if (regOperand.IsPhysicalRegister()) {
1730         return false;
1731     }
1732     regno_t regNo = regOperand.GetRegisterNumber();
1733     InsnSet regDefInsnSet = cgFunc.GetRD()->FindDefForRegOpnd(insn, regNo, true);
1734     if (regDefInsnSet.size() != k1BitSize) {
1735         return false;
1736     }
1737     defInsn = *regDefInsnSet.begin();
1738     CHECK_FATAL((defInsn != nullptr), "defInsn is null!");
1739 
1740     SelectExtendOrShift(*defInsn);
1741     /* defInsn must be shift or extend */
1742     if ((extendOp == ExtendShiftOperand::kUndef) && (shiftOp == BitShiftOperand::kUndef)) {
1743         return false;
1744     }
1745     return CheckDefUseInfo(insn, regOperand.GetSize());
1746 }
1747 
Init()1748 void ExtendShiftOptPattern::Init()
1749 {
1750     replaceOp = MOP_undef;
1751     extendOp = ExtendShiftOperand::kUndef;
1752     shiftOp = BitShiftOperand::kUndef;
1753     defInsn = nullptr;
1754     replaceIdx = kInsnThirdOpnd;
1755     newInsn = nullptr;
1756     optSuccess = false;
1757     removeDefInsn = false;
1758     exMOpType = kExUndef;
1759     lsMOpType = kLsUndef;
1760 }
1761 
Run()1762 void ExtendShiftOptPattern::Run()
1763 {
1764     if (!cgFunc.GetMirModule().IsCModule()) {
1765         return;
1766     }
1767     FOR_ALL_BB_REV(bb, &cgFunc)
1768     {
1769         FOR_BB_INSNS_REV(insn, bb)
1770         {
1771             if (!insn->IsMachineInstruction()) {
1772                 continue;
1773             }
1774             DoExtendShiftOpt(*insn);
1775         }
1776     }
1777 }
1778 
Run()1779 void ExtenToMovPattern::Run()
1780 {
1781     if (!cgFunc.GetMirModule().IsCModule()) {
1782         return;
1783     }
1784     FOR_ALL_BB(bb, &cgFunc)
1785     {
1786         FOR_BB_INSNS(insn, bb)
1787         {
1788             if (!insn->IsMachineInstruction()) {
1789                 continue;
1790             }
1791             if (!CheckCondition(*insn)) {
1792                 continue;
1793             }
1794             Optimize(*insn);
1795         }
1796     }
1797 }
1798 
1799 /* Check for Implicit uxtw */
CheckHideUxtw(const Insn & insn,regno_t regno) const1800 bool ExtenToMovPattern::CheckHideUxtw(const Insn &insn, regno_t regno) const
1801 {
1802     const InsnDesc *md = &AArch64CG::kMd[insn.GetMachineOpcode()];
1803     if (md->IsMove()) {
1804         return false;
1805     }
1806     uint32 optSize = insn.GetOperandSize();
1807     for (uint32 i = 0; i < optSize; ++i) {
1808         if (regno == static_cast<RegOperand &>(insn.GetOperand(i)).GetRegisterNumber()) {
1809             auto *curOpndDescription = md->GetOpndDes(i);
1810             if (curOpndDescription->IsDef() && curOpndDescription->GetSize() == k32BitSize) {
1811                 return true;
1812             }
1813             break;
1814         }
1815     }
1816     return false;
1817 }
1818 
CheckUxtw(Insn & insn)1819 bool ExtenToMovPattern::CheckUxtw(Insn &insn)
1820 {
1821     if (insn.GetOperand(kInsnFirstOpnd).GetSize() == k64BitSize &&
1822         insn.GetOperand(kInsnSecondOpnd).GetSize() == k32BitSize) {
1823         DEBUG_ASSERT(insn.GetOperand(kInsnSecondOpnd).IsRegister(), "is not Register");
1824         regno_t regno = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd)).GetRegisterNumber();
1825         InsnSet preDef = cgFunc.GetRD()->FindDefForRegOpnd(insn, kInsnSecondOpnd, false);
1826         if (preDef.empty()) {
1827             return false;
1828         }
1829         for (auto defInsn : preDef) {
1830             if (!CheckHideUxtw(*defInsn, regno)) {
1831                 return false;
1832             }
1833         }
1834         replaceMop = MOP_xmovrr_uxtw;
1835         return true;
1836     }
1837     return false;
1838 }
1839 
CheckSrcReg(Insn & insn,regno_t srcRegNo,uint32 validNum)1840 bool ExtenToMovPattern::CheckSrcReg(Insn &insn, regno_t srcRegNo, uint32 validNum)
1841 {
1842     InsnSet srcDefSet = cgFunc.GetRD()->FindDefForRegOpnd(insn, srcRegNo, true);
1843     for (auto defInsn : srcDefSet) {
1844         CHECK_FATAL((defInsn != nullptr), "defInsn is null!");
1845         MOperator mOp = defInsn->GetMachineOpcode();
1846         switch (mOp) {
1847             case MOP_wiorrri12:
1848             case MOP_weorrri12: {
1849                 /* check immVal if mop is OR */
1850                 ImmOperand &imm = static_cast<ImmOperand &>(defInsn->GetOperand(kInsnThirdOpnd));
1851                 auto bitNum = static_cast<uint32>(imm.GetValue());
1852                 if ((bitNum >> validNum) != 0) {
1853                     return false;
1854                 }
1855                 break;
1856             }
1857             case MOP_wandrri12: {
1858                 /* check defSrcReg */
1859                 RegOperand &defSrcRegOpnd = static_cast<RegOperand &>(defInsn->GetOperand(kInsnSecondOpnd));
1860                 regno_t defSrcRegNo = defSrcRegOpnd.GetRegisterNumber();
1861                 if (!CheckSrcReg(*defInsn, defSrcRegNo, validNum)) {
1862                     return false;
1863                 }
1864                 break;
1865             }
1866             case MOP_wandrrr: {
1867                 /* check defSrcReg */
1868                 RegOperand &defSrcRegOpnd1 = static_cast<RegOperand &>(defInsn->GetOperand(kInsnSecondOpnd));
1869                 RegOperand &defSrcRegOpnd2 = static_cast<RegOperand &>(defInsn->GetOperand(kInsnThirdOpnd));
1870                 regno_t defSrcRegNo1 = defSrcRegOpnd1.GetRegisterNumber();
1871                 regno_t defSrcRegNo2 = defSrcRegOpnd2.GetRegisterNumber();
1872                 if (!CheckSrcReg(*defInsn, defSrcRegNo1, validNum) && !CheckSrcReg(*defInsn, defSrcRegNo2, validNum)) {
1873                     return false;
1874                 }
1875                 break;
1876             }
1877             case MOP_wiorrrr:
1878             case MOP_weorrrr: {
1879                 /* check defSrcReg */
1880                 RegOperand &defSrcRegOpnd1 = static_cast<RegOperand &>(defInsn->GetOperand(kInsnSecondOpnd));
1881                 RegOperand &defSrcRegOpnd2 = static_cast<RegOperand &>(defInsn->GetOperand(kInsnThirdOpnd));
1882                 regno_t defSrcRegNo1 = defSrcRegOpnd1.GetRegisterNumber();
1883                 regno_t defSrcRegNo2 = defSrcRegOpnd2.GetRegisterNumber();
1884                 if (!CheckSrcReg(*defInsn, defSrcRegNo1, validNum) || !CheckSrcReg(*defInsn, defSrcRegNo2, validNum)) {
1885                     return false;
1886                 }
1887                 break;
1888             }
1889             case MOP_wldrb: {
1890                 if (validNum != k8BitSize) {
1891                     return false;
1892                 }
1893                 break;
1894             }
1895             case MOP_wldrh: {
1896                 if (validNum != k16BitSize) {
1897                     return false;
1898                 }
1899                 break;
1900             }
1901             default:
1902                 return false;
1903         }
1904     }
1905     return true;
1906 }
1907 
BitNotAffected(Insn & insn,uint32 validNum)1908 bool ExtenToMovPattern::BitNotAffected(Insn &insn, uint32 validNum)
1909 {
1910     RegOperand &firstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
1911     RegOperand &secondOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
1912     regno_t desRegNo = firstOpnd.GetRegisterNumber();
1913     regno_t srcRegNo = secondOpnd.GetRegisterNumber();
1914     InsnSet desDefSet = cgFunc.GetRD()->FindDefForRegOpnd(insn, desRegNo, true);
1915     /* desReg is not redefined */
1916     if (!desDefSet.empty()) {
1917         return false;
1918     }
1919     if (!CheckSrcReg(insn, srcRegNo, validNum)) {
1920         return false;
1921     }
1922     replaceMop = MOP_wmovrr;
1923     return true;
1924 }
1925 
CheckCondition(Insn & insn)1926 bool ExtenToMovPattern::CheckCondition(Insn &insn)
1927 {
1928     MOperator mOp = insn.GetMachineOpcode();
1929     switch (mOp) {
1930         case MOP_xuxtw64:
1931             return CheckUxtw(insn);
1932         case MOP_xuxtb32:
1933             return BitNotAffected(insn, k8BitSize);
1934         case MOP_xuxth32:
1935             return BitNotAffected(insn, k16BitSize);
1936         default:
1937             return false;
1938     }
1939 }
1940 
1941 /* No initialization required */
Init()1942 void ExtenToMovPattern::Init()
1943 {
1944     replaceMop = MOP_undef;
1945 }
1946 
Optimize(Insn & insn)1947 void ExtenToMovPattern::Optimize(Insn &insn)
1948 {
1949     insn.SetMOP(AArch64CG::kMd[replaceMop]);
1950 }
1951 
Run()1952 void SameDefPattern::Run()
1953 {
1954     FOR_ALL_BB_REV(bb, &cgFunc)
1955     {
1956         FOR_BB_INSNS_REV(insn, bb)
1957         {
1958             if (!CheckCondition(*insn)) {
1959                 continue;
1960             }
1961             Optimize(*insn);
1962         }
1963     }
1964 }
1965 
Init()1966 void SameDefPattern::Init()
1967 {
1968     currInsn = nullptr;
1969     sameInsn = nullptr;
1970 }
1971 
CheckCondition(Insn & insn)1972 bool SameDefPattern::CheckCondition(Insn &insn)
1973 {
1974     MOperator mOp = insn.GetMachineOpcode();
1975     if (insn.GetBB()->GetPreds().size() > k1BitSize) {
1976         return false;
1977     }
1978     if (insn.GetBB()->HasCall()) {
1979         return false;
1980     }
1981     return (mOp == MOP_wcmprr) || (mOp == MOP_xcmprr) || (mOp == MOP_xwcmprre) || (mOp == MOP_xcmprrs);
1982 }
1983 
Optimize(Insn & insn)1984 void SameDefPattern::Optimize(Insn &insn)
1985 {
1986     InsnSet sameDefSet = cgFunc.GetRD()->FindDefForRegOpnd(insn, 0, false);
1987     if (sameDefSet.size() != k1BitSize) {
1988         return;
1989     }
1990     Insn *sameDefInsn = *sameDefSet.begin();
1991     if (sameDefInsn == nullptr) {
1992         return;
1993     }
1994     currInsn = &insn;
1995     sameInsn = sameDefInsn;
1996     if (!IsSameDef()) {
1997         return;
1998     }
1999     if (GLOBAL_DUMP) {
2000         LogInfo::MapleLogger() << ">>>>>>> In SameDefPattern : <<<<<<<\n";
2001         LogInfo::MapleLogger() << "=======remove insn: \n";
2002         insn.Dump();
2003         LogInfo::MapleLogger() << "=======sameDef insn: \n";
2004         sameDefInsn->Dump();
2005     }
2006     insn.GetBB()->RemoveInsn(insn);
2007 }
2008 
IsSameDef()2009 bool SameDefPattern::IsSameDef()
2010 {
2011     if (!CheckCondition(*sameInsn)) {
2012         return false;
2013     }
2014     if (currInsn == sameInsn) {
2015         return false;
2016     }
2017     if (currInsn->GetMachineOpcode() != sameInsn->GetMachineOpcode()) {
2018         return false;
2019     }
2020     for (uint32 i = k1BitSize; i < currInsn->GetOperandSize(); ++i) {
2021         Operand &opnd0 = currInsn->GetOperand(i);
2022         Operand &opnd1 = sameInsn->GetOperand(i);
2023         if (!IsSameOperand(opnd0, opnd1)) {
2024             return false;
2025         }
2026     }
2027     return true;
2028 }
2029 
IsSameOperand(Operand & opnd0,Operand & opnd1)2030 bool SameDefPattern::IsSameOperand(Operand &opnd0, Operand &opnd1)
2031 {
2032     if (opnd0.IsRegister()) {
2033         CHECK_FATAL(opnd1.IsRegister(), "must be RegOperand!");
2034         RegOperand &regOpnd0 = static_cast<RegOperand &>(opnd0);
2035         RegOperand &regOpnd1 = static_cast<RegOperand &>(opnd1);
2036         if (!RegOperand::IsSameReg(regOpnd0, regOpnd1)) {
2037             return false;
2038         }
2039         regno_t regNo = regOpnd0.GetRegisterNumber();
2040         /* src reg not redefined between sameInsn and currInsn */
2041         if (SrcRegIsRedefined(regNo)) {
2042             return false;
2043         }
2044     } else if (opnd0.IsOpdShift()) {
2045         CHECK_FATAL(opnd1.IsOpdShift(), "must be ShiftOperand!");
2046         BitShiftOperand &shiftOpnd0 = static_cast<BitShiftOperand &>(opnd0);
2047         BitShiftOperand &shiftOpnd1 = static_cast<BitShiftOperand &>(opnd1);
2048         if (shiftOpnd0.GetShiftAmount() != shiftOpnd1.GetShiftAmount()) {
2049             return false;
2050         }
2051     } else if (opnd0.IsOpdExtend()) {
2052         CHECK_FATAL(opnd1.IsOpdExtend(), "must be ExtendOperand!");
2053         ExtendShiftOperand &extendOpnd0 = static_cast<ExtendShiftOperand &>(opnd0);
2054         ExtendShiftOperand &extendOpnd1 = static_cast<ExtendShiftOperand &>(opnd1);
2055         if (extendOpnd0.GetShiftAmount() != extendOpnd1.GetShiftAmount()) {
2056             return false;
2057         }
2058     } else {
2059         return false;
2060     }
2061     return true;
2062 }
2063 
SrcRegIsRedefined(regno_t regNo)2064 bool SameDefPattern::SrcRegIsRedefined(regno_t regNo)
2065 {
2066     AArch64ReachingDefinition *a64RD = static_cast<AArch64ReachingDefinition *>(cgFunc.GetRD());
2067     if (currInsn->GetBB() == sameInsn->GetBB()) {
2068         FOR_BB_INSNS(insn, currInsn->GetBB())
2069         {
2070             if (insn->GetMachineOpcode() == MOP_xbl) {
2071                 return true;
2072             }
2073         }
2074         if (!a64RD->FindRegDefBetweenInsn(regNo, sameInsn, currInsn).empty()) {
2075             return true;
2076         }
2077     } else if (a64RD->HasRegDefBetweenInsnGlobal(regNo, *sameInsn, *currInsn)) {
2078         return true;
2079     }
2080     return false;
2081 }
2082 
Init()2083 void AndCbzPattern::Init()
2084 {
2085     prevInsn = nullptr;
2086 }
2087 
IsAdjacentArea(Insn & prev,Insn & curr) const2088 bool AndCbzPattern::IsAdjacentArea(Insn &prev, Insn &curr) const
2089 {
2090     if (prev.GetBB() == curr.GetBB()) {
2091         return true;
2092     }
2093     for (auto *succ : prev.GetBB()->GetSuccs()) {
2094         if (succ == curr.GetBB()) {
2095             return true;
2096         }
2097     }
2098     return false;
2099 }
2100 
CheckCondition(Insn & insn)2101 bool AndCbzPattern::CheckCondition(Insn &insn)
2102 {
2103     auto *aarch64RD = static_cast<AArch64ReachingDefinition *>(cgFunc.GetRD());
2104     MOperator mOp = insn.GetMachineOpcode();
2105     if ((mOp != MOP_wcbz) && (mOp != MOP_xcbz) && (mOp != MOP_wcbnz) && (mOp != MOP_xcbnz)) {
2106         return false;
2107     }
2108     regno_t regNo = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd)).GetRegisterNumber();
2109     InsnSet defSet = cgFunc.GetRD()->FindDefForRegOpnd(insn, regNo, true);
2110     if (defSet.size() != k1BitSize) {
2111         return false;
2112     }
2113     prevInsn = *defSet.begin();
2114     if (prevInsn->GetMachineOpcode() != MOP_wandrri12 && prevInsn->GetMachineOpcode() != MOP_xandrri13) {
2115         return false;
2116     }
2117     if (!IsAdjacentArea(*prevInsn, insn)) {
2118         return false;
2119     }
2120     regno_t propRegNo = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd)).GetRegisterNumber();
2121     if (prevInsn->GetBB() == insn.GetBB() && !(aarch64RD->FindRegDefBetweenInsn(propRegNo, prevInsn, &insn).empty())) {
2122         return false;
2123     }
2124     if (prevInsn->GetBB() != insn.GetBB() && aarch64RD->HasRegDefBetweenInsnGlobal(propRegNo, *prevInsn, insn)) {
2125         return false;
2126     }
2127     if (!(cgFunc.GetRD()->FindUseForRegOpnd(insn, regNo, true).empty())) {
2128         return false;
2129     }
2130     return true;
2131 }
2132 
CalculateLogValue(int64 val) const2133 int64 AndCbzPattern::CalculateLogValue(int64 val) const
2134 {
2135     return (__builtin_popcountll(static_cast<uint64>(val)) == 1) ? (__builtin_ffsll(val) - 1) : -1;
2136 }
2137 
Optimize(Insn & insn)2138 void AndCbzPattern::Optimize(Insn &insn)
2139 {
2140     BB *bb = insn.GetBB();
2141     auto &aarchFunc = static_cast<AArch64CGFunc &>(cgFunc);
2142     auto &andImm = static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnThirdOpnd));
2143     int64 tbzVal = CalculateLogValue(andImm.GetValue());
2144     if (tbzVal < 0) {
2145         return;
2146     }
2147     MOperator mOp = insn.GetMachineOpcode();
2148     MOperator newMop = MOP_undef;
2149     switch (mOp) {
2150         case MOP_wcbz:
2151             newMop = MOP_wtbz;
2152             break;
2153         case MOP_wcbnz:
2154             newMop = MOP_wtbnz;
2155             break;
2156         case MOP_xcbz:
2157             newMop = MOP_xtbz;
2158             break;
2159         case MOP_xcbnz:
2160             newMop = MOP_xtbnz;
2161             break;
2162         default:
2163             CHECK_FATAL(false, "must be cbz/cbnz");
2164             break;
2165     }
2166     auto &label = static_cast<LabelOperand &>(insn.GetOperand(kInsnSecondOpnd));
2167     ImmOperand &tbzImm = aarchFunc.CreateImmOperand(tbzVal, k8BitSize, false);
2168     Insn &newInsn = cgFunc.GetInsnBuilder()->BuildInsn(newMop, prevInsn->GetOperand(kInsnSecondOpnd), tbzImm, label);
2169     if (!VERIFY_INSN(&newInsn)) {
2170         return;
2171     }
2172     newInsn.SetId(insn.GetId());
2173     bb->ReplaceInsn(insn, newInsn);
2174     if (GLOBAL_DUMP) {
2175         LogInfo::MapleLogger() << ">>>>>>> In AndCbzPattern : <<<<<<<\n";
2176         LogInfo::MapleLogger() << "=======PrevInsn :\n";
2177         LogInfo::MapleLogger() << "=======ReplaceInsn :\n";
2178         insn.Dump();
2179         LogInfo::MapleLogger() << "=======NewInsn :\n";
2180         newInsn.Dump();
2181     }
2182     cgFunc.GetRD()->UpdateInOut(*bb, true);
2183 }
2184 
Run()2185 void AndCbzPattern::Run()
2186 {
2187     Init();
2188     FOR_ALL_BB_REV(bb, &cgFunc)
2189     {
2190         FOR_BB_INSNS_REV(insn, bb)
2191         {
2192             if (!insn->IsMachineInstruction() || !CheckCondition(*insn)) {
2193                 continue;
2194             }
2195             Optimize(*insn);
2196         }
2197     }
2198 }
2199 
Init()2200 void SameRHSPropPattern::Init()
2201 {
2202     prevInsn = nullptr;
2203     candidates = {MOP_waddrri12, MOP_xaddrri12, MOP_wsubrri12, MOP_xsubrri12,
2204                   MOP_wmovri32,  MOP_xmovri64,  MOP_wmovrr,    MOP_xmovrr};
2205 }
2206 
IsSameOperand(Operand * opnd1,Operand * opnd2) const2207 bool SameRHSPropPattern::IsSameOperand(Operand *opnd1, Operand *opnd2) const
2208 {
2209     if (opnd1 == nullptr && opnd2 == nullptr) {
2210         return true;
2211     } else if (opnd1 == nullptr || opnd2 == nullptr) {
2212         return false;
2213     }
2214     if (opnd1->IsRegister() && opnd2->IsRegister()) {
2215         return RegOperand::IsSameReg(*opnd1, *opnd2);
2216     } else if (opnd1->IsImmediate() && opnd2->IsImmediate()) {
2217         auto *immOpnd1 = static_cast<ImmOperand *>(opnd1);
2218         auto *immOpnd2 = static_cast<ImmOperand *>(opnd2);
2219         return (immOpnd1->GetSize() == immOpnd2->GetSize()) && (immOpnd1->GetValue() == immOpnd2->GetValue());
2220     }
2221     return false;
2222 }
2223 
FindSameRHSInsnInBB(Insn & insn)2224 bool SameRHSPropPattern::FindSameRHSInsnInBB(Insn &insn)
2225 {
2226     uint32 opndNum = insn.GetOperandSize();
2227     Operand *curRegOpnd = nullptr;
2228     Operand *curImmOpnd = nullptr;
2229     for (uint32 i = 0; i < opndNum; ++i) {
2230         if (insn.OpndIsDef(i)) {
2231             continue;
2232         }
2233         Operand &opnd = insn.GetOperand(i);
2234         if (opnd.IsRegister()) {
2235             curRegOpnd = &opnd;
2236         } else if (opnd.IsImmediate()) {
2237             auto &immOpnd = static_cast<ImmOperand &>(opnd);
2238             if (immOpnd.GetVary() == kUnAdjustVary) {
2239                 return false;
2240             }
2241             curImmOpnd = &opnd;
2242         }
2243     }
2244     if (curRegOpnd == nullptr && curImmOpnd != nullptr && static_cast<ImmOperand *>(curImmOpnd)->IsZero()) {
2245         return false;
2246     }
2247     BB *bb = insn.GetBB();
2248     for (auto *cursor = insn.GetPrev(); cursor != nullptr && cursor != bb->GetFirstInsn(); cursor = cursor->GetPrev()) {
2249         if (!cursor->IsMachineInstruction()) {
2250             continue;
2251         }
2252         if (cursor->IsCall() && !cgFunc.IsAfterRegAlloc()) {
2253             return false;
2254         }
2255         if (cursor->GetMachineOpcode() != insn.GetMachineOpcode()) {
2256             continue;
2257         }
2258         uint32 candOpndNum = cursor->GetOperandSize();
2259         Operand *candRegOpnd = nullptr;
2260         Operand *candImmOpnd = nullptr;
2261         for (uint32 i = 0; i < candOpndNum; ++i) {
2262             Operand &opnd = cursor->GetOperand(i);
2263             if (cursor->OpndIsDef(i)) {
2264                 continue;
2265             }
2266             if (opnd.IsRegister()) {
2267                 candRegOpnd = &opnd;
2268             } else if (opnd.IsImmediate()) {
2269                 auto &immOpnd = static_cast<ImmOperand &>(opnd);
2270                 if (immOpnd.GetVary() == kUnAdjustVary) {
2271                     return false;
2272                 }
2273                 candImmOpnd = &opnd;
2274             }
2275         }
2276         if (IsSameOperand(curRegOpnd, candRegOpnd) && IsSameOperand(curImmOpnd, candImmOpnd)) {
2277             prevInsn = cursor;
2278             return true;
2279         }
2280     }
2281     return false;
2282 }
2283 
CheckCondition(Insn & insn)2284 bool SameRHSPropPattern::CheckCondition(Insn &insn)
2285 {
2286     if (!insn.IsMachineInstruction()) {
2287         return false;
2288     }
2289     MOperator mOp = insn.GetMachineOpcode();
2290     if (std::find(candidates.begin(), candidates.end(), mOp) == candidates.end()) {
2291         return false;
2292     }
2293     if (!FindSameRHSInsnInBB(insn)) {
2294         return false;
2295     }
2296     CHECK_FATAL(prevInsn->GetOperand(kInsnFirstOpnd).IsRegister(), "prevInsn first operand must be register");
2297     if (prevInsn->GetOperand(kInsnSecondOpnd).IsRegister() &&
2298         RegOperand::IsSameReg(prevInsn->GetOperand(kInsnFirstOpnd), prevInsn->GetOperand(kInsnSecondOpnd))) {
2299         return false;
2300     }
2301     uint32 opndNum = prevInsn->GetOperandSize();
2302     for (uint32 i = 0; i < opndNum; ++i) {
2303         Operand &opnd = prevInsn->GetOperand(i);
2304         if (!opnd.IsRegister()) {
2305             continue;
2306         }
2307         regno_t regNO = static_cast<RegOperand &>(opnd).GetRegisterNumber();
2308         if (!(cgFunc.GetRD()->FindRegDefBetweenInsn(regNO, prevInsn->GetNext(), insn.GetPrev()).empty())) {
2309             return false;
2310         }
2311     }
2312     return true;
2313 }
2314 
Optimize(Insn & insn)2315 void SameRHSPropPattern::Optimize(Insn &insn)
2316 {
2317     BB *bb = insn.GetBB();
2318     Operand &destOpnd = insn.GetOperand(kInsnFirstOpnd);
2319     uint32 bitSize = static_cast<RegOperand &>(destOpnd).GetSize();
2320     MOperator mOp = (bitSize == k64BitSize ? MOP_xmovrr : MOP_wmovrr);
2321     Insn &newInsn = cgFunc.GetInsnBuilder()->BuildInsn(mOp, destOpnd, prevInsn->GetOperand(kInsnFirstOpnd));
2322     newInsn.SetId(insn.GetId());
2323     bb->ReplaceInsn(insn, newInsn);
2324     if (GLOBAL_DUMP) {
2325         LogInfo::MapleLogger() << ">>>>>>> In SameRHSPropPattern : <<<<<<<\n";
2326         LogInfo::MapleLogger() << "=======PrevInsn :\n";
2327         LogInfo::MapleLogger() << "======= ReplaceInsn :\n";
2328         insn.Dump();
2329         LogInfo::MapleLogger() << "======= NewInsn :\n";
2330         newInsn.Dump();
2331     }
2332     cgFunc.GetRD()->UpdateInOut(*bb, true);
2333 }
2334 
Run()2335 void SameRHSPropPattern::Run()
2336 {
2337     Init();
2338     FOR_ALL_BB_REV(bb, &cgFunc)
2339     {
2340         FOR_BB_INSNS_REV(insn, bb)
2341         {
2342             if (!CheckCondition(*insn)) {
2343                 continue;
2344             }
2345             Optimize(*insn);
2346         }
2347     }
2348 }
2349 } /* namespace maplebe */
2350