• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "aarch64_ico.h"
17 #include "ico.h"
18 #include "cg.h"
19 #include "cg_option.h"
20 #include "aarch64_isa.h"
21 #include "aarch64_insn.h"
22 #include "aarch64_cgfunc.h"
23 
24 /*
25  * This phase implements if-conversion optimization,
26  * which tries to convert conditional branches into cset/csel instructions
27  */
28 namespace maplebe {
InitOptimizePatterns()29 void AArch64IfConversionOptimizer::InitOptimizePatterns()
30 {
31     singlePassPatterns.emplace_back(memPool->New<AArch64ICOIfThenElsePattern>(*cgFunc));
32     singlePassPatterns.emplace_back(memPool->New<AArch64ICOMorePredsPattern>(*cgFunc));
33 }
34 
35 /* build ccmp Insn */
BuildCcmpInsn(ConditionCode ccCode,const Insn * cmpInsn) const36 Insn *AArch64ICOPattern::BuildCcmpInsn(ConditionCode ccCode, const Insn *cmpInsn) const
37 {
38     Operand &opnd0 = cmpInsn->GetOperand(kInsnFirstOpnd);
39     Operand &opnd1 = cmpInsn->GetOperand(kInsnSecondOpnd);
40     Operand &opnd2 = cmpInsn->GetOperand(kInsnThirdOpnd);
41     /* ccmp has only int opnd */
42     if (!static_cast<RegOperand &>(opnd1).IsOfIntClass()) {
43         return nullptr;
44     }
45     AArch64CGFunc *func = static_cast<AArch64CGFunc *>(cgFunc);
46     uint32 nzcv = GetNZCV(ccCode, false);
47     if (nzcv == k16BitSize) {
48         return nullptr;
49     }
50     ImmOperand &opnd3 = func->CreateImmOperand(PTY_u8, nzcv);
51     CondOperand &cond = static_cast<AArch64CGFunc *>(cgFunc)->GetCondOperand(ccCode);
52     uint32 dSize = opnd1.GetSize();
53     bool isIntTy = opnd2.IsIntImmediate();
54     MOperator mOpCode = isIntTy ? (dSize == k64BitSize ? MOP_xccmpriic : MOP_wccmpriic)
55                                 : (dSize == k64BitSize ? MOP_xccmprric : MOP_wccmprric);
56     /* cmp opnd2 in the range 0-4095, ccmp opnd2 in the range 0-31 */
57     if (isIntTy && static_cast<RegOperand &>(opnd2).GetRegisterNumber() >= k32BitSize) {
58         return nullptr;
59     }
60     return &cgFunc->GetInsnBuilder()->BuildInsn(mOpCode, opnd0, opnd1, opnd2, opnd3, cond);
61 }
62 
63 /* Rooted ccCode resource NZCV */
GetNZCV(ConditionCode ccCode,bool inverse)64 uint32 AArch64ICOPattern::GetNZCV(ConditionCode ccCode, bool inverse)
65 {
66     switch (ccCode) {
67         case CC_EQ:
68             return inverse ? k4BitSize : k0BitSize;
69         case CC_HS:
70             return inverse ? k2BitSize : k0BitSize;
71         case CC_MI:
72             return inverse ? k8BitSize : k0BitSize;
73         case CC_VS:
74             return inverse ? k1BitSize : k0BitSize;
75         case CC_VC:
76             return inverse ? k0BitSize : k1BitSize;
77         case CC_LS:
78             return inverse ? k4BitSize : k2BitSize;
79         case CC_LO:
80             return inverse ? k0BitSize : k2BitSize;
81         case CC_NE:
82             return inverse ? k0BitSize : k4BitSize;
83         case CC_HI:
84             return inverse ? k2BitSize : k4BitSize;
85         case CC_PL:
86             return inverse ? k0BitSize : k8BitSize;
87         default:
88             return k16BitSize;
89     }
90 }
91 
BuildCmpInsn(const Insn & condBr) const92 Insn *AArch64ICOPattern::BuildCmpInsn(const Insn &condBr) const
93 {
94     AArch64CGFunc *func = static_cast<AArch64CGFunc *>(cgFunc);
95     RegOperand &reg = static_cast<RegOperand &>(condBr.GetOperand(0));
96     PrimType ptyp = (reg.GetSize() == k64BitSize) ? PTY_u64 : PTY_u32;
97     ImmOperand &numZero = func->CreateImmOperand(ptyp, 0);
98     Operand &rflag = func->GetOrCreateRflag();
99     MOperator mopCode = (reg.GetSize() == k64BitSize) ? MOP_xcmpri : MOP_wcmpri;
100     Insn &cmpInsn = func->GetInsnBuilder()->BuildInsn(mopCode, rflag, reg, numZero);
101     return &cmpInsn;
102 }
103 
IsSetInsn(const Insn & insn,Operand * & dest,std::vector<Operand * > & src) const104 bool AArch64ICOPattern::IsSetInsn(const Insn &insn, Operand *&dest, std::vector<Operand *> &src) const
105 {
106     MOperator mOpCode = insn.GetMachineOpcode();
107     if ((mOpCode >= MOP_xmovrr && mOpCode <= MOP_xvmovd) || cgFunc->GetTheCFG()->IsAddOrSubInsn(insn)) {
108         dest = &(insn.GetOperand(0));
109         for (uint32 i = 1; i < insn.GetOperandSize(); ++i) {
110             (void)src.emplace_back(&(insn.GetOperand(i)));
111         }
112         return true;
113     }
114     dest = nullptr;
115     src.clear();
116     return false;
117 }
118 
Encode(MOperator mOp,bool inverse) const119 ConditionCode AArch64ICOPattern::Encode(MOperator mOp, bool inverse) const
120 {
121     switch (mOp) {
122         case MOP_bmi:
123             return inverse ? CC_PL : CC_MI;
124         case MOP_bvc:
125             return inverse ? CC_VS : CC_VC;
126         case MOP_bls:
127             return inverse ? CC_HI : CC_LS;
128         case MOP_blt:
129             return inverse ? CC_GE : CC_LT;
130         case MOP_ble:
131             return inverse ? CC_GT : CC_LE;
132         case MOP_beq:
133             return inverse ? CC_NE : CC_EQ;
134         case MOP_bne:
135             return inverse ? CC_EQ : CC_NE;
136         case MOP_blo:
137             return inverse ? CC_HS : CC_LO;
138         case MOP_bpl:
139             return inverse ? CC_MI : CC_PL;
140         case MOP_bhs:
141             return inverse ? CC_LO : CC_HS;
142         case MOP_bvs:
143             return inverse ? CC_VC : CC_VS;
144         case MOP_bhi:
145             return inverse ? CC_LS : CC_HI;
146         case MOP_bgt:
147             return inverse ? CC_LE : CC_GT;
148         case MOP_bge:
149             return inverse ? CC_LT : CC_GE;
150         case MOP_wcbnz:
151             return inverse ? CC_EQ : CC_NE;
152         case MOP_xcbnz:
153             return inverse ? CC_EQ : CC_NE;
154         case MOP_wcbz:
155             return inverse ? CC_NE : CC_EQ;
156         case MOP_xcbz:
157             return inverse ? CC_NE : CC_EQ;
158         default:
159             return kCcLast;
160     }
161 }
162 
BuildCondSet(const Insn & branch,RegOperand & reg,bool inverse) const163 Insn *AArch64ICOPattern::BuildCondSet(const Insn &branch, RegOperand &reg, bool inverse) const
164 {
165     ConditionCode ccCode = Encode(branch.GetMachineOpcode(), inverse);
166     DEBUG_ASSERT(ccCode != kCcLast, "unknown cond, ccCode can't be kCcLast");
167     AArch64CGFunc *func = static_cast<AArch64CGFunc *>(cgFunc);
168     CondOperand &cond = func->GetCondOperand(ccCode);
169     Operand &rflag = func->GetOrCreateRflag();
170     MOperator mopCode = (reg.GetSize() == k64BitSize) ? MOP_xcsetrc : MOP_wcsetrc;
171     return &func->GetInsnBuilder()->BuildInsn(mopCode, reg, cond, rflag);
172 }
173 
BuildCondSel(const Insn & branch,MOperator mOp,RegOperand & dst,RegOperand & src1,RegOperand & src2) const174 Insn *AArch64ICOPattern::BuildCondSel(const Insn &branch, MOperator mOp, RegOperand &dst, RegOperand &src1,
175                                       RegOperand &src2) const
176 {
177     ConditionCode ccCode = Encode(branch.GetMachineOpcode(), false);
178     DEBUG_ASSERT(ccCode != kCcLast, "unknown cond, ccCode can't be kCcLast");
179     CondOperand &cond = static_cast<AArch64CGFunc *>(cgFunc)->GetCondOperand(ccCode);
180     Operand &rflag = static_cast<AArch64CGFunc *>(cgFunc)->GetOrCreateRflag();
181     return &cgFunc->GetInsnBuilder()->BuildInsn(mOp, dst, src1, src2, cond, rflag);
182 }
183 
GenerateInsnForImm(const Insn & branchInsn,Operand & ifDest,Operand & elseDest,RegOperand & destReg,std::vector<Insn * > & generateInsn)184 void AArch64ICOIfThenElsePattern::GenerateInsnForImm(const Insn &branchInsn, Operand &ifDest, Operand &elseDest,
185                                                      RegOperand &destReg, std::vector<Insn *> &generateInsn)
186 {
187     ImmOperand &imm1 = static_cast<ImmOperand &>(ifDest);
188     ImmOperand &imm2 = static_cast<ImmOperand &>(elseDest);
189     bool inverse = imm1.IsZero() && imm2.IsOne();
190     if (inverse || (imm2.IsZero() && imm1.IsOne())) {
191         Insn *csetInsn = BuildCondSet(branchInsn, destReg, inverse);
192         DEBUG_ASSERT(csetInsn != nullptr, "build a insn failed");
193         generateInsn.emplace_back(csetInsn);
194     } else if (imm1.GetValue() == imm2.GetValue()) {
195         bool destIsIntTy = destReg.IsOfIntClass();
196         MOperator mOp = destIsIntTy ? ((destReg.GetSize() == k64BitSize ? MOP_xmovri64 : MOP_wmovri32))
197                                     : ((destReg.GetSize() == k64BitSize ? MOP_xdfmovri : MOP_wsfmovri));
198         Insn &tempInsn = cgFunc->GetInsnBuilder()->BuildInsn(mOp, destReg, imm1);
199         generateInsn.emplace_back(&tempInsn);
200     } else {
201         bool destIsIntTy = destReg.IsOfIntClass();
202         uint32 dSize = destReg.GetSize();
203         bool isD64 = dSize == k64BitSize;
204         MOperator mOp = destIsIntTy ? ((destReg.GetSize() == k64BitSize ? MOP_xmovri64 : MOP_wmovri32))
205                                     : ((destReg.GetSize() == k64BitSize ? MOP_xdfmovri : MOP_wsfmovri));
206         RegOperand *tempTarIf = nullptr;
207         if (imm1.IsZero()) {
208             tempTarIf = &cgFunc->GetZeroOpnd(dSize);
209         } else {
210             tempTarIf = cgFunc->GetTheCFG()->CreateVregFromReg(destReg);
211             Insn &tempInsnIf = cgFunc->GetInsnBuilder()->BuildInsn(mOp, *tempTarIf, imm1);
212             generateInsn.emplace_back(&tempInsnIf);
213         }
214 
215         RegOperand *tempTarElse = nullptr;
216         if (imm2.IsZero()) {
217             tempTarElse = &cgFunc->GetZeroOpnd(dSize);
218         } else {
219             tempTarElse = cgFunc->GetTheCFG()->CreateVregFromReg(destReg);
220             Insn &tempInsnElse = cgFunc->GetInsnBuilder()->BuildInsn(mOp, *tempTarElse, imm2);
221             generateInsn.emplace_back(&tempInsnElse);
222         }
223 
224         bool isIntTy = destReg.IsOfIntClass();
225         MOperator mOpCode = isIntTy ? (isD64 ? MOP_xcselrrrc : MOP_wcselrrrc)
226                                     : (isD64 ? MOP_dcselrrrc : (dSize == k32BitSize ? MOP_scselrrrc : MOP_hcselrrrc));
227         Insn *cselInsn = BuildCondSel(branchInsn, mOpCode, destReg, *tempTarIf, *tempTarElse);
228         CHECK_FATAL(cselInsn != nullptr, "build a csel insn failed");
229         generateInsn.emplace_back(cselInsn);
230     }
231 }
232 
GenerateRegAndTempInsn(Operand & dest,const RegOperand & destReg,std::vector<Insn * > & generateInsn) const233 RegOperand *AArch64ICOIfThenElsePattern::GenerateRegAndTempInsn(Operand &dest, const RegOperand &destReg,
234                                                                 std::vector<Insn *> &generateInsn) const
235 {
236     RegOperand *reg = nullptr;
237     if (!dest.IsRegister()) {
238         bool destIsIntTy = destReg.IsOfIntClass();
239         bool isDest64 = destReg.GetSize() == k64BitSize;
240         MOperator mOp =
241             destIsIntTy ? (isDest64 ? MOP_xmovri64 : MOP_wmovri32) : (isDest64 ? MOP_xdfmovri : MOP_wsfmovri);
242         reg = cgFunc->GetTheCFG()->CreateVregFromReg(destReg);
243         ImmOperand &tempSrcElse = static_cast<ImmOperand &>(dest);
244         if (tempSrcElse.IsZero()) {
245             return &cgFunc->GetZeroOpnd(destReg.GetSize());
246         }
247         Insn &tempInsn = cgFunc->GetInsnBuilder()->BuildInsn(mOp, *reg, tempSrcElse);
248         generateInsn.emplace_back(&tempInsn);
249         return reg;
250     } else {
251         return (static_cast<RegOperand *>(&dest));
252     }
253 }
254 
GenerateInsnForReg(const Insn & branchInsn,Operand & ifDest,Operand & elseDest,RegOperand & destReg,std::vector<Insn * > & generateInsn)255 void AArch64ICOIfThenElsePattern::GenerateInsnForReg(const Insn &branchInsn, Operand &ifDest, Operand &elseDest,
256                                                      RegOperand &destReg, std::vector<Insn *> &generateInsn)
257 {
258     RegOperand *tReg = static_cast<RegOperand *>(&ifDest);
259     RegOperand *eReg = static_cast<RegOperand *>(&elseDest);
260 
261     /* mov w0, w1   mov w0, w1  --> mov w0, w1 */
262     if (eReg->GetRegisterNumber() == tReg->GetRegisterNumber()) {
263         uint32 dSize = destReg.GetSize();
264         bool srcIsIntTy = tReg->IsOfIntClass();
265         bool destIsIntTy = destReg.IsOfIntClass();
266         MOperator mOp;
267         if (dSize == k64BitSize) {
268             mOp = srcIsIntTy ? (destIsIntTy ? MOP_xmovrr : MOP_xvmovdr) : (destIsIntTy ? MOP_xvmovrd : MOP_xvmovd);
269         } else {
270             mOp = srcIsIntTy ? (destIsIntTy ? MOP_wmovrr : MOP_xvmovsr) : (destIsIntTy ? MOP_xvmovrs : MOP_xvmovs);
271         }
272         Insn &tempInsnIf = cgFunc->GetInsnBuilder()->BuildInsn(mOp, destReg, *tReg);
273         generateInsn.emplace_back(&tempInsnIf);
274     } else {
275         uint32 dSize = destReg.GetSize();
276         bool isIntTy = destReg.IsOfIntClass();
277         MOperator mOpCode =
278             isIntTy ? (dSize == k64BitSize ? MOP_xcselrrrc : MOP_wcselrrrc)
279                     : (dSize == k64BitSize ? MOP_dcselrrrc : (dSize == k32BitSize ? MOP_scselrrrc : MOP_hcselrrrc));
280         Insn *cselInsn = BuildCondSel(branchInsn, mOpCode, destReg, *tReg, *eReg);
281         CHECK_FATAL(cselInsn != nullptr, "build a csel insn failed");
282         generateInsn.emplace_back(cselInsn);
283     }
284 }
285 
GetDestReg(const std::map<Operand *,std::vector<Operand * >> & destSrcMap,const RegOperand & destReg) const286 Operand *AArch64ICOIfThenElsePattern::GetDestReg(const std::map<Operand *, std::vector<Operand *>> &destSrcMap,
287                                                  const RegOperand &destReg) const
288 {
289     Operand *dest = nullptr;
290     for (const auto &destSrcPair : destSrcMap) {
291         DEBUG_ASSERT(destSrcPair.first->IsRegister(), "opnd must be register");
292         RegOperand *destRegInMap = static_cast<RegOperand *>(destSrcPair.first);
293         DEBUG_ASSERT(destRegInMap != nullptr, "nullptr check");
294         if (destRegInMap->GetRegisterNumber() == destReg.GetRegisterNumber()) {
295             if (destSrcPair.second.size() > 1) {
296                 dest = destSrcPair.first;
297             } else {
298                 dest = destSrcPair.second[0];
299             }
300             break;
301         }
302     }
303     return dest;
304 }
305 
BuildCondMovInsn(BB & cmpBB,const BB & bb,const std::map<Operand *,std::vector<Operand * >> & ifDestSrcMap,const std::map<Operand *,std::vector<Operand * >> & elseDestSrcMap,bool elseBBIsProcessed,std::vector<Insn * > & generateInsn)306 bool AArch64ICOIfThenElsePattern::BuildCondMovInsn(BB &cmpBB, const BB &bb,
307                                                    const std::map<Operand *, std::vector<Operand *>> &ifDestSrcMap,
308                                                    const std::map<Operand *, std::vector<Operand *>> &elseDestSrcMap,
309                                                    bool elseBBIsProcessed, std::vector<Insn *> &generateInsn)
310 {
311     Insn *branchInsn = cgFunc->GetTheCFG()->FindLastCondBrInsn(cmpBB);
312     FOR_BB_INSNS_CONST(insn, (&bb))
313     {
314         if (!insn->IsMachineInstruction() || insn->IsBranch()) {
315             continue;
316         }
317         Operand *dest = nullptr;
318         std::vector<Operand *> src;
319 
320         if (!IsSetInsn(*insn, dest, src)) {
321             DEBUG_ASSERT(false, "insn check");
322         }
323         DEBUG_ASSERT(dest->IsRegister(), "register check");
324         RegOperand *destReg = static_cast<RegOperand *>(dest);
325 
326         Operand *elseDest = GetDestReg(elseDestSrcMap, *destReg);
327         Operand *ifDest = GetDestReg(ifDestSrcMap, *destReg);
328 
329         if (elseBBIsProcessed) {
330             if (elseDest != nullptr) {
331                 continue;
332             }
333             elseDest = dest;
334             DEBUG_ASSERT(ifDest != nullptr, "null ptr check");
335             if (!bb.GetLiveOut()->TestBit(destReg->GetRegisterNumber())) {
336                 continue;
337             }
338         } else {
339             DEBUG_ASSERT(elseDest != nullptr, "null ptr check");
340             if (ifDest == nullptr) {
341                 if (!bb.GetLiveOut()->TestBit(destReg->GetRegisterNumber())) {
342                     continue;
343                 }
344                 ifDest = dest;
345             }
346         }
347 
348         /* generate cset or csel instruction */
349         DEBUG_ASSERT(ifDest != nullptr, "null ptr check");
350         if (ifDest->IsIntImmediate() && elseDest->IsIntImmediate()) {
351             GenerateInsnForImm(*branchInsn, *ifDest, *elseDest, *destReg, generateInsn);
352         } else {
353             RegOperand *tReg = GenerateRegAndTempInsn(*ifDest, *destReg, generateInsn);
354             RegOperand *eReg = GenerateRegAndTempInsn(*elseDest, *destReg, generateInsn);
355             if ((tReg->GetRegisterType() != eReg->GetRegisterType()) ||
356                 (tReg->GetRegisterType() != destReg->GetRegisterType())) {
357                 return false;
358             }
359             GenerateInsnForReg(*branchInsn, *tReg, *eReg, *destReg, generateInsn);
360         }
361     }
362 
363     return true;
364 }
365 
CheckHasSameDest(std::vector<Insn * > & lInsn,std::vector<Insn * > & rInsn) const366 bool AArch64ICOIfThenElsePattern::CheckHasSameDest(std::vector<Insn *> &lInsn, std::vector<Insn *> &rInsn) const
367 {
368     for (size_t i = 0; i < lInsn.size(); ++i) {
369         if (cgFunc->GetTheCFG()->IsAddOrSubInsn(*lInsn[i])) {
370             bool hasSameDest = false;
371             for (size_t j = 0; j < rInsn.size(); ++j) {
372                 RegOperand *rDestReg = static_cast<RegOperand *>(&rInsn[j]->GetOperand(0));
373                 RegOperand *lDestReg = static_cast<RegOperand *>(&lInsn[i]->GetOperand(0));
374                 if (lDestReg->GetRegisterNumber() == rDestReg->GetRegisterNumber()) {
375                     hasSameDest = true;
376                     break;
377                 }
378             }
379             if (!hasSameDest) {
380                 return false;
381             }
382         }
383     }
384     return true;
385 }
386 
CheckCondMoveBB(BB * bb,std::map<Operand *,std::vector<Operand * >> & destSrcMap,std::vector<Operand * > & destRegs,std::vector<Insn * > & setInsn,Operand * flagOpnd,Insn * cmpInsn) const387 bool AArch64ICOIfThenElsePattern::CheckCondMoveBB(BB *bb, std::map<Operand *, std::vector<Operand *>> &destSrcMap,
388                                                   std::vector<Operand *> &destRegs, std::vector<Insn *> &setInsn,
389                                                   Operand *flagOpnd, Insn *cmpInsn) const
390 {
391     if (bb == nullptr) {
392         return false;
393     }
394     FOR_BB_INSNS(insn, bb)
395     {
396         if (!insn->IsMachineInstruction() || insn->IsBranch()) {
397             continue;
398         }
399         Operand *dest = nullptr;
400         std::vector<Operand *> src;
401 
402         if (!IsSetInsn(*insn, dest, src)) {
403             return false;
404         }
405         DEBUG_ASSERT(dest != nullptr, "null ptr check");
406         DEBUG_ASSERT(src.size() != 0, "null ptr check");
407 
408         if (!dest->IsRegister()) {
409             return false;
410         }
411 
412         for (auto srcOpnd : src) {
413             if (!(srcOpnd->IsConstImmediate()) && !srcOpnd->IsRegister()) {
414                 return false;
415             }
416         }
417 
418         if (flagOpnd != nullptr) {
419             RegOperand *flagReg = static_cast<RegOperand *>(flagOpnd);
420             regno_t flagRegNO = flagReg->GetRegisterNumber();
421             if (bb->GetLiveOut()->TestBit(flagRegNO)) {
422                 return false;
423             }
424         }
425 
426         (void)destSrcMap.insert(std::make_pair(dest, src));
427         destRegs.emplace_back(dest);
428         (void)setInsn.emplace_back(insn);
429     }
430     return true;
431 }
432 
CheckMop(MOperator mOperator) const433 bool AArch64ICOPattern::CheckMop(MOperator mOperator) const
434 {
435     switch (mOperator) {
436         case MOP_beq:
437         case MOP_bne:
438         case MOP_blt:
439         case MOP_ble:
440         case MOP_bgt:
441         case MOP_bge:
442         case MOP_blo:
443         case MOP_bls:
444         case MOP_bhs:
445         case MOP_bhi:
446         case MOP_bpl:
447         case MOP_bmi:
448         case MOP_bvc:
449         case MOP_bvs:
450             return true;
451         default:
452             return false;
453     }
454 }
455 
456 /* this BBGoto only has mov Insn and Branch */
CheckGotoBB(BB & gotoBB,std::vector<Insn * > & movInsn) const457 bool AArch64ICOMorePredsPattern::CheckGotoBB(BB &gotoBB, std::vector<Insn *> &movInsn) const
458 {
459     FOR_BB_INSNS(insn, &gotoBB)
460     {
461         if (!insn->IsMachineInstruction()) {
462             continue;
463         }
464         if (insn->IsMove()) {
465             movInsn.push_back(insn);
466             continue;
467         }
468         if (insn->GetId() != gotoBB.GetLastInsn()->GetId()) {
469             return false;
470         } else if (!insn->IsBranch()) { /* last Insn is Branch */
471             return false;
472         }
473     }
474     return true;
475 }
476 
477 /* this BBGoto only has mov Insn */
MovToCsel(std::vector<Insn * > & movInsn,std::vector<Insn * > & cselInsn,const Insn & branchInsn) const478 bool AArch64ICOMorePredsPattern::MovToCsel(std::vector<Insn *> &movInsn, std::vector<Insn *> &cselInsn,
479                                            const Insn &branchInsn) const
480 {
481     Operand &branchOpnd0 = branchInsn.GetOperand(kInsnFirstOpnd);
482     regno_t branchRegNo;
483     if (branchOpnd0.IsRegister()) {
484         branchRegNo = static_cast<RegOperand &>(branchOpnd0).GetRegisterNumber();
485     }
486     for (Insn *insn : movInsn) {
487         /* use mov build csel */
488         Operand &opnd0 = insn->GetOperand(kInsnFirstOpnd);
489         Operand &opnd1 = insn->GetOperand(kInsnSecondOpnd);
490         ConditionCode ccCode = AArch64ICOPattern::Encode(branchInsn.GetMachineOpcode(), false);
491         DEBUG_ASSERT(ccCode != kCcLast, "unknown cond, ccCode can't be kCcLast");
492         CondOperand &cond = static_cast<AArch64CGFunc *>(cgFunc)->GetCondOperand(ccCode);
493         Operand &rflag = static_cast<AArch64CGFunc *>(cgFunc)->GetOrCreateRflag();
494         RegOperand &regOpnd0 = static_cast<RegOperand &>(opnd0);
495         RegOperand &regOpnd1 = static_cast<RegOperand &>(opnd1);
496         /* movInsn's opnd1 is Immediate */
497         if (opnd1.IsImmediate()) {
498             return false;
499         }
500         /* opnd0 and opnd1 hsa same type and size */
501         if (regOpnd0.GetSize() != regOpnd1.GetSize() || (regOpnd0.IsOfIntClass() != regOpnd1.IsOfIntClass())) {
502             return false;
503         }
504         /* The branchOpnd0 cannot be modified for csel. */
505         regno_t movRegNo0 = static_cast<RegOperand &>(opnd0).GetRegisterNumber();
506         if (branchOpnd0.IsRegister() && branchRegNo == movRegNo0) {
507             return false;
508         }
509         uint32 dSize = regOpnd0.GetSize();
510         bool isIntTy = regOpnd0.IsOfIntClass();
511         MOperator mOpCode =
512             isIntTy ? (dSize == k64BitSize ? MOP_xcselrrrc : MOP_wcselrrrc)
513                     : (dSize == k64BitSize ? MOP_dcselrrrc : (dSize == k32BitSize ? MOP_scselrrrc : MOP_hcselrrrc));
514         cselInsn.emplace_back(&cgFunc->GetInsnBuilder()->BuildInsn(mOpCode, opnd0, opnd1, opnd0, cond, rflag));
515     }
516     if (cselInsn.size() < 1) {
517         return false;
518     }
519     return true;
520 }
521 } /* namespace maplebe */
522