1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "aarch64_peep.h"
17 #include "cg.h"
18 #include "mpl_logging.h"
19 #include "common_utils.h"
20 #include "cg_option.h"
21 #include "aarch64_utils.h"
22
23 namespace maplebe {
24 #define JAVALANG (cgFunc->GetMirModule().IsJavaModule())
25 #define CG_PEEP_DUMP CG_DEBUG_FUNC(*cgFunc)
26 namespace {
27 const std::string kMccLoadRef = "MCC_LoadRefField";
28 const std::string kMccLoadRefV = "MCC_LoadVolatileField";
29 const std::string kMccLoadRefS = "MCC_LoadRefStatic";
30 const std::string kMccLoadRefVS = "MCC_LoadVolatileStaticField";
31 const std::string kMccDummy = "MCC_Dummy";
32
33 const uint32 kSizeOfSextMopTable = 5;
34 const uint32 kSizeOfUextMopTable = 3;
35
36 MOperator sextMopTable[kSizeOfSextMopTable] = {MOP_xsxtb32, MOP_xsxtb64, MOP_xsxth32, MOP_xsxth64, MOP_xsxtw64};
37
38 MOperator uextMopTable[kSizeOfUextMopTable] = {MOP_xuxtb32, MOP_xuxth32, MOP_xuxtw64};
39
GetReadBarrierName(const Insn & insn)40 const std::string GetReadBarrierName(const Insn &insn)
41 {
42 constexpr int32 totalBarrierNamesNum = 5;
43 std::array<std::string, totalBarrierNamesNum> barrierNames = {kMccLoadRef, kMccLoadRefV, kMccLoadRefS,
44 kMccLoadRefVS, kMccDummy};
45 if (insn.GetMachineOpcode() == MOP_xbl || insn.GetMachineOpcode() == MOP_tail_call_opt_xbl) {
46 auto &op = static_cast<FuncNameOperand &>(insn.GetOperand(kInsnFirstOpnd));
47 const std::string &funcName = op.GetName();
48 for (const std::string &singleBarrierName : barrierNames) {
49 if (funcName == singleBarrierName) {
50 return singleBarrierName;
51 }
52 }
53 }
54 return "";
55 }
56
GetLoadOperator(uint32 refSize,bool isVolatile)57 MOperator GetLoadOperator(uint32 refSize, bool isVolatile)
58 {
59 if (refSize == k32BitSize) {
60 return isVolatile ? MOP_wldar : MOP_wldr;
61 }
62 return isVolatile ? MOP_xldar : MOP_xldr;
63 }
64 } // namespace
65
IsZeroRegister(const Operand & opnd)66 static bool IsZeroRegister(const Operand &opnd)
67 {
68 if (!opnd.IsRegister()) {
69 return false;
70 }
71 const RegOperand *regOpnd = static_cast<const RegOperand *>(&opnd);
72 return regOpnd->GetRegisterNumber() == RZR;
73 }
74
Run()75 void AArch64CGPeepHole::Run()
76 {
77 bool optSuccess = false;
78 FOR_ALL_BB(bb, cgFunc) {
79 FOR_BB_INSNS_SAFE(insn, bb, nextInsn) {
80 if (!insn->IsMachineInstruction()) {
81 continue;
82 }
83 if (ssaInfo != nullptr) {
84 optSuccess = DoSSAOptimize(*bb, *insn);
85 } else {
86 DoNormalOptimize(*bb, *insn);
87 }
88 }
89 }
90 if (optSuccess) {
91 Run();
92 }
93 }
94
DoSSAOptimize(BB & bb,Insn & insn)95 bool AArch64CGPeepHole::DoSSAOptimize(BB &bb, Insn &insn)
96 {
97 MOperator thisMop = insn.GetMachineOpcode();
98 manager = peepMemPool->New<PeepOptimizeManager>(*cgFunc, bb, insn, *ssaInfo);
99 switch (thisMop) {
100 case MOP_xandrrr:
101 case MOP_wandrrr: {
102 manager->Optimize<MvnAndToBicPattern>(true);
103 break;
104 }
105 case MOP_wiorrri12:
106 case MOP_xiorrri13: {
107 manager->Optimize<OrrToMovPattern>(true);
108 break;
109 }
110 case MOP_wcbz:
111 case MOP_xcbz:
112 case MOP_wcbnz:
113 case MOP_xcbnz: {
114 manager->Optimize<AndCbzToTbzPattern>(true);
115 manager->Optimize<CsetCbzToBeqPattern>(true);
116 manager->Optimize<OneHoleBranchPattern>(true);
117 break;
118 }
119 case MOP_beq:
120 case MOP_bne: {
121 manager->Optimize<AndCmpBranchesToTbzPattern>(true);
122 break;
123 }
124 case MOP_wcsetrc:
125 case MOP_xcsetrc: {
126 manager->Optimize<ContinuousCmpCsetPattern>(true);
127 break;
128 }
129 case MOP_waddrrr:
130 case MOP_xaddrrr:
131 case MOP_dadd:
132 case MOP_sadd:
133 case MOP_wsubrrr:
134 case MOP_xsubrrr:
135 case MOP_dsub:
136 case MOP_ssub:
137 case MOP_xinegrr:
138 case MOP_winegrr:
139 case MOP_wfnegrr:
140 case MOP_xfnegrr: {
141 manager->Optimize<SimplifyMulArithmeticPattern>(true);
142 break;
143 }
144 case MOP_wandrri12:
145 case MOP_xandrri13: {
146 manager->Optimize<LsrAndToUbfxPattern>(true);
147 break;
148 }
149 case MOP_wcselrrrc:
150 case MOP_xcselrrrc: {
151 manager->Optimize<CselToCsetPattern>(true);
152 break;
153 }
154 case MOP_wiorrrr:
155 case MOP_xiorrrr:
156 case MOP_wiorrrrs:
157 case MOP_xiorrrrs: {
158 manager->Optimize<LogicShiftAndOrrToExtrPattern>(true);
159 break;
160 }
161 case MOP_bge:
162 case MOP_ble:
163 case MOP_blt:
164 case MOP_bgt: {
165 manager->Optimize<ZeroCmpBranchesToTbzPattern>(true);
166 break;
167 }
168 case MOP_wcmprr:
169 case MOP_xcmprr: {
170 manager->Optimize<NegCmpToCmnPattern>(true);
171 break;
172 }
173 case MOP_xlslrri6: {
174 manager->Optimize<ExtLslToBitFieldInsertPattern>();
175 manager->Optimize<CombineSameArithmeticPattern>(true);
176 break;
177 }
178 case MOP_xsxtb32:
179 case MOP_xsxtb64:
180 case MOP_xsxth32:
181 case MOP_xsxth64:
182 case MOP_xsxtw64:
183 case MOP_xuxtb32:
184 case MOP_xuxth32:
185 case MOP_xuxtw64: {
186 manager->Optimize<ElimSpecificExtensionPattern>(true);
187 break;
188 }
189 case MOP_wlsrrri5:
190 case MOP_xlsrrri6:
191 case MOP_wasrrri5:
192 case MOP_xasrrri6:
193 case MOP_wlslrri5:
194 case MOP_waddrri12:
195 case MOP_xaddrri12:
196 case MOP_wsubrri12:
197 case MOP_xsubrri12: {
198 manager->Optimize<CombineSameArithmeticPattern>(true);
199 break;
200 }
201 case MOP_wubfxrri5i5:
202 case MOP_xubfxrri6i6: {
203 manager->Optimize<UbfxAndCbzToTbzPattern>(true);
204 break;
205 }
206 default:
207 break;
208 }
209 return manager->OptSuccess();
210 }
211
CheckCondCode(const CondOperand & condOpnd) const212 bool ContinuousCmpCsetPattern::CheckCondCode(const CondOperand &condOpnd) const
213 {
214 switch (condOpnd.GetCode()) {
215 case CC_NE:
216 case CC_EQ:
217 case CC_LT:
218 case CC_GE:
219 case CC_GT:
220 case CC_LE:
221 return true;
222 default:
223 return false;
224 }
225 }
226
CheckCondition(Insn & insn)227 bool ContinuousCmpCsetPattern::CheckCondition(Insn &insn)
228 {
229 MOperator curMop = insn.GetMachineOpcode();
230 if (curMop != MOP_wcsetrc && curMop != MOP_xcsetrc) {
231 return false;
232 }
233 auto &condOpnd = static_cast<CondOperand &>(insn.GetOperand(kInsnSecondOpnd));
234 if (condOpnd.GetCode() != CC_NE && condOpnd.GetCode() != CC_EQ) {
235 return false;
236 }
237 reverse = (condOpnd.GetCode() == CC_EQ);
238 auto &ccReg = static_cast<RegOperand &>(insn.GetOperand(kInsnThirdOpnd));
239 prevCmpInsn = GetDefInsn(ccReg);
240 if (prevCmpInsn == nullptr) {
241 return false;
242 }
243 MOperator prevCmpMop = prevCmpInsn->GetMachineOpcode();
244 if (prevCmpMop != MOP_wcmpri && prevCmpMop != MOP_xcmpri) {
245 return false;
246 }
247 if (!static_cast<ImmOperand &>(prevCmpInsn->GetOperand(kInsnThirdOpnd)).IsZero()) {
248 return false;
249 }
250 auto &cmpCCReg = static_cast<RegOperand &>(prevCmpInsn->GetOperand(kInsnFirstOpnd));
251 InsnSet useSet = GetAllUseInsn(cmpCCReg);
252 if (useSet.size() > 1) {
253 return false;
254 }
255 auto &cmpUseReg = static_cast<RegOperand &>(prevCmpInsn->GetOperand(kInsnSecondOpnd));
256 prevCsetInsn1 = GetDefInsn(cmpUseReg);
257 if (prevCsetInsn1 == nullptr) {
258 return false;
259 }
260 MOperator prevCsetMop1 = prevCsetInsn1->GetMachineOpcode();
261 if (prevCsetMop1 != MOP_wcsetrc && prevCsetMop1 != MOP_xcsetrc) {
262 return false;
263 }
264 auto &condOpnd1 = static_cast<CondOperand &>(prevCsetInsn1->GetOperand(kInsnSecondOpnd));
265 if (!CheckCondCode(condOpnd1)) {
266 return false;
267 }
268 auto &ccReg1 = static_cast<RegOperand &>(prevCsetInsn1->GetOperand(kInsnThirdOpnd));
269 prevCmpInsn1 = GetDefInsn(ccReg1);
270 if (prevCmpInsn1 == nullptr) {
271 return false;
272 }
273 if (IsCCRegCrossVersion(*prevCsetInsn1, *prevCmpInsn, ccReg1)) {
274 return false;
275 }
276 return true;
277 }
278
Run(BB & bb,Insn & insn)279 void ContinuousCmpCsetPattern::Run(BB &bb, Insn &insn)
280 {
281 if (!CheckCondition(insn)) {
282 return;
283 }
284 auto *aarFunc = static_cast<AArch64CGFunc *>(cgFunc);
285 MOperator curMop = insn.GetMachineOpcode();
286 Operand &resOpnd = insn.GetOperand(kInsnFirstOpnd);
287 Insn *newCsetInsn = nullptr;
288 if (reverse) {
289 MOperator prevCsetMop = prevCsetInsn1->GetMachineOpcode();
290 auto &prevCsetCondOpnd = static_cast<CondOperand &>(prevCsetInsn1->GetOperand(kInsnSecondOpnd));
291 CondOperand &newCondOpnd = aarFunc->GetCondOperand(GetReverseBasicCC(prevCsetCondOpnd.GetCode()));
292 regno_t tmpRegNO = 0;
293 auto *tmpDefOpnd = aarFunc->CreateVirtualRegisterOperand(tmpRegNO, resOpnd.GetSize(),
294 static_cast<RegOperand &>(resOpnd).GetRegisterType());
295 tmpDefOpnd->SetValidBitsNum(k1BitSize);
296 newCsetInsn = &cgFunc->GetInsnBuilder()->BuildInsn(prevCsetMop, *tmpDefOpnd, newCondOpnd,
297 prevCsetInsn1->GetOperand(kInsnThirdOpnd));
298 BB *prevCsetBB = prevCsetInsn1->GetBB();
299 (void)prevCsetBB->InsertInsnAfter(*prevCsetInsn1, *newCsetInsn);
300 /* update ssa info */
301 auto *a64SSAInfo = static_cast<AArch64CGSSAInfo *>(ssaInfo);
302 a64SSAInfo->CreateNewInsnSSAInfo(*newCsetInsn);
303 /* dump pattern info */
304 if (CG_PEEP_DUMP) {
305 std::vector<Insn *> prevs;
306 prevs.emplace_back(prevCmpInsn1);
307 prevs.emplace_back(&insn);
308 DumpAfterPattern(prevs, prevCmpInsn, newCsetInsn);
309 }
310 }
311 MOperator newMop = (curMop == MOP_wcsetrc) ? MOP_wmovrr : MOP_xmovrr;
312 Insn *newInsn = nullptr;
313 if (newCsetInsn == nullptr) {
314 newInsn = &cgFunc->GetInsnBuilder()->BuildInsn(newMop, insn.GetOperand(kInsnFirstOpnd),
315 prevCsetInsn1->GetOperand(kInsnFirstOpnd));
316 } else {
317 newInsn = &cgFunc->GetInsnBuilder()->BuildInsn(newMop, insn.GetOperand(kInsnFirstOpnd),
318 newCsetInsn->GetOperand(kInsnFirstOpnd));
319 }
320 if (newInsn == nullptr) {
321 return;
322 }
323 bb.ReplaceInsn(insn, *newInsn);
324 /* update ssa info */
325 ssaInfo->ReplaceInsn(insn, *newInsn);
326 optSuccess = true;
327 SetCurrInsn(newInsn);
328 /* dump pattern info */
329 if (CG_PEEP_DUMP) {
330 std::vector<Insn *> prevs;
331 prevs.emplace_back(prevCmpInsn1);
332 prevs.emplace_back(prevCsetInsn1);
333 if (newCsetInsn == nullptr) {
334 (void)prevs.emplace_back(prevCmpInsn);
335 } else {
336 (void)prevs.emplace_back(newCsetInsn);
337 }
338 DumpAfterPattern(prevs, &insn, newInsn);
339 }
340 }
341
CheckCondition(Insn & insn)342 bool NegCmpToCmnPattern::CheckCondition(Insn &insn)
343 {
344 MOperator curMop = insn.GetMachineOpcode();
345 if (curMop != MOP_wcmprr && curMop != MOP_xcmprr) {
346 return false;
347 }
348 auto &useReg = static_cast<RegOperand &>(insn.GetOperand(kInsnThirdOpnd));
349 prevInsn = GetDefInsn(useReg);
350 if (prevInsn == nullptr) {
351 return false;
352 }
353 MOperator prevMop = prevInsn->GetMachineOpcode();
354 if (prevMop != MOP_winegrr && prevMop != MOP_xinegrr && prevMop != MOP_winegrrs && prevMop != MOP_xinegrrs) {
355 return false;
356 }
357 auto &ccReg = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
358 InsnSet useInsns = GetAllUseInsn(ccReg);
359 for (auto *useInsn : useInsns) {
360 if (useInsn == nullptr) {
361 continue;
362 }
363 MOperator useMop = useInsn->GetMachineOpcode();
364 if (useMop == MOP_bhi || useMop == MOP_bls) {
365 return false;
366 }
367 bool findUnsignedCond = false;
368 for (size_t i = 0; i < useInsn->GetOperandSize(); ++i) {
369 if (useInsn->GetOperand(i).GetKind() == Operand::kOpdCond) {
370 ConditionCode cond = static_cast<CondOperand &>(useInsn->GetOperand(i)).GetCode();
371 if (cond == CC_HI || cond == CC_LS) {
372 findUnsignedCond = true;
373 break;
374 }
375 }
376 }
377 if (findUnsignedCond) {
378 return false;
379 }
380 }
381 return true;
382 }
383
Run(BB & bb,Insn & insn)384 void NegCmpToCmnPattern::Run(BB &bb, Insn &insn)
385 {
386 if (!CheckCondition(insn)) {
387 return;
388 }
389 Operand &opnd1 = insn.GetOperand(kInsnSecondOpnd);
390 Operand &opnd2 = prevInsn->GetOperand(kInsnSecondOpnd);
391 auto &ccReg = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
392 MOperator prevMop = prevInsn->GetMachineOpcode();
393 MOperator currMop = insn.GetMachineOpcode();
394 Insn *newInsn = nullptr;
395 if (prevMop == MOP_winegrr || prevMop == MOP_xinegrr) {
396 MOperator newMop = (currMop == MOP_wcmprr) ? MOP_wcmnrr : MOP_xcmnrr;
397 newInsn = &(cgFunc->GetInsnBuilder()->BuildInsn(newMop, ccReg, opnd1, opnd2));
398 } else {
399 /* prevMop == MOP_winegrrs || prevMop == MOP_xinegrrs */
400 MOperator newMop = (currMop == MOP_wcmprr) ? MOP_wcmnrrs : MOP_xcmnrrs;
401 Operand &shiftOpnd = prevInsn->GetOperand(kInsnThirdOpnd);
402 newInsn = &(cgFunc->GetInsnBuilder()->BuildInsn(newMop, ccReg, opnd1, opnd2, shiftOpnd));
403 }
404 CHECK_FATAL(newInsn != nullptr, "must create newInsn");
405 bb.ReplaceInsn(insn, *newInsn);
406 /* update ssa info */
407 ssaInfo->ReplaceInsn(insn, *newInsn);
408 optSuccess = true;
409 SetCurrInsn(newInsn);
410 /* dump pattern info */
411 if (CG_PEEP_DUMP) {
412 std::vector<Insn *> prevs;
413 prevs.emplace_back(prevInsn);
414 DumpAfterPattern(prevs, &insn, newInsn);
415 }
416 }
417
CheckCondition(Insn & insn)418 bool CsetCbzToBeqPattern::CheckCondition(Insn &insn)
419 {
420 MOperator curMop = insn.GetMachineOpcode();
421 if (curMop != MOP_wcbz && curMop != MOP_xcbz && curMop != MOP_wcbnz && curMop != MOP_xcbnz) {
422 return false;
423 }
424 auto &useReg = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
425 prevInsn = GetDefInsn(useReg);
426 if (prevInsn == nullptr) {
427 return false;
428 }
429 MOperator prevMop = prevInsn->GetMachineOpcode();
430 if (prevMop != MOP_wcsetrc && prevMop != MOP_xcsetrc) {
431 return false;
432 }
433 auto &ccReg = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnThirdOpnd));
434 if (IsCCRegCrossVersion(*prevInsn, insn, ccReg)) {
435 return false;
436 }
437 return true;
438 }
439
SelectNewMop(ConditionCode condCode,bool inverse) const440 MOperator CsetCbzToBeqPattern::SelectNewMop(ConditionCode condCode, bool inverse) const
441 {
442 switch (condCode) {
443 case CC_NE:
444 return inverse ? MOP_beq : MOP_bne;
445 case CC_EQ:
446 return inverse ? MOP_bne : MOP_beq;
447 case CC_MI:
448 return inverse ? MOP_bpl : MOP_bmi;
449 case CC_PL:
450 return inverse ? MOP_bmi : MOP_bpl;
451 case CC_VS:
452 return inverse ? MOP_bvc : MOP_bvs;
453 case CC_VC:
454 return inverse ? MOP_bvs : MOP_bvc;
455 case CC_HI:
456 return inverse ? MOP_bls : MOP_bhi;
457 case CC_LS:
458 return inverse ? MOP_bhi : MOP_bls;
459 case CC_GE:
460 return inverse ? MOP_blt : MOP_bge;
461 case CC_LT:
462 return inverse ? MOP_bge : MOP_blt;
463 case CC_HS:
464 return inverse ? MOP_blo : MOP_bhs;
465 case CC_LO:
466 return inverse ? MOP_bhs : MOP_blo;
467 case CC_LE:
468 return inverse ? MOP_bgt : MOP_ble;
469 case CC_GT:
470 return inverse ? MOP_ble : MOP_bgt;
471 case CC_CS:
472 return inverse ? MOP_bcc : MOP_bcs;
473 default:
474 return MOP_undef;
475 }
476 }
477
Run(BB & bb,Insn & insn)478 void CsetCbzToBeqPattern::Run(BB &bb, Insn &insn)
479 {
480 if (!CheckCondition(insn)) {
481 return;
482 }
483 MOperator curMop = insn.GetMachineOpcode();
484 bool reverse = (curMop == MOP_wcbz || curMop == MOP_xcbz);
485 auto &labelOpnd = static_cast<LabelOperand &>(insn.GetOperand(kInsnSecondOpnd));
486 auto &condOpnd = static_cast<CondOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
487 MOperator newMop = SelectNewMop(condOpnd.GetCode(), reverse);
488 DEBUG_ASSERT(newMop != MOP_undef, "unknown condition code");
489 Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(newMop, prevInsn->GetOperand(kInsnThirdOpnd), labelOpnd);
490 bb.ReplaceInsn(insn, newInsn);
491 /* update ssa info */
492 ssaInfo->ReplaceInsn(insn, newInsn);
493 optSuccess = true;
494 SetCurrInsn(&newInsn);
495 /* dump pattern info */
496 if (CG_PEEP_DUMP) {
497 std::vector<Insn *> prevs;
498 prevs.emplace_back(prevInsn);
499 DumpAfterPattern(prevs, &insn, &newInsn);
500 }
501 }
502
CheckCondition(Insn & insn)503 bool ExtLslToBitFieldInsertPattern::CheckCondition(Insn &insn)
504 {
505 auto &useReg = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
506 prevInsn = GetDefInsn(useReg);
507 if (prevInsn == nullptr) {
508 return false;
509 }
510 MOperator prevMop = prevInsn->GetMachineOpcode();
511 if (prevMop != MOP_xsxtw64 && prevMop != MOP_xuxtw64) {
512 return false;
513 }
514 auto &immOpnd = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
515 if (immOpnd.GetValue() > k32BitSize) {
516 return false;
517 }
518 return true;
519 }
520
Run(BB & bb,Insn & insn)521 void ExtLslToBitFieldInsertPattern::Run(BB &bb, Insn &insn)
522 {
523 if (!CheckCondition(insn)) {
524 return;
525 }
526 auto &prevSrcReg = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
527 cgFunc->InsertExtendSet(prevSrcReg.GetRegisterNumber());
528 MOperator newMop = (prevInsn->GetMachineOpcode() == MOP_xsxtw64) ? MOP_xsbfizrri6i6 : MOP_xubfizrri6i6;
529 auto *aarFunc = static_cast<AArch64CGFunc *>(cgFunc);
530 auto &newImmOpnd1 = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
531 ImmOperand &newImmOpnd2 = aarFunc->CreateImmOperand(k32BitSize, k6BitSize, false);
532 Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(newMop, insn.GetOperand(kInsnFirstOpnd), prevSrcReg,
533 newImmOpnd1, newImmOpnd2);
534 bb.ReplaceInsn(insn, newInsn);
535 /* update ssa info */
536 ssaInfo->ReplaceInsn(insn, newInsn);
537 optSuccess = true;
538 /* dump pattern info */
539 if (CG_PEEP_DUMP) {
540 std::vector<Insn *> prevs;
541 prevs.emplace_back(prevInsn);
542 DumpAfterPattern(prevs, &insn, &newInsn);
543 }
544 }
545
IsOpndDefByZero(const Insn & insn) const546 bool CselToCsetPattern::IsOpndDefByZero(const Insn &insn) const
547 {
548 MOperator movMop = insn.GetMachineOpcode();
549 switch (movMop) {
550 case MOP_xmovrr:
551 case MOP_wmovrr: {
552 return IsZeroRegister(insn.GetOperand(kInsnSecondOpnd));
553 }
554 case MOP_wmovri32:
555 case MOP_xmovri64: {
556 auto &immOpnd = static_cast<ImmOperand &>(insn.GetOperand(kInsnSecondOpnd));
557 return immOpnd.GetValue() == 0;
558 }
559 default:
560 return false;
561 }
562 }
563
IsOpndDefByOne(const Insn & insn) const564 bool CselToCsetPattern::IsOpndDefByOne(const Insn &insn) const
565 {
566 MOperator movMop = insn.GetMachineOpcode();
567 if ((movMop != MOP_wmovri32) && (movMop != MOP_xmovri64)) {
568 return false;
569 }
570 auto &immOpnd = static_cast<ImmOperand &>(insn.GetOperand(kInsnSecondOpnd));
571 return immOpnd.GetValue() == 1;
572 }
573
CheckCondition(Insn & insn)574 bool CselToCsetPattern::CheckCondition(Insn &insn)
575 {
576 MOperator curMop = insn.GetMachineOpcode();
577 if (curMop != MOP_wcselrrrc && curMop != MOP_xcselrrrc) {
578 return false;
579 }
580 auto &useOpnd1 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
581 prevMovInsn1 = GetDefInsn(useOpnd1);
582 if (prevMovInsn1 == nullptr) {
583 return false;
584 }
585 MOperator prevMop1 = prevMovInsn1->GetMachineOpcode();
586 if (prevMop1 != MOP_wmovri32 && prevMop1 != MOP_xmovri64 && prevMop1 != MOP_wmovrr && prevMop1 != MOP_xmovrr) {
587 return false;
588 }
589 auto &useOpnd2 = static_cast<RegOperand &>(insn.GetOperand(kInsnThirdOpnd));
590 prevMovInsn2 = GetDefInsn(useOpnd2);
591 if (prevMovInsn2 == nullptr) {
592 return false;
593 }
594 MOperator prevMop2 = prevMovInsn2->GetMachineOpcode();
595 if (prevMop2 != MOP_wmovri32 && prevMop2 != MOP_xmovri64 && prevMop2 != MOP_wmovrr && prevMop2 != MOP_xmovrr) {
596 return false;
597 }
598 return true;
599 }
600
Run(BB & bb,Insn & insn)601 void CselToCsetPattern::Run(BB &bb, Insn &insn)
602 {
603 if (!CheckCondition(insn)) {
604 return;
605 }
606 Operand &dstOpnd = insn.GetOperand(kInsnFirstOpnd);
607 MOperator newMop = (dstOpnd.GetSize() == k64BitSize ? MOP_xcsetrc : MOP_wcsetrc);
608 Operand &condOpnd = insn.GetOperand(kInsnFourthOpnd);
609 Operand &rflag = insn.GetOperand(kInsnFifthOpnd);
610 Insn *newInsn = nullptr;
611 if (IsOpndDefByOne(*prevMovInsn1) && IsOpndDefByZero(*prevMovInsn2)) {
612 newInsn = &(cgFunc->GetInsnBuilder()->BuildInsn(newMop, dstOpnd, condOpnd, rflag));
613 } else if (IsOpndDefByZero(*prevMovInsn1) && IsOpndDefByOne(*prevMovInsn2)) {
614 auto &origCondOpnd = static_cast<CondOperand &>(condOpnd);
615 ConditionCode inverseCondCode = GetReverseCC(origCondOpnd.GetCode());
616 if (inverseCondCode == kCcLast) {
617 return;
618 }
619 auto *aarFunc = static_cast<AArch64CGFunc *>(cgFunc);
620 CondOperand &inverseCondOpnd = aarFunc->GetCondOperand(inverseCondCode);
621 newInsn = &(cgFunc->GetInsnBuilder()->BuildInsn(newMop, dstOpnd, inverseCondOpnd, rflag));
622 }
623 if (newInsn == nullptr) {
624 return;
625 }
626 bb.ReplaceInsn(insn, *newInsn);
627 /* update ssa info */
628 ssaInfo->ReplaceInsn(insn, *newInsn);
629 optSuccess = true;
630 SetCurrInsn(newInsn);
631 /* dump pattern info */
632 if (CG_PEEP_DUMP) {
633 std::vector<Insn *> prevs;
634 prevs.emplace_back(prevMovInsn1);
635 prevs.emplace_back(prevMovInsn2);
636 DumpAfterPattern(prevs, &insn, newInsn);
637 }
638 }
639
CheckAndSelectPattern(const Insn & currInsn)640 bool AndCmpBranchesToTbzPattern::CheckAndSelectPattern(const Insn &currInsn)
641 {
642 MOperator curMop = currInsn.GetMachineOpcode();
643 MOperator prevAndMop = prevAndInsn->GetMachineOpcode();
644 auto &andImmOpnd = static_cast<ImmOperand &>(prevAndInsn->GetOperand(kInsnThirdOpnd));
645 auto &cmpImmOpnd = static_cast<ImmOperand &>(prevCmpInsn->GetOperand(kInsnThirdOpnd));
646 if (cmpImmOpnd.GetValue() == 0) {
647 tbzImmVal = GetLogValueAtBase2(andImmOpnd.GetValue());
648 if (tbzImmVal < 0) {
649 return false;
650 }
651 switch (curMop) {
652 case MOP_beq:
653 newMop = (prevAndMop == MOP_wandrri12) ? MOP_wtbz : MOP_xtbz;
654 break;
655 case MOP_bne:
656 newMop = (prevAndMop == MOP_wandrri12) ? MOP_wtbnz : MOP_xtbnz;
657 break;
658 default:
659 return false;
660 }
661 } else {
662 tbzImmVal = GetLogValueAtBase2(andImmOpnd.GetValue());
663 int64 tmpVal = GetLogValueAtBase2(cmpImmOpnd.GetValue());
664 if (tbzImmVal < 0 || tmpVal < 0 || tbzImmVal != tmpVal) {
665 return false;
666 }
667 switch (curMop) {
668 case MOP_beq:
669 newMop = (prevAndMop == MOP_wandrri12) ? MOP_wtbnz : MOP_xtbnz;
670 break;
671 case MOP_bne:
672 newMop = (prevAndMop == MOP_wandrri12) ? MOP_wtbz : MOP_xtbz;
673 break;
674 default:
675 return false;
676 }
677 }
678 return true;
679 }
680
CheckCondition(Insn & insn)681 bool AndCmpBranchesToTbzPattern::CheckCondition(Insn &insn)
682 {
683 MOperator curMop = insn.GetMachineOpcode();
684 if (curMop != MOP_beq && curMop != MOP_bne) {
685 return false;
686 }
687 auto &ccReg = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
688 prevCmpInsn = GetDefInsn(ccReg);
689 if (prevCmpInsn == nullptr) {
690 return false;
691 }
692 MOperator prevCmpMop = prevCmpInsn->GetMachineOpcode();
693 if (prevCmpMop != MOP_wcmpri && prevCmpMop != MOP_xcmpri) {
694 return false;
695 }
696 auto &cmpUseReg = static_cast<RegOperand &>(prevCmpInsn->GetOperand(kInsnSecondOpnd));
697 prevAndInsn = GetDefInsn(cmpUseReg);
698 if (prevAndInsn == nullptr) {
699 return false;
700 }
701 MOperator prevAndMop = prevAndInsn->GetMachineOpcode();
702 if (prevAndMop != MOP_wandrri12 && prevAndMop != MOP_xandrri13) {
703 return false;
704 }
705 CHECK_FATAL(prevAndInsn->GetOperand(kInsnFirstOpnd).GetSize() == prevCmpInsn->GetOperand(kInsnSecondOpnd).GetSize(),
706 "def-use reg size must be same based-on ssa");
707 if (!CheckAndSelectPattern(insn)) {
708 return false;
709 }
710 return true;
711 }
712
Run(BB & bb,Insn & insn)713 void AndCmpBranchesToTbzPattern::Run(BB &bb, Insn &insn)
714 {
715 if (!CheckCondition(insn)) {
716 return;
717 }
718 auto *aarFunc = static_cast<AArch64CGFunc *>(cgFunc);
719 auto &labelOpnd = static_cast<LabelOperand &>(insn.GetOperand(kInsnSecondOpnd));
720 ImmOperand &tbzImmOpnd = aarFunc->CreateImmOperand(tbzImmVal, k8BitSize, false);
721 Insn &newInsn =
722 cgFunc->GetInsnBuilder()->BuildInsn(newMop, prevAndInsn->GetOperand(kInsnSecondOpnd), tbzImmOpnd, labelOpnd);
723 bb.ReplaceInsn(insn, newInsn);
724 /* update ssa info */
725 ssaInfo->ReplaceInsn(insn, newInsn);
726 optSuccess = true;
727 SetCurrInsn(&newInsn);
728 /* dump pattern info */
729 if (CG_PEEP_DUMP) {
730 std::vector<Insn *> prevs;
731 prevs.emplace_back(prevAndInsn);
732 prevs.emplace_back(prevCmpInsn);
733 DumpAfterPattern(prevs, &insn, &newInsn);
734 }
735 }
736
CheckAndSelectPattern(const Insn & currInsn)737 bool ZeroCmpBranchesToTbzPattern::CheckAndSelectPattern(const Insn &currInsn)
738 {
739 MOperator currMop = currInsn.GetMachineOpcode();
740 MOperator prevMop = prevInsn->GetMachineOpcode();
741 switch (prevMop) {
742 case MOP_wcmpri:
743 case MOP_xcmpri: {
744 regOpnd = &static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
745 auto &immOpnd = static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnThirdOpnd));
746 if (immOpnd.GetValue() != 0) {
747 return false;
748 }
749 switch (currMop) {
750 case MOP_bge:
751 newMop = (regOpnd->GetSize() <= k32BitSize) ? MOP_wtbz : MOP_xtbz;
752 break;
753 case MOP_blt:
754 newMop = (regOpnd->GetSize() <= k32BitSize) ? MOP_wtbnz : MOP_xtbnz;
755 break;
756 default:
757 return false;
758 }
759 break;
760 }
761 case MOP_wcmprr:
762 case MOP_xcmprr: {
763 auto ®Opnd0 = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
764 auto ®Opnd1 = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnThirdOpnd));
765 if (!IsZeroRegister(regOpnd0) && !IsZeroRegister(regOpnd1)) {
766 return false;
767 }
768 switch (currMop) {
769 case MOP_bge:
770 if (IsZeroRegister(regOpnd1)) {
771 regOpnd = &static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
772 newMop = (regOpnd->GetSize() <= k32BitSize) ? MOP_wtbz : MOP_xtbz;
773 } else {
774 return false;
775 }
776 break;
777 case MOP_ble:
778 if (IsZeroRegister(regOpnd0)) {
779 regOpnd = &static_cast<RegOperand &>(prevInsn->GetOperand(kInsnThirdOpnd));
780 newMop = (regOpnd->GetSize() <= k32BitSize) ? MOP_wtbz : MOP_xtbz;
781 } else {
782 return false;
783 }
784 break;
785 case MOP_blt:
786 if (IsZeroRegister(regOpnd1)) {
787 regOpnd = &static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
788 newMop = (regOpnd->GetSize() <= k32BitSize) ? MOP_wtbnz : MOP_xtbnz;
789 } else {
790 return false;
791 }
792 break;
793 case MOP_bgt:
794 if (IsZeroRegister(regOpnd0)) {
795 regOpnd = &static_cast<RegOperand &>(prevInsn->GetOperand(kInsnThirdOpnd));
796 newMop = (regOpnd->GetSize() <= k32BitSize) ? MOP_wtbnz : MOP_xtbnz;
797 } else {
798 return false;
799 }
800 break;
801 default:
802 return false;
803 }
804 break;
805 }
806 // fall through
807 [[clang::fallthrough]];
808 default:
809 return false;
810 }
811 return true;
812 }
813
CheckCondition(Insn & insn)814 bool ZeroCmpBranchesToTbzPattern::CheckCondition(Insn &insn)
815 {
816 MOperator curMop = insn.GetMachineOpcode();
817 if (curMop != MOP_bge && curMop != MOP_ble && curMop != MOP_blt && curMop != MOP_bgt) {
818 return false;
819 }
820 CHECK_FATAL(insn.GetOperand(kInsnSecondOpnd).IsLabel(), "must be labelOpnd");
821 auto &ccReg = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
822 prevInsn = GetDefInsn(ccReg);
823 if (prevInsn == nullptr) {
824 return false;
825 }
826 MOperator prevMop = prevInsn->GetMachineOpcode();
827 if (prevMop != MOP_wcmpri && prevMop != MOP_xcmpri && prevMop != MOP_wcmprr && prevMop != MOP_xcmprr) {
828 return false;
829 }
830 if (!CheckAndSelectPattern(insn)) {
831 return false;
832 }
833 return true;
834 }
835
Run(BB & bb,Insn & insn)836 void ZeroCmpBranchesToTbzPattern::Run(BB &bb, Insn &insn)
837 {
838 if (!CheckCondition(insn)) {
839 return;
840 }
841 CHECK_FATAL(regOpnd != nullptr, "must have regOpnd");
842 auto *aarFunc = static_cast<AArch64CGFunc *>(cgFunc);
843 ImmOperand &bitOpnd = aarFunc->CreateImmOperand(
844 (regOpnd->GetSize() <= k32BitSize) ? (k32BitSize - 1) : (k64BitSize - 1), k8BitSize, false);
845 auto &labelOpnd = static_cast<LabelOperand &>(insn.GetOperand(kInsnSecondOpnd));
846 Insn &newInsn =
847 cgFunc->GetInsnBuilder()->BuildInsn(newMop, *static_cast<RegOperand *>(regOpnd), bitOpnd, labelOpnd);
848 bb.ReplaceInsn(insn, newInsn);
849 /* update ssa info */
850 ssaInfo->ReplaceInsn(insn, newInsn);
851 optSuccess = true;
852 SetCurrInsn(&newInsn);
853 /* dump pattern info */
854 if (CG_PEEP_DUMP) {
855 std::vector<Insn *> prevs;
856 prevs.emplace_back(prevInsn);
857 DumpAfterPattern(prevs, &insn, &newInsn);
858 }
859 }
860
CheckCondition(Insn & insn)861 bool LsrAndToUbfxPattern::CheckCondition(Insn &insn)
862 {
863 MOperator curMop = insn.GetMachineOpcode();
864 if (curMop != MOP_wandrri12 && curMop != MOP_xandrri13) {
865 return false;
866 }
867 int64 immValue = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd)).GetValue();
868 /* and_imm value must be (1 << n - 1) */
869 if (immValue <= 0 || (((static_cast<uint64>(immValue)) & (static_cast<uint64>(immValue) + 1)) != 0)) {
870 return false;
871 }
872 auto &useReg = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
873 prevInsn = GetDefInsn(useReg);
874 if (prevInsn == nullptr) {
875 return false;
876 }
877 MOperator prevMop = prevInsn->GetMachineOpcode();
878 if (prevMop != MOP_wlsrrri5 && prevMop != MOP_xlsrrri6) {
879 return false;
880 }
881 auto &prevDstOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
882 auto &currUseOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
883 /* check def-use reg size found by ssa */
884 CHECK_FATAL(prevDstOpnd.GetSize() == currUseOpnd.GetSize(), "def-use reg size must be same");
885 auto &andDstReg = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
886 VRegVersion *andDstVersion = ssaInfo->FindSSAVersion(andDstReg.GetRegisterNumber());
887 DEBUG_ASSERT(andDstVersion != nullptr, "find destReg Version failed");
888 for (auto useDUInfoIt : andDstVersion->GetAllUseInsns()) {
889 if (useDUInfoIt.second == nullptr) {
890 continue;
891 }
892 Insn *useInsn = (useDUInfoIt.second)->GetInsn();
893 if (useInsn == nullptr) {
894 continue;
895 }
896 MOperator useMop = useInsn->GetMachineOpcode();
897 /* combine [and & cbz --> tbz] first, to eliminate more insns becase of incompleted copy prop */
898 if (useMop == MOP_wcbz || useMop == MOP_xcbz || useMop == MOP_wcbnz || useMop == MOP_xcbnz) {
899 return false;
900 }
901 }
902 return true;
903 }
904
Run(BB & bb,Insn & insn)905 void LsrAndToUbfxPattern::Run(BB &bb, Insn &insn)
906 {
907 if (!CheckCondition(insn)) {
908 return;
909 }
910 auto *aarFunc = static_cast<AArch64CGFunc *>(cgFunc);
911 bool is64Bits = (static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd)).GetSize() == k64BitSize);
912 Operand &resOpnd = insn.GetOperand(kInsnFirstOpnd);
913 Operand &srcOpnd = prevInsn->GetOperand(kInsnSecondOpnd);
914 int64 immVal1 = static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnThirdOpnd)).GetValue();
915 Operand &immOpnd1 = is64Bits ? aarFunc->CreateImmOperand(immVal1, kMaxImmVal6Bits, false)
916 : aarFunc->CreateImmOperand(immVal1, kMaxImmVal5Bits, false);
917 int64 tmpVal = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd)).GetValue();
918 int64 immVal2 = __builtin_ffsll(tmpVal + 1) - 1;
919 if ((immVal2 < k1BitSize) || (is64Bits && (immVal1 + immVal2) > k64BitSize) ||
920 (!is64Bits && (immVal1 + immVal2) > k32BitSize)) {
921 return;
922 }
923 Operand &immOpnd2 = is64Bits ? aarFunc->CreateImmOperand(immVal2, kMaxImmVal6Bits, false)
924 : aarFunc->CreateImmOperand(immVal2, kMaxImmVal5Bits, false);
925 MOperator newMop = (is64Bits ? MOP_xubfxrri6i6 : MOP_wubfxrri5i5);
926 Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(newMop, resOpnd, srcOpnd, immOpnd1, immOpnd2);
927 bb.ReplaceInsn(insn, newInsn);
928 /* update ssa info */
929 ssaInfo->ReplaceInsn(insn, newInsn);
930 optSuccess = true;
931 SetCurrInsn(&newInsn);
932 /* dump pattern info */
933 if (CG_PEEP_DUMP) {
934 std::vector<Insn *> prevs;
935 prevs.emplace_back(prevInsn);
936 DumpAfterPattern(prevs, &insn, &newInsn);
937 }
938 }
939
CheckCondition(Insn & insn)940 bool MvnAndToBicPattern::CheckCondition(Insn &insn)
941 {
942 MOperator curMop = insn.GetMachineOpcode();
943 if (curMop != MOP_wandrrr && curMop != MOP_xandrrr) {
944 return false;
945 }
946 auto &useReg1 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
947 auto &useReg2 = static_cast<RegOperand &>(insn.GetOperand(kInsnThirdOpnd));
948 prevInsn1 = GetDefInsn(useReg1);
949 prevInsn2 = GetDefInsn(useReg2);
950 MOperator mop = insn.GetMachineOpcode();
951 MOperator desMop = mop == MOP_xandrrr ? MOP_xnotrr : MOP_wnotrr;
952 op1IsMvnDef = prevInsn1 != nullptr && prevInsn1->GetMachineOpcode() == desMop;
953 op2IsMvnDef = prevInsn2 != nullptr && prevInsn2->GetMachineOpcode() == desMop;
954 if (op1IsMvnDef || op2IsMvnDef) {
955 return true;
956 }
957 return false;
958 }
959
Run(BB & bb,Insn & insn)960 void MvnAndToBicPattern::Run(BB &bb, Insn &insn)
961 {
962 if (!CheckCondition(insn)) {
963 return;
964 }
965 MOperator newMop = insn.GetMachineOpcode() == MOP_xandrrr ? MOP_xbicrrr : MOP_wbicrrr;
966 Insn *prevInsn = op1IsMvnDef ? prevInsn1 : prevInsn2;
967 auto &prevOpnd1 = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
968 auto &opnd0 = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
969 auto &opnd1 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
970 auto &opnd2 = static_cast<RegOperand &>(insn.GetOperand(kInsnThirdOpnd));
971 Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(newMop, opnd0, op1IsMvnDef ? opnd2 : opnd1, prevOpnd1);
972 /* update ssa info */
973 ssaInfo->ReplaceInsn(insn, newInsn);
974 bb.ReplaceInsn(insn, newInsn);
975 optSuccess = true;
976 SetCurrInsn(&newInsn);
977 /* dump pattern info */
978 if (CG_PEEP_DUMP) {
979 std::vector<Insn *> prevs;
980 prevs.emplace_back(prevInsn);
981 DumpAfterPattern(prevs, &insn, &newInsn);
982 }
983 }
984
CheckCondition(Insn & insn)985 bool AndCbzToTbzPattern::CheckCondition(Insn &insn)
986 {
987 MOperator curMop = insn.GetMachineOpcode();
988 if (curMop != MOP_wcbz && curMop != MOP_xcbz && curMop != MOP_wcbnz && curMop != MOP_xcbnz) {
989 return false;
990 }
991 auto &useReg = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
992 prevInsn = ssaInfo ? GetDefInsn(useReg) : insn.GetPreviousMachineInsn();
993 if (prevInsn == nullptr) {
994 return false;
995 }
996 MOperator prevMop = prevInsn->GetMachineOpcode();
997 if (prevMop != MOP_wandrri12 && prevMop != MOP_xandrri13) {
998 return false;
999 }
1000 if (!ssaInfo && (&(prevInsn->GetOperand(kInsnFirstOpnd)) != &(insn.GetOperand(kInsnFirstOpnd)))) {
1001 return false;
1002 }
1003 return true;
1004 }
1005
Run(BB & bb,Insn & insn)1006 void AndCbzToTbzPattern::Run(BB &bb, Insn &insn)
1007 {
1008 auto *aarchFunc = static_cast<AArch64CGFunc *>(cgFunc);
1009 if (!CheckCondition(insn)) {
1010 return;
1011 }
1012 auto &andImm = static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnThirdOpnd));
1013 int64 tbzVal = GetLogValueAtBase2(andImm.GetValue());
1014 if (tbzVal == -1) {
1015 return;
1016 }
1017 MOperator mOp = insn.GetMachineOpcode();
1018 MOperator newMop = MOP_undef;
1019 switch (mOp) {
1020 case MOP_wcbz:
1021 newMop = MOP_wtbz;
1022 break;
1023 case MOP_wcbnz:
1024 newMop = MOP_wtbnz;
1025 break;
1026 case MOP_xcbz:
1027 newMop = MOP_xtbz;
1028 break;
1029 case MOP_xcbnz:
1030 newMop = MOP_xtbnz;
1031 break;
1032 default:
1033 CHECK_FATAL(false, "must be cbz/cbnz");
1034 break;
1035 }
1036 auto &labelOpnd = static_cast<LabelOperand &>(insn.GetOperand(kInsnSecondOpnd));
1037 ImmOperand &tbzImm = aarchFunc->CreateImmOperand(tbzVal, k8BitSize, false);
1038 Insn &newInsn =
1039 cgFunc->GetInsnBuilder()->BuildInsn(newMop, prevInsn->GetOperand(kInsnSecondOpnd), tbzImm, labelOpnd);
1040 bb.ReplaceInsn(insn, newInsn);
1041 if (ssaInfo) {
1042 /* update ssa info */
1043 ssaInfo->ReplaceInsn(insn, newInsn);
1044 }
1045 optSuccess = true;
1046 SetCurrInsn(&newInsn);
1047 /* dump pattern info */
1048 if (CG_PEEP_DUMP) {
1049 std::vector<Insn *> prevs;
1050 prevs.emplace_back(prevInsn);
1051 DumpAfterPattern(prevs, &insn, &newInsn);
1052 }
1053 }
1054
CheckCondition(Insn & insn)1055 bool CombineSameArithmeticPattern::CheckCondition(Insn &insn)
1056 {
1057 MOperator curMop = insn.GetMachineOpcode();
1058 if (std::find(validMops.begin(), validMops.end(), curMop) == validMops.end()) {
1059 return false;
1060 }
1061 Operand &useOpnd = insn.GetOperand(kInsnSecondOpnd);
1062 CHECK_FATAL(useOpnd.IsRegister(), "expect regOpnd");
1063 prevInsn = GetDefInsn(static_cast<RegOperand &>(useOpnd));
1064 if (prevInsn == nullptr) {
1065 return false;
1066 }
1067 if (prevInsn->GetMachineOpcode() != curMop) {
1068 return false;
1069 }
1070 auto &prevDefOpnd = prevInsn->GetOperand(kInsnFirstOpnd);
1071 CHECK_FATAL(prevDefOpnd.IsRegister(), "expect regOpnd");
1072 InsnSet useInsns = GetAllUseInsn(static_cast<RegOperand &>(prevDefOpnd));
1073 if (useInsns.size() > 1) {
1074 return false;
1075 }
1076 auto *aarFunc = static_cast<AArch64CGFunc *>(cgFunc);
1077 CHECK_FATAL(prevInsn->GetOperand(kInsnThirdOpnd).IsIntImmediate(), "expect immOpnd");
1078 CHECK_FATAL(insn.GetOperand(kInsnThirdOpnd).IsIntImmediate(), "expect immOpnd");
1079 auto &prevImmOpnd = static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnThirdOpnd));
1080 auto &curImmOpnd = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
1081 int64 prevImm = prevImmOpnd.GetValue();
1082 int64 curImm = curImmOpnd.GetValue();
1083 newImmOpnd = &aarFunc->CreateImmOperand(prevImmOpnd.GetValue() + curImmOpnd.GetValue(), curImmOpnd.GetSize(),
1084 curImmOpnd.IsSignedValue());
1085 switch (curMop) {
1086 case MOP_wlsrrri5:
1087 case MOP_wasrrri5:
1088 case MOP_wlslrri5: {
1089 if ((prevImm + curImm) < k0BitSizeInt || (prevImm + curImm) >= k32BitSizeInt) {
1090 return false;
1091 }
1092 break;
1093 }
1094 case MOP_xlsrrri6:
1095 case MOP_xasrrri6:
1096 case MOP_xlslrri6: {
1097 if ((prevImm + curImm) < k0BitSizeInt || (prevImm + curImm) >= k64BitSizeInt) {
1098 return false;
1099 }
1100 break;
1101 }
1102 case MOP_waddrri12:
1103 case MOP_xaddrri12:
1104 case MOP_wsubrri12:
1105 case MOP_xsubrri12: {
1106 if (!newImmOpnd->IsSingleInstructionMovable()) {
1107 return false;
1108 }
1109 break;
1110 }
1111 default:
1112 return false;
1113 }
1114 return true;
1115 }
1116
Run(BB & bb,Insn & insn)1117 void CombineSameArithmeticPattern::Run(BB &bb, Insn &insn)
1118 {
1119 if (!CheckCondition(insn)) {
1120 return;
1121 }
1122 Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(insn.GetMachineOpcode(), insn.GetOperand(kInsnFirstOpnd),
1123 prevInsn->GetOperand(kInsnSecondOpnd), *newImmOpnd);
1124 bb.ReplaceInsn(insn, newInsn);
1125 /* update ssa info */
1126 ssaInfo->ReplaceInsn(insn, newInsn);
1127 optSuccess = true;
1128 SetCurrInsn(&newInsn);
1129 /* dump pattern info */
1130 if (CG_PEEP_DUMP) {
1131 std::vector<Insn *> prevs;
1132 (void)prevs.emplace_back(prevInsn);
1133 DumpAfterPattern(prevs, &insn, &newInsn);
1134 }
1135 }
1136
CheckCondition(Insn & insn)1137 bool LogicShiftAndOrrToExtrPattern::CheckCondition(Insn &insn)
1138 {
1139 MOperator curMop = insn.GetMachineOpcode();
1140 if (curMop != MOP_wiorrrr && curMop != MOP_xiorrrr && curMop != MOP_wiorrrrs && curMop != MOP_xiorrrrs) {
1141 return false;
1142 }
1143 Operand &curDstOpnd = insn.GetOperand(kInsnFirstOpnd);
1144 is64Bits = (curDstOpnd.GetSize() == k64BitSize);
1145 if (curMop == MOP_wiorrrr || curMop == MOP_xiorrrr) {
1146 auto &useReg1 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
1147 Insn *prevInsn1 = GetDefInsn(useReg1);
1148 auto &useReg2 = static_cast<RegOperand &>(insn.GetOperand(kInsnThirdOpnd));
1149 Insn *prevInsn2 = GetDefInsn(useReg2);
1150 if (prevInsn1 == nullptr || prevInsn2 == nullptr) {
1151 return false;
1152 }
1153 MOperator prevMop1 = prevInsn1->GetMachineOpcode();
1154 MOperator prevMop2 = prevInsn2->GetMachineOpcode();
1155 if ((prevMop1 == MOP_wlsrrri5 || prevMop1 == MOP_xlsrrri6) &&
1156 (prevMop2 == MOP_wlslrri5 || prevMop2 == MOP_xlslrri6)) {
1157 prevLsrInsn = prevInsn1;
1158 prevLslInsn = prevInsn2;
1159 } else if ((prevMop2 == MOP_wlsrrri5 || prevMop2 == MOP_xlsrrri6) &&
1160 (prevMop1 == MOP_wlslrri5 || prevMop1 == MOP_xlslrri6)) {
1161 prevLsrInsn = prevInsn2;
1162 prevLslInsn = prevInsn1;
1163 } else {
1164 return false;
1165 }
1166 int64 prevLsrImmValue = static_cast<ImmOperand &>(prevLsrInsn->GetOperand(kInsnThirdOpnd)).GetValue();
1167 int64 prevLslImmValue = static_cast<ImmOperand &>(prevLslInsn->GetOperand(kInsnThirdOpnd)).GetValue();
1168 if ((prevLsrImmValue + prevLslImmValue) < 0) {
1169 return false;
1170 }
1171 if ((is64Bits && (prevLsrImmValue + prevLslImmValue) != k64BitSize) ||
1172 (!is64Bits && (prevLsrImmValue + prevLslImmValue) != k32BitSize)) {
1173 return false;
1174 }
1175 shiftValue = prevLsrImmValue;
1176 } else if (curMop == MOP_wiorrrrs || curMop == MOP_xiorrrrs) {
1177 auto &useReg = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
1178 Insn *prevInsn = GetDefInsn(useReg);
1179 if (prevInsn == nullptr) {
1180 return false;
1181 }
1182 MOperator prevMop = prevInsn->GetMachineOpcode();
1183 if (prevMop != MOP_wlsrrri5 && prevMop != MOP_xlsrrri6 && prevMop != MOP_wlslrri5 && prevMop != MOP_xlslrri6) {
1184 return false;
1185 }
1186 int64 prevImm = static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnThirdOpnd)).GetValue();
1187 auto &shiftOpnd = static_cast<BitShiftOperand &>(insn.GetOperand(kInsnFourthOpnd));
1188 uint32 shiftAmount = shiftOpnd.GetShiftAmount();
1189 if (shiftOpnd.GetShiftOp() == BitShiftOperand::kLSL && (prevMop == MOP_wlsrrri5 || prevMop == MOP_xlsrrri6)) {
1190 prevLsrInsn = prevInsn;
1191 shiftValue = prevImm;
1192 } else if (shiftOpnd.GetShiftOp() == BitShiftOperand::kLSR &&
1193 (prevMop == MOP_wlslrri5 || prevMop == MOP_xlslrri6)) {
1194 prevLslInsn = prevInsn;
1195 shiftValue = shiftAmount;
1196 } else {
1197 return false;
1198 }
1199 if (prevImm + static_cast<int64>(shiftAmount) < 0) {
1200 return false;
1201 }
1202 if ((is64Bits && (prevImm + static_cast<int64>(shiftAmount)) != k64BitSize) ||
1203 (!is64Bits && (prevImm + static_cast<int64>(shiftAmount)) != k32BitSize)) {
1204 return false;
1205 }
1206 } else {
1207 CHECK_FATAL(false, "must be above mop");
1208 return false;
1209 }
1210 return true;
1211 }
1212
Run(BB & bb,Insn & insn)1213 void LogicShiftAndOrrToExtrPattern::Run(BB &bb, Insn &insn)
1214 {
1215 if (!CheckCondition(insn)) {
1216 return;
1217 }
1218 auto *aarFunc = static_cast<AArch64CGFunc *>(cgFunc);
1219 Operand &opnd1 =
1220 (prevLslInsn == nullptr ? insn.GetOperand(kInsnThirdOpnd) : prevLslInsn->GetOperand(kInsnSecondOpnd));
1221 Operand &opnd2 =
1222 (prevLsrInsn == nullptr ? insn.GetOperand(kInsnThirdOpnd) : prevLsrInsn->GetOperand(kInsnSecondOpnd));
1223 ImmOperand &immOpnd = is64Bits ? aarFunc->CreateImmOperand(shiftValue, kMaxImmVal6Bits, false)
1224 : aarFunc->CreateImmOperand(shiftValue, kMaxImmVal5Bits, false);
1225 MOperator newMop = is64Bits ? MOP_xextrrrri6 : MOP_wextrrrri5;
1226 Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(newMop, insn.GetOperand(kInsnFirstOpnd), opnd1, opnd2, immOpnd);
1227 bb.ReplaceInsn(insn, newInsn);
1228 /* update ssa info */
1229 ssaInfo->ReplaceInsn(insn, newInsn);
1230 optSuccess = true;
1231 SetCurrInsn(&newInsn);
1232 /* dump pattern info */
1233 if (CG_PEEP_DUMP) {
1234 std::vector<Insn *> prevs;
1235 prevs.emplace_back(prevLsrInsn);
1236 prevs.emplace_back(prevLslInsn);
1237 DumpAfterPattern(prevs, &insn, &newInsn);
1238 }
1239 }
1240
SetArithType(const Insn & currInsn)1241 void SimplifyMulArithmeticPattern::SetArithType(const Insn &currInsn)
1242 {
1243 MOperator mOp = currInsn.GetMachineOpcode();
1244 switch (mOp) {
1245 case MOP_waddrrr:
1246 case MOP_xaddrrr: {
1247 arithType = kAdd;
1248 isFloat = false;
1249 break;
1250 }
1251 case MOP_dadd:
1252 case MOP_sadd: {
1253 arithType = kFAdd;
1254 isFloat = true;
1255 break;
1256 }
1257 case MOP_wsubrrr:
1258 case MOP_xsubrrr: {
1259 arithType = kSub;
1260 isFloat = false;
1261 validOpndIdx = kInsnThirdOpnd;
1262 break;
1263 }
1264 case MOP_dsub:
1265 case MOP_ssub: {
1266 arithType = kFSub;
1267 isFloat = true;
1268 validOpndIdx = kInsnThirdOpnd;
1269 break;
1270 }
1271 case MOP_xinegrr:
1272 case MOP_winegrr: {
1273 arithType = kNeg;
1274 isFloat = false;
1275 validOpndIdx = kInsnSecondOpnd;
1276 break;
1277 }
1278 case MOP_wfnegrr:
1279 case MOP_xfnegrr: {
1280 arithType = kFNeg;
1281 isFloat = true;
1282 validOpndIdx = kInsnSecondOpnd;
1283 break;
1284 }
1285 default: {
1286 CHECK_FATAL(false, "must be above mop");
1287 break;
1288 }
1289 }
1290 }
1291
CheckCondition(Insn & insn)1292 bool SimplifyMulArithmeticPattern::CheckCondition(Insn &insn)
1293 {
1294 if (arithType == kUndef || validOpndIdx < 0) {
1295 return false;
1296 }
1297 auto &useReg = static_cast<RegOperand &>(insn.GetOperand(static_cast<uint32>(validOpndIdx)));
1298 prevInsn = GetDefInsn(useReg);
1299 if (prevInsn == nullptr) {
1300 return false;
1301 }
1302 regno_t useRegNO = useReg.GetRegisterNumber();
1303 VRegVersion *useVersion = ssaInfo->FindSSAVersion(useRegNO);
1304 if (useVersion->GetAllUseInsns().size() > 1) {
1305 return false;
1306 }
1307 MOperator currMop = insn.GetMachineOpcode();
1308 if (currMop == MOP_dadd || currMop == MOP_sadd || currMop == MOP_dsub || currMop == MOP_ssub ||
1309 currMop == MOP_wfnegrr || currMop == MOP_xfnegrr) {
1310 isFloat = true;
1311 }
1312 MOperator prevMop = prevInsn->GetMachineOpcode();
1313 if (prevMop != MOP_wmulrrr && prevMop != MOP_xmulrrr && prevMop != MOP_xvmuld && prevMop != MOP_xvmuls) {
1314 return false;
1315 }
1316 if (isFloat && (prevMop == MOP_wmulrrr || prevMop == MOP_xmulrrr)) {
1317 return false;
1318 }
1319 if (!isFloat && (prevMop == MOP_xvmuld || prevMop == MOP_xvmuls)) {
1320 return false;
1321 }
1322 if ((currMop == MOP_xaddrrr) || (currMop == MOP_waddrrr)) {
1323 return true;
1324 }
1325 return CGOptions::IsFastMath();
1326 }
1327
DoOptimize(BB & currBB,Insn & currInsn)1328 void SimplifyMulArithmeticPattern::DoOptimize(BB &currBB, Insn &currInsn)
1329 {
1330 Operand &resOpnd = currInsn.GetOperand(kInsnFirstOpnd);
1331 Operand &opndMulOpnd1 = prevInsn->GetOperand(kInsnSecondOpnd);
1332 Operand &opndMulOpnd2 = prevInsn->GetOperand(kInsnThirdOpnd);
1333 bool is64Bits = (static_cast<RegOperand &>(resOpnd).GetSize() == k64BitSize);
1334 /* may overflow */
1335 if ((prevInsn->GetOperand(kInsnFirstOpnd).GetSize() == k32BitSize) && is64Bits) {
1336 return;
1337 }
1338 MOperator newMop = is64Bits ? curMop2NewMopTable[arithType][1] : curMop2NewMopTable[arithType][0];
1339 Insn *newInsn = nullptr;
1340 if (arithType == kNeg || arithType == kFNeg) {
1341 newInsn = &(cgFunc->GetInsnBuilder()->BuildInsn(newMop, resOpnd, opndMulOpnd1, opndMulOpnd2));
1342 } else {
1343 Operand &opnd3 = (validOpndIdx == kInsnSecondOpnd) ? currInsn.GetOperand(kInsnThirdOpnd)
1344 : currInsn.GetOperand(kInsnSecondOpnd);
1345 newInsn = &(cgFunc->GetInsnBuilder()->BuildInsn(newMop, resOpnd, opndMulOpnd1, opndMulOpnd2, opnd3));
1346 }
1347 CHECK_FATAL(newInsn != nullptr, "must create newInsn");
1348 currBB.ReplaceInsn(currInsn, *newInsn);
1349 /* update ssa info */
1350 ssaInfo->ReplaceInsn(currInsn, *newInsn);
1351 optSuccess = true;
1352 /* dump pattern info */
1353 if (CG_PEEP_DUMP) {
1354 std::vector<Insn *> prevs;
1355 prevs.emplace_back(prevInsn);
1356 DumpAfterPattern(prevs, &currInsn, newInsn);
1357 }
1358 }
1359
Run(BB & bb,Insn & insn)1360 void SimplifyMulArithmeticPattern::Run(BB &bb, Insn &insn)
1361 {
1362 SetArithType(insn);
1363 if (arithType == kAdd || arithType == kFAdd) {
1364 validOpndIdx = kInsnSecondOpnd;
1365 if (CheckCondition(insn)) {
1366 DoOptimize(bb, insn);
1367 return;
1368 } else {
1369 validOpndIdx = kInsnThirdOpnd;
1370 }
1371 }
1372 if (!CheckCondition(insn)) {
1373 return;
1374 }
1375 DoOptimize(bb, insn);
1376 }
1377
SetSpecificExtType(const Insn & currInsn)1378 void ElimSpecificExtensionPattern::SetSpecificExtType(const Insn &currInsn)
1379 {
1380 MOperator mOp = currInsn.GetMachineOpcode();
1381 switch (mOp) {
1382 case MOP_xsxtb32: {
1383 is64Bits = false;
1384 extTypeIdx = SXTB;
1385 break;
1386 }
1387 case MOP_xsxtb64: {
1388 is64Bits = true;
1389 extTypeIdx = SXTB;
1390 break;
1391 }
1392 case MOP_xsxth32: {
1393 is64Bits = false;
1394 extTypeIdx = SXTH;
1395 break;
1396 }
1397 case MOP_xsxth64: {
1398 is64Bits = true;
1399 extTypeIdx = SXTH;
1400 break;
1401 }
1402 case MOP_xsxtw64: {
1403 is64Bits = true;
1404 extTypeIdx = SXTW;
1405 break;
1406 }
1407 case MOP_xuxtb32: {
1408 is64Bits = false;
1409 extTypeIdx = UXTB;
1410 break;
1411 }
1412 case MOP_xuxth32: {
1413 is64Bits = false;
1414 extTypeIdx = UXTH;
1415 break;
1416 }
1417 case MOP_xuxtw64: {
1418 is64Bits = true;
1419 extTypeIdx = UXTW;
1420 break;
1421 }
1422 default: {
1423 extTypeIdx = EXTUNDEF;
1424 }
1425 }
1426 }
1427
SetOptSceneType()1428 void ElimSpecificExtensionPattern::SetOptSceneType()
1429 {
1430 if (prevInsn->IsCall()) {
1431 sceneType = kSceneMov;
1432 return;
1433 }
1434 MOperator preMop = prevInsn->GetMachineOpcode();
1435 switch (preMop) {
1436 case MOP_wldr:
1437 case MOP_wldrb:
1438 case MOP_wldrsb:
1439 case MOP_wldrh:
1440 case MOP_wldrsh:
1441 case MOP_xldrsw: {
1442 sceneType = kSceneLoad;
1443 break;
1444 }
1445 case MOP_wmovri32:
1446 case MOP_xmovri64: {
1447 sceneType = kSceneMov;
1448 break;
1449 }
1450 case MOP_xsxtb32:
1451 case MOP_xsxtb64:
1452 case MOP_xsxth32:
1453 case MOP_xsxth64:
1454 case MOP_xsxtw64:
1455 case MOP_xuxtb32:
1456 case MOP_xuxth32:
1457 case MOP_xuxtw64: {
1458 sceneType = kSceneSameExt;
1459 break;
1460 }
1461 default: {
1462 sceneType = kSceneUndef;
1463 }
1464 }
1465 }
1466
ReplaceExtWithMov(Insn & currInsn)1467 void ElimSpecificExtensionPattern::ReplaceExtWithMov(Insn &currInsn)
1468 {
1469 auto &prevDstOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
1470 auto &currDstOpnd = static_cast<RegOperand &>(currInsn.GetOperand(kInsnFirstOpnd));
1471 MOperator newMop = is64Bits ? MOP_xmovrr : MOP_wmovrr;
1472 Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(newMop, currDstOpnd, prevDstOpnd);
1473 currBB->ReplaceInsn(currInsn, newInsn);
1474 /* update ssa info */
1475 ssaInfo->ReplaceInsn(currInsn, newInsn);
1476 optSuccess = true;
1477 /* dump pattern info */
1478 if (CG_PEEP_DUMP) {
1479 std::vector<Insn *> prevs;
1480 prevs.emplace_back(prevInsn);
1481 DumpAfterPattern(prevs, &currInsn, &newInsn);
1482 }
1483 }
1484
ElimExtensionAfterMov(Insn & insn)1485 void ElimSpecificExtensionPattern::ElimExtensionAfterMov(Insn &insn)
1486 {
1487 if (&insn == currBB->GetFirstInsn()) {
1488 return;
1489 }
1490 auto &prevDstOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
1491 auto &currDstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
1492 auto &currSrcOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
1493 if (prevDstOpnd.GetSize() != currDstOpnd.GetSize()) {
1494 return;
1495 }
1496 MOperator currMop = insn.GetMachineOpcode();
1497 /* example 2) [mov w0, R0] is return value of call and return size is not of range */
1498 if (prevInsn->IsCall() && (currSrcOpnd.GetRegisterNumber() == R0 || currSrcOpnd.GetRegisterNumber() == V0) &&
1499 currDstOpnd.GetRegisterNumber() == currSrcOpnd.GetRegisterNumber()) {
1500 uint32 retSize = prevInsn->GetRetSize();
1501 if (retSize > 0 &&
1502 ((currMop == MOP_xuxtb32 && retSize <= k1ByteSize) || (currMop == MOP_xuxth32 && retSize <= k2ByteSize) ||
1503 (currMop == MOP_xuxtw64 && retSize <= k4ByteSize))) {
1504 ReplaceExtWithMov(insn);
1505 }
1506 return;
1507 }
1508 if (prevInsn->IsCall() && prevInsn->GetIsCallReturnSigned()) {
1509 return;
1510 }
1511 auto &immMovOpnd = static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
1512 int64 value = immMovOpnd.GetValue();
1513 uint64 minRange = extValueRangeTable[extTypeIdx][0];
1514 uint64 maxRange = extValueRangeTable[extTypeIdx][1];
1515 if (currMop == MOP_xsxtb32 || currMop == MOP_xsxth32) {
1516 /* value should be in valid range */
1517 if (static_cast<uint64>(value) >= minRange && static_cast<uint64>(value) <= maxRange &&
1518 immMovOpnd.IsSingleInstructionMovable(currDstOpnd.GetSize())) {
1519 ReplaceExtWithMov(insn);
1520 }
1521 } else if (currMop == MOP_xuxtb32 || currMop == MOP_xuxth32) {
1522 if (!(static_cast<uint64>(value) & minRange)) {
1523 ReplaceExtWithMov(insn);
1524 }
1525 } else if (currMop == MOP_xuxtw64) {
1526 ReplaceExtWithMov(insn);
1527 } else {
1528 /* MOP_xsxtb64 & MOP_xsxth64 & MOP_xsxtw64 */
1529 if (!(static_cast<uint64>(value) & minRange) && immMovOpnd.IsSingleInstructionMovable(currDstOpnd.GetSize())) {
1530 ReplaceExtWithMov(insn);
1531 }
1532 }
1533 }
1534
IsValidLoadExtPattern(Insn & currInsn,MOperator oldMop,MOperator newMop) const1535 bool ElimSpecificExtensionPattern::IsValidLoadExtPattern(Insn &currInsn, MOperator oldMop, MOperator newMop) const
1536 {
1537 if (oldMop == newMop) {
1538 return true;
1539 }
1540 auto *aarFunc = static_cast<AArch64CGFunc *>(cgFunc);
1541 auto *memOpnd = static_cast<MemOperand *>(prevInsn->GetMemOpnd());
1542 DEBUG_ASSERT(!prevInsn->IsStorePair(), "do not do ElimSpecificExtensionPattern for str pair");
1543 DEBUG_ASSERT(!prevInsn->IsLoadPair(), "do not do ElimSpecificExtensionPattern for ldr pair");
1544 if (memOpnd->GetAddrMode() == MemOperand::kAddrModeBOi &&
1545 !aarFunc->IsOperandImmValid(newMop, memOpnd, kInsnSecondOpnd)) {
1546 return false;
1547 }
1548 uint32 shiftAmount = memOpnd->ShiftAmount();
1549 if (shiftAmount == 0) {
1550 return true;
1551 }
1552 const InsnDesc *md = &AArch64CG::kMd[newMop];
1553 uint32 memSize = md->GetOperandSize() / k8BitSize;
1554 uint32 validShiftAmount =
1555 ((memSize == k8BitSize)
1556 ? k3BitSize
1557 : ((memSize == k4BitSize) ? k2BitSize : ((memSize == k2BitSize) ? k1BitSize : k0BitSize)));
1558 if (shiftAmount != validShiftAmount) {
1559 return false;
1560 }
1561 return true;
1562 }
1563
SelectNewLoadMopByBitSize(MOperator lowBitMop) const1564 MOperator ElimSpecificExtensionPattern::SelectNewLoadMopByBitSize(MOperator lowBitMop) const
1565 {
1566 auto &prevDstOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
1567 switch (lowBitMop) {
1568 case MOP_wldrsb: {
1569 prevDstOpnd.SetSize(k64BitSize);
1570 return MOP_xldrsb;
1571 }
1572 case MOP_wldrsh: {
1573 prevDstOpnd.SetSize(k64BitSize);
1574 return MOP_xldrsh;
1575 }
1576 default:
1577 break;
1578 }
1579 return lowBitMop;
1580 }
1581
ElimExtensionAfterLoad(Insn & insn)1582 void ElimSpecificExtensionPattern::ElimExtensionAfterLoad(Insn &insn)
1583 {
1584 if (extTypeIdx == EXTUNDEF) {
1585 return;
1586 }
1587 MOperator prevOrigMop = prevInsn->GetMachineOpcode();
1588 for (uint8 i = 0; i < kPrevLoadPatternNum; i++) {
1589 DEBUG_ASSERT(extTypeIdx < SpecificExtTypeSize, "extTypeIdx must be lower than SpecificExtTypeSize");
1590 if (prevOrigMop != loadMappingTable[extTypeIdx][i][0]) {
1591 continue;
1592 }
1593 MOperator prevNewMop = loadMappingTable[extTypeIdx][i][1];
1594 if (!IsValidLoadExtPattern(insn, prevOrigMop, prevNewMop)) {
1595 return;
1596 }
1597 if (is64Bits && extTypeIdx >= SXTB && extTypeIdx <= SXTW) {
1598 prevNewMop = SelectNewLoadMopByBitSize(prevNewMop);
1599 }
1600 auto &prevDstOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
1601 auto &currDstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
1602 /* to avoid {mov [64], [32]} in the case of big endian */
1603 if (prevDstOpnd.GetSize() != currDstOpnd.GetSize()) {
1604 return;
1605 }
1606
1607 auto *newMemOp = GetOrCreateMemOperandForNewMOP(*cgFunc, *prevInsn, prevNewMop);
1608
1609 if (newMemOp == nullptr) {
1610 return;
1611 }
1612
1613 auto *aarCGSSAInfo = static_cast<AArch64CGSSAInfo *>(ssaInfo);
1614 if (CG_PEEP_DUMP) {
1615 LogInfo::MapleLogger() << ">>>>>>> In " << GetPatternName() << " : <<<<<<<\n";
1616 if (prevOrigMop != prevNewMop) {
1617 LogInfo::MapleLogger() << "======= OrigPrevInsn : \n";
1618 prevInsn->Dump();
1619 aarCGSSAInfo->DumpInsnInSSAForm(*prevInsn);
1620 }
1621 }
1622
1623 prevInsn->SetMemOpnd(newMemOp);
1624 prevInsn->SetMOP(AArch64CG::kMd[prevNewMop]);
1625
1626 if ((prevOrigMop != prevNewMop) && CG_PEEP_DUMP) {
1627 LogInfo::MapleLogger() << "======= NewPrevInsn : \n";
1628 prevInsn->Dump();
1629 aarCGSSAInfo->DumpInsnInSSAForm(*prevInsn);
1630 }
1631
1632 MOperator movMop = is64Bits ? MOP_xmovrr : MOP_wmovrr;
1633 Insn &newMovInsn = cgFunc->GetInsnBuilder()->BuildInsn(movMop, insn.GetOperand(kInsnFirstOpnd),
1634 prevInsn->GetOperand(kInsnFirstOpnd));
1635 currBB->ReplaceInsn(insn, newMovInsn);
1636 /* update ssa info */
1637 ssaInfo->ReplaceInsn(insn, newMovInsn);
1638 optSuccess = true;
1639 /* dump pattern info */
1640 if (CG_PEEP_DUMP) {
1641 LogInfo::MapleLogger() << "======= ReplacedInsn :\n";
1642 insn.Dump();
1643 aarCGSSAInfo->DumpInsnInSSAForm(insn);
1644 LogInfo::MapleLogger() << "======= NewInsn :\n";
1645 newMovInsn.Dump();
1646 aarCGSSAInfo->DumpInsnInSSAForm(newMovInsn);
1647 }
1648 }
1649 }
1650
ElimExtensionAfterSameExt(Insn & insn)1651 void ElimSpecificExtensionPattern::ElimExtensionAfterSameExt(Insn &insn)
1652 {
1653 if (extTypeIdx == EXTUNDEF) {
1654 return;
1655 }
1656 auto &prevDstOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
1657 auto &currDstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
1658 if (prevDstOpnd.GetSize() != currDstOpnd.GetSize()) {
1659 return;
1660 }
1661 MOperator prevMop = prevInsn->GetMachineOpcode();
1662 MOperator currMop = insn.GetMachineOpcode();
1663 for (uint8 i = 0; i < kSameExtPatternNum; i++) {
1664 DEBUG_ASSERT(extTypeIdx < SpecificExtTypeSize, "extTypeIdx must be lower than SpecificExtTypeSize");
1665 if (sameExtMappingTable[extTypeIdx][i][0] == MOP_undef || sameExtMappingTable[extTypeIdx][i][1] == MOP_undef) {
1666 continue;
1667 }
1668 if (prevMop == sameExtMappingTable[extTypeIdx][i][0] && currMop == sameExtMappingTable[extTypeIdx][i][1]) {
1669 ReplaceExtWithMov(insn);
1670 }
1671 }
1672 }
1673
CheckCondition(Insn & insn)1674 bool ElimSpecificExtensionPattern::CheckCondition(Insn &insn)
1675 {
1676 auto &useReg = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
1677 prevInsn = GetDefInsn(useReg);
1678 InsnSet useInsns = GetAllUseInsn(useReg);
1679 if ((prevInsn == nullptr) || (useInsns.size() != 1)) {
1680 return false;
1681 }
1682 SetOptSceneType();
1683 SetSpecificExtType(insn);
1684 if (sceneType == kSceneUndef) {
1685 return false;
1686 }
1687 return true;
1688 }
1689
Run(BB & bb,Insn & insn)1690 void ElimSpecificExtensionPattern::Run(BB &bb, Insn &insn)
1691 {
1692 if (!CheckCondition(insn)) {
1693 return;
1694 }
1695 if (sceneType == kSceneMov) {
1696 ElimExtensionAfterMov(insn);
1697 } else if (sceneType == kSceneLoad) {
1698 ElimExtensionAfterLoad(insn);
1699 } else if (sceneType == kSceneSameExt) {
1700 ElimExtensionAfterSameExt(insn);
1701 }
1702 }
1703
FindNewMop(const BB & bb,const Insn & insn)1704 void OneHoleBranchPattern::FindNewMop(const BB &bb, const Insn &insn)
1705 {
1706 if (&insn != bb.GetLastInsn()) {
1707 return;
1708 }
1709 MOperator thisMop = insn.GetMachineOpcode();
1710 switch (thisMop) {
1711 case MOP_wcbz:
1712 newOp = MOP_wtbnz;
1713 break;
1714 case MOP_wcbnz:
1715 newOp = MOP_wtbz;
1716 break;
1717 case MOP_xcbz:
1718 newOp = MOP_xtbnz;
1719 break;
1720 case MOP_xcbnz:
1721 newOp = MOP_xtbz;
1722 break;
1723 default:
1724 break;
1725 }
1726 }
1727
1728 /*
1729 * pattern1:
1730 * uxtb w0, w1 <-----(ValidBitsNum <= 8)
1731 * cbz w0, .label
1732 * ===>
1733 * cbz w1, .label
1734 *
1735 * pattern2:
1736 * uxtb w2, w1 <-----(ValidBitsNum == 1)
1737 * eor w3, w2, #1
1738 * cbz w3, .label
1739 * ===>
1740 * tbnz w1, #0, .label
1741 */
Run(BB & bb,Insn & insn)1742 void OneHoleBranchPattern::Run(BB &bb, Insn &insn)
1743 {
1744 if (!CheckCondition(insn)) {
1745 return;
1746 }
1747 LabelOperand &label = static_cast<LabelOperand &>(insn.GetOperand(kInsnSecondOpnd));
1748 bool pattern1 = (prevInsn->GetMachineOpcode() == MOP_xuxtb32) &&
1749 (static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd)).GetValidBitsNum() <= k8BitSize ||
1750 static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd)).GetValidBitsNum() <= k8BitSize);
1751 if (pattern1) {
1752 Insn &newCbzInsn =
1753 cgFunc->GetInsnBuilder()->BuildInsn(insn.GetMachineOpcode(), prevInsn->GetOperand(kInsnSecondOpnd), label);
1754 bb.ReplaceInsn(insn, newCbzInsn);
1755 ssaInfo->ReplaceInsn(insn, newCbzInsn);
1756 optSuccess = true;
1757 SetCurrInsn(&newCbzInsn);
1758 if (CG_PEEP_DUMP) {
1759 std::vector<Insn *> prevs;
1760 prevs.emplace_back(prevInsn);
1761 DumpAfterPattern(prevs, &newCbzInsn, nullptr);
1762 }
1763 return;
1764 }
1765 bool pattern2 = (prevInsn->GetMachineOpcode() == MOP_xeorrri13 || prevInsn->GetMachineOpcode() == MOP_weorrri12) &&
1766 (static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnThirdOpnd)).GetValue() == 1);
1767 if (pattern2) {
1768 if (!CheckPrePrevInsn()) {
1769 return;
1770 }
1771 AArch64CGFunc *aarch64CGFunc = static_cast<AArch64CGFunc *>(cgFunc);
1772 ImmOperand &oneHoleOpnd = aarch64CGFunc->CreateImmOperand(0, k8BitSize, false);
1773 auto ®Operand = static_cast<RegOperand &>(prePrevInsn->GetOperand(kInsnSecondOpnd));
1774 Insn &newTbzInsn = cgFunc->GetInsnBuilder()->BuildInsn(newOp, regOperand, oneHoleOpnd, label);
1775 bb.ReplaceInsn(insn, newTbzInsn);
1776 ssaInfo->ReplaceInsn(insn, newTbzInsn);
1777 optSuccess = true;
1778 if (CG_PEEP_DUMP) {
1779 std::vector<Insn *> prevs;
1780 prevs.emplace_back(prevInsn);
1781 prevs.emplace_back(prePrevInsn);
1782 DumpAfterPattern(prevs, &newTbzInsn, nullptr);
1783 }
1784 }
1785 }
1786
CheckCondition(Insn & insn)1787 bool OneHoleBranchPattern::CheckCondition(Insn &insn)
1788 {
1789 MOperator curMop = insn.GetMachineOpcode();
1790 if (curMop != MOP_wcbz && curMop != MOP_xcbz && curMop != MOP_wcbnz && curMop != MOP_xcbnz) {
1791 return false;
1792 }
1793 FindNewMop(*insn.GetBB(), insn);
1794 if (newOp == MOP_undef) {
1795 return false;
1796 }
1797 auto &useReg = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
1798 prevInsn = GetDefInsn(useReg);
1799 if (prevInsn == nullptr) {
1800 return false;
1801 }
1802 if (&(prevInsn->GetOperand(kInsnFirstOpnd)) != &(insn.GetOperand(kInsnFirstOpnd))) {
1803 return false;
1804 }
1805 return true;
1806 }
1807
CheckPrePrevInsn()1808 bool OneHoleBranchPattern::CheckPrePrevInsn()
1809 {
1810 auto &useReg = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
1811 prePrevInsn = GetDefInsn(useReg);
1812 if (prePrevInsn == nullptr) {
1813 return false;
1814 }
1815 if (prePrevInsn->GetMachineOpcode() != MOP_xuxtb32 ||
1816 static_cast<RegOperand &>(prePrevInsn->GetOperand(kInsnSecondOpnd)).GetValidBitsNum() != 1) {
1817 return false;
1818 }
1819 if (&(prePrevInsn->GetOperand(kInsnFirstOpnd)) != &(prevInsn->GetOperand(kInsnSecondOpnd))) {
1820 return false;
1821 }
1822 return true;
1823 }
1824
Run(BB & bb,Insn & insn)1825 void OrrToMovPattern::Run(BB &bb, Insn &insn)
1826 {
1827 if (!CheckCondition(insn)) {
1828 return;
1829 }
1830 RegOperand *reg1 = &static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
1831 Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(newMop, *reg1, *reg2);
1832 bb.ReplaceInsn(insn, newInsn);
1833 ssaInfo->ReplaceInsn(insn, newInsn);
1834 optSuccess = true;
1835 SetCurrInsn(&newInsn);
1836 if (CG_PEEP_DUMP) {
1837 std::vector<Insn *> prevs;
1838 prevs.emplace_back(&insn);
1839 DumpAfterPattern(prevs, &newInsn, nullptr);
1840 }
1841 }
1842
CheckCondition(Insn & insn)1843 bool OrrToMovPattern::CheckCondition(Insn &insn)
1844 {
1845 MOperator curMop = insn.GetMachineOpcode();
1846 if (curMop != MOP_wiorrri12 && curMop != MOP_xiorrri13) {
1847 return false;
1848 }
1849 MOperator thisMop = insn.GetMachineOpcode();
1850 Operand *opndOfOrr = nullptr;
1851 switch (thisMop) {
1852 case MOP_wiorrri12: { /* opnd1 is reg32 and opnd3 is immediate. */
1853 opndOfOrr = &(insn.GetOperand(kInsnThirdOpnd));
1854 reg2 = &static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
1855 newMop = MOP_wmovrr;
1856 break;
1857 }
1858 case MOP_xiorrri13: { /* opnd1 is reg64 and opnd3 is immediate. */
1859 opndOfOrr = &(insn.GetOperand(kInsnThirdOpnd));
1860 reg2 = &static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
1861 newMop = MOP_xmovrr;
1862 break;
1863 }
1864 default:
1865 return false;
1866 }
1867 CHECK_FATAL(opndOfOrr->IsIntImmediate(), "expects immediate operand");
1868 ImmOperand *immOpnd = static_cast<ImmOperand *>(opndOfOrr);
1869 if (immOpnd->GetValue() != 0) {
1870 return false;
1871 }
1872 return true;
1873 }
1874
DoNormalOptimize(BB & bb,Insn & insn)1875 void AArch64CGPeepHole::DoNormalOptimize(BB &bb, Insn &insn)
1876 {
1877 MOperator thisMop = insn.GetMachineOpcode();
1878 manager = peepMemPool->New<PeepOptimizeManager>(*cgFunc, bb, insn);
1879 switch (thisMop) {
1880 /*
1881 * e.g.
1882 * execute before & after RA: manager->NormalPatternOpt<>(true)
1883 * execute before RA: manager->NormalPatternOpt<>(!cgFunc->IsAfterRegAlloc())
1884 * execute after RA: manager->NormalPatternOpt<>(cgFunc->IsAfterRegAlloc())
1885 */
1886 case MOP_xubfxrri6i6: {
1887 manager->NormalPatternOpt<UbfxToUxtwPattern>(!cgFunc->IsAfterRegAlloc());
1888 break;
1889 }
1890 case MOP_xmovzri16: {
1891 manager->NormalPatternOpt<LoadFloatPointPattern>(!cgFunc->IsAfterRegAlloc());
1892 break;
1893 }
1894 case MOP_wcmpri: {
1895 manager->NormalPatternOpt<LongIntCompareWithZPattern>(!cgFunc->IsAfterRegAlloc());
1896 break;
1897 }
1898 case MOP_wmovrr:
1899 case MOP_xmovrr:
1900 case MOP_xvmovs:
1901 case MOP_xvmovd:
1902 case MOP_vmovuu:
1903 case MOP_vmovvv: {
1904 manager->NormalPatternOpt<RemoveMovingtoSameRegPattern>(cgFunc->IsAfterRegAlloc());
1905 break;
1906 }
1907 case MOP_wstrb:
1908 case MOP_wldrb:
1909 case MOP_wstrh:
1910 case MOP_wldrh:
1911 case MOP_xldr:
1912 case MOP_xstr:
1913 case MOP_wldr:
1914 case MOP_wstr:
1915 case MOP_dldr:
1916 case MOP_dstr:
1917 case MOP_sldr:
1918 case MOP_sstr:
1919 case MOP_qldr:
1920 case MOP_qstr: {
1921 manager->NormalPatternOpt<CombineContiLoadAndStorePattern>(cgFunc->IsAfterRegAlloc());
1922 manager->NormalPatternOpt<ContiLDRorSTRToSameMEMPattern>(cgFunc->IsAfterRegAlloc());
1923 manager->NormalPatternOpt<RemoveIdenticalLoadAndStorePattern>(cgFunc->IsAfterRegAlloc());
1924 break;
1925 }
1926 case MOP_xvmovrv:
1927 case MOP_xvmovrd: {
1928 manager->NormalPatternOpt<FmovRegPattern>(cgFunc->IsAfterRegAlloc());
1929 break;
1930 }
1931 case MOP_xsbfxrri6i6: {
1932 manager->NormalPatternOpt<SbfxOptPattern>(cgFunc->IsAfterRegAlloc());
1933 break;
1934 }
1935 case MOP_wcbz:
1936 case MOP_xcbz:
1937 case MOP_wcbnz:
1938 case MOP_xcbnz: {
1939 manager->NormalPatternOpt<AndCbzToTbzPattern>(!cgFunc->IsAfterRegAlloc());
1940 manager->NormalPatternOpt<CbnzToCbzPattern>(cgFunc->IsAfterRegAlloc());
1941 break;
1942 }
1943 case MOP_wsdivrrr: {
1944 manager->NormalPatternOpt<ReplaceDivToMultiPattern>(cgFunc->IsAfterRegAlloc());
1945 break;
1946 }
1947 case MOP_xbl: {
1948 if (JAVALANG) {
1949 manager->NormalPatternOpt<RemoveIncRefPattern>(!cgFunc->IsAfterRegAlloc());
1950 manager->NormalPatternOpt<RemoveDecRefPattern>(!cgFunc->IsAfterRegAlloc());
1951 manager->NormalPatternOpt<ReplaceIncDecWithIncPattern>(!cgFunc->IsAfterRegAlloc());
1952 manager->NormalPatternOpt<RemoveIncDecRefPattern>(cgFunc->IsAfterRegAlloc());
1953 }
1954 if (CGOptions::IsGCOnly() && CGOptions::DoWriteRefFieldOpt()) {
1955 manager->NormalPatternOpt<WriteFieldCallPattern>(!cgFunc->IsAfterRegAlloc());
1956 }
1957 break;
1958 }
1959 default:
1960 break;
1961 }
1962 }
1963 /* ======== CGPeepPattern End ======== */
1964
InitOpts()1965 void AArch64PeepHole::InitOpts()
1966 {
1967 optimizations.resize(kPeepholeOptsNum);
1968 optimizations[kEliminateSpecifcSXTOpt] = optOwnMemPool->New<EliminateSpecifcSXTAArch64>(cgFunc);
1969 optimizations[kEliminateSpecifcUXTOpt] = optOwnMemPool->New<EliminateSpecifcUXTAArch64>(cgFunc);
1970 optimizations[kCsetCbzToBeqOpt] = optOwnMemPool->New<CsetCbzToBeqOptAArch64>(cgFunc);
1971 optimizations[kAndCmpBranchesToCsetOpt] = optOwnMemPool->New<AndCmpBranchesToCsetAArch64>(cgFunc);
1972 optimizations[kAndCmpBranchesToTstOpt] = optOwnMemPool->New<AndCmpBranchesToTstAArch64>(cgFunc);
1973 optimizations[kAndCbzBranchesToTstOpt] = optOwnMemPool->New<AndCbzBranchesToTstAArch64>(cgFunc);
1974 optimizations[kZeroCmpBranchesOpt] = optOwnMemPool->New<ZeroCmpBranchesAArch64>(cgFunc);
1975 optimizations[kCselZeroOneToCsetOpt] = optOwnMemPool->New<CselZeroOneToCsetOpt>(cgFunc);
1976 optimizations[kAndCmpCsetEorCbzOpt] = optOwnMemPool->New<AndCmpCsetEorCbzOpt>(cgFunc);
1977 optimizations[kAddLdrOpt] = optOwnMemPool->New<AddLdrOpt>(cgFunc);
1978 optimizations[kCsetEorOpt] = optOwnMemPool->New<CsetEorOpt>(cgFunc);
1979 optimizations[kMoveCmpOpt] = optOwnMemPool->New<MoveCmpOpt>(cgFunc);
1980 }
1981
Run(BB & bb,Insn & insn)1982 void AArch64PeepHole::Run(BB &bb, Insn &insn)
1983 {
1984 MOperator thisMop = insn.GetMachineOpcode();
1985 switch (thisMop) {
1986 case MOP_xsxtb32:
1987 case MOP_xsxth32:
1988 case MOP_xsxtb64:
1989 case MOP_xsxth64:
1990 case MOP_xsxtw64: {
1991 (static_cast<EliminateSpecifcSXTAArch64 *>(optimizations[kEliminateSpecifcSXTOpt]))->Run(bb, insn);
1992 break;
1993 }
1994 case MOP_xuxtb32:
1995 case MOP_xuxth32:
1996 case MOP_xuxtw64: {
1997 (static_cast<EliminateSpecifcUXTAArch64 *>(optimizations[kEliminateSpecifcUXTOpt]))->Run(bb, insn);
1998 break;
1999 }
2000 case MOP_wcbnz:
2001 case MOP_xcbnz: {
2002 (static_cast<CsetCbzToBeqOptAArch64 *>(optimizations[kCsetCbzToBeqOpt]))->Run(bb, insn);
2003 break;
2004 }
2005 case MOP_wcbz:
2006 case MOP_xcbz: {
2007 (static_cast<CsetCbzToBeqOptAArch64 *>(optimizations[kCsetCbzToBeqOpt]))->Run(bb, insn);
2008 break;
2009 }
2010 case MOP_xandrrr:
2011 case MOP_wandrrr:
2012 case MOP_wandrri12:
2013 case MOP_xandrri13: {
2014 (static_cast<AndCmpCsetEorCbzOpt *>(optimizations[kAndCmpCsetEorCbzOpt]))->Run(bb, insn);
2015 (static_cast<AndCmpBranchesToTstAArch64 *>(optimizations[kAndCmpBranchesToTstOpt]))->Run(bb, insn);
2016 (static_cast<AndCbzBranchesToTstAArch64 *>(optimizations[kAndCbzBranchesToTstOpt]))->Run(bb, insn);
2017 break;
2018 }
2019 case MOP_wcsetrc:
2020 case MOP_xcsetrc: {
2021 (static_cast<CsetEorOpt *>(optimizations[kCsetEorOpt]))->Run(bb, insn);
2022 (static_cast<AndCmpBranchesToCsetAArch64 *>(optimizations[kAndCmpBranchesToCsetOpt]))->Run(bb, insn);
2023 break;
2024 }
2025 case MOP_xmovri64:
2026 case MOP_wmovri32: {
2027 static_cast<MoveCmpOpt *>(optimizations[kMoveCmpOpt])->Run(bb, insn);
2028 break;
2029 }
2030 case MOP_xaddrrr: {
2031 (static_cast<AddLdrOpt *>(optimizations[kAddLdrOpt]))->Run(bb, insn);
2032 break;
2033 }
2034 case MOP_wcselrrrc:
2035 case MOP_xcselrrrc: {
2036 (static_cast<CselZeroOneToCsetOpt *>(optimizations[kCselZeroOneToCsetOpt]))->Run(bb, insn);
2037 break;
2038 }
2039 default:
2040 break;
2041 }
2042 if (&insn == bb.GetLastInsn()) {
2043 (static_cast<ZeroCmpBranchesAArch64 *>(optimizations[kZeroCmpBranchesOpt]))->Run(bb, insn);
2044 }
2045 }
2046
InitOpts()2047 void AArch64PeepHole0::InitOpts()
2048 {
2049 optimizations.resize(kPeepholeOptsNum);
2050 optimizations[kRemoveIdenticalLoadAndStoreOpt] = optOwnMemPool->New<RemoveIdenticalLoadAndStoreAArch64>(cgFunc);
2051 optimizations[kCmpCsetOpt] = optOwnMemPool->New<CmpCsetAArch64>(cgFunc);
2052 optimizations[kComplexMemOperandOptAdd] = optOwnMemPool->New<ComplexMemOperandAddAArch64>(cgFunc);
2053 optimizations[kDeleteMovAfterCbzOrCbnzOpt] = optOwnMemPool->New<DeleteMovAfterCbzOrCbnzAArch64>(cgFunc);
2054 optimizations[kRemoveSxtBeforeStrOpt] = optOwnMemPool->New<RemoveSxtBeforeStrAArch64>(cgFunc);
2055 optimizations[kRemoveMovingtoSameRegOpt] = optOwnMemPool->New<RemoveMovingtoSameRegAArch64>(cgFunc);
2056 optimizations[kEnhanceStrLdrAArch64Opt] = optOwnMemPool->New<EnhanceStrLdrAArch64>(cgFunc);
2057 }
2058
Run(BB & bb,Insn & insn)2059 void AArch64PeepHole0::Run(BB &bb, Insn &insn)
2060 {
2061 MOperator thisMop = insn.GetMachineOpcode();
2062 switch (thisMop) {
2063 case MOP_wcmpri:
2064 case MOP_xcmpri: {
2065 (static_cast<CmpCsetAArch64 *>(optimizations[kCmpCsetOpt]))->Run(bb, insn);
2066 break;
2067 }
2068 case MOP_xaddrrr: {
2069 (static_cast<ComplexMemOperandAddAArch64 *>(optimizations[kComplexMemOperandOptAdd]))->Run(bb, insn);
2070 break;
2071 }
2072 case MOP_wcbz:
2073 case MOP_xcbz:
2074 case MOP_wcbnz:
2075 case MOP_xcbnz: {
2076 (static_cast<DeleteMovAfterCbzOrCbnzAArch64 *>(optimizations[kDeleteMovAfterCbzOrCbnzOpt]))->Run(bb, insn);
2077 break;
2078 }
2079 case MOP_wstrh:
2080 case MOP_wstrb: {
2081 (static_cast<RemoveSxtBeforeStrAArch64 *>(optimizations[kRemoveSxtBeforeStrOpt]))->Run(bb, insn);
2082 break;
2083 }
2084 case MOP_wmovrr:
2085 case MOP_xmovrr:
2086 case MOP_xvmovs:
2087 case MOP_xvmovd:
2088 case MOP_vmovuu:
2089 case MOP_vmovvv: {
2090 (static_cast<RemoveMovingtoSameRegAArch64 *>(optimizations[kRemoveMovingtoSameRegOpt]))->Run(bb, insn);
2091 break;
2092 }
2093 case MOP_xldr:
2094 case MOP_xstr:
2095 case MOP_wldr:
2096 case MOP_wstr:
2097 case MOP_dldr:
2098 case MOP_dstr:
2099 case MOP_sldr:
2100 case MOP_sstr: {
2101 if (thisMop == MOP_wstr || thisMop == MOP_xstr) {
2102 (static_cast<RemoveIdenticalLoadAndStoreAArch64 *>(optimizations[kRemoveIdenticalLoadAndStoreOpt]))
2103 ->Run(bb, insn);
2104 }
2105 (static_cast<EnhanceStrLdrAArch64*>(optimizations[kEnhanceStrLdrAArch64Opt]))->Run(bb, insn);
2106 break;
2107 }
2108 default:
2109 break;
2110 }
2111 }
2112
InitOpts()2113 void AArch64PrePeepHole::InitOpts()
2114 {
2115 optimizations.resize(kPeepholeOptsNum);
2116 optimizations[kOneHoleBranchesPreOpt] = optOwnMemPool->New<OneHoleBranchesPreAArch64>(cgFunc);
2117 optimizations[kReplaceOrrToMovOpt] = optOwnMemPool->New<ReplaceOrrToMovAArch64>(cgFunc);
2118 optimizations[kReplaceCmpToCmnOpt] = optOwnMemPool->New<ReplaceCmpToCmnAArch64>(cgFunc);
2119 optimizations[kComplexMemOperandOpt] = optOwnMemPool->New<ComplexMemOperandAArch64>(cgFunc);
2120 optimizations[kComplexMemOperandPreOptAdd] = optOwnMemPool->New<ComplexMemOperandPreAddAArch64>(cgFunc);
2121 optimizations[kComplexMemOperandOptLSL] = optOwnMemPool->New<ComplexMemOperandLSLAArch64>(cgFunc);
2122 optimizations[kComplexMemOperandOptLabel] = optOwnMemPool->New<ComplexMemOperandLabelAArch64>(cgFunc);
2123 optimizations[kDuplicateExtensionOpt] = optOwnMemPool->New<ElimDuplicateExtensionAArch64>(cgFunc);
2124 optimizations[kEnhanceStrLdrAArch64Opt] = optOwnMemPool->New<EnhanceStrLdrAArch64>(cgFunc);
2125 }
2126
Run(BB & bb,Insn & insn)2127 void AArch64PrePeepHole::Run(BB &bb, Insn &insn)
2128 {
2129 MOperator thisMop = insn.GetMachineOpcode();
2130 switch (thisMop) {
2131 case MOP_wiorrri12:
2132 case MOP_xiorrri13: {
2133 (static_cast<ReplaceOrrToMovAArch64 *>(optimizations[kReplaceOrrToMovOpt]))->Run(bb, insn);
2134 break;
2135 }
2136 case MOP_wmovri32:
2137 case MOP_xmovri64: {
2138 (static_cast<ReplaceCmpToCmnAArch64 *>(optimizations[kReplaceCmpToCmnOpt]))->Run(bb, insn);
2139 break;
2140 }
2141 case MOP_xadrpl12: {
2142 (static_cast<ComplexMemOperandAArch64 *>(optimizations[kComplexMemOperandOpt]))->Run(bb, insn);
2143 break;
2144 }
2145 case MOP_xaddrrr: {
2146 (static_cast<ComplexMemOperandPreAddAArch64 *>(optimizations[kComplexMemOperandPreOptAdd]))->Run(bb, insn);
2147 break;
2148 }
2149 case MOP_xaddrrrs: {
2150 (static_cast<ComplexMemOperandLSLAArch64 *>(optimizations[kComplexMemOperandOptLSL]))->Run(bb, insn);
2151 break;
2152 }
2153 case MOP_xsxtb32:
2154 case MOP_xsxth32:
2155 case MOP_xsxtb64:
2156 case MOP_xsxth64:
2157 case MOP_xsxtw64:
2158 case MOP_xuxtb32:
2159 case MOP_xuxth32:
2160 case MOP_xuxtw64: {
2161 (static_cast<ElimDuplicateExtensionAArch64 *>(optimizations[kDuplicateExtensionOpt]))->Run(bb, insn);
2162 break;
2163 }
2164 case MOP_xldli: {
2165 (static_cast<ComplexMemOperandLabelAArch64 *>(optimizations[kComplexMemOperandOptLabel]))->Run(bb, insn);
2166 break;
2167 }
2168 case MOP_xldr:
2169 case MOP_xstr:
2170 case MOP_wldr:
2171 case MOP_wstr:
2172 case MOP_dldr:
2173 case MOP_dstr:
2174 case MOP_sldr:
2175 case MOP_sstr: {
2176 (static_cast<EnhanceStrLdrAArch64 *>(optimizations[kEnhanceStrLdrAArch64Opt]))->Run(bb, insn);
2177 break;
2178 }
2179 default:
2180 break;
2181 }
2182 if (&insn == bb.GetLastInsn()) {
2183 (static_cast<OneHoleBranchesPreAArch64 *>(optimizations[kOneHoleBranchesPreOpt]))->Run(bb, insn);
2184 }
2185 }
2186
InitOpts()2187 void AArch64PrePeepHole1::InitOpts()
2188 {
2189 optimizations.resize(kPeepholeOptsNum);
2190 optimizations[kOneHoleBranchesOpt] = optOwnMemPool->New<OneHoleBranchesAArch64>(cgFunc);
2191 optimizations[kAndCmpBranchesToTbzOpt] = optOwnMemPool->New<AndCmpBranchesToTbzAArch64>(cgFunc);
2192 optimizations[kComplexExtendWordLslOpt] = optOwnMemPool->New<ComplexExtendWordLslAArch64>(cgFunc);
2193 }
2194
Run(BB & bb,Insn & insn)2195 void AArch64PrePeepHole1::Run(BB &bb, Insn &insn)
2196 {
2197 MOperator thisMop = insn.GetMachineOpcode();
2198 switch (thisMop) {
2199 case MOP_xsxtw64:
2200 case MOP_xuxtw64: {
2201 (static_cast<ComplexExtendWordLslAArch64 *>(optimizations[kComplexExtendWordLslOpt]))->Run(bb, insn);
2202 break;
2203 }
2204 default:
2205 break;
2206 }
2207 if (&insn == bb.GetLastInsn()) {
2208 switch (thisMop) {
2209 case MOP_wcbz:
2210 case MOP_wcbnz:
2211 case MOP_xcbz:
2212 case MOP_xcbnz: {
2213 (static_cast<OneHoleBranchesAArch64 *>(optimizations[kOneHoleBranchesOpt]))->Run(bb, insn);
2214 break;
2215 }
2216 case MOP_beq:
2217 case MOP_bne: {
2218 (static_cast<AndCmpBranchesToTbzAArch64 *>(optimizations[kAndCmpBranchesToTbzOpt]))->Run(bb, insn);
2219 break;
2220 }
2221 default:
2222 break;
2223 }
2224 }
2225 }
2226
CheckCondition(Insn & insn)2227 bool RemoveIdenticalLoadAndStorePattern::CheckCondition(Insn &insn)
2228 {
2229 nextInsn = insn.GetNextMachineInsn();
2230 if (nextInsn == nullptr) {
2231 return false;
2232 }
2233 return true;
2234 }
2235
Run(BB & bb,Insn & insn)2236 void RemoveIdenticalLoadAndStorePattern::Run(BB &bb, Insn &insn)
2237 {
2238 if (!CheckCondition(insn)) {
2239 return;
2240 }
2241 MOperator mop1 = insn.GetMachineOpcode();
2242 MOperator mop2 = nextInsn->GetMachineOpcode();
2243 if ((mop1 == MOP_wstr && mop2 == MOP_wstr) || (mop1 == MOP_xstr && mop2 == MOP_xstr)) {
2244 if (IsMemOperandsIdentical(insn, *nextInsn)) {
2245 bb.RemoveInsn(insn);
2246 }
2247 } else if ((mop1 == MOP_wstr && mop2 == MOP_wldr) || (mop1 == MOP_xstr && mop2 == MOP_xldr)) {
2248 if (IsMemOperandsIdentical(insn, *nextInsn)) {
2249 bb.RemoveInsn(*nextInsn);
2250 }
2251 }
2252 }
2253
IsMemOperandsIdentical(const Insn & insn1,const Insn & insn2) const2254 bool RemoveIdenticalLoadAndStorePattern::IsMemOperandsIdentical(const Insn &insn1, const Insn &insn2) const
2255 {
2256 regno_t regNO1 = static_cast<RegOperand &>(insn1.GetOperand(kInsnFirstOpnd)).GetRegisterNumber();
2257 regno_t regNO2 = static_cast<RegOperand &>(insn2.GetOperand(kInsnFirstOpnd)).GetRegisterNumber();
2258 if (regNO1 != regNO2) {
2259 return false;
2260 }
2261 /* Match only [base + offset] */
2262 auto &memOpnd1 = static_cast<MemOperand &>(insn1.GetOperand(kInsnSecondOpnd));
2263 if (memOpnd1.GetAddrMode() != MemOperand::kAddrModeBOi || !memOpnd1.IsIntactIndexed()) {
2264 return false;
2265 }
2266 auto &memOpnd2 = static_cast<MemOperand &>(insn2.GetOperand(kInsnSecondOpnd));
2267 if (memOpnd2.GetAddrMode() != MemOperand::kAddrModeBOi || !memOpnd1.IsIntactIndexed()) {
2268 return false;
2269 }
2270 Operand *base1 = memOpnd1.GetBaseRegister();
2271 Operand *base2 = memOpnd2.GetBaseRegister();
2272 if (!((base1 != nullptr) && base1->IsRegister()) || !((base2 != nullptr) && base2->IsRegister())) {
2273 return false;
2274 }
2275
2276 regno_t baseRegNO1 = static_cast<RegOperand *>(base1)->GetRegisterNumber();
2277 /* First insn re-write base addr reg1 <- [ reg1 + offset ] */
2278 if (baseRegNO1 == regNO1) {
2279 return false;
2280 }
2281
2282 regno_t baseRegNO2 = static_cast<RegOperand *>(base2)->GetRegisterNumber();
2283 if (baseRegNO1 != baseRegNO2) {
2284 return false;
2285 }
2286
2287 return memOpnd1.GetOffsetImmediate()->GetOffsetValue() == memOpnd2.GetOffsetImmediate()->GetOffsetValue();
2288 }
2289
Run(BB & bb,Insn & insn)2290 void RemoveIdenticalLoadAndStoreAArch64::Run(BB &bb, Insn &insn)
2291 {
2292 Insn *nextInsn = insn.GetNextMachineInsn();
2293 if (nextInsn == nullptr) {
2294 return;
2295 }
2296 MOperator mop1 = insn.GetMachineOpcode();
2297 MOperator mop2 = nextInsn->GetMachineOpcode();
2298 if ((mop1 == MOP_wstr && mop2 == MOP_wstr) || (mop1 == MOP_xstr && mop2 == MOP_xstr)) {
2299 if (IsMemOperandsIdentical(insn, *nextInsn)) {
2300 bb.RemoveInsn(insn);
2301 }
2302 } else if ((mop1 == MOP_wstr && mop2 == MOP_wldr) || (mop1 == MOP_xstr && mop2 == MOP_xldr)) {
2303 if (IsMemOperandsIdentical(insn, *nextInsn)) {
2304 bb.RemoveInsn(*nextInsn);
2305 }
2306 }
2307 }
2308
IsMemOperandsIdentical(const Insn & insn1,const Insn & insn2) const2309 bool RemoveIdenticalLoadAndStoreAArch64::IsMemOperandsIdentical(const Insn &insn1, const Insn &insn2) const
2310 {
2311 regno_t regNO1 = static_cast<RegOperand &>(insn1.GetOperand(kInsnFirstOpnd)).GetRegisterNumber();
2312 regno_t regNO2 = static_cast<RegOperand &>(insn2.GetOperand(kInsnFirstOpnd)).GetRegisterNumber();
2313 if (regNO1 != regNO2) {
2314 return false;
2315 }
2316 /* Match only [base + offset] */
2317 auto &memOpnd1 = static_cast<MemOperand &>(insn1.GetOperand(kInsnSecondOpnd));
2318 if (memOpnd1.GetAddrMode() != MemOperand::kAddrModeBOi || !memOpnd1.IsIntactIndexed()) {
2319 return false;
2320 }
2321 auto &memOpnd2 = static_cast<MemOperand &>(insn2.GetOperand(kInsnSecondOpnd));
2322 if (memOpnd2.GetAddrMode() != MemOperand::kAddrModeBOi || !memOpnd1.IsIntactIndexed()) {
2323 return false;
2324 }
2325 Operand *base1 = memOpnd1.GetBaseRegister();
2326 Operand *base2 = memOpnd2.GetBaseRegister();
2327 if (!((base1 != nullptr) && base1->IsRegister()) || !((base2 != nullptr) && base2->IsRegister())) {
2328 return false;
2329 }
2330
2331 regno_t baseRegNO1 = static_cast<RegOperand *>(base1)->GetRegisterNumber();
2332 /* First insn re-write base addr reg1 <- [ reg1 + offset ] */
2333 if (baseRegNO1 == regNO1) {
2334 return false;
2335 }
2336
2337 regno_t baseRegNO2 = static_cast<RegOperand *>(base2)->GetRegisterNumber();
2338 if (baseRegNO1 != baseRegNO2) {
2339 return false;
2340 }
2341
2342 return memOpnd1.GetOffsetImmediate()->GetOffsetValue() == memOpnd2.GetOffsetImmediate()->GetOffsetValue();
2343 }
2344
CheckCondition(Insn & insn)2345 bool RemoveMovingtoSameRegPattern::CheckCondition(Insn &insn)
2346 {
2347 DEBUG_ASSERT(insn.GetOperand(kInsnFirstOpnd).IsRegister(), "expects registers");
2348 DEBUG_ASSERT(insn.GetOperand(kInsnSecondOpnd).IsRegister(), "expects registers");
2349 auto ®1 = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
2350 auto ®2 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
2351 /* remove mov x0,x0 when it cast i32 to i64 */
2352 if ((reg1.GetRegisterNumber() == reg2.GetRegisterNumber()) && (reg1.GetSize() >= reg2.GetSize())) {
2353 return true;
2354 }
2355 return false;
2356 }
2357
Run(BB & bb,Insn & insn)2358 void RemoveMovingtoSameRegPattern::Run(BB &bb, Insn &insn)
2359 {
2360 /* remove mov x0,x0 when it cast i32 to i64 */
2361 if (CheckCondition(insn)) {
2362 bb.RemoveInsn(insn);
2363 }
2364 }
2365
Run(BB & bb,Insn & insn)2366 void RemoveMovingtoSameRegAArch64::Run(BB &bb, Insn &insn)
2367 {
2368 DEBUG_ASSERT(insn.GetOperand(kInsnFirstOpnd).IsRegister(), "expects registers");
2369 DEBUG_ASSERT(insn.GetOperand(kInsnSecondOpnd).IsRegister(), "expects registers");
2370 auto ®1 = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
2371 auto ®2 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
2372 /* remove mov x0,x0 when it cast i32 to i64 */
2373 if ((reg1.GetRegisterNumber() == reg2.GetRegisterNumber()) && (reg1.GetSize() >= reg2.GetSize())) {
2374 bb.RemoveInsn(insn);
2375 }
2376 }
2377
CheckOperandIsDeadFromInsn(const RegOperand & regOpnd,Insn & insn)2378 bool EnhanceStrLdrAArch64::CheckOperandIsDeadFromInsn(const RegOperand ®Opnd, Insn &insn)
2379 {
2380 for (uint32 i = 0; i < insn.GetOperandSize(); ++i) {
2381 auto &opnd = insn.GetOperand(i);
2382 if (!insn.GetDesc()->GetOpndDes(i)->IsRegDef()) {
2383 continue;
2384 }
2385 // regOpnd is redefined at curInsn
2386 if (static_cast<RegOperand &>(opnd).GetRegisterNumber() == regOpnd.GetRegisterNumber()) {
2387 return true;
2388 }
2389 }
2390 return !IfOperandIsLiveAfterInsn(regOpnd, insn);
2391 }
2392
GetInsnAddOrSubNewOffset(Insn & insn,ImmOperand & offset)2393 ImmOperand *EnhanceStrLdrAArch64::GetInsnAddOrSubNewOffset(Insn &insn, ImmOperand &offset)
2394 {
2395 int64 val = 0;
2396 VaryType vary = offset.GetVary();
2397 auto mOp = insn.GetMachineOpcode();
2398 if (mOp == MOP_xaddrri12 || mOp == MOP_xsubrri12) {
2399 auto &immOpnd = static_cast<ImmOperand&>(insn.GetOperand(kInsnThirdOpnd));
2400 val = immOpnd.GetValue();
2401 CHECK_FATAL(!(vary == kUnAdjustVary && immOpnd.GetVary() == kUnAdjustVary), "NIY, can not deal this case!");
2402 vary = immOpnd.GetVary();
2403 } else {
2404 auto &immOpnd = static_cast<ImmOperand&>(insn.GetOperand(kInsnThirdOpnd));
2405 auto &shiftOpnd = static_cast<BitShiftOperand &>(insn.GetOperand(kInsnFourthOpnd));
2406 CHECK_FATAL(shiftOpnd.GetShiftAmount() == 12, "invalid shiftAmount");
2407 val = (immOpnd.GetValue() << shiftOpnd.GetShiftAmount());
2408 }
2409
2410 if (mOp == MOP_xsubrri12 || mOp == MOP_xsubrri24) {
2411 val = -val;
2412 }
2413 val += offset.GetValue();
2414 auto &newImm = static_cast<AArch64CGFunc&>(cgFunc).GetOrCreateOfstOpnd(val, k64BitSize);
2415 newImm.SetVary(vary);
2416 return &newImm;
2417 }
2418
OptimizeAddrBOI(Insn & insn,MemOperand & memOpnd,Insn & prevInsn)2419 void EnhanceStrLdrAArch64::OptimizeAddrBOI(Insn &insn, MemOperand &memOpnd, Insn &prevInsn)
2420 {
2421 auto *oriBase = memOpnd.GetBaseRegister();
2422 auto *oriOffset = memOpnd.GetOffsetOperand();
2423 auto &defOpnd = static_cast<RegOperand&>(prevInsn.GetOperand(kInsnFirstOpnd));
2424 if (defOpnd.GetRegisterNumber() != oriBase->GetRegisterNumber() || !CheckOperandIsDeadFromInsn(defOpnd, insn)) {
2425 return;
2426 }
2427 auto *newBase = static_cast<RegOperand*>(&prevInsn.GetOperand(kInsnSecondOpnd));
2428 auto *newOffset = GetInsnAddOrSubNewOffset(prevInsn, *memOpnd.GetOffsetOperand());
2429 if (newOffset->GetValue() < 0) {
2430 return; // obj dump cannot deal str x19, [x29,#-16]
2431 }
2432
2433 memOpnd.SetBaseRegister(*newBase);
2434 memOpnd.SetOffsetOperand(*newOffset);
2435 if (!static_cast<AArch64CGFunc&>(cgFunc).IsOperandImmValid(insn.GetMachineOpcode(), &memOpnd, kInsnSecondOpnd)) {
2436 // If new offset is invalid, undo it
2437 memOpnd.SetBaseRegister(*oriBase);
2438 memOpnd.SetOffsetOperand(*oriOffset);
2439 return;
2440 }
2441 memOpnd.SetAddrMode(MemOperand::kAddrModeBOi);
2442 prevInsn.GetBB()->RemoveInsn(prevInsn);
2443 }
2444
OptimizeAddrBOrXShiftExtend(Insn & insn,MemOperand & memOpnd,Insn & shiftExtendInsn)2445 void EnhanceStrLdrAArch64::OptimizeAddrBOrXShiftExtend(Insn &insn, MemOperand &memOpnd, Insn &shiftExtendInsn)
2446 {
2447 auto mOp = shiftExtendInsn.GetMachineOpcode();
2448 if (mOp != MOP_xuxtw64 && mOp != MOP_xsxtw64 && mOp != MOP_xlslrri6) {
2449 return;
2450 }
2451 auto *oriIndex = memOpnd.GetIndexRegister();
2452 auto &defOpnd = static_cast<RegOperand&>(shiftExtendInsn.GetOperand(kInsnFirstOpnd));
2453 if (defOpnd.GetRegisterNumber() != oriIndex->GetRegisterNumber() || !CheckOperandIsDeadFromInsn(defOpnd, insn)) {
2454 return;
2455 }
2456 auto &newIndex = static_cast<RegOperand&>(shiftExtendInsn.GetOperand(kInsnSecondOpnd));
2457 bool isSigned = (mOp == MOP_xsxtw64);
2458 uint32 shift = 0;
2459 if (mOp == MOP_xlslrri6) {
2460 shift = static_cast<uint32>(static_cast<ImmOperand&>(shiftExtendInsn.GetOperand(kInsnThirdOpnd)).GetValue());
2461 }
2462 const uint32 regSize = insn.GetDesc()->GetOpndDes(kInsnFirstOpnd)->GetSize();
2463 // lsl extend insn shift amount can only be 0 or 1(16-bit def opnd) or 2(32-bit def opnd) or
2464 // 3(64-bit def opnd) or 4(128-bit def opnd) in ldr/str insn, and in this pattern we only have
2465 // 32-bit & 64-bit situation now
2466 if ((shift == k0BitSize) || (regSize == k32BitSize && shift == k2BitSize) ||
2467 (regSize == k64BitSize && shift == k3BitSize)) {
2468 auto *newMemOpnd = static_cast<AArch64CGFunc&>(cgFunc).CreateMemOperand(MemOperand::kAddrModeBOrX,
2469 memOpnd.GetSize(), *memOpnd.GetBaseRegister(), newIndex, shift, isSigned);
2470 insn.SetOperand(kInsnSecondOpnd, *newMemOpnd);
2471 shiftExtendInsn.GetBB()->RemoveInsn(shiftExtendInsn);
2472 }
2473 }
2474
OptimizeAddrBOrX(Insn & insn,MemOperand & memOpnd,Insn & prevInsn)2475 void EnhanceStrLdrAArch64::OptimizeAddrBOrX(Insn &insn, MemOperand &memOpnd, Insn &prevInsn)
2476 {
2477 if (memOpnd.GetOffsetOperand()->GetValue() != 0 || memOpnd.GetOffsetOperand()->GetVary() == kUnAdjustVary) {
2478 return;
2479 }
2480 auto *oriBase = memOpnd.GetBaseRegister();
2481 auto &defOpnd = static_cast<RegOperand&>(prevInsn.GetOperand(kInsnFirstOpnd));
2482 if (defOpnd.GetRegisterNumber() != oriBase->GetRegisterNumber() || !CheckOperandIsDeadFromInsn(defOpnd, insn)) {
2483 return;
2484 }
2485 auto *newBase = static_cast<RegOperand*>(&prevInsn.GetOperand(kInsnSecondOpnd));
2486 auto *newIndex = static_cast<RegOperand*>(&prevInsn.GetOperand(kInsnThirdOpnd));
2487
2488 memOpnd.SetBaseRegister(*newBase);
2489 memOpnd.SetIndexRegister(*newIndex);
2490 memOpnd.SetAddrMode(MemOperand::kAddrModeBOrX);
2491 auto *prevShiftExtendInsn = prevInsn.GetPreviousMachineInsn();
2492 if (prevShiftExtendInsn != nullptr) {
2493 OptimizeAddrBOrXShiftExtend(insn, memOpnd, *prevShiftExtendInsn);
2494 }
2495 prevInsn.GetBB()->RemoveInsn(prevInsn);
2496 }
2497
OptimizeWithAddrrrs(Insn & insn,MemOperand & memOpnd,Insn & addInsn)2498 void EnhanceStrLdrAArch64::OptimizeWithAddrrrs(Insn &insn, MemOperand &memOpnd, Insn &addInsn)
2499 {
2500 if (memOpnd.GetOffsetOperand()->GetValue() != 0 || memOpnd.GetOffsetOperand()->GetVary() != kNotVary) {
2501 return;
2502 }
2503 auto *oriBase = memOpnd.GetBaseRegister();
2504 auto &defOpnd = static_cast<RegOperand&>(addInsn.GetOperand(kInsnFirstOpnd));
2505 if (defOpnd.GetRegisterNumber() != oriBase->GetRegisterNumber() || !CheckOperandIsDeadFromInsn(defOpnd, insn)) {
2506 return;
2507 }
2508 auto &newBase = static_cast<RegOperand&>(addInsn.GetOperand(kInsnSecondOpnd));
2509 auto &newIndex = static_cast<RegOperand&>(addInsn.GetOperand(kInsnThirdOpnd));
2510 auto &shift = static_cast<BitShiftOperand&>(addInsn.GetOperand(kInsnFourthOpnd));
2511 if (shift.GetShiftOp() != BitShiftOperand::kLSL) {
2512 return;
2513 }
2514 auto *newMemOpnd = static_cast<AArch64CGFunc&>(cgFunc).CreateMemOperand(MemOperand::kAddrModeBOrX,
2515 memOpnd.GetSize(), newBase, newIndex, shift.GetShiftAmount());
2516 insn.SetOperand(kInsnSecondOpnd, *newMemOpnd);
2517 addInsn.GetBB()->RemoveInsn(addInsn);
2518 }
2519
Run(BB & bb,Insn & insn)2520 void EnhanceStrLdrAArch64::Run(BB &bb, Insn &insn)
2521 {
2522 Operand &opnd = insn.GetOperand(kInsnSecondOpnd);
2523 CHECK_FATAL(opnd.IsMemoryAccessOperand(), "Unexpected operand in EnhanceStrLdrAArch64");
2524 auto &memOpnd = static_cast<MemOperand&>(opnd);
2525 if (memOpnd.GetAddrMode() != MemOperand::kAddrModeBOi || !memOpnd.GetOffsetImmediate()->IsImmOffset()) {
2526 return;
2527 }
2528
2529 auto *prev = insn.GetPreviousMachineInsn();
2530 while (prev != nullptr) {
2531 if (prev->GetMachineOpcode() == MOP_xmovrr) {
2532 auto &defOpnd = static_cast<RegOperand&>(prev->GetOperand(kInsnFirstOpnd));
2533 if (defOpnd.GetRegisterNumber() != memOpnd.GetBaseRegister()->GetRegisterNumber() ||
2534 !CheckOperandIsDeadFromInsn(defOpnd, insn)) {
2535 return;
2536 }
2537 memOpnd.SetBaseRegister(static_cast<RegOperand&>(prev->GetOperand(kInsnSecondOpnd)));
2538 auto *tmpInsn = prev;
2539 prev = prev->GetPreviousMachineInsn();
2540 tmpInsn->GetBB()->RemoveInsn(*tmpInsn);
2541 continue;
2542 }
2543 break;
2544 }
2545 if (prev == nullptr) {
2546 return;
2547 }
2548 auto prevMop = prev->GetMachineOpcode();
2549 if (prevMop == MOP_xaddrri12 || prevMop == MOP_xsubrri12 || prevMop == MOP_xaddrri24 || prevMop == MOP_xsubrri24) {
2550 OptimizeAddrBOI(insn, memOpnd, *prev);
2551 } else if (prevMop == MOP_xaddrrr) {
2552 OptimizeAddrBOrX(insn, memOpnd, *prev);
2553 } else if (prevMop == MOP_xaddrrrs) {
2554 OptimizeWithAddrrrs(insn, memOpnd, *prev);
2555 }
2556 }
2557
IsSameRegisterOperation(const RegOperand & desMovOpnd,const RegOperand & uxtDestOpnd,const RegOperand & uxtFromOpnd)2558 bool IsSameRegisterOperation(const RegOperand &desMovOpnd, const RegOperand &uxtDestOpnd, const RegOperand &uxtFromOpnd)
2559 {
2560 return ((desMovOpnd.GetRegisterNumber() == uxtDestOpnd.GetRegisterNumber()) &&
2561 (uxtDestOpnd.GetRegisterNumber() == uxtFromOpnd.GetRegisterNumber()));
2562 }
2563
IsRegNotSameMemUseInInsn(const Insn & insn,regno_t regNO,bool isStore,int64 baseOfst) const2564 bool CombineContiLoadAndStorePattern::IsRegNotSameMemUseInInsn(const Insn &insn, regno_t regNO, bool isStore,
2565 int64 baseOfst) const
2566 {
2567 uint32 opndNum = insn.GetOperandSize();
2568 bool sameMemAccess = false; /* both store or load */
2569 if (insn.IsStore() == isStore) {
2570 sameMemAccess = true;
2571 }
2572 for (uint32 i = 0; i < opndNum; ++i) {
2573 Operand &opnd = insn.GetOperand(i);
2574 if (opnd.IsList()) {
2575 auto &listOpnd = static_cast<ListOperand &>(opnd);
2576 for (auto listElem : listOpnd.GetOperands()) {
2577 RegOperand *regOpnd = static_cast<RegOperand *>(listElem);
2578 DEBUG_ASSERT(regOpnd != nullptr, "parameter operand must be RegOperand");
2579 if (regNO == regOpnd->GetRegisterNumber()) {
2580 return true;
2581 }
2582 }
2583 } else if (opnd.IsMemoryAccessOperand()) {
2584 auto &memOperand = static_cast<MemOperand &>(opnd);
2585 RegOperand *base = memOperand.GetBaseRegister();
2586 /* need check offset as well */
2587 regno_t stackBaseRegNO = cgFunc->UseFP() ? R29 : RSP;
2588 if (!sameMemAccess && base != nullptr) {
2589 regno_t curBaseRegNO = base->GetRegisterNumber();
2590 int64 memBarrierRange = static_cast<int64>(insn.IsLoadStorePair() ? k16BitSize : k8BitSize);
2591 if (!(curBaseRegNO == regNO && memOperand.GetAddrMode() == MemOperand::kAddrModeBOi &&
2592 memOperand.GetOffsetImmediate() != nullptr &&
2593 (memOperand.GetOffsetImmediate()->GetOffsetValue() <= (baseOfst - memBarrierRange) ||
2594 memOperand.GetOffsetImmediate()->GetOffsetValue() >= (baseOfst + memBarrierRange)))) {
2595 return true;
2596 }
2597 }
2598 /* do not trust the following situation :
2599 * str x1, [x9]
2600 * str x6, [x2]
2601 * str x3, [x9, #8]
2602 */
2603 if (isStore && regNO != stackBaseRegNO && base != nullptr && base->GetRegisterNumber() != stackBaseRegNO &&
2604 base->GetRegisterNumber() != regNO) {
2605 return true;
2606 }
2607 if (isStore && base != nullptr && base->GetRegisterNumber() == regNO) {
2608 if (memOperand.GetAddrMode() == MemOperand::kAddrModeBOi &&
2609 memOperand.GetOffsetImmediate() != nullptr) {
2610 int64 curOffset = memOperand.GetOffsetImmediate()->GetOffsetValue();
2611 if (memOperand.GetSize() == k64BitSize) {
2612 uint32 memBarrierRange = insn.IsLoadStorePair() ? k16BitSize : k8BitSize;
2613 if (curOffset < baseOfst + memBarrierRange && curOffset > baseOfst - memBarrierRange) {
2614 return true;
2615 }
2616 } else if (memOperand.GetSize() == k32BitSize) {
2617 uint32 memBarrierRange = insn.IsLoadStorePair() ? k8BitSize : k4BitSize;
2618 if (curOffset < baseOfst + memBarrierRange && curOffset > baseOfst - memBarrierRange) {
2619 return true;
2620 }
2621 }
2622 }
2623 }
2624 } else if (opnd.IsConditionCode()) {
2625 Operand &rflagOpnd = cgFunc->GetOrCreateRflag();
2626 RegOperand &rflagReg = static_cast<RegOperand &>(rflagOpnd);
2627 if (rflagReg.GetRegisterNumber() == regNO) {
2628 return true;
2629 }
2630 } else if (opnd.IsRegister()) {
2631 if (!isStore && static_cast<RegOperand &>(opnd).GetRegisterNumber() == regNO) {
2632 return true;
2633 }
2634 }
2635 }
2636 return false;
2637 }
2638
FindPrevStrLdr(Insn & insn,regno_t destRegNO,regno_t memBaseRegNO,int64 baseOfst)2639 std::vector<Insn *> CombineContiLoadAndStorePattern::FindPrevStrLdr(Insn &insn, regno_t destRegNO, regno_t memBaseRegNO,
2640 int64 baseOfst)
2641 {
2642 std::vector<Insn *> prevContiInsns;
2643 bool isStr = insn.IsStore();
2644 for (Insn *curInsn = insn.GetPrev(); curInsn != nullptr; curInsn = curInsn->GetPrev()) {
2645 if (!curInsn->IsMachineInstruction()) {
2646 continue;
2647 }
2648 if (curInsn->IsRegDefined(memBaseRegNO)) {
2649 return prevContiInsns;
2650 }
2651 if (IsRegNotSameMemUseInInsn(*curInsn, memBaseRegNO, insn.IsStore(), static_cast<int32>(baseOfst))) {
2652 return prevContiInsns;
2653 }
2654 /* return continuous STD/LDR insn */
2655 if (((isStr && curInsn->IsStore()) || (!isStr && curInsn->IsLoad())) && !curInsn->IsLoadStorePair()) {
2656 auto *memOperand = static_cast<MemOperand *>(curInsn->GetMemOpnd());
2657 /* do not combine ldr r0, label */
2658 if (memOperand != nullptr) {
2659 auto *BaseRegOpnd = static_cast<RegOperand *>(memOperand->GetBaseRegister());
2660 DEBUG_ASSERT(BaseRegOpnd == nullptr || !BaseRegOpnd->IsVirtualRegister(),
2661 "physical register has not been allocated?");
2662 if (memOperand->GetAddrMode() == MemOperand::kAddrModeBOi &&
2663 BaseRegOpnd->GetRegisterNumber() == memBaseRegNO) {
2664 prevContiInsns.emplace_back(curInsn);
2665 }
2666 }
2667 }
2668 /* check insn that changes the data flow */
2669 regno_t stackBaseRegNO = cgFunc->UseFP() ? R29 : RSP;
2670 /* ldr x8, [x21, #8]
2671 * call foo()
2672 * ldr x9, [x21, #16]
2673 * although x21 is a calleeSave register, there is no guarantee data in memory [x21] is not changed
2674 */
2675 if (curInsn->IsCall() &&
2676 (!AArch64Abi::IsCalleeSavedReg(static_cast<AArch64reg>(destRegNO)) || memBaseRegNO != stackBaseRegNO)) {
2677 return prevContiInsns;
2678 }
2679 /* store opt should not cross call due to stack args */
2680 if (curInsn->IsCall() && isStr) {
2681 return prevContiInsns;
2682 }
2683 if (curInsn->GetMachineOpcode() == MOP_asm) {
2684 return prevContiInsns;
2685 }
2686 if (curInsn->ScanReg(destRegNO)) {
2687 return prevContiInsns;
2688 }
2689 }
2690 return prevContiInsns;
2691 }
2692
FindValidSplitAddInsn(Insn & curInsn,RegOperand & baseOpnd) const2693 Insn *CombineContiLoadAndStorePattern::FindValidSplitAddInsn(Insn &curInsn, RegOperand &baseOpnd) const
2694 {
2695 Insn *splitAdd = nullptr;
2696 for (Insn *cursor = curInsn.GetPrev(); cursor != nullptr; cursor = cursor->GetPrev()) {
2697 if (!cursor->IsMachineInstruction()) {
2698 continue;
2699 }
2700 if (cursor->IsCall()) {
2701 break;
2702 }
2703 if (cursor->IsRegDefined(baseOpnd.GetRegisterNumber())) {
2704 break;
2705 }
2706 MOperator mOp = cursor->GetMachineOpcode();
2707 if (mOp != MOP_xaddrri12 && mOp != MOP_waddrri12) {
2708 continue;
2709 }
2710 auto &destOpnd = static_cast<RegOperand &>(cursor->GetOperand(kInsnFirstOpnd));
2711 if (destOpnd.GetRegisterNumber() != R16 || destOpnd.GetSize() != baseOpnd.GetSize()) {
2712 continue;
2713 }
2714 auto &useOpnd = static_cast<RegOperand &>(cursor->GetOperand(kInsnSecondOpnd));
2715 /*
2716 * split add as following:
2717 * add R16, R0, #2, LSL #12
2718 * add R16, R16, #1536
2719 */
2720 if (useOpnd.GetRegisterNumber() != baseOpnd.GetRegisterNumber()) {
2721 if (useOpnd.GetRegisterNumber() == R16) {
2722 Insn *defInsn = cursor->GetPrev();
2723 CHECK_FATAL(defInsn, "invalid defInsn");
2724 CHECK_FATAL(defInsn->GetMachineOpcode() == MOP_xaddrri24 ||
2725 defInsn->GetMachineOpcode() == MOP_waddrri24,
2726 "split with wrong add");
2727 auto &opnd = static_cast<RegOperand &>(defInsn->GetOperand(kInsnSecondOpnd));
2728 if (opnd.GetRegisterNumber() == baseOpnd.GetRegisterNumber()) {
2729 splitAdd = cursor;
2730 }
2731 }
2732 break;
2733 } else {
2734 splitAdd = cursor;
2735 break;
2736 }
2737 }
2738 return splitAdd;
2739 }
2740
PlaceSplitAddInsn(const Insn & curInsn,Insn & combineInsn,const MemOperand & memOperand,RegOperand & baseOpnd,uint32 bitLen) const2741 bool CombineContiLoadAndStorePattern::PlaceSplitAddInsn(const Insn &curInsn, Insn &combineInsn,
2742 const MemOperand &memOperand, RegOperand &baseOpnd,
2743 uint32 bitLen) const
2744 {
2745 Insn *cursor = nullptr;
2746 MemOperand *maxOfstMem = nullptr;
2747 int64 maxOfstVal = 0;
2748 MOperator mop = curInsn.GetMachineOpcode();
2749 OfstOperand *ofstOpnd = memOperand.GetOffsetImmediate();
2750 int64 ofstVal = ofstOpnd->GetOffsetValue();
2751 auto &aarFunc = static_cast<AArch64CGFunc &>(*cgFunc);
2752 for (cursor = curInsn.GetNext(); cursor != nullptr; cursor = cursor->GetNext()) {
2753 if (!cursor->IsMachineInstruction()) {
2754 continue;
2755 }
2756 if (cursor->GetMachineOpcode() == mop && (cursor->IsLoad() || cursor->IsStore())) {
2757 auto &curMemOpnd = static_cast<MemOperand &>(cursor->GetOperand(kInsnSecondOpnd));
2758 RegOperand *curBaseOpnd = curMemOpnd.GetBaseRegister();
2759 if (curMemOpnd.GetAddrMode() == MemOperand::kAddrModeBOi && RegOperand::IsSameReg(baseOpnd, *curBaseOpnd)) {
2760 OfstOperand *curOfstOpnd = curMemOpnd.GetOffsetImmediate();
2761 CHECK_FATAL(curOfstOpnd, "invalid OfstOperand");
2762 if (curOfstOpnd->GetOffsetValue() > ofstVal &&
2763 (curOfstOpnd->GetOffsetValue() - ofstVal) < MemOperand::GetMaxPairPIMM(bitLen) &&
2764 !aarFunc.IsOperandImmValid(combineInsn.GetMachineOpcode(), &curMemOpnd, kInsnThirdOpnd)) {
2765 maxOfstMem = &curMemOpnd;
2766 maxOfstVal = curOfstOpnd->GetOffsetValue();
2767 }
2768 }
2769 }
2770 if (cursor->IsRegDefined(baseOpnd.GetRegisterNumber())) {
2771 break;
2772 }
2773 if (cursor->IsRegDefined(R16)) {
2774 break;
2775 }
2776 }
2777 MemOperand *newMemOpnd = nullptr;
2778 if (maxOfstMem == nullptr) {
2779 newMemOpnd = &aarFunc.SplitOffsetWithAddInstruction(memOperand, bitLen, static_cast<AArch64reg>(R16), false,
2780 &combineInsn, true);
2781 } else {
2782 RegOperand *addResOpnd = aarFunc.GetBaseRegForSplit(R16);
2783 ImmOperand &immAddend =
2784 aarFunc.SplitAndGetRemained(*maxOfstMem, bitLen, addResOpnd, maxOfstVal, false, &combineInsn, true);
2785 newMemOpnd = &aarFunc.CreateReplacementMemOperand(bitLen, *addResOpnd, ofstVal - immAddend.GetValue());
2786 if (!(aarFunc.IsOperandImmValid(combineInsn.GetMachineOpcode(), newMemOpnd, kInsnThirdOpnd))) {
2787 newMemOpnd = &aarFunc.SplitOffsetWithAddInstruction(memOperand, bitLen, static_cast<AArch64reg>(R16), false,
2788 &combineInsn, true);
2789 } else {
2790 aarFunc.SelectAddAfterInsn(*addResOpnd, baseOpnd, immAddend, PTY_i64, false, combineInsn);
2791 }
2792 }
2793 if (!(aarFunc.IsOperandImmValid(combineInsn.GetMachineOpcode(), newMemOpnd, kInsnThirdOpnd))) {
2794 return false;
2795 }
2796 combineInsn.SetOperand(kInsnThirdOpnd, *newMemOpnd);
2797 return true;
2798 }
2799
SplitOfstWithAddToCombine(const Insn & curInsn,Insn & combineInsn,const MemOperand & memOperand) const2800 bool CombineContiLoadAndStorePattern::SplitOfstWithAddToCombine(const Insn &curInsn, Insn &combineInsn,
2801 const MemOperand &memOperand) const
2802 {
2803 auto *baseRegOpnd = static_cast<RegOperand *>(memOperand.GetBaseRegister());
2804 auto *ofstOpnd = static_cast<OfstOperand *>(memOperand.GetOffsetImmediate());
2805 DEBUG_ASSERT(baseRegOpnd && ofstOpnd, "get baseOpnd and ofstOpnd failed");
2806 CHECK_FATAL(combineInsn.GetOperand(kInsnFirstOpnd).GetSize() == combineInsn.GetOperand(kInsnSecondOpnd).GetSize(),
2807 "the size must equal");
2808 if (baseRegOpnd->GetRegisterNumber() == R16) {
2809 return false;
2810 }
2811 Insn *splitAdd = FindValidSplitAddInsn(combineInsn, *baseRegOpnd);
2812 const InsnDesc *md = &AArch64CG::kMd[combineInsn.GetMachineOpcode()];
2813 auto *opndProp = md->opndMD[kInsnFirstOpnd];
2814 auto &aarFunc = static_cast<AArch64CGFunc &>(*cgFunc);
2815 if (splitAdd == nullptr) {
2816 if (combineInsn.IsLoadStorePair()) {
2817 if (ofstOpnd->GetOffsetValue() < 0) {
2818 return false; /* do not split */
2819 }
2820 }
2821 /* create and place addInsn */
2822 return PlaceSplitAddInsn(curInsn, combineInsn, memOperand, *baseRegOpnd, opndProp->GetSize());
2823 } else {
2824 auto &newBaseReg = static_cast<RegOperand &>(splitAdd->GetOperand(kInsnFirstOpnd));
2825 auto &addImmOpnd = static_cast<ImmOperand &>(splitAdd->GetOperand(kInsnThirdOpnd));
2826 int64 addVal = 0;
2827 if (static_cast<RegOperand &>(splitAdd->GetOperand(kInsnSecondOpnd)).GetRegisterNumber() == R16) {
2828 Insn *defInsn = splitAdd->GetPrev();
2829 CHECK_FATAL(defInsn->GetMachineOpcode() == MOP_xaddrri24 || defInsn->GetMachineOpcode() == MOP_waddrri24,
2830 "split with wrong add");
2831 auto &immOpnd = static_cast<ImmOperand &>(defInsn->GetOperand(kInsnThirdOpnd));
2832 auto &shiftOpnd = static_cast<BitShiftOperand &>(defInsn->GetOperand(kInsnFourthOpnd));
2833 addVal = (immOpnd.GetValue() << shiftOpnd.GetShiftAmount()) + addImmOpnd.GetValue();
2834 } else {
2835 addVal = addImmOpnd.GetValue();
2836 }
2837 auto *newOfstOpnd =
2838 &aarFunc.CreateOfstOpnd(static_cast<uint64>(ofstOpnd->GetOffsetValue() - addVal), ofstOpnd->GetSize());
2839 auto *newMemOpnd = aarFunc.CreateMemOperand(MemOperand::kAddrModeBOi, opndProp->GetSize(), newBaseReg, nullptr,
2840 newOfstOpnd, memOperand.GetSymbol());
2841 if (!(static_cast<AArch64CGFunc &>(*cgFunc).IsOperandImmValid(combineInsn.GetMachineOpcode(), newMemOpnd,
2842 kInsnThirdOpnd))) {
2843 return PlaceSplitAddInsn(curInsn, combineInsn, memOperand, *baseRegOpnd, opndProp->GetSize());
2844 }
2845 combineInsn.SetOperand(kInsnThirdOpnd, *newMemOpnd);
2846 return true;
2847 }
2848 }
2849
CheckCondition(Insn & insn)2850 bool CombineContiLoadAndStorePattern::CheckCondition(Insn &insn)
2851 {
2852 memOpnd = static_cast<MemOperand *>(insn.GetMemOpnd());
2853 DEBUG_ASSERT(memOpnd != nullptr, "get mem operand failed");
2854 if (memOpnd->GetAddrMode() != MemOperand::kAddrModeBOi) {
2855 return false;
2856 }
2857 if (!doAggressiveCombine) {
2858 return false;
2859 }
2860 return true;
2861 }
2862
2863 /* Combining 2 STRs into 1 stp or 2 LDRs into 1 ldp */
Run(BB & bb,Insn & insn)2864 void CombineContiLoadAndStorePattern::Run(BB &bb, Insn &insn)
2865 {
2866 if (!CheckCondition(insn)) {
2867 return;
2868 }
2869 MOperator thisMop = insn.GetMachineOpcode();
2870 DEBUG_ASSERT(insn.GetOperand(kInsnFirstOpnd).IsRegister(), "unexpect operand");
2871 auto &destOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
2872 auto *baseRegOpnd = static_cast<RegOperand *>(memOpnd->GetBaseRegister());
2873 OfstOperand *offsetOpnd = memOpnd->GetOffsetImmediate();
2874 CHECK_FATAL(offsetOpnd != nullptr, "offset opnd lost");
2875 DEBUG_ASSERT(baseRegOpnd == nullptr || !baseRegOpnd->IsVirtualRegister(),
2876 "physical register has not been allocated?");
2877 std::vector<Insn *> prevContiInsnVec = FindPrevStrLdr(
2878 insn, destOpnd.GetRegisterNumber(), baseRegOpnd->GetRegisterNumber(), offsetOpnd->GetOffsetValue());
2879 for (auto prevContiInsn : prevContiInsnVec) {
2880 DEBUG_ASSERT(prevContiInsn != nullptr, "get previous consecutive instructions failed");
2881 auto *prevMemOpnd = static_cast<MemOperand *>(prevContiInsn->GetMemOpnd());
2882 if (memOpnd->GetIndexOpt() != prevMemOpnd->GetIndexOpt()) {
2883 continue;
2884 }
2885 OfstOperand *prevOffsetOpnd = prevMemOpnd->GetOffsetImmediate();
2886 CHECK_FATAL(offsetOpnd != nullptr && prevOffsetOpnd != nullptr, "both conti str/ldr have no offset");
2887 auto &prevDestOpnd = static_cast<RegOperand &>(prevContiInsn->GetOperand(kInsnFirstOpnd));
2888 uint32 memSize = insn.GetMemoryByteSize();
2889 uint32 prevMemSize = prevContiInsn->GetMemoryByteSize();
2890 if (prevDestOpnd.GetRegisterType() != destOpnd.GetRegisterType()) {
2891 continue;
2892 }
2893 int64 offsetVal = offsetOpnd->GetOffsetValue();
2894 int64 prevOffsetVal = prevOffsetOpnd->GetOffsetValue();
2895 auto diffVal = std::abs(offsetVal - prevOffsetVal);
2896 regno_t destRegNO = destOpnd.GetRegisterNumber();
2897 regno_t prevDestRegNO = prevDestOpnd.GetRegisterNumber();
2898 if (insn.IsStore() && memOpnd->IsStackArgMem() && prevMemOpnd->IsStackArgMem() &&
2899 (memSize == k4ByteSize || memSize == k8ByteSize) && diffVal == k8BitSize &&
2900 (prevMemSize == k4ByteSize || prevMemSize == k8ByteSize) &&
2901 (destOpnd.GetValidBitsNum() == memSize * k8BitSize) &&
2902 (prevDestOpnd.GetValidBitsNum() == prevMemSize * k8BitSize)) {
2903 RegOperand &newDest = static_cast<AArch64CGFunc *>(cgFunc)->GetOrCreatePhysicalRegisterOperand(
2904 static_cast<AArch64reg>(destRegNO), k64BitSize, destOpnd.GetRegisterType());
2905 RegOperand &newPrevDest = static_cast<AArch64CGFunc *>(cgFunc)->GetOrCreatePhysicalRegisterOperand(
2906 static_cast<AArch64reg>(prevDestRegNO), k64BitSize, prevDestOpnd.GetRegisterType());
2907 MemOperand *combineMemOpnd = (offsetVal < prevOffsetVal) ? memOpnd : prevMemOpnd;
2908 MOperator mopPair = (destOpnd.GetRegisterType() == kRegTyInt) ? MOP_xstp : MOP_dstp;
2909 if ((static_cast<AArch64CGFunc &>(*cgFunc).IsOperandImmValid(mopPair, combineMemOpnd, kInsnThirdOpnd))) {
2910 Insn &combineInsn =
2911 (offsetVal < prevOffsetVal)
2912 ? cgFunc->GetInsnBuilder()->BuildInsn(mopPair, newDest, newPrevDest, *combineMemOpnd)
2913 : cgFunc->GetInsnBuilder()->BuildInsn(mopPair, newPrevDest, newDest, *combineMemOpnd);
2914 bb.InsertInsnAfter(*prevContiInsn, combineInsn);
2915 RemoveInsnAndKeepComment(bb, insn, *prevContiInsn);
2916 return;
2917 }
2918 }
2919 if (memSize != prevMemSize || thisMop != prevContiInsn->GetMachineOpcode() ||
2920 prevDestOpnd.GetSize() != destOpnd.GetSize()) {
2921 continue;
2922 }
2923 /* do combination str/ldr -> stp/ldp */
2924 if ((insn.IsStore() || destRegNO != prevDestRegNO) || (destRegNO == RZR && prevDestRegNO == RZR)) {
2925 if ((memSize == k8ByteSize && diffVal == k8BitSize) || (memSize == k4ByteSize && diffVal == k4BitSize) ||
2926 (memSize == k16ByteSize && diffVal == k16BitSize)) {
2927 MOperator mopPair = GetMopPair(thisMop);
2928 MemOperand *combineMemOpnd = (offsetVal < prevOffsetVal) ? memOpnd : prevMemOpnd;
2929 Insn &combineInsn =
2930 (offsetVal < prevOffsetVal)
2931 ? cgFunc->GetInsnBuilder()->BuildInsn(mopPair, destOpnd, prevDestOpnd, *combineMemOpnd)
2932 : cgFunc->GetInsnBuilder()->BuildInsn(mopPair, prevDestOpnd, destOpnd, *combineMemOpnd);
2933 bb.InsertInsnAfter(*prevContiInsn, combineInsn);
2934 if (!(static_cast<AArch64CGFunc &>(*cgFunc).IsOperandImmValid(mopPair, combineMemOpnd,
2935 kInsnThirdOpnd)) &&
2936 !SplitOfstWithAddToCombine(insn, combineInsn, *combineMemOpnd)) {
2937 bb.RemoveInsn(combineInsn);
2938 return;
2939 }
2940 RemoveInsnAndKeepComment(bb, insn, *prevContiInsn);
2941 return;
2942 }
2943 }
2944 /* do combination strb/ldrb -> strh/ldrh -> str/ldr */
2945 if (destRegNO == prevDestRegNO && destRegNO == RZR && prevDestRegNO == RZR) {
2946 if ((memSize == k1ByteSize && diffVal == k1ByteSize) || (memSize == k2ByteSize && diffVal == k2ByteSize)) {
2947 MOperator mopPair = GetMopHigherByte(thisMop);
2948 if (offsetVal < prevOffsetVal) {
2949 if (static_cast<AArch64CGFunc &>(*cgFunc).IsOperandImmValid(mopPair, memOpnd, kInsnSecondOpnd)) {
2950 Insn &combineInsn = cgFunc->GetInsnBuilder()->BuildInsn(mopPair, destOpnd, *memOpnd);
2951 bb.InsertInsnAfter(*prevContiInsn, combineInsn);
2952 RemoveInsnAndKeepComment(bb, insn, *prevContiInsn);
2953 return;
2954 }
2955 } else {
2956 if (static_cast<AArch64CGFunc &>(*cgFunc).IsOperandImmValid(mopPair, prevMemOpnd,
2957 kInsnSecondOpnd)) {
2958 Insn &combineInsn = cgFunc->GetInsnBuilder()->BuildInsn(mopPair, prevDestOpnd, *prevMemOpnd);
2959 bb.InsertInsnAfter(*prevContiInsn, combineInsn);
2960 RemoveInsnAndKeepComment(bb, insn, *prevContiInsn);
2961 return;
2962 }
2963 }
2964 }
2965 }
2966 }
2967 }
2968
GetMopHigherByte(MOperator mop) const2969 MOperator CombineContiLoadAndStorePattern::GetMopHigherByte(MOperator mop) const
2970 {
2971 switch (mop) {
2972 case MOP_wldrb:
2973 return MOP_wldrh;
2974 case MOP_wstrb:
2975 return MOP_wstrh;
2976 case MOP_wldrh:
2977 return MOP_wldr;
2978 case MOP_wstrh:
2979 return MOP_wstr;
2980 default:
2981 DEBUG_ASSERT(false, "should not run here");
2982 return MOP_undef;
2983 }
2984 }
2985
RemoveInsnAndKeepComment(BB & bb,Insn & insn,Insn & prevInsn) const2986 void CombineContiLoadAndStorePattern::RemoveInsnAndKeepComment(BB &bb, Insn &insn, Insn &prevInsn) const
2987 {
2988 /* keep the comment */
2989 Insn *nn = prevInsn.GetNextMachineInsn();
2990 std::string newComment = "";
2991 MapleString comment = insn.GetComment();
2992 if (comment.c_str() != nullptr && strlen(comment.c_str()) > 0) {
2993 newComment += comment.c_str();
2994 }
2995 comment = prevInsn.GetComment();
2996 if (comment.c_str() != nullptr && strlen(comment.c_str()) > 0) {
2997 newComment = newComment + " " + comment.c_str();
2998 }
2999 if (newComment.c_str() != nullptr && strlen(newComment.c_str()) > 0) {
3000 DEBUG_ASSERT(nn != nullptr, "nn should not be nullptr");
3001 nn->SetComment(newComment);
3002 }
3003 bb.RemoveInsn(insn);
3004 bb.RemoveInsn(prevInsn);
3005 }
3006
Run(BB & bb,Insn & insn)3007 void EliminateSpecifcSXTAArch64::Run(BB &bb, Insn &insn)
3008 {
3009 MOperator thisMop = insn.GetMachineOpcode();
3010 Insn *prevInsn = insn.GetPrev();
3011 while (prevInsn != nullptr && !prevInsn->GetMachineOpcode()) {
3012 prevInsn = prevInsn->GetPrev();
3013 }
3014 if (prevInsn == nullptr) {
3015 return;
3016 }
3017 auto ®Opnd0 = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
3018 auto ®Opnd1 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
3019 if (&insn != bb.GetFirstInsn() && regOpnd0.GetRegisterNumber() == regOpnd1.GetRegisterNumber() &&
3020 prevInsn->IsMachineInstruction()) {
3021 if (prevInsn->GetMachineOpcode() == MOP_wmovri32 || prevInsn->GetMachineOpcode() == MOP_xmovri64) {
3022 auto &dstMovOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
3023 if (dstMovOpnd.GetRegisterNumber() != regOpnd1.GetRegisterNumber()) {
3024 return;
3025 }
3026 Operand &opnd = prevInsn->GetOperand(kInsnSecondOpnd);
3027 if (opnd.IsIntImmediate()) {
3028 auto &immOpnd = static_cast<ImmOperand &>(opnd);
3029 int64 value = immOpnd.GetValue();
3030 if (thisMop == MOP_xsxtb32) {
3031 /* value should in range between -127 and 127 */
3032 if (value >= static_cast<int64>(0xFFFFFFFFFFFFFF80) && value <= 0x7F &&
3033 immOpnd.IsSingleInstructionMovable(regOpnd0.GetSize())) {
3034 bb.RemoveInsn(insn);
3035 }
3036 } else if (thisMop == MOP_xsxth32) {
3037 /* value should in range between -32678 and 32678 */
3038 if (value >= static_cast<int64>(0xFFFFFFFFFFFF8000) && value <= 0x7FFF &&
3039 immOpnd.IsSingleInstructionMovable(regOpnd0.GetSize())) {
3040 bb.RemoveInsn(insn);
3041 }
3042 } else {
3043 uint64 flag = 0xFFFFFFFFFFFFFF80; /* initialize the flag with fifty-nine 1s at top */
3044 if (thisMop == MOP_xsxth64) {
3045 flag = 0xFFFFFFFFFFFF8000; /* specify the flag with forty-nine 1s at top in this case */
3046 } else if (thisMop == MOP_xsxtw64) {
3047 flag = 0xFFFFFFFF80000000; /* specify the flag with thirty-three 1s at top in this case */
3048 }
3049 if (!(static_cast<uint64>(value) & flag) &&
3050 immOpnd.IsSingleInstructionMovable(regOpnd0.GetSize())) {
3051 auto *aarch64CGFunc = static_cast<AArch64CGFunc *>(&cgFunc);
3052 RegOperand &dstOpnd = aarch64CGFunc->GetOrCreatePhysicalRegisterOperand(
3053 static_cast<AArch64reg>(dstMovOpnd.GetRegisterNumber()), k64BitSize,
3054 dstMovOpnd.GetRegisterType());
3055 prevInsn->SetOperand(kInsnFirstOpnd, dstOpnd);
3056 prevInsn->SetMOP(AArch64CG::kMd[MOP_xmovri64]);
3057 bb.RemoveInsn(insn);
3058 }
3059 }
3060 }
3061 } else if (prevInsn->GetMachineOpcode() == MOP_wldrsb) {
3062 auto &dstMovOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
3063 if (dstMovOpnd.GetRegisterNumber() != regOpnd1.GetRegisterNumber()) {
3064 return;
3065 }
3066 if (thisMop == MOP_xsxtb32) {
3067 bb.RemoveInsn(insn);
3068 }
3069 } else if (prevInsn->GetMachineOpcode() == MOP_wldrsh) {
3070 auto &dstMovOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
3071 if (dstMovOpnd.GetRegisterNumber() != regOpnd1.GetRegisterNumber()) {
3072 return;
3073 }
3074 if (thisMop == MOP_xsxth32) {
3075 bb.RemoveInsn(insn);
3076 }
3077 }
3078 }
3079 }
3080
Run(BB & bb,Insn & insn)3081 void EliminateSpecifcUXTAArch64::Run(BB &bb, Insn &insn)
3082 {
3083 MOperator thisMop = insn.GetMachineOpcode();
3084 Insn *prevInsn = insn.GetPreviousMachineInsn();
3085 if (prevInsn == nullptr) {
3086 return;
3087 }
3088 auto ®Opnd0 = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
3089 auto ®Opnd1 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
3090 if (prevInsn->IsCall() && prevInsn->GetIsCallReturnUnsigned() &&
3091 regOpnd0.GetRegisterNumber() == regOpnd1.GetRegisterNumber() &&
3092 (regOpnd1.GetRegisterNumber() == R0 || regOpnd1.GetRegisterNumber() == V0)) {
3093 uint32 retSize = prevInsn->GetRetSize();
3094 if (retSize > 0 &&
3095 ((thisMop == MOP_xuxtb32 && retSize <= k1ByteSize) || (thisMop == MOP_xuxth32 && retSize <= k2ByteSize) ||
3096 (thisMop == MOP_xuxtw64 && retSize <= k4ByteSize))) {
3097 bb.RemoveInsn(insn);
3098 }
3099 return;
3100 }
3101 if (&insn == bb.GetFirstInsn() || regOpnd0.GetRegisterNumber() != regOpnd1.GetRegisterNumber() ||
3102 !prevInsn->IsMachineInstruction()) {
3103 return;
3104 }
3105 if (cgFunc.GetMirModule().GetSrcLang() == kSrcLangC && prevInsn->IsCall() && prevInsn->GetIsCallReturnSigned()) {
3106 return;
3107 }
3108 if (thisMop == MOP_xuxtb32) {
3109 if (prevInsn->GetMachineOpcode() == MOP_wmovri32 || prevInsn->GetMachineOpcode() == MOP_xmovri64) {
3110 auto &dstMovOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
3111 if (!IsSameRegisterOperation(dstMovOpnd, regOpnd1, regOpnd0)) {
3112 return;
3113 }
3114 Operand &opnd = prevInsn->GetOperand(kInsnSecondOpnd);
3115 if (opnd.IsIntImmediate()) {
3116 auto &immOpnd = static_cast<ImmOperand &>(opnd);
3117 int64 value = immOpnd.GetValue();
3118 /* check the top 56 bits of value */
3119 if (!(static_cast<uint64>(value) & 0xFFFFFFFFFFFFFF00)) {
3120 bb.RemoveInsn(insn);
3121 }
3122 }
3123 } else if (prevInsn->GetMachineOpcode() == MOP_wldrb) {
3124 auto &dstOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
3125 if (dstOpnd.GetRegisterNumber() != regOpnd1.GetRegisterNumber()) {
3126 return;
3127 }
3128 bb.RemoveInsn(insn);
3129 }
3130 } else if (thisMop == MOP_xuxth32) {
3131 if (prevInsn->GetMachineOpcode() == MOP_wmovri32 || prevInsn->GetMachineOpcode() == MOP_xmovri64) {
3132 auto &dstMovOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
3133 if (!IsSameRegisterOperation(dstMovOpnd, regOpnd1, regOpnd0)) {
3134 return;
3135 }
3136 Operand &opnd = prevInsn->GetOperand(kInsnSecondOpnd);
3137 if (opnd.IsIntImmediate()) {
3138 auto &immOpnd = static_cast<ImmOperand &>(opnd);
3139 int64 value = immOpnd.GetValue();
3140 if (!(static_cast<uint64>(value) & 0xFFFFFFFFFFFF0000)) {
3141 bb.RemoveInsn(insn);
3142 }
3143 }
3144 } else if (prevInsn->GetMachineOpcode() == MOP_wldrh) {
3145 auto &dstOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
3146 if (dstOpnd.GetRegisterNumber() != regOpnd1.GetRegisterNumber()) {
3147 return;
3148 }
3149 bb.RemoveInsn(insn);
3150 }
3151 } else {
3152 /* this_mop == MOP_xuxtw64 */
3153 if (prevInsn->GetMachineOpcode() == MOP_wmovri32 || prevInsn->GetMachineOpcode() == MOP_wldrsb ||
3154 prevInsn->GetMachineOpcode() == MOP_wldrb || prevInsn->GetMachineOpcode() == MOP_wldrsh ||
3155 prevInsn->GetMachineOpcode() == MOP_wldrh || prevInsn->GetMachineOpcode() == MOP_wldr) {
3156 auto &dstOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
3157 if (!IsSameRegisterOperation(dstOpnd, regOpnd1, regOpnd0)) {
3158 return;
3159 }
3160 /* 32-bit ldr does zero-extension by default, so this conversion can be skipped */
3161 bb.RemoveInsn(insn);
3162 }
3163 }
3164 }
3165
CheckCondition(Insn & insn)3166 bool FmovRegPattern::CheckCondition(Insn &insn)
3167 {
3168 nextInsn = insn.GetNextMachineInsn();
3169 if (nextInsn == nullptr) {
3170 return false;
3171 }
3172 if (&insn == insn.GetBB()->GetFirstInsn()) {
3173 return false;
3174 }
3175 prevInsn = insn.GetPrev();
3176 auto &curSrcRegOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
3177 auto &prevSrcRegOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
3178 /* same src freg */
3179 if (curSrcRegOpnd.GetRegisterNumber() != prevSrcRegOpnd.GetRegisterNumber()) {
3180 return false;
3181 }
3182 return true;
3183 }
3184
Run(BB & bb,Insn & insn)3185 void FmovRegPattern::Run(BB &bb, Insn &insn)
3186 {
3187 if (!CheckCondition(insn)) {
3188 return;
3189 }
3190 MOperator thisMop = insn.GetMachineOpcode();
3191 MOperator prevMop = prevInsn->GetMachineOpcode();
3192 MOperator newMop;
3193 uint32 doOpt = 0;
3194 if (prevMop == MOP_xvmovrv && thisMop == MOP_xvmovrv) {
3195 doOpt = k32BitSize;
3196 newMop = MOP_wmovrr;
3197 } else if (prevMop == MOP_xvmovrd && thisMop == MOP_xvmovrd) {
3198 doOpt = k64BitSize;
3199 newMop = MOP_xmovrr;
3200 }
3201 if (doOpt == 0) {
3202 return;
3203 }
3204 auto &curDstRegOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
3205 regno_t curDstReg = curDstRegOpnd.GetRegisterNumber();
3206 /* optimize case 1 */
3207 auto &prevDstRegOpnd = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
3208 regno_t prevDstReg = prevDstRegOpnd.GetRegisterNumber();
3209 auto *aarch64CGFunc = static_cast<AArch64CGFunc *>(cgFunc);
3210 RegOperand &dst =
3211 aarch64CGFunc->GetOrCreatePhysicalRegisterOperand(static_cast<AArch64reg>(curDstReg), doOpt, kRegTyInt);
3212 RegOperand &src =
3213 aarch64CGFunc->GetOrCreatePhysicalRegisterOperand(static_cast<AArch64reg>(prevDstReg), doOpt, kRegTyInt);
3214 Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(newMop, dst, src);
3215 bb.InsertInsnBefore(insn, newInsn);
3216 bb.RemoveInsn(insn);
3217 RegOperand &newOpnd =
3218 aarch64CGFunc->GetOrCreatePhysicalRegisterOperand(static_cast<AArch64reg>(prevDstReg), doOpt, kRegTyInt);
3219 uint32 opndNum = nextInsn->GetOperandSize();
3220 for (uint32 opndIdx = 0; opndIdx < opndNum; ++opndIdx) {
3221 Operand &opnd = nextInsn->GetOperand(opndIdx);
3222 if (opnd.IsMemoryAccessOperand()) {
3223 auto &memOpnd = static_cast<MemOperand &>(opnd);
3224 Operand *base = memOpnd.GetBaseRegister();
3225 if (base != nullptr) {
3226 if (base->IsRegister()) {
3227 auto *reg = static_cast<RegOperand *>(base);
3228 if (reg->GetRegisterNumber() == curDstReg) {
3229 memOpnd.SetBaseRegister(newOpnd);
3230 }
3231 }
3232 }
3233 Operand *offset = memOpnd.GetIndexRegister();
3234 if (offset != nullptr) {
3235 if (offset->IsRegister()) {
3236 auto *reg = static_cast<RegOperand *>(offset);
3237 if (reg->GetRegisterNumber() == curDstReg) {
3238 memOpnd.SetIndexRegister(newOpnd);
3239 }
3240 }
3241 }
3242 } else if (opnd.IsRegister()) {
3243 /* Check if it is a source operand. */
3244 auto *regProp = nextInsn->GetDesc()->opndMD[opndIdx];
3245 if (regProp->IsUse()) {
3246 auto ® = static_cast<RegOperand &>(opnd);
3247 if (reg.GetRegisterNumber() == curDstReg) {
3248 nextInsn->SetOperand(opndIdx, newOpnd);
3249 }
3250 }
3251 }
3252 }
3253 }
3254
CheckCondition(Insn & insn)3255 bool SbfxOptPattern::CheckCondition(Insn &insn)
3256 {
3257 nextInsn = insn.GetNextMachineInsn();
3258 if (nextInsn == nullptr) {
3259 return false;
3260 }
3261 auto &curDstRegOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
3262 uint32 opndNum = nextInsn->GetOperandSize();
3263 const InsnDesc *md = insn.GetDesc();
3264 for (uint32 opndIdx = 0; opndIdx < opndNum; ++opndIdx) {
3265 Operand &opnd = nextInsn->GetOperand(opndIdx);
3266 /* Check if it is a source operand. */
3267 if (opnd.IsMemoryAccessOperand() || opnd.IsList()) {
3268 return false;
3269 } else if (opnd.IsRegister()) {
3270 auto ® = static_cast<RegOperand &>(opnd);
3271 auto *regProp = md->opndMD[opndIdx];
3272 if (reg.GetRegisterNumber() == curDstRegOpnd.GetRegisterNumber()) {
3273 if (reg.GetSize() != k32BitSize) {
3274 return false;
3275 }
3276 if (regProp->IsDef()) {
3277 toRemove = true;
3278 } else {
3279 (void)cands.emplace_back(opndIdx);
3280 }
3281 }
3282 }
3283 }
3284 return cands.size() != 0;
3285 }
3286
Run(BB & bb,Insn & insn)3287 void SbfxOptPattern::Run(BB &bb, Insn &insn)
3288 {
3289 if (!CheckCondition(insn)) {
3290 return;
3291 }
3292 auto &srcRegOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
3293 RegOperand &newReg = static_cast<AArch64CGFunc *>(cgFunc)->GetOrCreatePhysicalRegisterOperand(
3294 static_cast<AArch64reg>(srcRegOpnd.GetRegisterNumber()), k32BitSize, srcRegOpnd.GetRegisterType());
3295 // replace use point of opnd in nextInsn
3296 for (auto i : cands) {
3297 nextInsn->SetOperand(i, newReg);
3298 }
3299 if (toRemove) {
3300 bb.RemoveInsn(insn);
3301 }
3302 }
3303
CheckCondition(Insn & insn)3304 bool CbnzToCbzPattern::CheckCondition(Insn &insn)
3305 {
3306 MOperator curMop = insn.GetMachineOpcode();
3307 if (curMop != MOP_wcbnz && curMop != MOP_xcbnz) {
3308 return false;
3309 }
3310 /* reg has to be R0, since return value is in R0 */
3311 auto ®Opnd0 = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
3312 if (regOpnd0.GetRegisterNumber() != R0) {
3313 return false;
3314 }
3315 nextBB = insn.GetBB()->GetNext();
3316 /* Make sure nextBB can only be reached by bb */
3317 if (nextBB->GetPreds().size() > 1 || nextBB->GetEhPreds().empty()) {
3318 return false;
3319 }
3320 /* Next insn should be a mov R0 = 0 */
3321 movInsn = nextBB->GetFirstMachineInsn();
3322 if (movInsn == nullptr) {
3323 return false;
3324 }
3325 MOperator movInsnMop = movInsn->GetMachineOpcode();
3326 if (movInsnMop != MOP_wmovri32 && movInsnMop != MOP_xmovri64) {
3327 return false;
3328 }
3329 auto &movDest = static_cast<RegOperand &>(movInsn->GetOperand(kInsnFirstOpnd));
3330 if (movDest.GetRegisterNumber() != R0) {
3331 return false;
3332 }
3333 auto &movImm = static_cast<ImmOperand &>(movInsn->GetOperand(kInsnSecondOpnd));
3334 if (movImm.GetValue() != 0) {
3335 return false;
3336 }
3337 Insn *nextBrInsn = movInsn->GetNextMachineInsn();
3338 if (nextBrInsn == nullptr) {
3339 return false;
3340 }
3341 if (nextBrInsn->GetMachineOpcode() != MOP_xuncond) {
3342 return false;
3343 }
3344 /* Is nextBB branch to the return-bb? */
3345 if (nextBB->GetSuccs().size() != 1) {
3346 return false;
3347 }
3348 return true;
3349 }
3350
Run(BB & bb,Insn & insn)3351 void CbnzToCbzPattern::Run(BB &bb, Insn &insn)
3352 {
3353 if (!CheckCondition(insn)) {
3354 return;
3355 }
3356 MOperator thisMop = insn.GetMachineOpcode();
3357 BB *targetBB = nullptr;
3358 auto it = bb.GetSuccsBegin();
3359 if (*it == nextBB) {
3360 ++it;
3361 }
3362 targetBB = *it;
3363 /* Make sure when nextBB is empty, targetBB is fallthru of bb. */
3364 if (targetBB != nextBB->GetNext()) {
3365 return;
3366 }
3367 BB *nextBBTarget = *(nextBB->GetSuccsBegin());
3368 if (nextBBTarget->GetKind() != BB::kBBReturn) {
3369 return;
3370 }
3371 /* Control flow looks nice, instruction looks nice */
3372 Operand &brTarget = brInsn->GetOperand(kInsnFirstOpnd);
3373 insn.SetOperand(kInsnSecondOpnd, brTarget);
3374 if (thisMop == MOP_wcbnz) {
3375 insn.SetMOP(AArch64CG::kMd[MOP_wcbz]);
3376 } else {
3377 insn.SetMOP(AArch64CG::kMd[MOP_xcbz]);
3378 }
3379 nextBB->RemoveInsn(*movInsn);
3380 nextBB->RemoveInsn(*brInsn);
3381 /* nextBB is now a fallthru bb, not a goto bb */
3382 nextBB->SetKind(BB::kBBFallthru);
3383 /*
3384 * fix control flow, we have bb, nextBB, targetBB, nextBB_target
3385 * connect bb -> nextBB_target erase targetBB
3386 */
3387 it = bb.GetSuccsBegin();
3388 CHECK_FATAL(it != bb.GetSuccsEnd(), "succs is empty.");
3389 if (*it == targetBB) {
3390 bb.EraseSuccs(it);
3391 bb.PushFrontSuccs(*nextBBTarget);
3392 } else {
3393 ++it;
3394 bb.EraseSuccs(it);
3395 bb.PushBackSuccs(*nextBBTarget);
3396 }
3397 for (auto targetBBIt = targetBB->GetPredsBegin(); targetBBIt != targetBB->GetPredsEnd(); ++targetBBIt) {
3398 if (*targetBBIt == &bb) {
3399 targetBB->ErasePreds(targetBBIt);
3400 break;
3401 }
3402 }
3403 for (auto nextIt = nextBBTarget->GetPredsBegin(); nextIt != nextBBTarget->GetPredsEnd(); ++nextIt) {
3404 if (*nextIt == nextBB) {
3405 nextBBTarget->ErasePreds(nextIt);
3406 break;
3407 }
3408 }
3409 nextBBTarget->PushBackPreds(bb);
3410
3411 /* nextBB has no target, originally just branch target */
3412 nextBB->EraseSuccs(nextBB->GetSuccsBegin());
3413 DEBUG_ASSERT(nextBB->GetSuccs().empty(), "peep: branch target incorrect");
3414 /* Now make nextBB fallthru to targetBB */
3415 nextBB->PushFrontSuccs(*targetBB);
3416 targetBB->PushBackPreds(*nextBB);
3417 }
3418
Run(BB & bb,Insn & insn)3419 void CsetCbzToBeqOptAArch64::Run(BB &bb, Insn &insn)
3420 {
3421 Insn *insn1 = insn.GetPreviousMachineInsn();
3422 if (insn1 == nullptr) {
3423 return;
3424 }
3425 /* prevInsn must be "cset" insn */
3426 MOperator opCode1 = insn1->GetMachineOpcode();
3427 if (opCode1 != MOP_xcsetrc && opCode1 != MOP_wcsetrc) {
3428 return;
3429 }
3430
3431 auto &tmpRegOp1 = static_cast<RegOperand &>(insn1->GetOperand(kInsnFirstOpnd));
3432 regno_t baseRegNO1 = tmpRegOp1.GetRegisterNumber();
3433 auto &tmpRegOp2 = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
3434 regno_t baseRegNO2 = tmpRegOp2.GetRegisterNumber();
3435 if (baseRegNO1 != baseRegNO2) {
3436 return;
3437 }
3438 /* If the reg will be used later, we shouldn't optimize the cset insn here */
3439 if (IfOperandIsLiveAfterInsn(tmpRegOp2, insn)) {
3440 return;
3441 }
3442 MOperator opCode = insn.GetMachineOpcode();
3443 bool reverse = (opCode == MOP_xcbz || opCode == MOP_wcbz);
3444 Operand &rflag = static_cast<AArch64CGFunc *>(&cgFunc)->GetOrCreateRflag();
3445 auto &label = static_cast<LabelOperand &>(insn.GetOperand(kInsnSecondOpnd));
3446 auto &cond = static_cast<CondOperand &>(insn1->GetOperand(kInsnSecondOpnd));
3447 MOperator jmpOperator = SelectMOperator(cond.GetCode(), reverse);
3448 CHECK_FATAL((jmpOperator != MOP_undef), "unknown condition code");
3449 Insn &newInsn = cgFunc.GetInsnBuilder()->BuildInsn(jmpOperator, rflag, label);
3450 bb.RemoveInsn(*insn1);
3451 bb.ReplaceInsn(insn, newInsn);
3452 }
3453
SelectMOperator(ConditionCode condCode,bool inverse) const3454 MOperator CsetCbzToBeqOptAArch64::SelectMOperator(ConditionCode condCode, bool inverse) const
3455 {
3456 switch (condCode) {
3457 case CC_NE:
3458 return inverse ? MOP_beq : MOP_bne;
3459 case CC_EQ:
3460 return inverse ? MOP_bne : MOP_beq;
3461 case CC_MI:
3462 return inverse ? MOP_bpl : MOP_bmi;
3463 case CC_PL:
3464 return inverse ? MOP_bmi : MOP_bpl;
3465 case CC_VS:
3466 return inverse ? MOP_bvc : MOP_bvs;
3467 case CC_VC:
3468 return inverse ? MOP_bvs : MOP_bvc;
3469 case CC_HI:
3470 return inverse ? MOP_bls : MOP_bhi;
3471 case CC_LS:
3472 return inverse ? MOP_bhi : MOP_bls;
3473 case CC_GE:
3474 return inverse ? MOP_blt : MOP_bge;
3475 case CC_LT:
3476 return inverse ? MOP_bge : MOP_blt;
3477 case CC_HS:
3478 return inverse ? MOP_blo : MOP_bhs;
3479 case CC_LO:
3480 return inverse ? MOP_bhs : MOP_blo;
3481 case CC_LE:
3482 return inverse ? MOP_bgt : MOP_ble;
3483 case CC_GT:
3484 return inverse ? MOP_ble : MOP_bgt;
3485 case CC_CS:
3486 return inverse ? MOP_bcc : MOP_bcs;
3487 default:
3488 return MOP_undef;
3489 }
3490 }
3491
CheckCondition(Insn & insn)3492 bool ContiLDRorSTRToSameMEMPattern::CheckCondition(Insn &insn)
3493 {
3494 prevInsn = insn.GetPrev();
3495 while (prevInsn != nullptr && !prevInsn->GetMachineOpcode() && prevInsn != insn.GetBB()->GetFirstInsn()) {
3496 prevInsn = prevInsn->GetPrev();
3497 }
3498 if (!insn.IsMachineInstruction() || prevInsn == nullptr) {
3499 return false;
3500 }
3501 MOperator thisMop = insn.GetMachineOpcode();
3502 MOperator prevMop = prevInsn->GetMachineOpcode();
3503 /*
3504 * store regB, RegC, offset
3505 * load regA, RegC, offset
3506 */
3507 if ((thisMop == MOP_xldr && prevMop == MOP_xstr) || (thisMop == MOP_wldr && prevMop == MOP_wstr) ||
3508 (thisMop == MOP_dldr && prevMop == MOP_dstr) || (thisMop == MOP_sldr && prevMop == MOP_sstr)) {
3509 loadAfterStore = true;
3510 }
3511 /*
3512 * load regA, RegC, offset
3513 * load regB, RegC, offset
3514 */
3515 if ((thisMop == MOP_xldr || thisMop == MOP_wldr || thisMop == MOP_dldr || thisMop == MOP_sldr) &&
3516 prevMop == thisMop) {
3517 loadAfterLoad = true;
3518 }
3519 if (!loadAfterStore && !loadAfterLoad) {
3520 return false;
3521 }
3522 DEBUG_ASSERT(insn.GetOperand(kInsnSecondOpnd).IsMemoryAccessOperand(), "expects mem operands");
3523 DEBUG_ASSERT(prevInsn->GetOperand(kInsnSecondOpnd).IsMemoryAccessOperand(), "expects mem operands");
3524 return true;
3525 }
3526
Run(BB & bb,Insn & insn)3527 void ContiLDRorSTRToSameMEMPattern::Run(BB &bb, Insn &insn)
3528 {
3529 if (!CheckCondition(insn)) {
3530 return;
3531 }
3532 MOperator thisMop = insn.GetMachineOpcode();
3533 auto &memOpnd1 = static_cast<MemOperand &>(insn.GetOperand(kInsnSecondOpnd));
3534 MemOperand::AArch64AddressingMode addrMode1 = memOpnd1.GetAddrMode();
3535 if (addrMode1 != MemOperand::kAddrModeBOi || (!memOpnd1.IsIntactIndexed())) {
3536 return;
3537 }
3538
3539 auto *base1 = static_cast<RegOperand *>(memOpnd1.GetBaseRegister());
3540 DEBUG_ASSERT(base1 == nullptr || !base1->IsVirtualRegister(), "physical register has not been allocated?");
3541 OfstOperand *offset1 = memOpnd1.GetOffsetImmediate();
3542
3543 auto &memOpnd2 = static_cast<MemOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
3544 MemOperand::AArch64AddressingMode addrMode2 = memOpnd2.GetAddrMode();
3545 if (addrMode2 != MemOperand::kAddrModeBOi || (!memOpnd2.IsIntactIndexed())) {
3546 return;
3547 }
3548
3549 auto *base2 = static_cast<RegOperand *>(memOpnd2.GetBaseRegister());
3550 DEBUG_ASSERT(base2 == nullptr || !base2->IsVirtualRegister(), "physical register has not been allocated?");
3551 OfstOperand *offset2 = memOpnd2.GetOffsetImmediate();
3552
3553 if (base1 == nullptr || base2 == nullptr || offset1 == nullptr || offset2 == nullptr) {
3554 return;
3555 }
3556
3557 auto ®1 = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
3558 auto ®2 = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
3559 int64 offsetVal1 = offset1->GetOffsetValue();
3560 int64 offsetVal2 = offset2->GetOffsetValue();
3561 if (base1->GetRegisterNumber() != base2->GetRegisterNumber() || reg1.GetRegisterType() != reg2.GetRegisterType() ||
3562 reg1.GetSize() != reg2.GetSize() || offsetVal1 != offsetVal2) {
3563 return;
3564 }
3565 if (loadAfterStore && reg1.GetRegisterNumber() != reg2.GetRegisterNumber()) {
3566 /* replace it with mov */
3567 MOperator newOp = MOP_wmovrr;
3568 if (reg1.GetRegisterType() == kRegTyInt) {
3569 newOp = (reg1.GetSize() <= k32BitSize) ? MOP_wmovrr : MOP_xmovrr;
3570 } else if (reg1.GetRegisterType() == kRegTyFloat) {
3571 newOp = (reg1.GetSize() <= k32BitSize) ? MOP_xvmovs : MOP_xvmovd;
3572 }
3573 Insn *nextInsn = insn.GetNext();
3574 while (nextInsn != nullptr && !nextInsn->GetMachineOpcode() && nextInsn != bb.GetLastInsn()) {
3575 nextInsn = nextInsn->GetNext();
3576 }
3577 bool moveSameReg = false;
3578 if (nextInsn && nextInsn->GetIsSpill() && !IfOperandIsLiveAfterInsn(reg1, *nextInsn)) {
3579 MOperator nextMop = nextInsn->GetMachineOpcode();
3580 if ((thisMop == MOP_xldr && nextMop == MOP_xstr) || (thisMop == MOP_wldr && nextMop == MOP_wstr) ||
3581 (thisMop == MOP_dldr && nextMop == MOP_dstr) || (thisMop == MOP_sldr && nextMop == MOP_sstr)) {
3582 nextInsn->Insn::SetOperand(kInsnFirstOpnd, reg2);
3583 moveSameReg = true;
3584 }
3585 }
3586 if (!moveSameReg) {
3587 bb.InsertInsnAfter(*prevInsn, cgFunc->GetInsnBuilder()->BuildInsn(newOp, reg1, reg2));
3588 }
3589 bb.RemoveInsn(insn);
3590 } else if (reg1.GetRegisterNumber() == reg2.GetRegisterNumber() &&
3591 base1->GetRegisterNumber() != reg2.GetRegisterNumber()) {
3592 bb.RemoveInsn(insn);
3593 }
3594 }
3595
CheckCondition(Insn & insn)3596 bool RemoveIncDecRefPattern::CheckCondition(Insn &insn)
3597 {
3598 if (insn.GetMachineOpcode() != MOP_xbl) {
3599 return false;
3600 }
3601 prevInsn = insn.GetPreviousMachineInsn();
3602 if (prevInsn == nullptr) {
3603 return false;
3604 }
3605 MOperator prevMop = prevInsn->GetMachineOpcode();
3606 if (prevMop != MOP_xmovrr) {
3607 return false;
3608 }
3609 auto &target = static_cast<FuncNameOperand &>(insn.GetOperand(kInsnFirstOpnd));
3610 if (target.GetName() != "MCC_IncDecRef_NaiveRCFast") {
3611 return false;
3612 }
3613 if (static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd)).GetRegisterNumber() != R1 ||
3614 static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd)).GetRegisterNumber() != R0) {
3615 return false;
3616 }
3617 return true;
3618 }
3619
Run(BB & bb,Insn & insn)3620 void RemoveIncDecRefPattern::Run(BB &bb, Insn &insn)
3621 {
3622 if (!CheckCondition(insn)) {
3623 return;
3624 }
3625 bb.RemoveInsn(*prevInsn);
3626 bb.RemoveInsn(insn);
3627 }
3628
3629 #ifdef USE_32BIT_REF
3630 constexpr uint32 kRefSize = 32;
3631 #else
3632 constexpr uint32 kRefSize = 64;
3633 #endif
3634
Run(BB & bb,Insn & insn)3635 void CselZeroOneToCsetOpt::Run(BB &bb, Insn &insn)
3636 {
3637 Operand &trueValueOp = insn.GetOperand(kInsnSecondOpnd);
3638 Operand &falseValueOp = insn.GetOperand(kInsnThirdOpnd);
3639 Operand *trueTempOp = nullptr;
3640 Operand *falseTempOp = nullptr;
3641
3642 /* find fixed value in BB */
3643 if (!trueValueOp.IsIntImmediate()) {
3644 trueMovInsn = FindFixedValue(trueValueOp, bb, trueTempOp, insn);
3645 }
3646 if (!falseValueOp.IsIntImmediate()) {
3647 falseMovInsn = FindFixedValue(falseValueOp, bb, falseTempOp, insn);
3648 }
3649
3650 DEBUG_ASSERT(trueTempOp != nullptr, "trueTempOp should not be nullptr");
3651 DEBUG_ASSERT(falseTempOp != nullptr, "falseTempOp should not be nullptr");
3652 /* csel to cset */
3653 if ((trueTempOp->IsIntImmediate() || IsZeroRegister(*trueTempOp)) &&
3654 (falseTempOp->IsIntImmediate() || IsZeroRegister(*falseTempOp))) {
3655 ImmOperand *imm1 = static_cast<ImmOperand *>(trueTempOp);
3656 ImmOperand *imm2 = static_cast<ImmOperand *>(falseTempOp);
3657 bool inverse = imm1->IsOne() && (imm2->IsZero() || IsZeroRegister(*imm2));
3658 if (inverse || ((imm1->IsZero() || IsZeroRegister(*imm1)) && imm2->IsOne())) {
3659 Operand ® = insn.GetOperand(kInsnFirstOpnd);
3660 CondOperand &condOperand = static_cast<CondOperand &>(insn.GetOperand(kInsnFourthOpnd));
3661 MOperator mopCode = (reg.GetSize() == k64BitSize) ? MOP_xcsetrc : MOP_wcsetrc;
3662 /* get new cond ccCode */
3663 ConditionCode ccCode = inverse ? condOperand.GetCode() : GetReverseCC(condOperand.GetCode());
3664 if (ccCode == kCcLast) {
3665 return;
3666 }
3667 AArch64CGFunc *func = static_cast<AArch64CGFunc *>(cgFunc);
3668 CondOperand &cond = func->GetCondOperand(ccCode);
3669 Operand &rflag = func->GetOrCreateRflag();
3670 Insn &csetInsn = func->GetInsnBuilder()->BuildInsn(mopCode, reg, cond, rflag);
3671 if (CGOptions::DoCGSSA() && CGOptions::GetInstance().GetOptimizeLevel() < CGOptions::kLevel0) {
3672 CHECK_FATAL(false, "check this case in ssa opt");
3673 }
3674 insn.GetBB()->ReplaceInsn(insn, csetInsn);
3675 }
3676 }
3677 }
3678
FindFixedValue(Operand & opnd,BB & bb,Operand * & tempOp,const Insn & insn) const3679 Insn *CselZeroOneToCsetOpt::FindFixedValue(Operand &opnd, BB &bb, Operand *&tempOp, const Insn &insn) const
3680 {
3681 tempOp = &opnd;
3682 bool alreadyFindCsel = false;
3683 bool isRegDefined = false;
3684 regno_t regno = static_cast<RegOperand &>(opnd).GetRegisterNumber();
3685 FOR_BB_INSNS_REV(defInsn, &bb) {
3686 if (!defInsn->IsMachineInstruction() || defInsn->IsBranch()) {
3687 continue;
3688 }
3689 /* find csel */
3690 if (defInsn->GetId() == insn.GetId()) {
3691 alreadyFindCsel = true;
3692 }
3693 /* find def defined */
3694 if (alreadyFindCsel) {
3695 isRegDefined = defInsn->IsRegDefined(regno);
3696 }
3697 /* if def defined is movi do this opt */
3698 if (isRegDefined) {
3699 MOperator thisMop = defInsn->GetMachineOpcode();
3700 if (thisMop == MOP_wmovri32 || thisMop == MOP_xmovri64) {
3701 if (&defInsn->GetOperand(kInsnFirstOpnd) == &opnd) {
3702 tempOp = &(defInsn->GetOperand(kInsnSecondOpnd));
3703 return defInsn;
3704 }
3705 } else {
3706 return nullptr;
3707 }
3708 }
3709 }
3710 return nullptr;
3711 }
3712
Run(BB & bb,Insn & insn)3713 void AndCmpCsetEorCbzOpt::Run(BB &bb, Insn &insn)
3714 {
3715 if (insn.GetMachineOpcode() != MOP_wandrri12) {
3716 return;
3717 }
3718 RegOperand &andInsnFirstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
3719 RegOperand &andInsnSecondOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
3720 ImmOperand &andInsnThirdOpnd = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
3721 if (andInsnFirstOpnd.GetRegisterNumber() != andInsnSecondOpnd.GetRegisterNumber() ||
3722 andInsnThirdOpnd.GetValue() != 1) {
3723 return;
3724 }
3725 Insn *cmpInsn = insn.GetNextMachineInsn();
3726 if (cmpInsn == nullptr || cmpInsn->GetMachineOpcode() != MOP_wcmpri) {
3727 return;
3728 }
3729 RegOperand &cmpInsnSecondOpnd = static_cast<RegOperand &>(cmpInsn->GetOperand(kInsnSecondOpnd));
3730 ImmOperand &cmpInsnThirdOpnd = static_cast<ImmOperand&>(cmpInsn->GetOperand(kInsnThirdOpnd));
3731 if (cmpInsnSecondOpnd.GetRegisterNumber() != andInsnFirstOpnd.GetRegisterNumber() ||
3732 cmpInsnThirdOpnd.GetValue() != 0) {
3733 return;
3734 }
3735 Insn *csetInsn = cmpInsn->GetNextMachineInsn();
3736 if (csetInsn == nullptr || csetInsn->GetMachineOpcode() != MOP_wcsetrc) {
3737 return;
3738 }
3739 RegOperand &csetInsnFirstOpnd = static_cast<RegOperand &>(csetInsn->GetOperand(kInsnFirstOpnd));
3740 CondOperand &csetSecondOpnd = static_cast<CondOperand &>(csetInsn->GetOperand(kInsnSecondOpnd));
3741 if (csetInsnFirstOpnd.GetRegisterNumber() != andInsnFirstOpnd.GetRegisterNumber() ||
3742 csetSecondOpnd.GetCode() != CC_EQ) {
3743 return;
3744 }
3745 Insn *eorInsn = csetInsn->GetNextMachineInsn();
3746 if (eorInsn == nullptr || eorInsn->GetMachineOpcode() != MOP_weorrri12) {
3747 return;
3748 }
3749 RegOperand &eorInsnFirstOpnd = static_cast<RegOperand &>(eorInsn->GetOperand(kInsnFirstOpnd));
3750 RegOperand &eorInsnSecondOpnd = static_cast<RegOperand &>(eorInsn->GetOperand(kInsnSecondOpnd));
3751 ImmOperand &eorInsnThirdOpnd = static_cast<ImmOperand &>(eorInsn->GetOperand(kInsnThirdOpnd));
3752 if (eorInsnFirstOpnd.GetRegisterNumber() != andInsnFirstOpnd.GetRegisterNumber() ||
3753 eorInsnFirstOpnd.GetRegisterNumber() != eorInsnSecondOpnd.GetRegisterNumber() ||
3754 eorInsnThirdOpnd.GetValue() != 1) {
3755 return;
3756 }
3757 Insn *cbzInsn = eorInsn->GetNextMachineInsn();
3758 if (cbzInsn == nullptr || cbzInsn->GetMachineOpcode() != MOP_wcbz) {
3759 return;
3760 }
3761 RegOperand &cbzInsnFirstOpnd = static_cast<RegOperand &>(cbzInsn->GetOperand(kInsnFirstOpnd));
3762 if (cbzInsnFirstOpnd.GetRegisterNumber() != andInsnFirstOpnd.GetRegisterNumber()) {
3763 return;
3764 }
3765 bb.RemoveInsn(*cmpInsn);
3766 bb.RemoveInsn(*csetInsn);
3767 bb.RemoveInsn(*eorInsn);
3768 bb.RemoveInsn(*cbzInsn);
3769 /* replace insn */
3770 auto &label = static_cast<LabelOperand &>(cbzInsn->GetOperand(kInsnSecondOpnd));
3771 ImmOperand &oneHoleOpnd = static_cast<AArch64CGFunc *>(cgFunc)->CreateImmOperand(0, k8BitSize, false);
3772 bb.ReplaceInsn(insn, cgFunc->GetInsnBuilder()->BuildInsn(MOP_wtbz, cbzInsnFirstOpnd, oneHoleOpnd, label));
3773 }
3774
Run(BB & bb,Insn & insn)3775 void AddLdrOpt::Run(BB &bb, Insn &insn)
3776 {
3777 if (insn.GetMachineOpcode() != MOP_xaddrrr) {
3778 return;
3779 }
3780 Insn *nextInsn = insn.GetNextMachineInsn();
3781 if (nextInsn == nullptr) {
3782 return;
3783 }
3784 auto nextMop = nextInsn->GetMachineOpcode();
3785 if (nextMop != MOP_xldr && nextMop != MOP_wldr) {
3786 return;
3787 }
3788 RegOperand &insnFirstOpnd = static_cast<RegOperand&>(insn.GetOperand(kInsnFirstOpnd));
3789 RegOperand &insnSecondOpnd = static_cast<RegOperand&>(insn.GetOperand(kInsnSecondOpnd));
3790 if (insnFirstOpnd.GetRegisterNumber() != insnSecondOpnd.GetRegisterNumber()) {
3791 return;
3792 }
3793 RegOperand &ldrInsnFirstOpnd = static_cast<RegOperand &>(nextInsn->GetOperand(kInsnFirstOpnd));
3794 MemOperand &memOpnd = static_cast<MemOperand &>(nextInsn->GetOperand(kInsnSecondOpnd));
3795 if (memOpnd.GetAddrMode() != MemOperand::kAddrModeBOi ||
3796 memOpnd.GetBaseRegister()->GetRegisterNumber() != insnFirstOpnd.GetRegisterNumber() ||
3797 ldrInsnFirstOpnd.GetRegisterNumber() != insnFirstOpnd.GetRegisterNumber() ||
3798 memOpnd.GetOffsetImmediate()->GetOffsetValue() != 0) {
3799 return;
3800 }
3801 MemOperand &newMemOpnd = static_cast<AArch64CGFunc*>(cgFunc)->GetOrCreateMemOpnd(MemOperand::kAddrModeBOrX,
3802 memOpnd.GetSize(), &insnFirstOpnd, &static_cast<RegOperand&>(insn.GetOperand(kInsnThirdOpnd)), 0, false);
3803 nextInsn->SetOperand(kInsnSecondOpnd, newMemOpnd);
3804 bb.RemoveInsn(insn);
3805 }
3806
Run(BB & bb,Insn & insn)3807 void CsetEorOpt::Run(BB &bb, Insn &insn)
3808 {
3809 if (insn.GetMachineOpcode() != MOP_xcsetrc && insn.GetMachineOpcode() != MOP_wcsetrc) {
3810 return;
3811 }
3812 Insn *nextInsn = insn.GetNextMachineInsn();
3813 if (nextInsn == nullptr ||
3814 (nextInsn->GetMachineOpcode() != MOP_weorrri12 && nextInsn->GetMachineOpcode() != MOP_xeorrri13)) {
3815 return;
3816 }
3817 RegOperand &csetFirstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
3818 RegOperand &eorFirstOpnd = static_cast<RegOperand &>(nextInsn->GetOperand(kInsnFirstOpnd));
3819 RegOperand &eorSecondOpnd = static_cast<RegOperand &>(nextInsn->GetOperand(kInsnSecondOpnd));
3820 ImmOperand &eorThirdOpnd = static_cast<ImmOperand &>(nextInsn->GetOperand(kInsnThirdOpnd));
3821 if (eorThirdOpnd.GetValue() != 1 ||
3822 eorFirstOpnd.GetRegisterNumber() != eorSecondOpnd.GetRegisterNumber() ||
3823 csetFirstOpnd.GetRegisterNumber() != eorFirstOpnd.GetRegisterNumber()) {
3824 return;
3825 }
3826 CondOperand &csetSecondOpnd = static_cast<CondOperand &>(insn.GetOperand(kInsnSecondOpnd));
3827 ConditionCode inverseCondCode = GetReverseCC(csetSecondOpnd.GetCode());
3828 if (inverseCondCode == kCcLast) {
3829 return;
3830 }
3831 auto *aarFunc = static_cast<AArch64CGFunc *>(cgFunc);
3832 CondOperand &inverseCondOpnd = aarFunc->GetCondOperand(inverseCondCode);
3833 insn.SetOperand(kInsnSecondOpnd, inverseCondOpnd);
3834 bb.RemoveInsn(*nextInsn);
3835 }
3836
Run(BB & bb,Insn & insn)3837 void MoveCmpOpt::Run(BB &bb, Insn &insn)
3838 {
3839 if (insn.GetMachineOpcode() != MOP_xmovri64 && insn.GetMachineOpcode() != MOP_wmovri32) {
3840 return;
3841 }
3842 ImmOperand &immOpnd = static_cast<ImmOperand &>(insn.GetOperand(kInsnSecondOpnd));
3843 if (!immOpnd.IsInBitSize(kMaxImmVal12Bits, 0) && !immOpnd.IsInBitSize(kMaxImmVal12Bits, kMaxImmVal12Bits)) {
3844 return;
3845 }
3846 Insn *nextInsn = insn.GetNextMachineInsn();
3847 if (nextInsn == nullptr ||
3848 (nextInsn->GetMachineOpcode() != MOP_wcmprr && nextInsn->GetMachineOpcode() != MOP_xcmprr)) {
3849 return;
3850 }
3851 RegOperand &cmpSecondOpnd = static_cast<RegOperand &>(nextInsn->GetOperand(kInsnSecondOpnd));
3852 RegOperand &cmpThirdOpnd = static_cast<RegOperand &>(nextInsn->GetOperand(kInsnThirdOpnd));
3853 RegOperand &movFirstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
3854 if (cmpSecondOpnd.GetRegisterNumber() == cmpThirdOpnd.GetRegisterNumber()) {
3855 return;
3856 }
3857 if (cmpThirdOpnd.GetRegisterNumber() != movFirstOpnd.GetRegisterNumber()) {
3858 return;
3859 }
3860 MOperator cmpOpCode = (cmpThirdOpnd.GetSize() == k64BitSize) ? MOP_xcmpri : MOP_wcmpri;
3861 Insn &newCmpInsn = cgFunc->GetInsnBuilder()->BuildInsn(
3862 cmpOpCode, nextInsn->GetOperand(kInsnFirstOpnd), nextInsn->GetOperand(kInsnSecondOpnd), immOpnd);
3863 bb.ReplaceInsn(*nextInsn, newCmpInsn);
3864 if (!IfOperandIsLiveAfterInsn(movFirstOpnd, newCmpInsn)) {
3865 bb.RemoveInsn(insn);
3866 }
3867 }
3868
CheckCondition(Insn & insn)3869 bool InlineReadBarriersPattern::CheckCondition(Insn &insn)
3870 {
3871 /* Inline read barriers only enabled for GCONLY. */
3872 if (!CGOptions::IsGCOnly()) {
3873 return false;
3874 }
3875 return true;
3876 }
3877
Run(BB & bb,Insn & insn)3878 void InlineReadBarriersPattern::Run(BB &bb, Insn &insn)
3879 {
3880 if (!CheckCondition(insn)) {
3881 return;
3882 }
3883 const std::string &barrierName = GetReadBarrierName(insn);
3884 if (barrierName == kMccDummy) {
3885 /* remove dummy call. */
3886 bb.RemoveInsn(insn);
3887 } else {
3888 /* replace barrier function call with load instruction. */
3889 bool isVolatile = (barrierName == kMccLoadRefV || barrierName == kMccLoadRefVS);
3890 bool isStatic = (barrierName == kMccLoadRefS || barrierName == kMccLoadRefVS);
3891 /* refSize is 32 if USE_32BIT_REF defined, otherwise 64. */
3892 const uint32 refSize = kRefSize;
3893 auto *aarch64CGFunc = static_cast<AArch64CGFunc *>(cgFunc);
3894 MOperator loadOp = GetLoadOperator(refSize, isVolatile);
3895 RegOperand ®Op = aarch64CGFunc->GetOrCreatePhysicalRegisterOperand(R0, refSize, kRegTyInt);
3896 AArch64reg addrReg = isStatic ? R0 : R1;
3897 MemOperand &addr = aarch64CGFunc->CreateMemOpnd(addrReg, 0, refSize);
3898 Insn &loadInsn = cgFunc->GetInsnBuilder()->BuildInsn(loadOp, regOp, addr);
3899 bb.ReplaceInsn(insn, loadInsn);
3900 }
3901 bool isTailCall = (insn.GetMachineOpcode() == MOP_tail_call_opt_xbl);
3902 if (isTailCall) {
3903 /* add 'ret' instruction for tail call optimized load barrier. */
3904 Insn &retInsn = cgFunc->GetInsnBuilder()->BuildInsn<AArch64CG>(MOP_xret);
3905 bb.AppendInsn(retInsn);
3906 bb.SetKind(BB::kBBReturn);
3907 }
3908 }
3909
CheckCondition(Insn & insn)3910 bool ReplaceDivToMultiPattern::CheckCondition(Insn &insn)
3911 {
3912 prevInsn = insn.GetPreviousMachineInsn();
3913 if (prevInsn == nullptr) {
3914 return false;
3915 }
3916 prePrevInsn = prevInsn->GetPreviousMachineInsn();
3917 auto &sdivOpnd1 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
3918 auto &sdivOpnd2 = static_cast<RegOperand &>(insn.GetOperand(kInsnThirdOpnd));
3919 if (sdivOpnd1.GetRegisterNumber() == sdivOpnd2.GetRegisterNumber() || sdivOpnd1.GetRegisterNumber() == R16 ||
3920 sdivOpnd2.GetRegisterNumber() == R16 || prePrevInsn == nullptr) {
3921 return false;
3922 }
3923 MOperator prevMop = prevInsn->GetMachineOpcode();
3924 MOperator prePrevMop = prePrevInsn->GetMachineOpcode();
3925 if (prevMop && (prevMop == MOP_wmovkri16) && prePrevMop && (prePrevMop == MOP_wmovri32)) {
3926 return true;
3927 }
3928 return false;
3929 }
3930
Run(BB & bb,Insn & insn)3931 void ReplaceDivToMultiPattern::Run(BB &bb, Insn &insn)
3932 {
3933 if (CheckCondition(insn)) {
3934 auto &sdivOpnd1 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
3935 auto &sdivOpnd2 = static_cast<RegOperand &>(insn.GetOperand(kInsnThirdOpnd));
3936 /* Check if dest operand of insn is idential with register of prevInsn and prePrevInsn. */
3937 if ((&(prevInsn->GetOperand(kInsnFirstOpnd)) != &sdivOpnd2) ||
3938 (&(prePrevInsn->GetOperand(kInsnFirstOpnd)) != &sdivOpnd2)) {
3939 return;
3940 }
3941 auto &prevLsl = static_cast<BitShiftOperand &>(prevInsn->GetOperand(kInsnThirdOpnd));
3942 if (prevLsl.GetShiftAmount() != k16BitSize) {
3943 return;
3944 }
3945 auto &prevImmOpnd = static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
3946 auto &prePrevImmOpnd = static_cast<ImmOperand &>(prePrevInsn->GetOperand(kInsnSecondOpnd));
3947 /*
3948 * expect the immediate value of first mov is 0x086A0 which matches 0x186A0
3949 * because 0x10000 is ignored in 32 bits register
3950 */
3951 if ((prevImmOpnd.GetValue() != 1) || (prePrevImmOpnd.GetValue() != 0x86a0)) {
3952 return;
3953 }
3954 auto *aarch64CGFunc = static_cast<AArch64CGFunc *>(cgFunc);
3955 /* mov w16, #0x588f */
3956 RegOperand &tempOpnd =
3957 aarch64CGFunc->GetOrCreatePhysicalRegisterOperand(static_cast<AArch64reg>(R16), k64BitSize, kRegTyInt);
3958 /* create a immedate operand with this specific value */
3959 ImmOperand &multiplierLow = aarch64CGFunc->CreateImmOperand(0x588f, k32BitSize, false);
3960 Insn &multiplierLowInsn = cgFunc->GetInsnBuilder()->BuildInsn(MOP_wmovri32, tempOpnd, multiplierLow);
3961 bb.InsertInsnBefore(*prePrevInsn, multiplierLowInsn);
3962
3963 /*
3964 * movk w16, #0x4f8b, LSL #16
3965 * create a immedate operand with this specific value
3966 */
3967 ImmOperand &multiplierHigh = aarch64CGFunc->CreateImmOperand(0x4f8b, k32BitSize, false);
3968 BitShiftOperand *multiplierHighLsl = aarch64CGFunc->GetLogicalShiftLeftOperand(k16BitSize, true);
3969 Insn &multiplierHighInsn =
3970 cgFunc->GetInsnBuilder()->BuildInsn(MOP_wmovkri16, tempOpnd, multiplierHigh, *multiplierHighLsl);
3971 bb.InsertInsnBefore(*prePrevInsn, multiplierHighInsn);
3972
3973 /* smull x16, w0, w16 */
3974 Insn &newSmullInsn = cgFunc->GetInsnBuilder()->BuildInsn(MOP_xsmullrrr, tempOpnd, sdivOpnd1, tempOpnd);
3975 bb.InsertInsnBefore(*prePrevInsn, newSmullInsn);
3976
3977 /* asr x16, x16, #32 */
3978 ImmOperand &dstLsrImmHigh = aarch64CGFunc->CreateImmOperand(k32BitSize, k32BitSize, false);
3979 Insn &dstLsrInsnHigh = cgFunc->GetInsnBuilder()->BuildInsn(MOP_xasrrri6, tempOpnd, tempOpnd, dstLsrImmHigh);
3980 bb.InsertInsnBefore(*prePrevInsn, dstLsrInsnHigh);
3981
3982 /* add x16, x16, w0, SXTW */
3983 Operand &sxtw = aarch64CGFunc->CreateExtendShiftOperand(ExtendShiftOperand::kSXTW, 0, 3);
3984 Insn &addInsn = cgFunc->GetInsnBuilder()->BuildInsn(MOP_xxwaddrrre, tempOpnd, tempOpnd, sdivOpnd1, sxtw);
3985 bb.InsertInsnBefore(*prePrevInsn, addInsn);
3986
3987 /* asr x16, x16, #17 */
3988 ImmOperand &dstLsrImmChange = aarch64CGFunc->CreateImmOperand(17, k32BitSize, false);
3989 Insn &dstLsrInsnChange = cgFunc->GetInsnBuilder()->BuildInsn(MOP_xasrrri6, tempOpnd, tempOpnd, dstLsrImmChange);
3990 bb.InsertInsnBefore(*prePrevInsn, dstLsrInsnChange);
3991
3992 /* add x2, x16, x0, LSR #31 */
3993 auto &sdivOpnd0 = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
3994 regno_t sdivOpnd0RegNO = sdivOpnd0.GetRegisterNumber();
3995 RegOperand &extSdivO0 = aarch64CGFunc->GetOrCreatePhysicalRegisterOperand(
3996 static_cast<AArch64reg>(sdivOpnd0RegNO), k64BitSize, kRegTyInt);
3997
3998 regno_t sdivOpnd1RegNum = sdivOpnd1.GetRegisterNumber();
3999 RegOperand &extSdivO1 = aarch64CGFunc->GetOrCreatePhysicalRegisterOperand(
4000 static_cast<AArch64reg>(sdivOpnd1RegNum), k64BitSize, kRegTyInt);
4001 /* shift bit amount is thirty-one at this insn */
4002 BitShiftOperand &addLsrOpnd = aarch64CGFunc->CreateBitShiftOperand(BitShiftOperand::kLSR, 31, 6);
4003 Insn &addLsrInsn =
4004 cgFunc->GetInsnBuilder()->BuildInsn(MOP_xaddrrrs, extSdivO0, tempOpnd, extSdivO1, addLsrOpnd);
4005 bb.InsertInsnBefore(*prePrevInsn, addLsrInsn);
4006
4007 /*
4008 * remove insns
4009 * Check if x1 is used after sdiv insn, and if it is in live-out.
4010 */
4011 if (sdivOpnd2.GetRegisterNumber() != sdivOpnd0.GetRegisterNumber()) {
4012 if (IfOperandIsLiveAfterInsn(sdivOpnd2, insn)) {
4013 /* Only remove div instruction. */
4014 bb.RemoveInsn(insn);
4015 return;
4016 }
4017 }
4018
4019 bb.RemoveInsn(*prePrevInsn);
4020 bb.RemoveInsn(*prevInsn);
4021 bb.RemoveInsn(insn);
4022 }
4023 }
4024
FindPreviousCmp(Insn & insn) const4025 Insn *AndCmpBranchesToCsetAArch64::FindPreviousCmp(Insn &insn) const
4026 {
4027 regno_t defRegNO = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd)).GetRegisterNumber();
4028 for (Insn *curInsn = insn.GetPrev(); curInsn != nullptr; curInsn = curInsn->GetPrev()) {
4029 if (!curInsn->IsMachineInstruction()) {
4030 continue;
4031 }
4032 if (curInsn->GetMachineOpcode() == MOP_wcmpri || curInsn->GetMachineOpcode() == MOP_xcmpri) {
4033 return curInsn;
4034 }
4035 /*
4036 * if any def/use of CC or insn defReg between insn and curInsn, stop searching and return nullptr.
4037 */
4038 if (curInsn->ScanReg(defRegNO) || curInsn->ScanReg(kRFLAG)) {
4039 return nullptr;
4040 }
4041 }
4042 return nullptr;
4043 }
4044
Run(BB & bb,Insn & insn)4045 void AndCmpBranchesToCsetAArch64::Run(BB &bb, Insn &insn)
4046 {
4047 /* prevInsn must be "cmp" insn */
4048 Insn *prevInsn = FindPreviousCmp(insn);
4049 if (prevInsn == nullptr) {
4050 return;
4051 }
4052 /* prevPrevInsn must be "and" insn */
4053 Insn *prevPrevInsn = prevInsn->GetPreviousMachineInsn();
4054 if (prevPrevInsn == nullptr ||
4055 (prevPrevInsn->GetMachineOpcode() != MOP_wandrri12 && prevPrevInsn->GetMachineOpcode() != MOP_xandrri13)) {
4056 return;
4057 }
4058
4059 auto &csetCond = static_cast<CondOperand &>(insn.GetOperand(kInsnSecondOpnd));
4060 auto &cmpImm = static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnThirdOpnd));
4061 int64 cmpImmVal = cmpImm.GetValue();
4062 auto &andImm = static_cast<ImmOperand &>(prevPrevInsn->GetOperand(kInsnThirdOpnd));
4063 int64 andImmVal = andImm.GetValue();
4064 if ((csetCond.GetCode() == CC_EQ && cmpImmVal == andImmVal) || (csetCond.GetCode() == CC_NE && cmpImmVal == 0)) {
4065 /* if flag_reg of "cmp" is live later, we can't remove cmp insn. */
4066 auto &flagReg = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
4067 if (IfOperandIsLiveAfterInsn(flagReg, insn)) {
4068 return;
4069 }
4070
4071 auto &csetReg = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
4072 auto &prevInsnSecondReg = prevInsn->GetOperand(kInsnSecondOpnd);
4073 bool isRegDiff = !RegOperand::IsSameRegNO(csetReg, prevInsnSecondReg);
4074 if (isRegDiff && IfOperandIsLiveAfterInsn(static_cast<RegOperand &>(prevInsnSecondReg), insn)) {
4075 return;
4076 }
4077 if (andImmVal == 1) {
4078 if (!RegOperand::IsSameRegNO(prevInsnSecondReg, prevPrevInsn->GetOperand(kInsnFirstOpnd))) {
4079 return;
4080 }
4081 /* save the "and" insn only. */
4082 bb.RemoveInsn(insn);
4083 bb.RemoveInsn(*prevInsn);
4084 if (isRegDiff) {
4085 prevPrevInsn->Insn::SetOperand(kInsnFirstOpnd, csetReg);
4086 }
4087 } else {
4088 if (!RegOperand::IsSameReg(prevInsnSecondReg, prevPrevInsn->GetOperand(kInsnFirstOpnd)) ||
4089 !RegOperand::IsSameReg(prevInsnSecondReg, prevPrevInsn->GetOperand(kInsnSecondOpnd))) {
4090 return;
4091 }
4092
4093 /* andImmVal is n power of 2 */
4094 int n = logValueAtBase2(andImmVal);
4095 if (n < 0) {
4096 return;
4097 }
4098
4099 /* create ubfx insn */
4100 MOperator ubfxOp = (csetReg.GetSize() <= k32BitSize) ? MOP_wubfxrri5i5 : MOP_xubfxrri6i6;
4101 if (ubfxOp == MOP_wubfxrri5i5 && static_cast<uint32>(n) >= k32BitSize) {
4102 return;
4103 }
4104 auto &dstReg = static_cast<RegOperand &>(csetReg);
4105 auto &srcReg = static_cast<RegOperand &>(prevInsnSecondReg);
4106 auto *aarch64CGFunc = static_cast<AArch64CGFunc *>(&cgFunc);
4107 ImmOperand &bitPos = aarch64CGFunc->CreateImmOperand(n, k8BitSize, false);
4108 ImmOperand &bitSize = aarch64CGFunc->CreateImmOperand(1, k8BitSize, false);
4109 Insn &ubfxInsn = cgFunc.GetInsnBuilder()->BuildInsn(ubfxOp, dstReg, srcReg, bitPos, bitSize);
4110 bb.InsertInsnBefore(*prevPrevInsn, ubfxInsn);
4111 bb.RemoveInsn(insn);
4112 bb.RemoveInsn(*prevInsn);
4113 bb.RemoveInsn(*prevPrevInsn);
4114 }
4115 }
4116 }
4117
Run(BB & bb,Insn & insn)4118 void AndCmpBranchesToTstAArch64::Run(BB &bb, Insn &insn)
4119 {
4120 /* nextInsn must be "cmp" insn */
4121 Insn *nextInsn = insn.GetNextMachineInsn();
4122 if (nextInsn == nullptr ||
4123 (nextInsn->GetMachineOpcode() != MOP_wcmpri && nextInsn->GetMachineOpcode() != MOP_xcmpri)) {
4124 return;
4125 }
4126 /* nextNextInsn must be "beq" or "bne" insn */
4127 Insn *nextNextInsn = nextInsn->GetNextMachineInsn();
4128 if (nextNextInsn == nullptr ||
4129 (nextNextInsn->GetMachineOpcode() != MOP_beq && nextNextInsn->GetMachineOpcode() != MOP_bne)) {
4130 return;
4131 }
4132 auto &andRegOp = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
4133 regno_t andRegNO1 = andRegOp.GetRegisterNumber();
4134 auto &cmpRegOp2 = static_cast<RegOperand &>(nextInsn->GetOperand(kInsnSecondOpnd));
4135 regno_t cmpRegNO2 = cmpRegOp2.GetRegisterNumber();
4136 if (andRegNO1 != cmpRegNO2) {
4137 return;
4138 }
4139 /* If the reg will be used later, we shouldn't optimize the and insn here */
4140 if (IfOperandIsLiveAfterInsn(andRegOp, *nextInsn)) {
4141 return;
4142 }
4143 Operand &immOpnd = nextInsn->GetOperand(kInsnThirdOpnd);
4144 DEBUG_ASSERT(immOpnd.IsIntImmediate(), "expects ImmOperand");
4145 auto &defConst = static_cast<ImmOperand &>(immOpnd);
4146 int64 defConstValue = defConst.GetValue();
4147 if (defConstValue != 0) {
4148 return;
4149 }
4150 /* build tst insn */
4151 Operand &andOpnd3 = insn.GetOperand(kInsnThirdOpnd);
4152 auto &andRegOp2 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
4153 MOperator newOp = MOP_undef;
4154 if (andOpnd3.IsRegister()) {
4155 newOp = (andRegOp2.GetSize() <= k32BitSize) ? MOP_wtstrr : MOP_xtstrr;
4156 } else {
4157 newOp = (andRegOp2.GetSize() <= k32BitSize) ? MOP_wtstri32 : MOP_xtstri64;
4158 }
4159 Operand &rflag = static_cast<AArch64CGFunc *>(&cgFunc)->GetOrCreateRflag();
4160 Insn &newInsn = cgFunc.GetInsnBuilder()->BuildInsn(newOp, rflag, andRegOp2, andOpnd3);
4161 if (CGOptions::DoCGSSA() && CGOptions::GetInstance().GetOptimizeLevel() < CGOptions::kLevel0) {
4162 CHECK_FATAL(false, "check this case in ssa opt");
4163 }
4164 bb.InsertInsnAfter(*nextInsn, newInsn);
4165 bb.RemoveInsn(insn);
4166 bb.RemoveInsn(*nextInsn);
4167 }
4168
Run(BB & bb,Insn & insn)4169 void AndCbzBranchesToTstAArch64::Run(BB &bb, Insn &insn)
4170 {
4171 /* nextInsn must be "cbz" or "cbnz" insn */
4172 Insn *nextInsn = insn.GetNextMachineInsn();
4173 if (nextInsn == nullptr || (nextInsn->GetMachineOpcode() != MOP_wcbz && nextInsn->GetMachineOpcode() != MOP_xcbz)) {
4174 return;
4175 }
4176 auto &andRegOp = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
4177 regno_t andRegNO1 = andRegOp.GetRegisterNumber();
4178 auto &cbzRegOp2 = static_cast<RegOperand &>(nextInsn->GetOperand(kInsnFirstOpnd));
4179 regno_t cbzRegNO2 = cbzRegOp2.GetRegisterNumber();
4180 if (andRegNO1 != cbzRegNO2) {
4181 return;
4182 }
4183 /* If the reg will be used later, we shouldn't optimize the and insn here */
4184 if (IfOperandIsLiveAfterInsn(andRegOp, *nextInsn)) {
4185 return;
4186 }
4187 /* build tst insn */
4188 Operand &andOpnd3 = insn.GetOperand(kInsnThirdOpnd);
4189 auto &andRegOp2 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
4190 auto &andRegOp3 = static_cast<RegOperand &>(insn.GetOperand(kInsnThirdOpnd));
4191 MOperator newTstOp = MOP_undef;
4192 if (andOpnd3.IsRegister()) {
4193 newTstOp = (andRegOp2.GetSize() <= k32BitSize && andRegOp3.GetSize() <= k32BitSize) ? MOP_wtstrr : MOP_xtstrr;
4194 } else {
4195 newTstOp =
4196 (andRegOp2.GetSize() <= k32BitSize && andRegOp3.GetSize() <= k32BitSize) ? MOP_wtstri32 : MOP_xtstri64;
4197 }
4198 Operand &rflag = static_cast<AArch64CGFunc *>(&cgFunc)->GetOrCreateRflag();
4199 Insn &newInsnTst = cgFunc.GetInsnBuilder()->BuildInsn(newTstOp, rflag, andRegOp2, andOpnd3);
4200 if (andOpnd3.IsImmediate()) {
4201 if (!static_cast<ImmOperand &>(andOpnd3).IsBitmaskImmediate(andRegOp2.GetSize())) {
4202 return;
4203 }
4204 }
4205 /* build beq insn */
4206 MOperator opCode = nextInsn->GetMachineOpcode();
4207 bool reverse = (opCode == MOP_xcbz || opCode == MOP_wcbz);
4208 auto &label = static_cast<LabelOperand &>(nextInsn->GetOperand(kInsnSecondOpnd));
4209 MOperator jmpOperator = reverse ? MOP_beq : MOP_bne;
4210 Insn &newInsnJmp = cgFunc.GetInsnBuilder()->BuildInsn(jmpOperator, rflag, label);
4211 bb.ReplaceInsn(insn, newInsnTst);
4212 bb.ReplaceInsn(*nextInsn, newInsnJmp);
4213 }
4214
Run(BB & bb,Insn & insn)4215 void ZeroCmpBranchesAArch64::Run(BB &bb, Insn &insn)
4216 {
4217 Insn *prevInsn = insn.GetPreviousMachineInsn();
4218 if (!insn.IsBranch() || insn.GetOperandSize() <= kInsnSecondOpnd || prevInsn == nullptr) {
4219 return;
4220 }
4221 if (!insn.GetOperand(kInsnSecondOpnd).IsLabel()) {
4222 return;
4223 }
4224 LabelOperand *label = &static_cast<LabelOperand &>(insn.GetOperand(kInsnSecondOpnd));
4225 RegOperand *regOpnd = nullptr;
4226 RegOperand *reg0 = nullptr;
4227 RegOperand *reg1 = nullptr;
4228 MOperator newOp = MOP_undef;
4229 ImmOperand *imm = nullptr;
4230 switch (prevInsn->GetMachineOpcode()) {
4231 case MOP_wcmpri:
4232 case MOP_xcmpri: {
4233 regOpnd = &static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
4234 imm = &static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnThirdOpnd));
4235 if (imm->GetValue() != 0) {
4236 return;
4237 }
4238 if (insn.GetMachineOpcode() == MOP_bge) {
4239 newOp = (regOpnd->GetSize() <= k32BitSize) ? MOP_wtbz : MOP_xtbz;
4240 } else if (insn.GetMachineOpcode() == MOP_blt) {
4241 newOp = (regOpnd->GetSize() <= k32BitSize) ? MOP_wtbnz : MOP_xtbnz;
4242 } else {
4243 return;
4244 }
4245 break;
4246 }
4247 case MOP_wcmprr:
4248 case MOP_xcmprr: {
4249 reg0 = &static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
4250 reg1 = &static_cast<RegOperand &>(prevInsn->GetOperand(kInsnThirdOpnd));
4251 if (!IsZeroRegister(*reg0) && !IsZeroRegister(*reg1)) {
4252 return;
4253 }
4254 switch (insn.GetMachineOpcode()) {
4255 case MOP_bge:
4256 if (IsZeroRegister(*reg1)) {
4257 regOpnd = &static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
4258 newOp = (regOpnd->GetSize() <= k32BitSize) ? MOP_wtbz : MOP_xtbz;
4259 } else {
4260 return;
4261 }
4262 break;
4263 case MOP_ble:
4264 if (IsZeroRegister(*reg0)) {
4265 regOpnd = &static_cast<RegOperand &>(prevInsn->GetOperand(kInsnThirdOpnd));
4266 newOp = (regOpnd->GetSize() <= k32BitSize) ? MOP_wtbz : MOP_xtbz;
4267 } else {
4268 return;
4269 }
4270 break;
4271 case MOP_blt:
4272 if (IsZeroRegister(*reg1)) {
4273 regOpnd = &static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
4274 newOp = (regOpnd->GetSize() <= k32BitSize) ? MOP_wtbnz : MOP_xtbnz;
4275 } else {
4276 return;
4277 }
4278 break;
4279 case MOP_bgt:
4280 if (IsZeroRegister(*reg0)) {
4281 regOpnd = &static_cast<RegOperand &>(prevInsn->GetOperand(kInsnThirdOpnd));
4282 newOp = (regOpnd->GetSize() <= k32BitSize) ? MOP_wtbnz : MOP_xtbnz;
4283 } else {
4284 return;
4285 }
4286 break;
4287 default:
4288 return;
4289 }
4290 break;
4291 }
4292 default:
4293 return;
4294 }
4295 auto aarch64CGFunc = static_cast<AArch64CGFunc *>(&cgFunc);
4296 ImmOperand &bitp = aarch64CGFunc->CreateImmOperand(
4297 (regOpnd->GetSize() <= k32BitSize) ? (k32BitSize - 1) : (k64BitSize - 1), k8BitSize, false);
4298 bb.InsertInsnAfter(insn,
4299 cgFunc.GetInsnBuilder()->BuildInsn(newOp, *static_cast<RegOperand *>(regOpnd), bitp, *label));
4300 bb.RemoveInsn(insn);
4301 bb.RemoveInsn(*prevInsn);
4302 }
4303
Run(BB & bb,Insn & insn)4304 void ElimDuplicateExtensionAArch64::Run(BB &bb, Insn &insn)
4305 {
4306 (void)bb;
4307 Insn *prevInsn = insn.GetPreviousMachineInsn();
4308 if (prevInsn == nullptr) {
4309 return;
4310 }
4311 uint32 index;
4312 uint32 upper;
4313 bool is32bits = false;
4314 MOperator *table = nullptr;
4315 MOperator thisMop = insn.GetMachineOpcode();
4316 switch (thisMop) {
4317 case MOP_xsxtb32:
4318 is32bits = true;
4319 [[clang::fallthrough]];
4320 case MOP_xsxtb64:
4321 table = sextMopTable;
4322 index = 0; // 0 is index of MOP_xsxtb32 in table sextMopTable
4323 upper = kSizeOfSextMopTable;
4324 break;
4325 case MOP_xsxth32:
4326 is32bits = true;
4327 [[clang::fallthrough]];
4328 case MOP_xsxth64:
4329 table = sextMopTable;
4330 index = 2; // 2 is index of MOP_xsxth32 in table sextMopTable
4331 upper = kSizeOfSextMopTable;
4332 break;
4333 case MOP_xsxtw64:
4334 table = sextMopTable;
4335 index = 4; // 4 is index of MOP_xsxtw64 in table sextMopTable
4336 upper = kSizeOfSextMopTable;
4337 break;
4338 case MOP_xuxtb32:
4339 is32bits = true;
4340 table = uextMopTable;
4341 index = 0; // 0 is index of MOP_xuxtb32 in table uextMopTable
4342 upper = kSizeOfUextMopTable;
4343 break;
4344 case MOP_xuxth32:
4345 is32bits = true;
4346 table = uextMopTable;
4347 index = 1; // 1 is index of MOP_xuxth32 in table uextMopTable
4348 upper = kSizeOfUextMopTable;
4349 break;
4350 case MOP_xuxtw64:
4351 table = uextMopTable;
4352 index = 2; // 2 is index of MOP_xuxtw64 in table uextMopTable
4353 upper = kSizeOfUextMopTable;
4354 break;
4355 default:
4356 CHECK_FATAL(false, "Unexpected mop");
4357 }
4358 MOperator prevMop = prevInsn->GetMachineOpcode();
4359 for (uint32 i = index; i < upper; ++i) {
4360 if (prevMop == table[i]) {
4361 Operand &prevDestOpnd = prevInsn->GetOperand(kInsnFirstOpnd);
4362 regno_t dest = static_cast<RegOperand &>(prevDestOpnd).GetRegisterNumber();
4363 regno_t src = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd)).GetRegisterNumber();
4364 if (dest == src) {
4365 insn.SetMOP(is32bits ? AArch64CG::kMd[MOP_wmovrr] : AArch64CG::kMd[MOP_xmovrr]);
4366 if (upper == kSizeOfSextMopTable &&
4367 static_cast<RegOperand &>(prevDestOpnd).GetValidBitsNum() !=
4368 static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd)).GetValidBitsNum()) {
4369 if (is32bits) {
4370 insn.GetOperand(kInsnFirstOpnd).SetSize(k64BitSize);
4371 insn.SetMOP(AArch64CG::kMd[MOP_xmovrr]);
4372 } else {
4373 prevDestOpnd.SetSize(k64BitSize);
4374 prevInsn->SetMOP(prevMop == MOP_xsxtb32 ? AArch64CG::kMd[MOP_xsxtb64]
4375 : AArch64CG::kMd[MOP_xsxth64]);
4376 }
4377 }
4378 }
4379 break;
4380 }
4381 }
4382 }
4383
4384 /*
4385 * if there is define point of checkInsn->GetOperand(opndIdx) between startInsn and firstInsn
4386 * return define insn. else return nullptr
4387 */
DefInsnOfOperandInBB(const Insn & startInsn,const Insn & checkInsn,int opndIdx) const4388 const Insn *CmpCsetAArch64::DefInsnOfOperandInBB(const Insn &startInsn, const Insn &checkInsn, int opndIdx) const
4389 {
4390 Insn *prevInsn = nullptr;
4391 for (const Insn *insn = &startInsn; insn != nullptr; insn = prevInsn) {
4392 prevInsn = insn->GetPreviousMachineInsn();
4393 if (!insn->IsMachineInstruction()) {
4394 continue;
4395 }
4396 /* checkInsn.GetOperand(opndIdx) is thought modified conservatively */
4397 if (insn->IsCall()) {
4398 return insn;
4399 }
4400 const InsnDesc *md = insn->GetDesc();
4401 uint32 opndNum = insn->GetOperandSize();
4402 for (uint32 i = 0; i < opndNum; ++i) {
4403 Operand &opnd = insn->GetOperand(i);
4404 if (!md->opndMD[i]->IsDef()) {
4405 continue;
4406 }
4407 /* Operand is base reg of Memory, defined by str */
4408 if (opnd.IsMemoryAccessOperand()) {
4409 auto &memOpnd = static_cast<MemOperand &>(opnd);
4410 RegOperand *base = memOpnd.GetBaseRegister();
4411 DEBUG_ASSERT(base != nullptr, "nullptr check");
4412 DEBUG_ASSERT(base->IsRegister(), "expects RegOperand");
4413 if (RegOperand::IsSameRegNO(*base, checkInsn.GetOperand(static_cast<uint32>(opndIdx))) &&
4414 memOpnd.GetAddrMode() == MemOperand::kAddrModeBOi &&
4415 (memOpnd.IsPostIndexed() || memOpnd.IsPreIndexed())) {
4416 return insn;
4417 }
4418 } else {
4419 DEBUG_ASSERT(opnd.IsRegister(), "expects RegOperand");
4420 if (RegOperand::IsSameRegNO(checkInsn.GetOperand(static_cast<uint32>(opndIdx)), opnd)) {
4421 return insn;
4422 }
4423 }
4424 }
4425 }
4426 return nullptr;
4427 }
4428
OpndDefByOneValidBit(const Insn & defInsn) const4429 bool CmpCsetAArch64::OpndDefByOneValidBit(const Insn &defInsn) const
4430 {
4431 MOperator defMop = defInsn.GetMachineOpcode();
4432 switch (defMop) {
4433 case MOP_wcsetrc:
4434 case MOP_xcsetrc:
4435 return true;
4436 case MOP_wmovri32:
4437 case MOP_xmovri64: {
4438 Operand &defOpnd = defInsn.GetOperand(kInsnSecondOpnd);
4439 DEBUG_ASSERT(defOpnd.IsIntImmediate(), "expects ImmOperand");
4440 auto &defConst = static_cast<ImmOperand &>(defOpnd);
4441 int64 defConstValue = defConst.GetValue();
4442 return (defConstValue == 0 || defConstValue == 1);
4443 }
4444 case MOP_xmovrr:
4445 case MOP_wmovrr:
4446 return IsZeroRegister(defInsn.GetOperand(kInsnSecondOpnd));
4447 case MOP_wlsrrri5:
4448 case MOP_xlsrrri6: {
4449 Operand &opnd2 = defInsn.GetOperand(kInsnThirdOpnd);
4450 DEBUG_ASSERT(opnd2.IsIntImmediate(), "expects ImmOperand");
4451 auto &opndImm = static_cast<ImmOperand &>(opnd2);
4452 int64 shiftBits = opndImm.GetValue();
4453 return ((defMop == MOP_wlsrrri5 && shiftBits == (k32BitSize - 1)) ||
4454 (defMop == MOP_xlsrrri6 && shiftBits == (k64BitSize - 1)));
4455 }
4456 default:
4457 return false;
4458 }
4459 }
4460
4461 /*
4462 * help function for cmpcset optimize
4463 * if all define points of used opnd in insn has only one valid bit,return true.
4464 * for cmp reg,#0(#1), that is checking for reg
4465 */
CheckOpndDefPoints(Insn & checkInsn,int opndIdx)4466 bool CmpCsetAArch64::CheckOpndDefPoints(Insn &checkInsn, int opndIdx)
4467 {
4468 if (checkInsn.GetBB()->GetPrev() == nullptr) {
4469 /* For 1st BB, be conservative for def of parameter registers */
4470 /* Since peep is light weight, do not want to insert pseudo defs */
4471 regno_t reg = static_cast<RegOperand &>(checkInsn.GetOperand(static_cast<uint32>(opndIdx))).GetRegisterNumber();
4472 if ((reg >= R0 && reg <= R7) || (reg >= D0 && reg <= D7)) {
4473 return false;
4474 }
4475 }
4476 /* check current BB */
4477 const Insn *defInsn = DefInsnOfOperandInBB(checkInsn, checkInsn, opndIdx);
4478 if (defInsn != nullptr) {
4479 return OpndDefByOneValidBit(*defInsn);
4480 }
4481 /* check pred */
4482 for (auto predBB : checkInsn.GetBB()->GetPreds()) {
4483 const Insn *tempInsn = nullptr;
4484 if (predBB->GetLastInsn() != nullptr) {
4485 tempInsn = DefInsnOfOperandInBB(*predBB->GetLastInsn(), checkInsn, opndIdx);
4486 }
4487 if (tempInsn == nullptr || !OpndDefByOneValidBit(*tempInsn)) {
4488 return false;
4489 }
4490 }
4491 return true;
4492 }
4493
4494 /* Check there is use point of rflag start from startInsn to current bb bottom */
FlagUsedLaterInCurBB(const BB & bb,Insn & startInsn) const4495 bool CmpCsetAArch64::FlagUsedLaterInCurBB(const BB &bb, Insn &startInsn) const
4496 {
4497 if (&bb != startInsn.GetBB()) {
4498 return false;
4499 }
4500 Insn *nextInsn = nullptr;
4501 for (Insn *insn = &startInsn; insn != nullptr; insn = nextInsn) {
4502 nextInsn = insn->GetNextMachineInsn();
4503 const InsnDesc *md = insn->GetDesc();
4504 uint32 opndNum = insn->GetOperandSize();
4505 for (uint32 i = 0; i < opndNum; ++i) {
4506 Operand &opnd = insn->GetOperand(i);
4507 /*
4508 * For condition operand, such as NE, EQ and so on, the register number should be
4509 * same with RFLAG, we only need check the property of use/def.
4510 */
4511 if (!opnd.IsConditionCode()) {
4512 continue;
4513 }
4514 if (md->opndMD[i]->IsUse()) {
4515 return true;
4516 } else {
4517 DEBUG_ASSERT(md->opndMD[i]->IsDef(), "register should be redefined.");
4518 return false;
4519 }
4520 }
4521 }
4522 return false;
4523 }
4524
Run(BB & bb,Insn & insn)4525 void CmpCsetAArch64::Run(BB &bb, Insn &insn)
4526 {
4527 Insn *nextInsn = insn.GetNextMachineInsn();
4528 if (nextInsn == nullptr) {
4529 return;
4530 }
4531 MOperator firstMop = insn.GetMachineOpcode();
4532 MOperator secondMop = nextInsn->GetMachineOpcode();
4533 if ((firstMop == MOP_wcmpri || firstMop == MOP_xcmpri) && (secondMop == MOP_wcsetrc || secondMop == MOP_xcsetrc)) {
4534 Operand &cmpFirstOpnd = insn.GetOperand(kInsnSecondOpnd);
4535 /* get ImmOperand, must be 0 or 1 */
4536 Operand &cmpSecondOpnd = insn.GetOperand(kInsnThirdOpnd);
4537 auto &cmpFlagReg = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
4538 DEBUG_ASSERT(cmpSecondOpnd.IsIntImmediate(), "expects ImmOperand");
4539 auto &cmpConst = static_cast<ImmOperand &>(cmpSecondOpnd);
4540 int64 cmpConstVal = cmpConst.GetValue();
4541 Operand &csetFirstOpnd = nextInsn->GetOperand(kInsnFirstOpnd);
4542 if ((cmpConstVal != 0 && cmpConstVal != 1) || !CheckOpndDefPoints(insn, 1) ||
4543 (nextInsn->GetNextMachineInsn() != nullptr && FlagUsedLaterInCurBB(bb, *nextInsn->GetNextMachineInsn())) ||
4544 FindRegLiveOut(cmpFlagReg, *insn.GetBB())) {
4545 return;
4546 }
4547
4548 Insn *csetInsn = nextInsn;
4549 nextInsn = nextInsn->GetNextMachineInsn();
4550 auto &cond = static_cast<CondOperand &>(csetInsn->GetOperand(kInsnSecondOpnd));
4551 if ((cmpConstVal == 0 && cond.GetCode() == CC_NE) || (cmpConstVal == 1 && cond.GetCode() == CC_EQ)) {
4552 if (RegOperand::IsSameRegNO(cmpFirstOpnd, csetFirstOpnd)) {
4553 bb.RemoveInsn(insn);
4554 bb.RemoveInsn(*csetInsn);
4555 } else {
4556 if (cmpFirstOpnd.GetSize() != csetFirstOpnd.GetSize()) {
4557 return;
4558 }
4559 MOperator mopCode = (cmpFirstOpnd.GetSize() == k64BitSize) ? MOP_xmovrr : MOP_wmovrr;
4560 Insn &newInsn = cgFunc.GetInsnBuilder()->BuildInsn(mopCode, csetFirstOpnd, cmpFirstOpnd);
4561 bb.ReplaceInsn(insn, newInsn);
4562 bb.RemoveInsn(*csetInsn);
4563 }
4564 } else if ((cmpConstVal == 1 && cond.GetCode() == CC_NE) || (cmpConstVal == 0 && cond.GetCode() == CC_EQ)) {
4565 if (cmpFirstOpnd.GetSize() != csetFirstOpnd.GetSize()) {
4566 return;
4567 }
4568 MOperator mopCode = (cmpFirstOpnd.GetSize() == k64BitSize) ? MOP_xeorrri13 : MOP_weorrri12;
4569 ImmOperand &one = static_cast<AArch64CGFunc *>(&cgFunc)->CreateImmOperand(1, k8BitSize, false);
4570 Insn &newInsn = cgFunc.GetInsnBuilder()->BuildInsn(mopCode, csetFirstOpnd, cmpFirstOpnd, one);
4571 bb.ReplaceInsn(insn, newInsn);
4572 bb.RemoveInsn(*csetInsn);
4573 }
4574 }
4575 }
4576
4577 /*
4578 * help function for DeleteMovAfterCbzOrCbnz
4579 * input:
4580 * bb: the bb to be checked out
4581 * checkCbz: to check out BB end with cbz or cbnz, if cbz, input true
4582 * opnd: for MOV reg, #0, opnd indicate reg
4583 * return:
4584 * according to cbz, return true if insn is cbz or cbnz and the first operand of cbz(cbnz) is same as input
4585 * operand
4586 */
PredBBCheck(BB & bb,bool checkCbz,const Operand & opnd) const4587 bool DeleteMovAfterCbzOrCbnzAArch64::PredBBCheck(BB &bb, bool checkCbz, const Operand &opnd) const
4588 {
4589 if (bb.GetKind() != BB::kBBIf) {
4590 return false;
4591 }
4592
4593 Insn *condBr = cgcfg->FindLastCondBrInsn(bb);
4594 DEBUG_ASSERT(condBr != nullptr, "condBr must be found");
4595 if (!cgcfg->IsCompareAndBranchInsn(*condBr)) {
4596 return false;
4597 }
4598 MOperator mOp = condBr->GetMachineOpcode();
4599 if (checkCbz && mOp != MOP_wcbz && mOp != MOP_xcbz) {
4600 return false;
4601 }
4602 if (!checkCbz && mOp != MOP_xcbnz && mOp != MOP_wcbnz) {
4603 return false;
4604 }
4605 return RegOperand::IsSameRegNO(condBr->GetOperand(kInsnFirstOpnd), opnd);
4606 }
4607
OpndDefByMovZero(const Insn & insn) const4608 bool DeleteMovAfterCbzOrCbnzAArch64::OpndDefByMovZero(const Insn &insn) const
4609 {
4610 MOperator defMop = insn.GetMachineOpcode();
4611 switch (defMop) {
4612 case MOP_wmovri32:
4613 case MOP_xmovri64: {
4614 Operand &defOpnd = insn.GetOperand(kInsnSecondOpnd);
4615 DEBUG_ASSERT(defOpnd.IsIntImmediate(), "expects ImmOperand");
4616 auto &defConst = static_cast<ImmOperand &>(defOpnd);
4617 int64 defConstValue = defConst.GetValue();
4618 if (defConstValue == 0) {
4619 return true;
4620 }
4621 return false;
4622 }
4623 case MOP_xmovrr:
4624 case MOP_wmovrr: {
4625 Operand &secondOpnd = insn.GetOperand(kInsnSecondOpnd);
4626 DEBUG_ASSERT(secondOpnd.IsRegister(), "expects RegOperand here");
4627 auto ®Opnd = static_cast<RegOperand &>(secondOpnd);
4628 return IsZeroRegister(regOpnd);
4629 }
4630 default:
4631 return false;
4632 }
4633 }
4634
4635 /* check whether predefine insn of first operand of test_insn is exist in current BB */
NoPreDefine(Insn & testInsn) const4636 bool DeleteMovAfterCbzOrCbnzAArch64::NoPreDefine(Insn &testInsn) const
4637 {
4638 Insn *nextInsn = nullptr;
4639 for (Insn *insn = testInsn.GetBB()->GetFirstInsn(); insn != nullptr && insn != &testInsn; insn = nextInsn) {
4640 nextInsn = insn->GetNextMachineInsn();
4641 if (!insn->IsMachineInstruction()) {
4642 continue;
4643 }
4644 DEBUG_ASSERT(!insn->IsCall(), "CG internal error, call insn should not be at the middle of the BB.");
4645 const InsnDesc *md = insn->GetDesc();
4646 uint32 opndNum = insn->GetOperandSize();
4647 for (uint32 i = 0; i < opndNum; ++i) {
4648 Operand &opnd = insn->GetOperand(i);
4649 if (!md->opndMD[i]->IsDef()) {
4650 continue;
4651 }
4652 if (opnd.IsMemoryAccessOperand()) {
4653 auto &memOpnd = static_cast<MemOperand &>(opnd);
4654 RegOperand *base = memOpnd.GetBaseRegister();
4655 DEBUG_ASSERT(base != nullptr, "nullptr check");
4656 DEBUG_ASSERT(base->IsRegister(), "expects RegOperand");
4657 if (RegOperand::IsSameRegNO(*base, testInsn.GetOperand(kInsnFirstOpnd)) &&
4658 memOpnd.GetAddrMode() == MemOperand::kAddrModeBOi &&
4659 (memOpnd.IsPostIndexed() || memOpnd.IsPreIndexed())) {
4660 return false;
4661 }
4662 } else if (opnd.IsList()) {
4663 for (auto operand : static_cast<ListOperand &>(opnd).GetOperands()) {
4664 if (RegOperand::IsSameRegNO(testInsn.GetOperand(kInsnFirstOpnd), *operand)) {
4665 return false;
4666 }
4667 }
4668 } else if (opnd.IsRegister()) {
4669 if (RegOperand::IsSameRegNO(testInsn.GetOperand(kInsnFirstOpnd), opnd)) {
4670 return false;
4671 }
4672 }
4673 }
4674 }
4675 return true;
4676 }
ProcessBBHandle(BB * processBB,const BB & bb,const Insn & insn) const4677 void DeleteMovAfterCbzOrCbnzAArch64::ProcessBBHandle(BB *processBB, const BB &bb, const Insn &insn) const
4678 {
4679 FOR_BB_INSNS_SAFE(processInsn, processBB, nextProcessInsn) {
4680 nextProcessInsn = processInsn->GetNextMachineInsn();
4681 if (!processInsn->IsMachineInstruction()) {
4682 continue;
4683 }
4684 /* register may be a caller save register */
4685 if (processInsn->IsCall()) {
4686 break;
4687 }
4688 if (!OpndDefByMovZero(*processInsn) || !NoPreDefine(*processInsn) ||
4689 !RegOperand::IsSameRegNO(processInsn->GetOperand(kInsnFirstOpnd), insn.GetOperand(kInsnFirstOpnd))) {
4690 continue;
4691 }
4692 bool toDoOpt = true;
4693 MOperator condBrMop = insn.GetMachineOpcode();
4694 /* process elseBB, other preds must be cbz */
4695 if (condBrMop == MOP_wcbnz || condBrMop == MOP_xcbnz) {
4696 /* check out all preds of process_bb */
4697 for (auto *processBBPred : processBB->GetPreds()) {
4698 if (processBBPred == &bb) {
4699 continue;
4700 }
4701 if (!PredBBCheck(*processBBPred, true, processInsn->GetOperand(kInsnFirstOpnd))) {
4702 toDoOpt = false;
4703 break;
4704 }
4705 }
4706 } else {
4707 /* process ifBB, other preds can be cbz or cbnz(one at most) */
4708 for (auto processBBPred : processBB->GetPreds()) {
4709 if (processBBPred == &bb) {
4710 continue;
4711 }
4712 /* for cbnz pred, there is one at most */
4713 if (!PredBBCheck(*processBBPred, processBBPred != processBB->GetPrev(),
4714 processInsn->GetOperand(kInsnFirstOpnd))) {
4715 toDoOpt = false;
4716 break;
4717 }
4718 }
4719 }
4720 if (!toDoOpt) {
4721 continue;
4722 }
4723 processBB->RemoveInsn(*processInsn);
4724 }
4725 }
4726
4727 /* ldr wn, [x1, wn, SXTW]
4728 * add x2, wn, x2
4729 */
IsExpandBaseOpnd(const Insn & insn,const Insn & prevInsn) const4730 bool ComplexMemOperandAddAArch64::IsExpandBaseOpnd(const Insn &insn, const Insn &prevInsn) const
4731 {
4732 MOperator prevMop = prevInsn.GetMachineOpcode();
4733 if (prevMop >= MOP_wldrsb && prevMop <= MOP_xldr &&
4734 prevInsn.GetOperand(kInsnFirstOpnd).Equals(insn.GetOperand(kInsnSecondOpnd))) {
4735 return true;
4736 }
4737 return false;
4738 }
4739
Run(BB & bb,Insn & insn)4740 void ComplexMemOperandAddAArch64::Run(BB &bb, Insn &insn)
4741 {
4742 AArch64CGFunc *aarch64CGFunc = static_cast<AArch64CGFunc *>(&cgFunc);
4743 Insn *nextInsn = insn.GetNextMachineInsn();
4744 if (nextInsn == nullptr) {
4745 return;
4746 }
4747 Insn *prevInsn = insn.GetPreviousMachineInsn();
4748 MOperator thisMop = insn.GetMachineOpcode();
4749 if (thisMop != MOP_xaddrrr && thisMop != MOP_waddrrr) {
4750 return;
4751 }
4752 MOperator nextMop = nextInsn->GetMachineOpcode();
4753 if (nextMop && ((nextMop >= MOP_wldrsb && nextMop <= MOP_dldr) || (nextMop >= MOP_wstrb && nextMop <= MOP_dstr))) {
4754 if (!IsMemOperandOptPattern(insn, *nextInsn)) {
4755 return;
4756 }
4757 MemOperand *memOpnd = static_cast<MemOperand *>(nextInsn->GetMemOpnd());
4758 auto newBaseOpnd = static_cast<RegOperand *>(&insn.GetOperand(kInsnSecondOpnd));
4759 auto newIndexOpnd = static_cast<RegOperand *>(&insn.GetOperand(kInsnThirdOpnd));
4760 regno_t memBaseOpndRegNO = newBaseOpnd->GetRegisterNumber();
4761 if (newBaseOpnd->GetSize() <= k32BitSize && prevInsn != nullptr && IsExpandBaseOpnd(insn, *prevInsn)) {
4762 newBaseOpnd = &aarch64CGFunc->GetOrCreatePhysicalRegisterOperand(static_cast<AArch64reg>(memBaseOpndRegNO),
4763 k64BitSize, kRegTyInt);
4764 }
4765 if (newBaseOpnd->GetSize() != k64BitSize) {
4766 return;
4767 }
4768 if (newIndexOpnd->GetSize() <= k32BitSize) {
4769 MemOperand &newMemOpnd = aarch64CGFunc->GetOrCreateMemOpnd(MemOperand::kAddrModeBOrX, memOpnd->GetSize(),
4770 newBaseOpnd, newIndexOpnd, 0, false);
4771 nextInsn->SetOperand(kInsnSecondOpnd, newMemOpnd);
4772 } else {
4773 MemOperand &newMemOpnd = aarch64CGFunc->GetOrCreateMemOpnd(MemOperand::kAddrModeBOrX, memOpnd->GetSize(),
4774 newBaseOpnd, newIndexOpnd, nullptr, nullptr);
4775 nextInsn->SetOperand(kInsnSecondOpnd, newMemOpnd);
4776 }
4777 bb.RemoveInsn(insn);
4778 }
4779 }
4780
Run(BB & bb,Insn & insn)4781 void DeleteMovAfterCbzOrCbnzAArch64::Run(BB &bb, Insn &insn)
4782 {
4783 if (bb.GetKind() != BB::kBBIf) {
4784 return;
4785 }
4786 if (&insn != cgcfg->FindLastCondBrInsn(bb)) {
4787 return;
4788 }
4789 if (!cgcfg->IsCompareAndBranchInsn(insn)) {
4790 return;
4791 }
4792 BB *processBB = nullptr;
4793 if (bb.GetNext() == maplebe::CGCFG::GetTargetSuc(bb)) {
4794 return;
4795 }
4796
4797 MOperator condBrMop = insn.GetMachineOpcode();
4798 if (condBrMop == MOP_wcbnz || condBrMop == MOP_xcbnz) {
4799 processBB = bb.GetNext();
4800 } else {
4801 processBB = maplebe::CGCFG::GetTargetSuc(bb);
4802 }
4803
4804 DEBUG_ASSERT(processBB != nullptr, "process_bb is null in DeleteMovAfterCbzOrCbnzAArch64::Run");
4805 ProcessBBHandle(processBB, bb, insn);
4806 }
4807
FindNewMop(const BB & bb,const Insn & insn) const4808 MOperator OneHoleBranchesPreAArch64::FindNewMop(const BB &bb, const Insn &insn) const
4809 {
4810 MOperator newOp = MOP_undef;
4811 if (&insn != bb.GetLastInsn()) {
4812 return newOp;
4813 }
4814 MOperator thisMop = insn.GetMachineOpcode();
4815 if (thisMop != MOP_wcbz && thisMop != MOP_wcbnz && thisMop != MOP_xcbz && thisMop != MOP_xcbnz) {
4816 return newOp;
4817 }
4818 switch (thisMop) {
4819 case MOP_wcbz:
4820 newOp = MOP_wtbnz;
4821 break;
4822 case MOP_wcbnz:
4823 newOp = MOP_wtbz;
4824 break;
4825 case MOP_xcbz:
4826 newOp = MOP_xtbnz;
4827 break;
4828 case MOP_xcbnz:
4829 newOp = MOP_xtbz;
4830 break;
4831 default:
4832 CHECK_FATAL(false, "can not touch here");
4833 break;
4834 }
4835 return newOp;
4836 }
4837
Run(BB & bb,Insn & insn)4838 void OneHoleBranchesPreAArch64::Run(BB &bb, Insn &insn)
4839 {
4840 AArch64CGFunc *aarch64CGFunc = static_cast<AArch64CGFunc *>(&cgFunc);
4841 MOperator newOp = FindNewMop(bb, insn);
4842 if (newOp == MOP_undef) {
4843 return;
4844 }
4845 Insn *prevInsn = insn.GetPreviousMachineInsn();
4846 LabelOperand &label = static_cast<LabelOperand &>(insn.GetOperand(kInsnSecondOpnd));
4847 if (prevInsn != nullptr && prevInsn->GetMachineOpcode() == MOP_xuxtb32 &&
4848 (static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd)).GetValidBitsNum() <= k8BitSize ||
4849 static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd)).GetValidBitsNum() <= k8BitSize)) {
4850 if (&(prevInsn->GetOperand(kInsnFirstOpnd)) != &(insn.GetOperand(kInsnFirstOpnd))) {
4851 return;
4852 }
4853 if (IfOperandIsLiveAfterInsn(static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd)), insn)) {
4854 return;
4855 }
4856 insn.SetOperand(kInsnFirstOpnd, prevInsn->GetOperand(kInsnSecondOpnd));
4857 if (CGOptions::DoCGSSA()) {
4858 CHECK_FATAL(false, "check this case in ssa opt");
4859 }
4860 bb.RemoveInsn(*prevInsn);
4861 }
4862 if (prevInsn != nullptr &&
4863 (prevInsn->GetMachineOpcode() == MOP_xeorrri13 || prevInsn->GetMachineOpcode() == MOP_weorrri12) &&
4864 static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnThirdOpnd)).GetValue() == 1) {
4865 if (&(prevInsn->GetOperand(kInsnFirstOpnd)) != &(insn.GetOperand(kInsnFirstOpnd))) {
4866 return;
4867 }
4868 Insn *prevPrevInsn = prevInsn->GetPreviousMachineInsn();
4869 if (prevPrevInsn == nullptr) {
4870 return;
4871 }
4872 if (prevPrevInsn->GetMachineOpcode() != MOP_xuxtb32 ||
4873 static_cast<RegOperand &>(prevPrevInsn->GetOperand(kInsnSecondOpnd)).GetValidBitsNum() != 1) {
4874 return;
4875 }
4876 if (&(prevPrevInsn->GetOperand(kInsnFirstOpnd)) != &(prevInsn->GetOperand(kInsnSecondOpnd))) {
4877 return;
4878 }
4879 ImmOperand &oneHoleOpnd = aarch64CGFunc->CreateImmOperand(0, k8BitSize, false);
4880 auto ®Operand = static_cast<RegOperand &>(prevPrevInsn->GetOperand(kInsnSecondOpnd));
4881 if (CGOptions::DoCGSSA()) {
4882 CHECK_FATAL(false, "check this case in ssa opt");
4883 }
4884 bb.InsertInsnAfter(insn, cgFunc.GetInsnBuilder()->BuildInsn(newOp, regOperand, oneHoleOpnd, label));
4885 bb.RemoveInsn(insn);
4886 bb.RemoveInsn(*prevInsn);
4887 bb.RemoveInsn(*prevPrevInsn);
4888 }
4889 }
4890
FindLoadFloatPoint(Insn & insn)4891 bool LoadFloatPointPattern::FindLoadFloatPoint(Insn &insn)
4892 {
4893 MOperator mOp = insn.GetMachineOpcode();
4894 optInsn.clear();
4895 if (mOp != MOP_xmovzri16) {
4896 return false;
4897 }
4898 optInsn.emplace_back(&insn);
4899
4900 Insn *insnMov2 = insn.GetNextMachineInsn();
4901 if (insnMov2 == nullptr) {
4902 return false;
4903 }
4904 if (insnMov2->GetMachineOpcode() != MOP_xmovkri16) {
4905 return false;
4906 }
4907 optInsn.emplace_back(insnMov2);
4908
4909 Insn *insnMov3 = insnMov2->GetNextMachineInsn();
4910 if (insnMov3 == nullptr) {
4911 return false;
4912 }
4913 if (insnMov3->GetMachineOpcode() != MOP_xmovkri16) {
4914 return false;
4915 }
4916 optInsn.emplace_back(insnMov3);
4917
4918 Insn *insnMov4 = insnMov3->GetNextMachineInsn();
4919 if (insnMov4 == nullptr) {
4920 return false;
4921 }
4922 if (insnMov4->GetMachineOpcode() != MOP_xmovkri16) {
4923 return false;
4924 }
4925 optInsn.emplace_back(insnMov4);
4926 return true;
4927 }
4928
IsPatternMatch()4929 bool LoadFloatPointPattern::IsPatternMatch()
4930 {
4931 int insnNum = 0;
4932 Insn *insn1 = optInsn[insnNum];
4933 Insn *insn2 = optInsn[++insnNum];
4934 Insn *insn3 = optInsn[++insnNum];
4935 Insn *insn4 = optInsn[++insnNum];
4936 if ((static_cast<RegOperand &>(insn1->GetOperand(kInsnFirstOpnd)).GetRegisterNumber() !=
4937 static_cast<RegOperand &>(insn2->GetOperand(kInsnFirstOpnd)).GetRegisterNumber()) ||
4938 (static_cast<RegOperand &>(insn2->GetOperand(kInsnFirstOpnd)).GetRegisterNumber() !=
4939 static_cast<RegOperand &>(insn3->GetOperand(kInsnFirstOpnd)).GetRegisterNumber()) ||
4940 (static_cast<RegOperand &>(insn3->GetOperand(kInsnFirstOpnd)).GetRegisterNumber() !=
4941 static_cast<RegOperand &>(insn4->GetOperand(kInsnFirstOpnd)).GetRegisterNumber())) {
4942 return false;
4943 }
4944 if ((static_cast<BitShiftOperand &>(insn1->GetOperand(kInsnThirdOpnd)).GetShiftAmount() != 0) ||
4945 (static_cast<BitShiftOperand &>(insn2->GetOperand(kInsnThirdOpnd)).GetShiftAmount() != k16BitSize) ||
4946 (static_cast<BitShiftOperand &>(insn3->GetOperand(kInsnThirdOpnd)).GetShiftAmount() != k32BitSize) ||
4947 (static_cast<BitShiftOperand &>(insn4->GetOperand(kInsnThirdOpnd)).GetShiftAmount() !=
4948 (k16BitSize + k32BitSize))) {
4949 return false;
4950 }
4951 return true;
4952 }
4953
CheckCondition(Insn & insn)4954 bool LoadFloatPointPattern::CheckCondition(Insn &insn)
4955 {
4956 if (FindLoadFloatPoint(insn) && IsPatternMatch()) {
4957 return true;
4958 }
4959 return false;
4960 }
4961
Run(BB & bb,Insn & insn)4962 void LoadFloatPointPattern::Run(BB &bb, Insn &insn)
4963 {
4964 /* logical shift left values in three optimized pattern */
4965 if (CheckCondition(insn)) {
4966 int insnNum = 0;
4967 Insn *insn1 = optInsn[insnNum];
4968 Insn *insn2 = optInsn[++insnNum];
4969 Insn *insn3 = optInsn[++insnNum];
4970 Insn *insn4 = optInsn[++insnNum];
4971 auto &movConst1 = static_cast<ImmOperand &>(insn1->GetOperand(kInsnSecondOpnd));
4972 auto &movConst2 = static_cast<ImmOperand &>(insn2->GetOperand(kInsnSecondOpnd));
4973 auto &movConst3 = static_cast<ImmOperand &>(insn3->GetOperand(kInsnSecondOpnd));
4974 auto &movConst4 = static_cast<ImmOperand &>(insn4->GetOperand(kInsnSecondOpnd));
4975 /* movk/movz's immOpnd is 16-bit unsigned immediate */
4976 uint64 value = static_cast<uint64>(movConst1.GetValue()) +
4977 (static_cast<uint64>(movConst2.GetValue()) << k16BitSize) +
4978 (static_cast<uint64>(movConst3.GetValue()) << k32BitSize) +
4979 (static_cast<uint64>(movConst4.GetValue()) << (k16BitSize + k32BitSize));
4980
4981 LabelIdx lableIdx = cgFunc->CreateLabel();
4982 AArch64CGFunc *aarch64CGFunc = static_cast<AArch64CGFunc *>(cgFunc);
4983 LabelOperand &target = aarch64CGFunc->GetOrCreateLabelOperand(lableIdx);
4984 cgFunc->InsertLabelMap(lableIdx, value);
4985 Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(MOP_xldli, insn4->GetOperand(kInsnFirstOpnd), target);
4986 bb.InsertInsnAfter(*insn4, newInsn);
4987 bb.RemoveInsn(*insn1);
4988 bb.RemoveInsn(*insn2);
4989 bb.RemoveInsn(*insn3);
4990 bb.RemoveInsn(*insn4);
4991 }
4992 }
4993
Run(BB & bb,Insn & insn)4994 void ReplaceOrrToMovAArch64::Run(BB &bb, Insn &insn)
4995 {
4996 Operand *opndOfOrr = nullptr;
4997 ImmOperand *immOpnd = nullptr;
4998 RegOperand *reg1 = nullptr;
4999 RegOperand *reg2 = nullptr;
5000 MOperator thisMop = insn.GetMachineOpcode();
5001 MOperator newMop = MOP_undef;
5002 switch (thisMop) {
5003 case MOP_wiorrri12: { /* opnd1 is reg32 and opnd3 is immediate. */
5004 opndOfOrr = &(insn.GetOperand(kInsnThirdOpnd));
5005 reg2 = &static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
5006 newMop = MOP_wmovrr;
5007 break;
5008 }
5009 case MOP_xiorrri13: { /* opnd1 is reg64 and opnd3 is immediate. */
5010 opndOfOrr = &(insn.GetOperand(kInsnThirdOpnd));
5011 reg2 = &static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
5012 newMop = MOP_xmovrr;
5013 break;
5014 }
5015 default:
5016 break;
5017 }
5018 DEBUG_ASSERT(opndOfOrr->IsIntImmediate(), "expects immediate operand");
5019 immOpnd = static_cast<ImmOperand *>(opndOfOrr);
5020 if (immOpnd->GetValue() == 0) {
5021 reg1 = &static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
5022 if (CGOptions::DoCGSSA()) {
5023 CHECK_FATAL(false, "check this case in ssa opt");
5024 }
5025 bb.ReplaceInsn(insn, cgFunc.GetInsnBuilder()->BuildInsn(newMop, *reg1, *reg2));
5026 }
5027 }
5028
Run(BB & bb,Insn & insn)5029 void ReplaceCmpToCmnAArch64::Run(BB &bb, Insn &insn)
5030 {
5031 AArch64CGFunc *aarch64CGFunc = static_cast<AArch64CGFunc *>(&cgFunc);
5032 MOperator thisMop = insn.GetMachineOpcode();
5033 MOperator nextMop = MOP_undef;
5034 MOperator newMop = MOP_undef;
5035 uint64 negOne = UINT64_MAX;
5036 switch (thisMop) {
5037 case MOP_wmovri32: {
5038 nextMop = MOP_wcmprr;
5039 newMop = MOP_wcmnri;
5040 negOne = UINT32_MAX;
5041 break;
5042 }
5043 case MOP_xmovri64: {
5044 nextMop = MOP_xcmprr;
5045 newMop = MOP_xcmnri;
5046 break;
5047 }
5048 default:
5049 break;
5050 }
5051 Operand *opnd1OfMov = &(insn.GetOperand(kInsnFirstOpnd));
5052 Operand *opnd2OfMov = &(insn.GetOperand(kInsnSecondOpnd));
5053 if (opnd2OfMov->IsIntImmediate()) {
5054 ImmOperand *immOpnd = static_cast<ImmOperand *>(opnd2OfMov);
5055 int64 iVal = immOpnd->GetValue();
5056 if ((kNegativeImmLowerLimit <= iVal && iVal < 0) || static_cast<uint64>(iVal) == negOne) {
5057 Insn *nextInsn = insn.GetNextMachineInsn(); /* get the next insn to judge if it is a cmp instruction. */
5058 if (nextInsn != nullptr) {
5059 if (nextInsn->GetMachineOpcode() == nextMop) {
5060 Operand *opndCmp2 = &(nextInsn->GetOperand(kInsnSecondOpnd));
5061 Operand *opndCmp3 = &(nextInsn->GetOperand(kInsnThirdOpnd)); /* get the third operand of cmp */
5062 /* if the first operand of mov equals the third operand of cmp, match the pattern. */
5063 if (opnd1OfMov == opndCmp3) {
5064 if (static_cast<uint64>(iVal) == negOne) {
5065 iVal = -1;
5066 }
5067 ImmOperand &newOpnd = aarch64CGFunc->CreateImmOperand(iVal * (-1), immOpnd->GetSize(), false);
5068 Operand ®Flag = nextInsn->GetOperand(kInsnFirstOpnd);
5069 bb.ReplaceInsn(*nextInsn,
5070 cgFunc.GetInsnBuilder()->BuildInsn(newMop, regFlag, *opndCmp2, newOpnd));
5071 }
5072 }
5073 }
5074 }
5075 }
5076 }
5077
CheckCondition(Insn & insn)5078 bool RemoveIncRefPattern::CheckCondition(Insn &insn)
5079 {
5080 MOperator mOp = insn.GetMachineOpcode();
5081 if (mOp != MOP_xbl) {
5082 return false;
5083 }
5084 auto &target = static_cast<FuncNameOperand &>(insn.GetOperand(kInsnFirstOpnd));
5085 if (target.GetName() != "MCC_IncDecRef_NaiveRCFast") {
5086 return false;
5087 }
5088 insnMov2 = insn.GetPreviousMachineInsn();
5089 if (insnMov2 == nullptr) {
5090 return false;
5091 }
5092 MOperator mopMov2 = insnMov2->GetMachineOpcode();
5093 if (mopMov2 != MOP_xmovrr) {
5094 return false;
5095 }
5096 insnMov1 = insnMov2->GetPreviousMachineInsn();
5097 if (insnMov1 == nullptr) {
5098 return false;
5099 }
5100 MOperator mopMov1 = insnMov1->GetMachineOpcode();
5101 if (mopMov1 != MOP_xmovrr) {
5102 return false;
5103 }
5104 if (static_cast<RegOperand &>(insnMov1->GetOperand(kInsnSecondOpnd)).GetRegisterNumber() !=
5105 static_cast<RegOperand &>(insnMov2->GetOperand(kInsnSecondOpnd)).GetRegisterNumber()) {
5106 return false;
5107 }
5108 auto &mov2Dest = static_cast<RegOperand &>(insnMov2->GetOperand(kInsnFirstOpnd));
5109 auto &mov1Dest = static_cast<RegOperand &>(insnMov1->GetOperand(kInsnFirstOpnd));
5110 if (mov1Dest.IsVirtualRegister() || mov2Dest.IsVirtualRegister() || mov1Dest.GetRegisterNumber() != R0 ||
5111 mov2Dest.GetRegisterNumber() != R1) {
5112 return false;
5113 }
5114 return true;
5115 }
5116
Run(BB & bb,Insn & insn)5117 void RemoveIncRefPattern::Run(BB &bb, Insn &insn)
5118 {
5119 if (!CheckCondition(insn)) {
5120 return;
5121 }
5122 bb.RemoveInsn(insn);
5123 bb.RemoveInsn(*insnMov2);
5124 bb.RemoveInsn(*insnMov1);
5125 }
5126
FindLondIntCmpWithZ(Insn & insn)5127 bool LongIntCompareWithZPattern::FindLondIntCmpWithZ(Insn &insn)
5128 {
5129 MOperator thisMop = insn.GetMachineOpcode();
5130 optInsn.clear();
5131 /* forth */
5132 if (thisMop != MOP_wcmpri) {
5133 return false;
5134 }
5135 (void)optInsn.emplace_back(&insn);
5136
5137 /* third */
5138 Insn *preInsn1 = insn.GetPreviousMachineInsn();
5139 if (preInsn1 == nullptr) {
5140 return false;
5141 }
5142 MOperator preMop1 = preInsn1->GetMachineOpcode();
5143 if (preMop1 != MOP_wcsincrrrc) {
5144 return false;
5145 }
5146 (void)optInsn.emplace_back(preInsn1);
5147
5148 /* second */
5149 Insn *preInsn2 = preInsn1->GetPreviousMachineInsn();
5150 if (preInsn2 == nullptr) {
5151 return false;
5152 }
5153 MOperator preMop2 = preInsn2->GetMachineOpcode();
5154 if (preMop2 != MOP_wcsinvrrrc) {
5155 return false;
5156 }
5157 (void)optInsn.emplace_back(preInsn2);
5158
5159 /* first */
5160 Insn *preInsn3 = preInsn2->GetPreviousMachineInsn();
5161 if (preInsn3 == nullptr) {
5162 return false;
5163 }
5164 MOperator preMop3 = preInsn3->GetMachineOpcode();
5165 if (preMop3 != MOP_xcmpri) {
5166 return false;
5167 }
5168 (void)optInsn.emplace_back(preInsn3);
5169 return true;
5170 }
5171
IsPatternMatch()5172 bool LongIntCompareWithZPattern::IsPatternMatch()
5173 {
5174 constexpr int insnLen = 4;
5175 if (optInsn.size() != insnLen) {
5176 return false;
5177 }
5178 int insnNum = 0;
5179 Insn *insn1 = optInsn[insnNum];
5180 Insn *insn2 = optInsn[++insnNum];
5181 Insn *insn3 = optInsn[++insnNum];
5182 Insn *insn4 = optInsn[++insnNum];
5183 if (IsZeroRegister(insn3->GetOperand(kInsnSecondOpnd)) && IsZeroRegister(insn3->GetOperand(kInsnThirdOpnd)) &&
5184 IsZeroRegister(insn2->GetOperand(kInsnThirdOpnd)) &&
5185 &(insn2->GetOperand(kInsnFirstOpnd)) == &(insn2->GetOperand(kInsnSecondOpnd)) &&
5186 static_cast<CondOperand &>(insn3->GetOperand(kInsnFourthOpnd)).GetCode() == CC_GE &&
5187 static_cast<CondOperand &>(insn2->GetOperand(kInsnFourthOpnd)).GetCode() == CC_LE &&
5188 static_cast<ImmOperand &>(insn1->GetOperand(kInsnThirdOpnd)).GetValue() == 0 &&
5189 static_cast<ImmOperand &>(insn4->GetOperand(kInsnThirdOpnd)).GetValue() == 0) {
5190 return true;
5191 }
5192 return false;
5193 }
5194
CheckCondition(Insn & insn)5195 bool LongIntCompareWithZPattern::CheckCondition(Insn &insn)
5196 {
5197 if (FindLondIntCmpWithZ(insn) && IsPatternMatch()) {
5198 return true;
5199 }
5200 return false;
5201 }
5202
Run(BB & bb,Insn & insn)5203 void LongIntCompareWithZPattern::Run(BB &bb, Insn &insn)
5204 {
5205 /* found pattern */
5206 if (CheckCondition(insn)) {
5207 Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(
5208 optInsn[3]->GetMachineOpcode(), optInsn[3]->GetOperand(kInsnFirstOpnd),
5209 optInsn[3]->GetOperand(kInsnSecondOpnd), optInsn[3]->GetOperand(kInsnThirdOpnd));
5210 /* use newInsn to replace the third optInsn */
5211 bb.ReplaceInsn(*optInsn[0], newInsn);
5212 optInsn.clear();
5213 }
5214 }
5215
Run(BB & bb,Insn & insn)5216 void ComplexMemOperandAArch64::Run(BB &bb, Insn &insn)
5217 {
5218 AArch64CGFunc *aarch64CGFunc = static_cast<AArch64CGFunc *>(&cgFunc);
5219 Insn *nextInsn = insn.GetNextMachineInsn();
5220 if (nextInsn == nullptr) {
5221 return;
5222 }
5223 MOperator thisMop = insn.GetMachineOpcode();
5224 if (thisMop != MOP_xadrpl12) {
5225 return;
5226 }
5227
5228 MOperator nextMop = nextInsn->GetMachineOpcode();
5229 if (nextMop && ((nextMop >= MOP_wldrsb && nextMop <= MOP_dldp) || (nextMop >= MOP_wstrb && nextMop <= MOP_dstp))) {
5230 /* Check if base register of nextInsn and the dest operand of insn are identical. */
5231 MemOperand *memOpnd = static_cast<MemOperand *>(nextInsn->GetMemOpnd());
5232 DEBUG_ASSERT(memOpnd != nullptr, "memOpnd is null in AArch64Peep::ComplexMemOperandAArch64");
5233
5234 /* Only for AddrMode_B_OI addressing mode. */
5235 if (memOpnd->GetAddrMode() != MemOperand::kAddrModeBOi) {
5236 return;
5237 }
5238
5239 /* Only for intact memory addressing. */
5240 if (!memOpnd->IsIntactIndexed()) {
5241 return;
5242 }
5243
5244 auto ®Opnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
5245
5246 /* Avoid linking issues when object is not 16byte aligned */
5247 if (memOpnd->GetSize() == k128BitSize) {
5248 return;
5249 }
5250
5251 /* Check if dest operand of insn is idential with base register of nextInsn. */
5252 if (memOpnd->GetBaseRegister() != ®Opnd) {
5253 return;
5254 }
5255
5256 /* Check if x0 is used after ldr insn, and if it is in live-out. */
5257 if (IfOperandIsLiveAfterInsn(regOpnd, *nextInsn)) {
5258 return;
5259 }
5260
5261 /* load store pairs cannot have relocation */
5262 if (nextInsn->IsLoadStorePair() && insn.GetOperand(kInsnThirdOpnd).IsStImmediate()) {
5263 return;
5264 }
5265
5266 auto &stImmOpnd = static_cast<StImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
5267 OfstOperand &offOpnd = aarch64CGFunc->GetOrCreateOfstOpnd(
5268 stImmOpnd.GetOffset() + memOpnd->GetOffsetImmediate()->GetOffsetValue(), k32BitSize);
5269
5270 /* do not guarantee rodata alignment at Os */
5271 if (CGOptions::OptimizeForSize() && stImmOpnd.GetSymbol()->IsReadOnly()) {
5272 return;
5273 }
5274
5275 /* avoid relocation */
5276 if ((offOpnd.GetValue() % static_cast<int8>(kBitsPerByte)) != 0) {
5277 return;
5278 }
5279
5280 if (cgFunc.GetMirModule().IsCModule()) {
5281 Insn *prevInsn = insn.GetPrev();
5282 MOperator prevMop = prevInsn->GetMachineOpcode();
5283 if (prevMop != MOP_xadrp) {
5284 return;
5285 } else {
5286 auto &prevStImmOpnd = static_cast<StImmOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
5287 prevStImmOpnd.SetOffset(offOpnd.GetValue());
5288 }
5289 }
5290 auto &newBaseOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
5291 MemOperand &newMemOpnd = aarch64CGFunc->GetOrCreateMemOpnd(
5292 MemOperand::kAddrModeLo12Li, memOpnd->GetSize(), &newBaseOpnd, nullptr, &offOpnd, stImmOpnd.GetSymbol());
5293
5294 nextInsn->SetMemOpnd(&newMemOpnd);
5295 bb.RemoveInsn(insn);
5296 CHECK_FATAL(!CGOptions::IsLazyBinding() || cgFunc.GetCG()->IsLibcore(),
5297 "this pattern can't be found in this phase");
5298 }
5299 }
5300
Run(BB & bb,Insn & insn)5301 void ComplexMemOperandPreAddAArch64::Run(BB &bb, Insn &insn)
5302 {
5303 AArch64CGFunc *aarch64CGFunc = static_cast<AArch64CGFunc *>(&cgFunc);
5304 Insn *nextInsn = insn.GetNextMachineInsn();
5305 if (nextInsn == nullptr) {
5306 return;
5307 }
5308 MOperator thisMop = insn.GetMachineOpcode();
5309 if (thisMop != MOP_xaddrrr && thisMop != MOP_waddrrr) {
5310 return;
5311 }
5312 MOperator nextMop = nextInsn->GetMachineOpcode();
5313 if (nextMop && ((nextMop >= MOP_wldrsb && nextMop <= MOP_dldr) || (nextMop >= MOP_wstrb && nextMop <= MOP_dstr))) {
5314 if (!IsMemOperandOptPattern(insn, *nextInsn)) {
5315 return;
5316 }
5317 MemOperand *memOpnd = static_cast<MemOperand *>(nextInsn->GetMemOpnd());
5318 auto &newBaseOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
5319 if (newBaseOpnd.GetSize() != k64BitSize) {
5320 return;
5321 }
5322 auto &newIndexOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnThirdOpnd));
5323 if (newIndexOpnd.GetSize() <= k32BitSize) {
5324 MemOperand &newMemOpnd = aarch64CGFunc->GetOrCreateMemOpnd(MemOperand::kAddrModeBOrX, memOpnd->GetSize(),
5325 &newBaseOpnd, &newIndexOpnd, 0, false);
5326 nextInsn->SetOperand(kInsnSecondOpnd, newMemOpnd);
5327 } else {
5328 auto *newOfstOpnd = &aarch64CGFunc->GetOrCreateOfstOpnd(0, k32BitSize);
5329 MemOperand &newMemOpnd = aarch64CGFunc->GetOrCreateMemOpnd(
5330 MemOperand::kAddrModeBOrX, memOpnd->GetSize(), &newBaseOpnd, &newIndexOpnd, newOfstOpnd, nullptr);
5331 nextInsn->SetOperand(kInsnSecondOpnd, newMemOpnd);
5332 }
5333 bb.RemoveInsn(insn);
5334 }
5335 }
5336
CheckShiftValid(const Insn & insn,const BitShiftOperand & lsl) const5337 bool ComplexMemOperandLSLAArch64::CheckShiftValid(const Insn &insn, const BitShiftOperand &lsl) const
5338 {
5339 /* check if shift amount is valid */
5340 uint32 lslAmount = lsl.GetShiftAmount();
5341 constexpr uint8 twoShiftBits = 2;
5342 constexpr uint8 threeShiftBits = 3;
5343 uint32 memSize = insn.GetMemoryByteSize();
5344 if ((memSize == k4ByteSize && (lsl.GetShiftAmount() != 0 && lslAmount != twoShiftBits)) ||
5345 (memSize == k8ByteSize && (lsl.GetShiftAmount() != 0 && lslAmount != threeShiftBits))) {
5346 return false;
5347 }
5348 if (memSize != (k5BitSize << lslAmount)) {
5349 return false;
5350 }
5351 return true;
5352 }
5353
Run(BB & bb,Insn & insn)5354 void ComplexMemOperandLSLAArch64::Run(BB &bb, Insn &insn)
5355 {
5356 AArch64CGFunc *aarch64CGFunc = static_cast<AArch64CGFunc *>(&cgFunc);
5357 Insn *nextInsn = insn.GetNextMachineInsn();
5358 if (nextInsn == nullptr) {
5359 return;
5360 }
5361 MOperator thisMop = insn.GetMachineOpcode();
5362 if (thisMop != MOP_xaddrrrs) {
5363 return;
5364 }
5365 MOperator nextMop = nextInsn->GetMachineOpcode();
5366 if (nextMop && ((nextMop >= MOP_wldrsb && nextMop <= MOP_dldr) || (nextMop >= MOP_wstrb && nextMop <= MOP_dstr))) {
5367 /* Check if base register of nextInsn and the dest operand of insn are identical. */
5368 MemOperand *memOpnd = static_cast<MemOperand *>(nextInsn->GetMemOpnd());
5369 DEBUG_ASSERT(memOpnd != nullptr, "null ptr check");
5370
5371 /* Only for AddrMode_B_OI addressing mode. */
5372 if (memOpnd->GetAddrMode() != MemOperand::kAddrModeBOi) {
5373 return;
5374 }
5375
5376 /* Only for immediate is 0. */
5377 if (memOpnd->GetOffsetImmediate()->GetOffsetValue() != 0) {
5378 return;
5379 }
5380
5381 /* Only for intact memory addressing. */
5382 if (!memOpnd->IsIntactIndexed()) {
5383 return;
5384 }
5385
5386 auto ®Opnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
5387
5388 /* Check if dest operand of insn is idential with base register of nextInsn. */
5389 if (memOpnd->GetBaseRegister() != ®Opnd) {
5390 return;
5391 }
5392
5393 #ifdef USE_32BIT_REF
5394 if (nextInsn->IsAccessRefField() && nextInsn->GetOperand(kInsnFirstOpnd).GetSize() > k32BitSize) {
5395 return;
5396 }
5397 #endif
5398
5399 /* Check if x0 is used after ldr insn, and if it is in live-out. */
5400 if (IfOperandIsLiveAfterInsn(regOpnd, *nextInsn)) {
5401 return;
5402 }
5403 auto &lsl = static_cast<BitShiftOperand &>(insn.GetOperand(kInsnFourthOpnd));
5404 if (!CheckShiftValid(*nextInsn, lsl)) {
5405 return;
5406 }
5407 auto &newBaseOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
5408 auto &newIndexOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnThirdOpnd));
5409 MemOperand &newMemOpnd =
5410 aarch64CGFunc->GetOrCreateMemOpnd(MemOperand::kAddrModeBOrX, memOpnd->GetSize(), &newBaseOpnd,
5411 &newIndexOpnd, static_cast<int32>(lsl.GetShiftAmount()), false);
5412 nextInsn->SetOperand(kInsnSecondOpnd, newMemOpnd);
5413 bb.RemoveInsn(insn);
5414 }
5415 }
5416
Run(BB & bb,Insn & insn)5417 void ComplexMemOperandLabelAArch64::Run(BB &bb, Insn &insn)
5418 {
5419 Insn *nextInsn = insn.GetNextMachineInsn();
5420 if (nextInsn == nullptr) {
5421 return;
5422 }
5423 MOperator thisMop = insn.GetMachineOpcode();
5424 if (thisMop != MOP_xldli) {
5425 return;
5426 }
5427 MOperator nextMop = nextInsn->GetMachineOpcode();
5428 if (nextMop != MOP_xvmovdr) {
5429 return;
5430 }
5431 auto ®Opnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
5432 if (regOpnd.GetRegisterNumber() !=
5433 static_cast<RegOperand &>(nextInsn->GetOperand(kInsnSecondOpnd)).GetRegisterNumber()) {
5434 return;
5435 }
5436
5437 /* Check if x0 is used after ldr insn, and if it is in live-out. */
5438 if (IfOperandIsLiveAfterInsn(regOpnd, *nextInsn)) {
5439 return;
5440 }
5441 if (CGOptions::DoCGSSA()) {
5442 /* same as CombineFmovLdrPattern in ssa */
5443 CHECK_FATAL(false, "check this case in ssa");
5444 }
5445 Insn &newInsn = cgFunc.GetInsnBuilder()->BuildInsn(MOP_dldli, nextInsn->GetOperand(kInsnFirstOpnd),
5446 insn.GetOperand(kInsnSecondOpnd));
5447 bb.InsertInsnAfter(*nextInsn, newInsn);
5448 bb.RemoveInsn(insn);
5449 bb.RemoveInsn(*nextInsn);
5450 }
5451
MayThrowBetweenInsn(const Insn & prevCallInsn,const Insn & currCallInsn)5452 static bool MayThrowBetweenInsn(const Insn &prevCallInsn, const Insn &currCallInsn)
5453 {
5454 for (Insn *insn = prevCallInsn.GetNext(); insn != nullptr && insn != &currCallInsn; insn = insn->GetNext()) {
5455 if (insn->MayThrow()) {
5456 return true;
5457 }
5458 }
5459 return false;
5460 }
5461
5462 /*
5463 * mov R0, vreg1 / R0 -> objDesignateInsn
5464 * add vreg2, vreg1, #imm -> fieldDesignateInsn
5465 * mov R1, vreg2 -> fieldParamDefInsn
5466 * mov R2, vreg3 -> fieldValueDefInsn
5467 */
WriteFieldCallOptPatternMatch(const Insn & writeFieldCallInsn,WriteRefFieldParam & param)5468 bool WriteFieldCallPattern::WriteFieldCallOptPatternMatch(const Insn &writeFieldCallInsn, WriteRefFieldParam ¶m)
5469 {
5470 Insn *fieldValueDefInsn = writeFieldCallInsn.GetPreviousMachineInsn();
5471 if (fieldValueDefInsn == nullptr || fieldValueDefInsn->GetMachineOpcode() != MOP_xmovrr) {
5472 return false;
5473 }
5474 Operand &fieldValueDefInsnDestOpnd = fieldValueDefInsn->GetOperand(kInsnFirstOpnd);
5475 auto &fieldValueDefInsnDestReg = static_cast<RegOperand &>(fieldValueDefInsnDestOpnd);
5476 if (fieldValueDefInsnDestReg.GetRegisterNumber() != R2) {
5477 return false;
5478 }
5479 paramDefInsns.emplace_back(fieldValueDefInsn);
5480 param.fieldValue = &(fieldValueDefInsn->GetOperand(kInsnSecondOpnd));
5481 Insn *fieldParamDefInsn = fieldValueDefInsn->GetPreviousMachineInsn();
5482 if (fieldParamDefInsn == nullptr || fieldParamDefInsn->GetMachineOpcode() != MOP_xmovrr) {
5483 return false;
5484 }
5485 Operand &fieldParamDestOpnd = fieldParamDefInsn->GetOperand(kInsnFirstOpnd);
5486 auto &fieldParamDestReg = static_cast<RegOperand &>(fieldParamDestOpnd);
5487 if (fieldParamDestReg.GetRegisterNumber() != R1) {
5488 return false;
5489 }
5490 paramDefInsns.emplace_back(fieldParamDefInsn);
5491 Insn *fieldDesignateInsn = fieldParamDefInsn->GetPreviousMachineInsn();
5492 if (fieldDesignateInsn == nullptr || fieldDesignateInsn->GetMachineOpcode() != MOP_xaddrri12) {
5493 return false;
5494 }
5495 Operand &fieldParamDefSrcOpnd = fieldParamDefInsn->GetOperand(kInsnSecondOpnd);
5496 Operand &fieldDesignateDestOpnd = fieldDesignateInsn->GetOperand(kInsnFirstOpnd);
5497 if (!RegOperand::IsSameReg(fieldParamDefSrcOpnd, fieldDesignateDestOpnd)) {
5498 return false;
5499 }
5500 Operand &fieldDesignateBaseOpnd = fieldDesignateInsn->GetOperand(kInsnSecondOpnd);
5501 param.fieldBaseOpnd = &(static_cast<RegOperand &>(fieldDesignateBaseOpnd));
5502 auto &immOpnd = static_cast<ImmOperand &>(fieldDesignateInsn->GetOperand(kInsnThirdOpnd));
5503 param.fieldOffset = immOpnd.GetValue();
5504 paramDefInsns.emplace_back(fieldDesignateInsn);
5505 Insn *objDesignateInsn = fieldDesignateInsn->GetPreviousMachineInsn();
5506 if (objDesignateInsn == nullptr || objDesignateInsn->GetMachineOpcode() != MOP_xmovrr) {
5507 return false;
5508 }
5509 Operand &objDesignateDestOpnd = objDesignateInsn->GetOperand(kInsnFirstOpnd);
5510 auto &objDesignateDestReg = static_cast<RegOperand &>(objDesignateDestOpnd);
5511 if (objDesignateDestReg.GetRegisterNumber() != R0) {
5512 return false;
5513 }
5514 Operand &objDesignateSrcOpnd = objDesignateInsn->GetOperand(kInsnSecondOpnd);
5515 if (RegOperand::IsSameReg(objDesignateDestOpnd, objDesignateSrcOpnd) ||
5516 !RegOperand::IsSameReg(objDesignateSrcOpnd, fieldDesignateBaseOpnd)) {
5517 return false;
5518 }
5519 param.objOpnd = &(objDesignateInsn->GetOperand(kInsnSecondOpnd));
5520 paramDefInsns.emplace_back(objDesignateInsn);
5521 return true;
5522 }
5523
IsWriteRefFieldCallInsn(const Insn & insn) const5524 bool WriteFieldCallPattern::IsWriteRefFieldCallInsn(const Insn &insn) const
5525 {
5526 if (!insn.IsCall() || insn.GetMachineOpcode() == MOP_xblr) {
5527 return false;
5528 }
5529 Operand *targetOpnd = insn.GetCallTargetOperand();
5530 DEBUG_ASSERT(targetOpnd != nullptr, "targetOpnd must not be nullptr");
5531 if (!targetOpnd->IsFuncNameOpnd()) {
5532 return false;
5533 }
5534 auto *target = static_cast<FuncNameOperand *>(targetOpnd);
5535 const MIRSymbol *funcSt = target->GetFunctionSymbol();
5536 DEBUG_ASSERT(funcSt->GetSKind() == kStFunc, "the kind of funcSt is unreasonable");
5537 const std::string &funcName = funcSt->GetName();
5538 return funcName == "MCC_WriteRefField" || funcName == "MCC_WriteVolatileField";
5539 }
5540
CheckCondition(Insn & insn)5541 bool WriteFieldCallPattern::CheckCondition(Insn &insn)
5542 {
5543 nextInsn = insn.GetNextMachineInsn();
5544 if (nextInsn == nullptr) {
5545 return false;
5546 }
5547 if (!IsWriteRefFieldCallInsn(insn)) {
5548 return false;
5549 }
5550 if (!hasWriteFieldCall) {
5551 if (!WriteFieldCallOptPatternMatch(insn, firstCallParam)) {
5552 return false;
5553 }
5554 prevCallInsn = &insn;
5555 hasWriteFieldCall = true;
5556 return false;
5557 }
5558 if (!WriteFieldCallOptPatternMatch(insn, currentCallParam)) {
5559 return false;
5560 }
5561 if (prevCallInsn == nullptr || MayThrowBetweenInsn(*prevCallInsn, insn)) {
5562 return false;
5563 }
5564 if (firstCallParam.objOpnd == nullptr || currentCallParam.objOpnd == nullptr ||
5565 currentCallParam.fieldBaseOpnd == nullptr) {
5566 return false;
5567 }
5568 if (!RegOperand::IsSameReg(*firstCallParam.objOpnd, *currentCallParam.objOpnd)) {
5569 return false;
5570 }
5571 return true;
5572 }
5573
Run(BB & bb,Insn & insn)5574 void WriteFieldCallPattern::Run(BB &bb, Insn &insn)
5575 {
5576 paramDefInsns.clear();
5577 if (!CheckCondition(insn)) {
5578 return;
5579 }
5580 auto *aarCGFunc = static_cast<AArch64CGFunc *>(cgFunc);
5581 MemOperand &addr =
5582 aarCGFunc->CreateMemOpnd(*currentCallParam.fieldBaseOpnd, currentCallParam.fieldOffset, k64BitSize);
5583 Insn &strInsn = cgFunc->GetInsnBuilder()->BuildInsn(MOP_xstr, *currentCallParam.fieldValue, addr);
5584 strInsn.AppendComment("store reference field");
5585 strInsn.MarkAsAccessRefField(true);
5586 bb.InsertInsnAfter(insn, strInsn);
5587 for (Insn *paramDefInsn : paramDefInsns) {
5588 bb.RemoveInsn(*paramDefInsn);
5589 }
5590 bb.RemoveInsn(insn);
5591 prevCallInsn = &strInsn;
5592 nextInsn = strInsn.GetNextMachineInsn();
5593 }
5594
CheckCondition(Insn & insn)5595 bool RemoveDecRefPattern::CheckCondition(Insn &insn)
5596 {
5597 if (insn.GetMachineOpcode() != MOP_xbl) {
5598 return false;
5599 }
5600 auto &target = static_cast<FuncNameOperand &>(insn.GetOperand(kInsnFirstOpnd));
5601 if (target.GetName() != "MCC_DecRef_NaiveRCFast") {
5602 return false;
5603 }
5604 prevInsn = insn.GetPreviousMachineInsn();
5605 if (prevInsn == nullptr) {
5606 return false;
5607 }
5608 MOperator mopMov = prevInsn->GetMachineOpcode();
5609 if ((mopMov != MOP_xmovrr && mopMov != MOP_xmovri64) ||
5610 static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd)).GetRegisterNumber() != R0) {
5611 return false;
5612 }
5613 Operand &srcOpndOfMov = prevInsn->GetOperand(kInsnSecondOpnd);
5614 if (!IsZeroRegister(srcOpndOfMov) &&
5615 !(srcOpndOfMov.IsImmediate() && static_cast<ImmOperand &>(srcOpndOfMov).GetValue() == 0)) {
5616 return false;
5617 }
5618 return true;
5619 }
5620
Run(BB & bb,Insn & insn)5621 void RemoveDecRefPattern::Run(BB &bb, Insn &insn)
5622 {
5623 if (!CheckCondition(insn)) {
5624 return;
5625 }
5626 bb.RemoveInsn(*prevInsn);
5627 bb.RemoveInsn(insn);
5628 }
5629
5630 /*
5631 * We optimize the following pattern in this function:
5632 * and x1, x1, #imm (is n power of 2)
5633 * cbz/cbnz x1, .label
5634 * =>
5635 * and x1, x1, #imm (is n power of 2)
5636 * tbnz/tbz x1, #n, .label
5637 */
Run(BB & bb,Insn & insn)5638 void OneHoleBranchesAArch64::Run(BB &bb, Insn &insn)
5639 {
5640 AArch64CGFunc *aarch64CGFunc = static_cast<AArch64CGFunc *>(&cgFunc);
5641 if (&insn != bb.GetLastInsn()) {
5642 return;
5643 }
5644 /* check cbz/cbnz insn */
5645 MOperator thisMop = insn.GetMachineOpcode();
5646 if (thisMop != MOP_wcbz && thisMop != MOP_wcbnz && thisMop != MOP_xcbz && thisMop != MOP_xcbnz) {
5647 return;
5648 }
5649 /* check and insn */
5650 Insn *prevInsn = insn.GetPreviousMachineInsn();
5651 if (prevInsn == nullptr) {
5652 return;
5653 }
5654 MOperator prevMop = prevInsn->GetMachineOpcode();
5655 if (prevMop != MOP_wandrri12 && prevMop != MOP_xandrri13) {
5656 return;
5657 }
5658 /* check opearnd of two insns */
5659 if (&(prevInsn->GetOperand(kInsnFirstOpnd)) != &(insn.GetOperand(kInsnFirstOpnd))) {
5660 return;
5661 }
5662 auto &imm = static_cast<ImmOperand &>(prevInsn->GetOperand(kInsnThirdOpnd));
5663 int n = logValueAtBase2(imm.GetValue());
5664 if (n < 0) {
5665 return;
5666 }
5667
5668 /* replace insn */
5669 auto &label = static_cast<LabelOperand &>(insn.GetOperand(kInsnSecondOpnd));
5670 MOperator newOp = MOP_undef;
5671 switch (thisMop) {
5672 case MOP_wcbz:
5673 newOp = MOP_wtbz;
5674 break;
5675 case MOP_wcbnz:
5676 newOp = MOP_wtbnz;
5677 break;
5678 case MOP_xcbz:
5679 newOp = MOP_xtbz;
5680 break;
5681 case MOP_xcbnz:
5682 newOp = MOP_xtbnz;
5683 break;
5684 default:
5685 CHECK_FATAL(false, "can not touch here");
5686 break;
5687 }
5688 ImmOperand &oneHoleOpnd = aarch64CGFunc->CreateImmOperand(n, k8BitSize, false);
5689 (void)bb.InsertInsnAfter(
5690 insn, cgFunc.GetInsnBuilder()->BuildInsn(newOp, prevInsn->GetOperand(kInsnSecondOpnd), oneHoleOpnd, label));
5691 bb.RemoveInsn(insn);
5692 }
5693
CheckCondition(Insn & insn)5694 bool ReplaceIncDecWithIncPattern::CheckCondition(Insn &insn)
5695 {
5696 if (insn.GetMachineOpcode() != MOP_xbl) {
5697 return false;
5698 }
5699 target = &static_cast<FuncNameOperand &>(insn.GetOperand(kInsnFirstOpnd));
5700 if (target->GetName() != "MCC_IncDecRef_NaiveRCFast") {
5701 return false;
5702 }
5703 prevInsn = insn.GetPreviousMachineInsn();
5704 if (prevInsn == nullptr) {
5705 return false;
5706 }
5707 MOperator mopMov = prevInsn->GetMachineOpcode();
5708 if (mopMov != MOP_xmovrr) {
5709 return false;
5710 }
5711 if (static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd)).GetRegisterNumber() != R1 ||
5712 !IsZeroRegister(prevInsn->GetOperand(kInsnSecondOpnd))) {
5713 return false;
5714 }
5715 return true;
5716 }
5717
Run(BB & bb,Insn & insn)5718 void ReplaceIncDecWithIncPattern::Run(BB &bb, Insn &insn)
5719 {
5720 if (!CheckCondition(insn)) {
5721 return;
5722 }
5723 std::string funcName = "MCC_IncRef_NaiveRCFast";
5724 GStrIdx strIdx = GlobalTables::GetStrTable().GetStrIdxFromName(funcName);
5725 MIRSymbol *st = GlobalTables::GetGsymTable().GetSymbolFromStrIdx(strIdx, true);
5726 if (st == nullptr) {
5727 LogInfo::MapleLogger() << "WARNING: Replace IncDec With Inc fail due to no MCC_IncRef_NaiveRCFast func\n";
5728 return;
5729 }
5730 bb.RemoveInsn(*prevInsn);
5731 target->SetFunctionSymbol(*st);
5732 }
5733
Run(BB & bb,Insn & insn)5734 void AndCmpBranchesToTbzAArch64::Run(BB &bb, Insn &insn)
5735 {
5736 AArch64CGFunc *aarch64CGFunc = static_cast<AArch64CGFunc *>(&cgFunc);
5737 if (&insn != bb.GetLastInsn()) {
5738 return;
5739 }
5740 MOperator mopB = insn.GetMachineOpcode();
5741 if (mopB != MOP_beq && mopB != MOP_bne) {
5742 return;
5743 }
5744 auto &label = static_cast<LabelOperand &>(insn.GetOperand(kInsnSecondOpnd));
5745 /* get the instruction before bne/beq, expects its type is cmp. */
5746 Insn *prevInsn = insn.GetPreviousMachineInsn();
5747 if (prevInsn == nullptr) {
5748 return;
5749 }
5750 MOperator prevMop = prevInsn->GetMachineOpcode();
5751 if (prevMop != MOP_wcmpri && prevMop != MOP_xcmpri) {
5752 return;
5753 }
5754
5755 /* get the instruction before "cmp", expect its type is "and". */
5756 Insn *prevPrevInsn = prevInsn->GetPreviousMachineInsn();
5757 if (prevPrevInsn == nullptr) {
5758 return;
5759 }
5760 MOperator mopAnd = prevPrevInsn->GetMachineOpcode();
5761 if (mopAnd != MOP_wandrri12 && mopAnd != MOP_xandrri13) {
5762 return;
5763 }
5764
5765 /*
5766 * check operand
5767 *
5768 * the real register of "cmp" and "and" must be the same.
5769 */
5770 if (&(prevInsn->GetOperand(kInsnSecondOpnd)) != &(prevPrevInsn->GetOperand(kInsnFirstOpnd))) {
5771 return;
5772 }
5773
5774 uint32 opndIdx = 2;
5775 if (!prevPrevInsn->GetOperand(opndIdx).IsIntImmediate() || !prevInsn->GetOperand(opndIdx).IsIntImmediate()) {
5776 return;
5777 }
5778 auto &immAnd = static_cast<ImmOperand &>(prevPrevInsn->GetOperand(opndIdx));
5779 auto &immCmp = static_cast<ImmOperand &>(prevInsn->GetOperand(opndIdx));
5780 if (immCmp.GetValue() == 0) {
5781 int n = logValueAtBase2(immAnd.GetValue());
5782 if (n < 0) {
5783 return;
5784 }
5785 /* judge whether the flag_reg and "w0" is live later. */
5786 auto &flagReg = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
5787 auto &cmpReg = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
5788 if (FindRegLiveOut(flagReg, *prevInsn->GetBB()) || FindRegLiveOut(cmpReg, *prevInsn->GetBB())) {
5789 return;
5790 }
5791 MOperator mopNew = MOP_undef;
5792 switch (mopB) {
5793 case MOP_beq:
5794 if (mopAnd == MOP_wandrri12) {
5795 mopNew = MOP_wtbz;
5796 } else if (mopAnd == MOP_xandrri13) {
5797 mopNew = MOP_xtbz;
5798 }
5799 break;
5800 case MOP_bne:
5801 if (mopAnd == MOP_wandrri12) {
5802 mopNew = MOP_wtbnz;
5803 } else if (mopAnd == MOP_xandrri13) {
5804 mopNew = MOP_xtbnz;
5805 }
5806 break;
5807 default:
5808 CHECK_FATAL(false, "expects beq or bne insn");
5809 break;
5810 }
5811 ImmOperand &newImm = aarch64CGFunc->CreateImmOperand(n, k8BitSize, false);
5812 (void)bb.InsertInsnAfter(
5813 insn, cgFunc.GetInsnBuilder()->BuildInsn(mopNew, prevPrevInsn->GetOperand(kInsnSecondOpnd), newImm, label));
5814 bb.RemoveInsn(insn);
5815 bb.RemoveInsn(*prevInsn);
5816 bb.RemoveInsn(*prevPrevInsn);
5817 } else {
5818 int n = logValueAtBase2(immAnd.GetValue());
5819 int m = logValueAtBase2(immCmp.GetValue());
5820 if (n < 0 || m < 0 || n != m) {
5821 return;
5822 }
5823 /* judge whether the flag_reg and "w0" is live later. */
5824 auto &flagReg = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
5825 auto &cmpReg = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
5826 if (FindRegLiveOut(flagReg, *prevInsn->GetBB()) || FindRegLiveOut(cmpReg, *prevInsn->GetBB())) {
5827 return;
5828 }
5829 MOperator mopNew = MOP_undef;
5830 switch (mopB) {
5831 case MOP_beq:
5832 if (mopAnd == MOP_wandrri12) {
5833 mopNew = MOP_wtbnz;
5834 } else if (mopAnd == MOP_xandrri13) {
5835 mopNew = MOP_xtbnz;
5836 }
5837 break;
5838 case MOP_bne:
5839 if (mopAnd == MOP_wandrri12) {
5840 mopNew = MOP_wtbz;
5841 } else if (mopAnd == MOP_xandrri13) {
5842 mopNew = MOP_xtbz;
5843 }
5844 break;
5845 default:
5846 CHECK_FATAL(false, "expects beq or bne insn");
5847 break;
5848 }
5849 ImmOperand &newImm = aarch64CGFunc->CreateImmOperand(n, k8BitSize, false);
5850 (void)bb.InsertInsnAfter(
5851 insn, cgFunc.GetInsnBuilder()->BuildInsn(mopNew, prevPrevInsn->GetOperand(kInsnSecondOpnd), newImm, label));
5852 bb.RemoveInsn(insn);
5853 bb.RemoveInsn(*prevInsn);
5854 bb.RemoveInsn(*prevPrevInsn);
5855 }
5856 }
5857
Run(BB & bb,Insn & insn)5858 void RemoveSxtBeforeStrAArch64::Run(BB &bb, Insn &insn)
5859 {
5860 MOperator mop = insn.GetMachineOpcode();
5861 Insn *prevInsn = insn.GetPreviousMachineInsn();
5862 if (prevInsn == nullptr) {
5863 return;
5864 }
5865 MOperator prevMop = prevInsn->GetMachineOpcode();
5866 if (!(mop == MOP_wstrh && prevMop == MOP_xsxth32) && !(mop == MOP_wstrb && prevMop == MOP_xsxtb32)) {
5867 return;
5868 }
5869 auto &prevOpnd0 = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnFirstOpnd));
5870 if (IfOperandIsLiveAfterInsn(prevOpnd0, insn)) {
5871 return;
5872 }
5873 auto &prevOpnd1 = static_cast<RegOperand &>(prevInsn->GetOperand(kInsnSecondOpnd));
5874 regno_t prevRegNO0 = prevOpnd0.GetRegisterNumber();
5875 regno_t prevRegNO1 = prevOpnd1.GetRegisterNumber();
5876 regno_t regNO0 = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd)).GetRegisterNumber();
5877 if (prevRegNO0 != prevRegNO1) {
5878 return;
5879 }
5880 if (prevRegNO0 == regNO0) {
5881 bb.RemoveInsn(*prevInsn);
5882 return;
5883 }
5884 insn.SetOperand(0, prevOpnd1);
5885 bb.RemoveInsn(*prevInsn);
5886 }
5887
Run(BB & bb,Insn & insn)5888 void UbfxToUxtwPattern::Run(BB &bb, Insn &insn)
5889 {
5890 if (!CheckCondition(insn)) {
5891 return;
5892 }
5893 Insn *newInsn = &cgFunc->GetInsnBuilder()->BuildInsn(MOP_xuxtw64, insn.GetOperand(kInsnFirstOpnd),
5894 insn.GetOperand(kInsnSecondOpnd));
5895 bb.ReplaceInsn(insn, *newInsn);
5896 if (CG_PEEP_DUMP) {
5897 std::vector<Insn *> prevs;
5898 prevs.emplace_back(&insn);
5899 DumpAfterPattern(prevs, newInsn, nullptr);
5900 }
5901 }
5902
CheckCondition(Insn & insn)5903 bool UbfxToUxtwPattern::CheckCondition(Insn &insn)
5904 {
5905 ImmOperand &imm0 = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
5906 ImmOperand &imm1 = static_cast<ImmOperand &>(insn.GetOperand(kInsnFourthOpnd));
5907 if ((imm0.GetValue() != 0) || (imm1.GetValue() != k32BitSize)) {
5908 return false;
5909 }
5910 return true;
5911 }
5912
Run(BB & bb,Insn & insn)5913 void UbfxAndCbzToTbzPattern::Run(BB &bb, Insn &insn)
5914 {
5915 Operand &opnd2 = static_cast<Operand &>(insn.GetOperand(kInsnSecondOpnd));
5916 ImmOperand &imm3 = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
5917 if (!CheckCondition(insn)) {
5918 return;
5919 }
5920 auto &label = static_cast<LabelOperand &>(useInsn->GetOperand(kInsnSecondOpnd));
5921 MOperator nextMop = useInsn->GetMachineOpcode();
5922 switch (nextMop) {
5923 case MOP_wcbz:
5924 newMop = MOP_wtbz;
5925 break;
5926 case MOP_xcbz:
5927 newMop = MOP_xtbz;
5928 break;
5929 case MOP_wcbnz:
5930 newMop = MOP_wtbnz;
5931 break;
5932 case MOP_xcbnz:
5933 newMop = MOP_xtbnz;
5934 break;
5935 default:
5936 return;
5937 }
5938 if (newMop == MOP_undef) {
5939 return;
5940 }
5941 Insn *newInsn = &cgFunc->GetInsnBuilder()->BuildInsn(newMop, opnd2, imm3, label);
5942 BB *useInsnBB = useInsn->GetBB();
5943 useInsnBB->ReplaceInsn(*useInsn, *newInsn);
5944 /* update ssa info */
5945 ssaInfo->ReplaceInsn(*useInsn, *newInsn);
5946 optSuccess = true;
5947 if (CG_PEEP_DUMP) {
5948 std::vector<Insn *> prevs;
5949 (void)prevs.emplace_back(useInsn);
5950 DumpAfterPattern(prevs, newInsn, nullptr);
5951 }
5952 }
5953
CheckCondition(Insn & insn)5954 bool UbfxAndCbzToTbzPattern::CheckCondition(Insn &insn)
5955 {
5956 ImmOperand &imm4 = static_cast<ImmOperand &>(insn.GetOperand(kInsnFourthOpnd));
5957 RegOperand &opnd1 = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
5958 InsnSet useInsns = GetAllUseInsn(opnd1);
5959 if (useInsns.size() != 1) {
5960 return false;
5961 }
5962 useInsn = *useInsns.begin();
5963 if (useInsn == nullptr) {
5964 return false;
5965 }
5966 if (imm4.GetValue() == 1) {
5967 switch (useInsn->GetMachineOpcode()) {
5968 case MOP_wcbz:
5969 case MOP_xcbz:
5970 case MOP_wcbnz:
5971 case MOP_xcbnz:
5972 return true;
5973 default:
5974 break;
5975 }
5976 }
5977 return false;
5978 }
5979
IsExtendWordLslPattern(const Insn & insn) const5980 bool ComplexExtendWordLslAArch64::IsExtendWordLslPattern(const Insn &insn) const
5981 {
5982 Insn *nextInsn = insn.GetNext();
5983 if (nextInsn == nullptr) {
5984 return false;
5985 }
5986 MOperator nextMop = nextInsn->GetMachineOpcode();
5987 if (nextMop != MOP_xlslrri6) {
5988 return false;
5989 }
5990 return true;
5991 }
5992
Run(BB & bb,Insn & insn)5993 void ComplexExtendWordLslAArch64::Run(BB &bb, Insn &insn)
5994 {
5995 if (!IsExtendWordLslPattern(insn)) {
5996 return;
5997 }
5998 MOperator mop = insn.GetMachineOpcode();
5999 Insn *nextInsn = insn.GetNext();
6000 auto &nextOpnd2 = static_cast<ImmOperand &>(nextInsn->GetOperand(kInsnThirdOpnd));
6001 if (nextOpnd2.GetValue() > k32BitSize) {
6002 return;
6003 }
6004 auto &opnd0 = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
6005 auto &nextOpnd1 = static_cast<RegOperand &>(nextInsn->GetOperand(kInsnSecondOpnd));
6006 regno_t regNO0 = opnd0.GetRegisterNumber();
6007 regno_t nextRegNO1 = nextOpnd1.GetRegisterNumber();
6008 if (regNO0 != nextRegNO1 || IfOperandIsLiveAfterInsn(opnd0, *nextInsn)) {
6009 return;
6010 }
6011 auto &opnd1 = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
6012 auto &nextOpnd0 = static_cast<RegOperand &>(nextInsn->GetOperand(kInsnFirstOpnd));
6013 regno_t regNO1 = opnd1.GetRegisterNumber();
6014 cgFunc.InsertExtendSet(regNO1);
6015 MOperator mopNew = mop == MOP_xsxtw64 ? MOP_xsbfizrri6i6 : MOP_xubfizrri6i6;
6016 auto *aarch64CGFunc = static_cast<AArch64CGFunc *>(&cgFunc);
6017 RegOperand ®1 = aarch64CGFunc->GetOrCreateVirtualRegisterOperand(regNO1);
6018 ImmOperand &newImm = aarch64CGFunc->CreateImmOperand(k32BitSize, k6BitSize, false);
6019 Insn &newInsnSbfiz = cgFunc.GetInsnBuilder()->BuildInsn(mopNew, nextOpnd0, reg1, nextOpnd2, newImm);
6020 bb.RemoveInsn(*nextInsn);
6021 bb.ReplaceInsn(insn, newInsnSbfiz);
6022 }
6023 } /* namespace maplebe */
6024