1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "aarch64_validbit_opt.h"
17 #include "aarch64_cg.h"
18
19 namespace maplebe {
DoOpt(BB & bb,Insn & insn)20 void AArch64ValidBitOpt::DoOpt(BB &bb, Insn &insn)
21 {
22 MOperator curMop = insn.GetMachineOpcode();
23 switch (curMop) {
24 case MOP_wandrri12:
25 case MOP_xandrri13: {
26 Optimize<AndValidBitPattern>(bb, insn);
27 break;
28 }
29 case MOP_xuxtb32:
30 case MOP_xuxth32:
31 case MOP_wubfxrri5i5:
32 case MOP_xubfxrri6i6:
33 case MOP_wsbfxrri5i5:
34 case MOP_xsbfxrri6i6: {
35 Optimize<ExtValidBitPattern>(bb, insn);
36 break;
37 }
38 case MOP_wcsetrc:
39 case MOP_xcsetrc: {
40 Optimize<CmpCsetVBPattern>(bb, insn);
41 break;
42 }
43 case MOP_bge:
44 case MOP_blt: {
45 Optimize<CmpBranchesPattern>(bb, insn);
46 break;
47 }
48 default:
49 break;
50 }
51 }
52
SetValidBits(Insn & insn)53 void AArch64ValidBitOpt::SetValidBits(Insn &insn)
54 {
55 MOperator mop = insn.GetMachineOpcode();
56 switch (mop) {
57 case MOP_wcsetrc:
58 case MOP_xcsetrc: {
59 auto &dstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
60 dstOpnd.SetValidBitsNum(k1BitSize);
61 break;
62 }
63 case MOP_wmovri32:
64 case MOP_xmovri64: {
65 Operand &srcOpnd = insn.GetOperand(kInsnSecondOpnd);
66 DEBUG_ASSERT(srcOpnd.IsIntImmediate(), "must be ImmOperand");
67 auto &immOpnd = static_cast<ImmOperand &>(srcOpnd);
68 auto &dstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
69 dstOpnd.SetValidBitsNum(GetImmValidBit(immOpnd.GetValue(), dstOpnd.GetSize()));
70 break;
71 }
72 case MOP_xmovrr:
73 case MOP_wmovrr: {
74 auto &srcOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
75 if (!srcOpnd.IsVirtualRegister()) {
76 break;
77 }
78 if (srcOpnd.GetRegisterNumber() == RZR) {
79 srcOpnd.SetValidBitsNum(k1BitSize);
80 }
81 auto &dstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
82 if (!(dstOpnd.GetSize() == k64BitSize && srcOpnd.GetSize() == k32BitSize) &&
83 !(dstOpnd.GetSize() == k32BitSize && srcOpnd.GetSize() == k64BitSize)) {
84 dstOpnd.SetValidBitsNum(srcOpnd.GetValidBitsNum());
85 }
86 break;
87 }
88 case MOP_wlsrrri5:
89 case MOP_xlsrrri6:
90 case MOP_wasrrri5:
91 case MOP_xasrrri6: {
92 Operand &opnd = insn.GetOperand(kInsnThirdOpnd);
93 DEBUG_ASSERT(opnd.IsIntImmediate(), "must be ImmOperand");
94 uint32 shiftBits = static_cast<uint32>(static_cast<ImmOperand &>(opnd).GetValue());
95 auto &dstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
96 auto &srcOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
97 if ((static_cast<int64>(srcOpnd.GetValidBitsNum()) - shiftBits) <= 0) {
98 dstOpnd.SetValidBitsNum(k1BitSize);
99 } else {
100 dstOpnd.SetValidBitsNum(srcOpnd.GetValidBitsNum() - shiftBits);
101 }
102 break;
103 }
104 case MOP_wlslrri5:
105 case MOP_xlslrri6: {
106 Operand &opnd = insn.GetOperand(kInsnThirdOpnd);
107 DEBUG_ASSERT(opnd.IsIntImmediate(), "must be ImmOperand");
108 uint32 shiftBits = static_cast<uint32>(static_cast<ImmOperand &>(opnd).GetValue());
109 auto &dstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
110 auto &srcOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
111 uint32 newVB = ((srcOpnd.GetValidBitsNum() + shiftBits) > srcOpnd.GetSize())
112 ? srcOpnd.GetSize()
113 : (srcOpnd.GetValidBitsNum() + shiftBits);
114 dstOpnd.SetValidBitsNum(newVB);
115 break;
116 }
117 case MOP_xuxtb32:
118 case MOP_xuxth32: {
119 auto &dstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
120 auto &srcOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
121 uint32 srcVB = srcOpnd.GetValidBitsNum();
122 uint32 newVB = dstOpnd.GetValidBitsNum();
123 newVB = (mop == MOP_xuxtb32) ? ((srcVB < k8BitSize) ? srcVB : k8BitSize) : newVB;
124 newVB = (mop == MOP_xuxth32) ? ((srcVB < k16BitSize) ? srcVB : k16BitSize) : newVB;
125 dstOpnd.SetValidBitsNum(newVB);
126 break;
127 }
128 case MOP_wldrb:
129 case MOP_wldrh: {
130 auto &dstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
131 uint32 newVB = (mop == MOP_wldrb) ? k8BitSize : k16BitSize;
132 dstOpnd.SetValidBitsNum(newVB);
133 break;
134 }
135 case MOP_wandrrr:
136 case MOP_xandrrr: {
137 auto &dstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
138 uint32 src1VB = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd)).GetValidBitsNum();
139 uint32 src2VB = static_cast<RegOperand &>(insn.GetOperand(kInsnThirdOpnd)).GetValidBitsNum();
140 uint32 newVB = (src1VB <= src2VB ? src1VB : src2VB);
141 dstOpnd.SetValidBitsNum(newVB);
142 break;
143 }
144 case MOP_wandrri12:
145 case MOP_xandrri13: {
146 auto &dstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
147 auto &immOpnd = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
148 uint32 src1VB = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd)).GetValidBitsNum();
149 uint32 src2VB = GetImmValidBit(immOpnd.GetValue(), dstOpnd.GetSize());
150 uint32 newVB = (src1VB <= src2VB ? src1VB : src2VB);
151 dstOpnd.SetValidBitsNum(newVB);
152 break;
153 }
154 case MOP_wiorrrr:
155 case MOP_xiorrrr: {
156 auto &dstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
157 uint32 src1VB = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd)).GetValidBitsNum();
158 uint32 src2VB = static_cast<RegOperand &>(insn.GetOperand(kInsnThirdOpnd)).GetValidBitsNum();
159 uint32 newVB = (src1VB >= src2VB ? src1VB : src2VB);
160 dstOpnd.SetValidBitsNum(newVB);
161 break;
162 }
163 case MOP_wiorrri12:
164 case MOP_xiorrri13: {
165 auto &dstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
166 auto &immOpnd = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
167 uint32 src1VB = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd)).GetValidBitsNum();
168 uint32 src2VB = GetImmValidBit(immOpnd.GetValue(), dstOpnd.GetSize());
169 uint32 newVB = (src1VB >= src2VB ? src1VB : src2VB);
170 dstOpnd.SetValidBitsNum(newVB);
171 break;
172 }
173 default:
174 break;
175 }
176 }
177
SetPhiValidBits(Insn & insn)178 bool AArch64ValidBitOpt::SetPhiValidBits(Insn &insn)
179 {
180 Operand &defOpnd = insn.GetOperand(kInsnFirstOpnd);
181 DEBUG_ASSERT(defOpnd.IsRegister(), "expect register");
182 auto &defRegOpnd = static_cast<RegOperand &>(defOpnd);
183 Operand &phiOpnd = insn.GetOperand(kInsnSecondOpnd);
184 DEBUG_ASSERT(phiOpnd.IsPhi(), "expect phiList");
185 auto &phiList = static_cast<PhiOperand &>(phiOpnd);
186 int32 maxVB = -1;
187 for (auto phiOpndIt : phiList.GetOperands()) {
188 if (phiOpndIt.second != nullptr) {
189 maxVB = (maxVB < static_cast<int32>(phiOpndIt.second->GetValidBitsNum()))
190 ? static_cast<int32>(phiOpndIt.second->GetValidBitsNum())
191 : maxVB;
192 }
193 }
194 if (maxVB >= static_cast<int32>(k0BitSize) && static_cast<uint32>(maxVB) != defRegOpnd.GetValidBitsNum()) {
195 defRegOpnd.SetValidBitsNum(static_cast<uint32>(maxVB));
196 return true;
197 }
198 return false;
199 }
200
IsZeroRegister(const Operand & opnd)201 static bool IsZeroRegister(const Operand &opnd)
202 {
203 if (!opnd.IsRegister()) {
204 return false;
205 }
206 const RegOperand *regOpnd = static_cast<const RegOperand *>(&opnd);
207 return regOpnd->GetRegisterNumber() == RZR;
208 }
209
CheckImmValidBit(int64 andImm,uint32 andImmVB,int64 shiftImm) const210 bool AndValidBitPattern::CheckImmValidBit(int64 andImm, uint32 andImmVB, int64 shiftImm) const
211 {
212 if ((__builtin_ffs(static_cast<int>(andImm)) - 1 == shiftImm) &&
213 ((andImm >> shiftImm) == ((1 << (andImmVB - shiftImm)) - 1))) {
214 return true;
215 }
216 return false;
217 }
218
CheckCondition(Insn & insn)219 bool AndValidBitPattern::CheckCondition(Insn &insn)
220 {
221 MOperator mOp = insn.GetMachineOpcode();
222 if (mOp == MOP_wandrri12) {
223 newMop = MOP_wmovrr;
224 } else if (mOp == MOP_xandrri13) {
225 newMop = MOP_xmovrr;
226 }
227 if (newMop == MOP_undef) {
228 return false;
229 }
230 CHECK_FATAL(insn.GetOperand(kInsnFirstOpnd).IsRegister(), "must be register!");
231 CHECK_FATAL(insn.GetOperand(kInsnSecondOpnd).IsRegister(), "must be register!");
232 CHECK_FATAL(insn.GetOperand(kInsnThirdOpnd).IsImmediate(), "must be imm!");
233 desReg = static_cast<RegOperand *>(&insn.GetOperand(kInsnFirstOpnd));
234 srcReg = static_cast<RegOperand *>(&insn.GetOperand(kInsnSecondOpnd));
235 auto &andImm = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
236 int64 immVal = andImm.GetValue();
237 uint32 validBit = srcReg->GetValidBitsNum();
238 if (validBit == k8BitSize && immVal == 0xFF) {
239 return true;
240 } else if (validBit == k16BitSize && immVal == 0xFFFF) {
241 return true;
242 }
243 /* and R287[32], R286[64], #255 */
244 if ((desReg->GetSize() < srcReg->GetSize()) && (srcReg->GetValidBitsNum() > desReg->GetSize())) {
245 return false;
246 }
247 InsnSet useInsns = GetAllUseInsn(*desReg);
248 if (useInsns.size() == 1) {
249 Insn *useInsn = *useInsns.begin();
250 MOperator useMop = useInsn->GetMachineOpcode();
251 if (useMop != MOP_wasrrri5 && useMop != MOP_xasrrri6 && useMop != MOP_wlsrrri5 && useMop != MOP_xlsrrri6) {
252 return false;
253 }
254 Operand &shiftOpnd = useInsn->GetOperand(kInsnThirdOpnd);
255 CHECK_FATAL(shiftOpnd.IsImmediate(), "must be immediate");
256 int64 shiftImm = static_cast<ImmOperand &>(shiftOpnd).GetValue();
257 uint32 andImmVB = ValidBitOpt::GetImmValidBit(andImm.GetValue(), desReg->GetSize());
258 if ((srcReg->GetValidBitsNum() == andImmVB) && CheckImmValidBit(andImm.GetValue(), andImmVB, shiftImm)) {
259 return true;
260 }
261 }
262 return false;
263 }
264
Run(BB & bb,Insn & insn)265 void AndValidBitPattern::Run(BB &bb, Insn &insn)
266 {
267 if (!CheckCondition(insn)) {
268 return;
269 }
270 Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(newMop, *desReg, *srcReg);
271 bb.ReplaceInsn(insn, newInsn);
272 /* update ssa info */
273 ssaInfo->ReplaceInsn(insn, newInsn);
274 if (desReg->GetSize() < srcReg->GetSize()) {
275 ssaInfo->InsertSafePropInsn(newInsn.GetId());
276 }
277 /* dump pattern info */
278 if (CG_VALIDBIT_OPT_DUMP) {
279 std::vector<Insn *> prevs;
280 prevs.emplace_back(&insn);
281 DumpAfterPattern(prevs, &insn, &newInsn);
282 }
283 }
284
CheckCondition(Insn & insn)285 bool ExtValidBitPattern::CheckCondition(Insn &insn)
286 {
287 Operand &dstOpnd = insn.GetOperand(kInsnFirstOpnd);
288 Operand &srcOpnd = insn.GetOperand(kInsnSecondOpnd);
289 MOperator mOp = insn.GetMachineOpcode();
290 switch (mOp) {
291 case MOP_xuxtb32:
292 case MOP_xuxth32: {
293 CHECK_FATAL(dstOpnd.IsRegister(), "must be register");
294 CHECK_FATAL(srcOpnd.IsRegister(), "must be register");
295 if (static_cast<RegOperand &>(dstOpnd).GetValidBitsNum() !=
296 static_cast<RegOperand &>(srcOpnd).GetValidBitsNum()) {
297 return false;
298 }
299 newMop = MOP_wmovrr;
300 break;
301 }
302 case MOP_wubfxrri5i5:
303 case MOP_xubfxrri6i6:
304 case MOP_wsbfxrri5i5:
305 case MOP_xsbfxrri6i6: {
306 Operand &immOpnd1 = insn.GetOperand(kInsnThirdOpnd);
307 Operand &immOpnd2 = insn.GetOperand(kInsnFourthOpnd);
308 CHECK_FATAL(immOpnd1.IsImmediate(), "must be immediate");
309 CHECK_FATAL(immOpnd2.IsImmediate(), "must be immediate");
310 int64 lsb = static_cast<ImmOperand &>(immOpnd1).GetValue();
311 int64 width = static_cast<ImmOperand &>(immOpnd2).GetValue();
312 if (lsb != 0 || static_cast<RegOperand &>(srcOpnd).GetValidBitsNum() > width) {
313 return false;
314 }
315 if ((mOp == MOP_wsbfxrri5i5 || mOp == MOP_xsbfxrri6i6) &&
316 width != static_cast<RegOperand &>(srcOpnd).GetSize()) {
317 return false;
318 }
319 if (mOp == MOP_wubfxrri5i5 || mOp == MOP_wsbfxrri5i5) {
320 newMop = MOP_wmovrr;
321 } else if (mOp == MOP_xubfxrri6i6 || mOp == MOP_xsbfxrri6i6) {
322 newMop = MOP_xmovrr;
323 }
324 break;
325 }
326 default:
327 return false;
328 }
329 newDstOpnd = &static_cast<RegOperand &>(dstOpnd);
330 newSrcOpnd = &static_cast<RegOperand &>(srcOpnd);
331 return true;
332 }
333
Run(BB & bb,Insn & insn)334 void ExtValidBitPattern::Run(BB &bb, Insn &insn)
335 {
336 if (!CheckCondition(insn)) {
337 return;
338 }
339 MOperator mOp = insn.GetMachineOpcode();
340 switch (mOp) {
341 case MOP_xuxtb32:
342 case MOP_xuxth32: {
343 insn.SetMOP(AArch64CG::kMd[newMop]);
344 break;
345 }
346 case MOP_wubfxrri5i5:
347 case MOP_xubfxrri6i6:
348 case MOP_wsbfxrri5i5:
349 case MOP_xsbfxrri6i6: {
350 Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(newMop, *newDstOpnd, *newSrcOpnd);
351 bb.ReplaceInsn(insn, newInsn);
352 /* update ssa info */
353 ssaInfo->ReplaceInsn(insn, newInsn);
354 /* dump pattern info */
355 if (CG_VALIDBIT_OPT_DUMP) {
356 std::vector<Insn *> prevs;
357 prevs.emplace_back(&insn);
358 DumpAfterPattern(prevs, &insn, &newInsn);
359 }
360 break;
361 }
362 default:
363 return;
364 }
365 }
366
IsContinuousCmpCset(const Insn & curInsn)367 bool CmpCsetVBPattern::IsContinuousCmpCset(const Insn &curInsn)
368 {
369 auto &csetDstReg = static_cast<RegOperand &>(curInsn.GetOperand(kInsnFirstOpnd));
370 CHECK_FATAL(csetDstReg.IsSSAForm(), "dstOpnd must be ssa form");
371 VRegVersion *dstVersion = ssaInfo->FindSSAVersion(csetDstReg.GetRegisterNumber());
372 DEBUG_ASSERT(dstVersion != nullptr, "find vRegVersion failed");
373 for (auto useDUInfoIt : dstVersion->GetAllUseInsns()) {
374 if (useDUInfoIt.second == nullptr) {
375 continue;
376 }
377 Insn *useInsn = useDUInfoIt.second->GetInsn();
378 if (useInsn == nullptr) {
379 continue;
380 }
381 MOperator useMop = useInsn->GetMachineOpcode();
382 if (useMop == MOP_wcmpri || useMop == MOP_xcmpri) {
383 auto &ccDstReg = static_cast<RegOperand &>(useInsn->GetOperand(kInsnFirstOpnd));
384 CHECK_FATAL(ccDstReg.IsSSAForm(), "dstOpnd must be ssa form");
385 VRegVersion *ccDstVersion = ssaInfo->FindSSAVersion(ccDstReg.GetRegisterNumber());
386 DEBUG_ASSERT(ccDstVersion != nullptr, "find vRegVersion failed");
387 for (auto ccUseDUInfoIt : ccDstVersion->GetAllUseInsns()) {
388 if (ccUseDUInfoIt.second == nullptr) {
389 continue;
390 }
391 Insn *ccUseInsn = ccUseDUInfoIt.second->GetInsn();
392 if (ccUseInsn == nullptr) {
393 continue;
394 }
395 MOperator ccUseMop = ccUseInsn->GetMachineOpcode();
396 if (ccUseMop == MOP_wcsetrc || ccUseMop == MOP_xcsetrc) {
397 return true;
398 }
399 }
400 }
401 }
402 return false;
403 }
404
OpndDefByOneValidBit(const Insn & defInsn)405 bool CmpCsetVBPattern::OpndDefByOneValidBit(const Insn &defInsn)
406 {
407 if (defInsn.IsPhi()) {
408 return (static_cast<RegOperand &>(cmpInsn->GetOperand(kInsnSecondOpnd)).GetValidBitsNum() == k1BitSize) ||
409 (static_cast<RegOperand &>(cmpInsn->GetOperand(kInsnSecondOpnd)).GetValidBitsNum() == k0BitSize);
410 }
411 MOperator defMop = defInsn.GetMachineOpcode();
412 switch (defMop) {
413 case MOP_wcsetrc:
414 case MOP_xcsetrc:
415 return true;
416 case MOP_wmovri32:
417 case MOP_xmovri64: {
418 Operand &defOpnd = defInsn.GetOperand(kInsnSecondOpnd);
419 DEBUG_ASSERT(defOpnd.IsIntImmediate(), "expects ImmOperand");
420 auto &defConst = static_cast<ImmOperand &>(defOpnd);
421 int64 defConstValue = defConst.GetValue();
422 return (defConstValue == 0 || defConstValue == 1);
423 }
424 case MOP_xmovrr:
425 case MOP_wmovrr:
426 return IsZeroRegister(defInsn.GetOperand(kInsnSecondOpnd));
427 case MOP_wlsrrri5:
428 case MOP_xlsrrri6: {
429 Operand &opnd2 = defInsn.GetOperand(kInsnThirdOpnd);
430 DEBUG_ASSERT(opnd2.IsIntImmediate(), "expects ImmOperand");
431 auto &opndImm = static_cast<ImmOperand &>(opnd2);
432 int64 shiftBits = opndImm.GetValue();
433 return ((defMop == MOP_wlsrrri5 && shiftBits == (k32BitSize - 1)) ||
434 (defMop == MOP_xlsrrri6 && shiftBits == (k64BitSize - 1)));
435 }
436 default:
437 return false;
438 }
439 }
440
CheckCondition(Insn & csetInsn)441 bool CmpCsetVBPattern::CheckCondition(Insn &csetInsn)
442 {
443 MOperator curMop = csetInsn.GetMachineOpcode();
444 if (curMop != MOP_wcsetrc && curMop != MOP_xcsetrc) {
445 return false;
446 }
447 /* combine [continuous cmp & cset] first, to eliminate more insns */
448 if (IsContinuousCmpCset(csetInsn)) {
449 return false;
450 }
451 RegOperand &ccReg = static_cast<RegOperand &>(csetInsn.GetOperand(kInsnThirdOpnd));
452 regno_t ccRegNo = ccReg.GetRegisterNumber();
453 cmpInsn = GetDefInsn(ccReg);
454 CHECK_NULL_FATAL(cmpInsn);
455 MOperator mop = cmpInsn->GetMachineOpcode();
456 if ((mop != MOP_wcmpri) && (mop != MOP_xcmpri)) {
457 return false;
458 }
459 VRegVersion *ccRegVersion = ssaInfo->FindSSAVersion(ccRegNo);
460 if (ccRegVersion->GetAllUseInsns().size() > k1BitSize) {
461 return false;
462 }
463 Operand &cmpSecondOpnd = cmpInsn->GetOperand(kInsnThirdOpnd);
464 CHECK_FATAL(cmpSecondOpnd.IsIntImmediate(), "expects ImmOperand");
465 auto &cmpConst = static_cast<ImmOperand &>(cmpSecondOpnd);
466 cmpConstVal = cmpConst.GetValue();
467 /* get ImmOperand, must be 0 or 1 */
468 if ((cmpConstVal != 0) && (cmpConstVal != k1BitSize)) {
469 return false;
470 }
471 Operand &cmpFirstOpnd = cmpInsn->GetOperand(kInsnSecondOpnd);
472 CHECK_FATAL(cmpFirstOpnd.IsRegister(), "cmpFirstOpnd must be register!");
473 RegOperand &cmpReg = static_cast<RegOperand &>(cmpFirstOpnd);
474 Insn *defInsn = GetDefInsn(cmpReg);
475 if (defInsn == nullptr) {
476 return false;
477 }
478 if (defInsn->GetMachineOpcode() == MOP_wmovrr || defInsn->GetMachineOpcode() == MOP_xmovrr) {
479 auto &srcOpnd = static_cast<RegOperand &>(defInsn->GetOperand(kInsnSecondOpnd));
480 if (!srcOpnd.IsVirtualRegister()) {
481 return false;
482 }
483 }
484 return ((cmpReg.GetValidBitsNum() == k1BitSize) || (cmpReg.GetValidBitsNum() == k0BitSize) ||
485 OpndDefByOneValidBit(*defInsn));
486 }
487
Run(BB & bb,Insn & csetInsn)488 void CmpCsetVBPattern::Run(BB &bb, Insn &csetInsn)
489 {
490 if (!CheckCondition(csetInsn)) {
491 return;
492 }
493 Operand &csetFirstOpnd = csetInsn.GetOperand(kInsnFirstOpnd);
494 Operand &cmpFirstOpnd = cmpInsn->GetOperand(kInsnSecondOpnd);
495 auto &cond = static_cast<CondOperand &>(csetInsn.GetOperand(kInsnSecondOpnd));
496 Insn *newInsn = nullptr;
497
498 /* cmpFirstOpnd == 1 */
499 if ((cmpConstVal == 0 && cond.GetCode() == CC_NE) || (cmpConstVal == 1 && cond.GetCode() == CC_EQ)) {
500 MOperator mopCode = (cmpFirstOpnd.GetSize() == k64BitSize) ? MOP_xmovrr : MOP_wmovrr;
501 newInsn = &cgFunc->GetInsnBuilder()->BuildInsn(mopCode, csetFirstOpnd, cmpFirstOpnd);
502 } else if ((cmpConstVal == 1 && cond.GetCode() == CC_NE) || (cmpConstVal == 0 && cond.GetCode() == CC_EQ)) {
503 /* cmpFirstOpnd == 0 */
504 MOperator mopCode = (cmpFirstOpnd.GetSize() == k64BitSize) ? MOP_xeorrri13 : MOP_weorrri12;
505 ImmOperand &one = static_cast<AArch64CGFunc *>(cgFunc)->CreateImmOperand(1, k8BitSize, false);
506 newInsn = &cgFunc->GetInsnBuilder()->BuildInsn(mopCode, csetFirstOpnd, cmpFirstOpnd, one);
507 }
508 if (newInsn == nullptr) {
509 return;
510 }
511 bb.ReplaceInsn(csetInsn, *newInsn);
512 ssaInfo->ReplaceInsn(csetInsn, *newInsn);
513 if (CG_VALIDBIT_OPT_DUMP && (newInsn != nullptr)) {
514 std::vector<Insn *> prevInsns;
515 prevInsns.emplace_back(cmpInsn);
516 prevInsns.emplace_back(&csetInsn);
517 DumpAfterPattern(prevInsns, newInsn, nullptr);
518 }
519 }
520
SelectNewMop(MOperator mop)521 void CmpBranchesPattern::SelectNewMop(MOperator mop)
522 {
523 switch (mop) {
524 case MOP_bge: {
525 newMop = is64Bit ? MOP_xtbnz : MOP_wtbnz;
526 break;
527 }
528 case MOP_blt: {
529 newMop = is64Bit ? MOP_xtbz : MOP_wtbz;
530 break;
531 }
532 default:
533 break;
534 }
535 }
536
CheckCondition(Insn & insn)537 bool CmpBranchesPattern::CheckCondition(Insn &insn)
538 {
539 MOperator curMop = insn.GetMachineOpcode();
540 if (curMop != MOP_bge && curMop != MOP_blt) {
541 return false;
542 }
543 auto &ccReg = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
544 prevCmpInsn = GetDefInsn(ccReg);
545 if (prevCmpInsn == nullptr) {
546 return false;
547 }
548 MOperator cmpMop = prevCmpInsn->GetMachineOpcode();
549 if (cmpMop != MOP_wcmpri && cmpMop != MOP_xcmpri) {
550 return false;
551 }
552 is64Bit = (cmpMop == MOP_xcmpri);
553 auto &cmpUseOpnd = static_cast<RegOperand &>(prevCmpInsn->GetOperand(kInsnSecondOpnd));
554 auto &cmpImmOpnd = static_cast<ImmOperand &>(prevCmpInsn->GetOperand(kInsnThirdOpnd));
555 int64 cmpImmVal = cmpImmOpnd.GetValue();
556 newImmVal = ValidBitOpt::GetLogValueAtBase2(cmpImmVal);
557 if (newImmVal < 0 || cmpUseOpnd.GetValidBitsNum() != (newImmVal + 1)) {
558 return false;
559 }
560 SelectNewMop(curMop);
561 if (newMop == MOP_undef) {
562 return false;
563 }
564 return true;
565 }
566
Run(BB & bb,Insn & insn)567 void CmpBranchesPattern::Run(BB &bb, Insn &insn)
568 {
569 if (!CheckCondition(insn)) {
570 return;
571 }
572 auto *aarFunc = static_cast<AArch64CGFunc *>(cgFunc);
573 auto &labelOpnd = static_cast<LabelOperand &>(insn.GetOperand(kInsnSecondOpnd));
574 ImmOperand &newImmOpnd = aarFunc->CreateImmOperand(newImmVal, k8BitSize, false);
575 Insn &newInsn =
576 cgFunc->GetInsnBuilder()->BuildInsn(newMop, prevCmpInsn->GetOperand(kInsnSecondOpnd), newImmOpnd, labelOpnd);
577 bb.ReplaceInsn(insn, newInsn);
578 /* update ssa info */
579 ssaInfo->ReplaceInsn(insn, newInsn);
580 /* dump pattern info */
581 if (CG_VALIDBIT_OPT_DUMP) {
582 std::vector<Insn *> prevs;
583 prevs.emplace_back(prevCmpInsn);
584 DumpAfterPattern(prevs, &insn, &newInsn);
585 }
586 }
587 } /* namespace maplebe */
588