1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "aarch64_validbit_opt.h"
17 #include "aarch64_cg.h"
18
19 namespace maplebe {
DoOpt(BB & bb,Insn & insn)20 void AArch64ValidBitOpt::DoOpt(BB &bb, Insn &insn)
21 {
22 MOperator curMop = insn.GetMachineOpcode();
23 switch (curMop) {
24 case MOP_wandrri12:
25 case MOP_xandrri13: {
26 Optimize<AndValidBitPattern>(bb, insn);
27 break;
28 }
29 case MOP_xuxtb32:
30 case MOP_xuxth32:
31 case MOP_wubfxrri5i5:
32 case MOP_xubfxrri6i6:
33 case MOP_wsbfxrri5i5:
34 case MOP_xsbfxrri6i6: {
35 Optimize<ExtValidBitPattern>(bb, insn);
36 break;
37 }
38 case MOP_wcsetrc:
39 case MOP_xcsetrc: {
40 Optimize<CmpCsetVBPattern>(bb, insn);
41 break;
42 }
43 case MOP_bge:
44 case MOP_blt: {
45 Optimize<CmpBranchesPattern>(bb, insn);
46 break;
47 }
48 default:
49 break;
50 }
51 }
52
SetValidBits(Insn & insn)53 void AArch64ValidBitOpt::SetValidBits(Insn &insn)
54 {
55 MOperator mop = insn.GetMachineOpcode();
56 switch (mop) {
57 case MOP_wcsetrc:
58 case MOP_xcsetrc: {
59 auto &dstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
60 dstOpnd.SetValidBitsNum(k1BitSize);
61 break;
62 }
63 case MOP_wmovri32:
64 case MOP_xmovri64: {
65 Operand &srcOpnd = insn.GetOperand(kInsnSecondOpnd);
66 DEBUG_ASSERT(srcOpnd.IsIntImmediate(), "must be ImmOperand");
67 auto &immOpnd = static_cast<ImmOperand &>(srcOpnd);
68 auto &dstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
69 dstOpnd.SetValidBitsNum(GetImmValidBit(immOpnd.GetValue(), dstOpnd.GetSize()));
70 break;
71 }
72 case MOP_xmovrr:
73 case MOP_wmovrr: {
74 auto &srcOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
75 if (!srcOpnd.IsVirtualRegister()) {
76 break;
77 }
78 if (srcOpnd.GetRegisterNumber() == RZR) {
79 srcOpnd.SetValidBitsNum(k1BitSize);
80 }
81 auto &dstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
82 if (!(dstOpnd.GetSize() == k64BitSize && srcOpnd.GetSize() == k32BitSize) &&
83 !(dstOpnd.GetSize() == k32BitSize && srcOpnd.GetSize() == k64BitSize)) {
84 dstOpnd.SetValidBitsNum(srcOpnd.GetValidBitsNum());
85 }
86 break;
87 }
88 case MOP_wlsrrri5:
89 case MOP_xlsrrri6:
90 case MOP_wasrrri5:
91 case MOP_xasrrri6: {
92 Operand &opnd = insn.GetOperand(kInsnThirdOpnd);
93 DEBUG_ASSERT(opnd.IsIntImmediate(), "must be ImmOperand");
94 uint32 shiftBits = static_cast<uint32>(static_cast<ImmOperand &>(opnd).GetValue());
95 auto &dstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
96 auto &srcOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
97 if ((static_cast<uint32>(srcOpnd.GetValidBitsNum()) - shiftBits) <= 0) {
98 dstOpnd.SetValidBitsNum(k1BitSize);
99 } else {
100 dstOpnd.SetValidBitsNum(srcOpnd.GetValidBitsNum() - shiftBits);
101 }
102 break;
103 }
104 case MOP_wlslrri5:
105 case MOP_xlslrri6: {
106 Operand &opnd = insn.GetOperand(kInsnThirdOpnd);
107 DEBUG_ASSERT(opnd.IsIntImmediate(), "must be ImmOperand");
108 uint32 shiftBits = static_cast<uint32>(static_cast<ImmOperand &>(opnd).GetValue());
109 auto &dstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
110 auto &srcOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
111 uint32 newVB = ((srcOpnd.GetValidBitsNum() + shiftBits) > srcOpnd.GetSize())
112 ? srcOpnd.GetSize()
113 : (srcOpnd.GetValidBitsNum() + shiftBits);
114 dstOpnd.SetValidBitsNum(newVB);
115 break;
116 }
117 case MOP_xuxtb32:
118 case MOP_xuxth32: {
119 auto &dstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
120 auto &srcOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
121 uint32 srcVB = srcOpnd.GetValidBitsNum();
122 uint32 newVB = dstOpnd.GetValidBitsNum();
123 newVB = (mop == MOP_xuxtb32) ? ((srcVB < k8BitSize) ? srcVB : k8BitSize) : newVB;
124 newVB = (mop == MOP_xuxth32) ? ((srcVB < k16BitSize) ? srcVB : k16BitSize) : newVB;
125 dstOpnd.SetValidBitsNum(newVB);
126 break;
127 }
128 case MOP_wldrb:
129 case MOP_wldrh: {
130 auto &dstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
131 uint32 newVB = (mop == MOP_wldrb) ? k8BitSize : k16BitSize;
132 dstOpnd.SetValidBitsNum(newVB);
133 break;
134 }
135 case MOP_wandrrr:
136 case MOP_xandrrr: {
137 auto &dstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
138 uint32 src1VB = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd)).GetValidBitsNum();
139 uint32 src2VB = static_cast<RegOperand &>(insn.GetOperand(kInsnThirdOpnd)).GetValidBitsNum();
140 uint32 newVB = (src1VB <= src2VB ? src1VB : src2VB);
141 dstOpnd.SetValidBitsNum(newVB);
142 break;
143 }
144 case MOP_wandrri12:
145 case MOP_xandrri13: {
146 auto &dstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
147 auto &immOpnd = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
148 uint32 src1VB = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd)).GetValidBitsNum();
149 uint32 src2VB = GetImmValidBit(immOpnd.GetValue(), dstOpnd.GetSize());
150 uint32 newVB = (src1VB <= src2VB ? src1VB : src2VB);
151 dstOpnd.SetValidBitsNum(newVB);
152 break;
153 }
154 case MOP_wiorrrr:
155 case MOP_xiorrrr: {
156 auto &dstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
157 uint32 src1VB = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd)).GetValidBitsNum();
158 uint32 src2VB = static_cast<RegOperand &>(insn.GetOperand(kInsnThirdOpnd)).GetValidBitsNum();
159 uint32 newVB = (src1VB >= src2VB ? src1VB : src2VB);
160 dstOpnd.SetValidBitsNum(newVB);
161 break;
162 }
163 case MOP_wiorrri12:
164 case MOP_xiorrri13: {
165 auto &dstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
166 auto &immOpnd = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
167 uint32 src1VB = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd)).GetValidBitsNum();
168 uint32 src2VB = GetImmValidBit(immOpnd.GetValue(), dstOpnd.GetSize());
169 uint32 newVB = (src1VB >= src2VB ? src1VB : src2VB);
170 dstOpnd.SetValidBitsNum(newVB);
171 break;
172 }
173 default:
174 break;
175 }
176 }
177
SetPhiValidBits(Insn & insn)178 bool AArch64ValidBitOpt::SetPhiValidBits(Insn &insn)
179 {
180 Operand &defOpnd = insn.GetOperand(kInsnFirstOpnd);
181 DEBUG_ASSERT(defOpnd.IsRegister(), "expect register");
182 auto &defRegOpnd = static_cast<RegOperand &>(defOpnd);
183 Operand &phiOpnd = insn.GetOperand(kInsnSecondOpnd);
184 DEBUG_ASSERT(phiOpnd.IsPhi(), "expect phiList");
185 auto &phiList = static_cast<PhiOperand &>(phiOpnd);
186 int32 maxVB = -1;
187 for (auto phiOpndIt : phiList.GetOperands()) {
188 if (phiOpndIt.second != nullptr) {
189 maxVB = (maxVB < static_cast<int32>(phiOpndIt.second->GetValidBitsNum()))
190 ? static_cast<int32>(phiOpndIt.second->GetValidBitsNum())
191 : maxVB;
192 }
193 }
194 if (maxVB >= static_cast<int32>(k0BitSize) && static_cast<uint32>(maxVB) != defRegOpnd.GetValidBitsNum()) {
195 defRegOpnd.SetValidBitsNum(static_cast<uint32>(maxVB));
196 return true;
197 }
198 return false;
199 }
200
IsZeroRegister(const Operand & opnd)201 static bool IsZeroRegister(const Operand &opnd)
202 {
203 if (!opnd.IsRegister()) {
204 return false;
205 }
206 const RegOperand *regOpnd = static_cast<const RegOperand *>(&opnd);
207 return regOpnd->GetRegisterNumber() == RZR;
208 }
209
CheckImmValidBit(int64 andImm,uint32 andImmVB,int64 shiftImm) const210 bool AndValidBitPattern::CheckImmValidBit(int64 andImm, uint32 andImmVB, int64 shiftImm) const
211 {
212 if ((__builtin_ffs(static_cast<int>(andImm)) - 1 == shiftImm) &&
213 ((andImm >> shiftImm) == ((1 << (andImmVB - shiftImm)) - 1))) {
214 return true;
215 }
216 return false;
217 }
218
CheckCondition(Insn & insn)219 bool AndValidBitPattern::CheckCondition(Insn &insn)
220 {
221 MOperator mOp = insn.GetMachineOpcode();
222 if (mOp == MOP_wandrri12) {
223 newMop = MOP_wmovrr;
224 } else if (mOp == MOP_xandrri13) {
225 newMop = MOP_xmovrr;
226 }
227 if (newMop == MOP_undef) {
228 return false;
229 }
230 CHECK_FATAL(insn.GetOperand(kInsnFirstOpnd).IsRegister(), "must be register!");
231 CHECK_FATAL(insn.GetOperand(kInsnSecondOpnd).IsRegister(), "must be register!");
232 CHECK_FATAL(insn.GetOperand(kInsnThirdOpnd).IsImmediate(), "must be imm!");
233 desReg = static_cast<RegOperand *>(&insn.GetOperand(kInsnFirstOpnd));
234 srcReg = static_cast<RegOperand *>(&insn.GetOperand(kInsnSecondOpnd));
235 auto &andImm = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
236 int64 immVal = andImm.GetValue();
237 uint32 validBit = srcReg->GetValidBitsNum();
238 if (validBit == k8BitSize && immVal == 0xFF) {
239 return true;
240 } else if (validBit == k16BitSize && immVal == 0xFFFF) {
241 return true;
242 }
243 /* and R287[32], R286[64], #255 */
244 if ((desReg->GetSize() < srcReg->GetSize()) && (srcReg->GetValidBitsNum() > desReg->GetSize())) {
245 return false;
246 }
247 InsnSet useInsns = GetAllUseInsn(*desReg);
248 if (useInsns.size() == 1) {
249 Insn *useInsn = *useInsns.begin();
250 MOperator useMop = useInsn->GetMachineOpcode();
251 if (useMop != MOP_wasrrri5 && useMop != MOP_xasrrri6 && useMop != MOP_wlsrrri5 && useMop != MOP_xlsrrri6) {
252 return false;
253 }
254 Operand &shiftOpnd = useInsn->GetOperand(kInsnThirdOpnd);
255 CHECK_FATAL(shiftOpnd.IsImmediate(), "must be immediate");
256 int64 shiftImm = static_cast<ImmOperand &>(shiftOpnd).GetValue();
257 uint32 andImmVB = ValidBitOpt::GetImmValidBit(andImm.GetValue(), desReg->GetSize());
258 if ((srcReg->GetValidBitsNum() == andImmVB) && CheckImmValidBit(andImm.GetValue(), andImmVB, shiftImm)) {
259 return true;
260 }
261 }
262 return false;
263 }
264
Run(BB & bb,Insn & insn)265 void AndValidBitPattern::Run(BB &bb, Insn &insn)
266 {
267 if (!CheckCondition(insn)) {
268 return;
269 }
270 Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(newMop, *desReg, *srcReg);
271 bb.ReplaceInsn(insn, newInsn);
272 /* update ssa info */
273 ssaInfo->ReplaceInsn(insn, newInsn);
274 if (desReg->GetSize() < srcReg->GetSize()) {
275 ssaInfo->InsertSafePropInsn(newInsn.GetId());
276 }
277 /* dump pattern info */
278 if (CG_VALIDBIT_OPT_DUMP) {
279 std::vector<Insn *> prevs;
280 prevs.emplace_back(&insn);
281 DumpAfterPattern(prevs, &insn, &newInsn);
282 }
283 }
284
CheckCondition(Insn & insn)285 bool ExtValidBitPattern::CheckCondition(Insn &insn)
286 {
287 Operand &dstOpnd = insn.GetOperand(kInsnFirstOpnd);
288 Operand &srcOpnd = insn.GetOperand(kInsnSecondOpnd);
289 MOperator mOp = insn.GetMachineOpcode();
290 switch (mOp) {
291 case MOP_xuxtb32:
292 case MOP_xuxth32: {
293 CHECK_FATAL(dstOpnd.IsRegister(), "must be register");
294 CHECK_FATAL(srcOpnd.IsRegister(), "must be register");
295 if (static_cast<RegOperand &>(dstOpnd).GetValidBitsNum() !=
296 static_cast<RegOperand &>(srcOpnd).GetValidBitsNum()) {
297 return false;
298 }
299 newMop = MOP_wmovrr;
300 break;
301 }
302 case MOP_wubfxrri5i5:
303 case MOP_xubfxrri6i6:
304 case MOP_wsbfxrri5i5:
305 case MOP_xsbfxrri6i6: {
306 Operand &immOpnd1 = insn.GetOperand(kInsnThirdOpnd);
307 Operand &immOpnd2 = insn.GetOperand(kInsnFourthOpnd);
308 CHECK_FATAL(immOpnd1.IsImmediate(), "must be immediate");
309 CHECK_FATAL(immOpnd2.IsImmediate(), "must be immediate");
310 int64 lsb = static_cast<ImmOperand &>(immOpnd1).GetValue();
311 int64 width = static_cast<ImmOperand &>(immOpnd2).GetValue();
312 if (lsb != 0 || static_cast<RegOperand &>(srcOpnd).GetValidBitsNum() > width) {
313 return false;
314 }
315 if ((mOp == MOP_wsbfxrri5i5 || mOp == MOP_xsbfxrri6i6) &&
316 width != static_cast<RegOperand &>(srcOpnd).GetSize()) {
317 return false;
318 }
319 if (mOp == MOP_wubfxrri5i5 || mOp == MOP_wsbfxrri5i5) {
320 newMop = MOP_wmovrr;
321 } else if (mOp == MOP_xubfxrri6i6 || mOp == MOP_xsbfxrri6i6) {
322 newMop = MOP_xmovrr;
323 }
324 break;
325 }
326 default:
327 return false;
328 }
329 newDstOpnd = &static_cast<RegOperand &>(dstOpnd);
330 newSrcOpnd = &static_cast<RegOperand &>(srcOpnd);
331 return true;
332 }
333
Run(BB & bb,Insn & insn)334 void ExtValidBitPattern::Run(BB &bb, Insn &insn)
335 {
336 if (!CheckCondition(insn)) {
337 return;
338 }
339 MOperator mOp = insn.GetMachineOpcode();
340 switch (mOp) {
341 case MOP_xuxtb32:
342 case MOP_xuxth32: {
343 insn.SetMOP(AArch64CG::kMd[newMop]);
344 break;
345 }
346 case MOP_wubfxrri5i5:
347 case MOP_xubfxrri6i6:
348 case MOP_wsbfxrri5i5:
349 case MOP_xsbfxrri6i6: {
350 Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(newMop, *newDstOpnd, *newSrcOpnd);
351 bb.ReplaceInsn(insn, newInsn);
352 /* update ssa info */
353 ssaInfo->ReplaceInsn(insn, newInsn);
354 /* dump pattern info */
355 if (CG_VALIDBIT_OPT_DUMP) {
356 std::vector<Insn *> prevs;
357 prevs.emplace_back(&insn);
358 DumpAfterPattern(prevs, &insn, &newInsn);
359 }
360 break;
361 }
362 default:
363 return;
364 }
365 }
366
IsContinuousCmpCset(const Insn & curInsn)367 bool CmpCsetVBPattern::IsContinuousCmpCset(const Insn &curInsn)
368 {
369 auto &csetDstReg = static_cast<RegOperand &>(curInsn.GetOperand(kInsnFirstOpnd));
370 CHECK_FATAL(csetDstReg.IsSSAForm(), "dstOpnd must be ssa form");
371 VRegVersion *dstVersion = ssaInfo->FindSSAVersion(csetDstReg.GetRegisterNumber());
372 DEBUG_ASSERT(dstVersion != nullptr, "find vRegVersion failed");
373 for (auto useDUInfoIt : dstVersion->GetAllUseInsns()) {
374 if (useDUInfoIt.second == nullptr) {
375 continue;
376 }
377 Insn *useInsn = useDUInfoIt.second->GetInsn();
378 if (useInsn == nullptr) {
379 continue;
380 }
381 MOperator useMop = useInsn->GetMachineOpcode();
382 if (useMop == MOP_wcmpri || useMop == MOP_xcmpri) {
383 auto &ccDstReg = static_cast<RegOperand &>(useInsn->GetOperand(kInsnFirstOpnd));
384 CHECK_FATAL(ccDstReg.IsSSAForm(), "dstOpnd must be ssa form");
385 VRegVersion *ccDstVersion = ssaInfo->FindSSAVersion(ccDstReg.GetRegisterNumber());
386 DEBUG_ASSERT(ccDstVersion != nullptr, "find vRegVersion failed");
387 for (auto ccUseDUInfoIt : ccDstVersion->GetAllUseInsns()) {
388 if (ccUseDUInfoIt.second == nullptr) {
389 continue;
390 }
391 Insn *ccUseInsn = ccUseDUInfoIt.second->GetInsn();
392 if (ccUseInsn == nullptr) {
393 continue;
394 }
395 MOperator ccUseMop = ccUseInsn->GetMachineOpcode();
396 if (ccUseMop == MOP_wcsetrc || ccUseMop == MOP_xcsetrc) {
397 return true;
398 }
399 }
400 }
401 }
402 return false;
403 }
404
OpndDefByOneValidBit(const Insn & defInsn)405 bool CmpCsetVBPattern::OpndDefByOneValidBit(const Insn &defInsn)
406 {
407 if (defInsn.IsPhi()) {
408 return (static_cast<RegOperand &>(cmpInsn->GetOperand(kInsnSecondOpnd)).GetValidBitsNum() == k1BitSize) ||
409 (static_cast<RegOperand &>(cmpInsn->GetOperand(kInsnSecondOpnd)).GetValidBitsNum() == k0BitSize);
410 }
411 MOperator defMop = defInsn.GetMachineOpcode();
412 switch (defMop) {
413 case MOP_wcsetrc:
414 case MOP_xcsetrc:
415 return true;
416 case MOP_wmovri32:
417 case MOP_xmovri64: {
418 Operand &defOpnd = defInsn.GetOperand(kInsnSecondOpnd);
419 DEBUG_ASSERT(defOpnd.IsIntImmediate(), "expects ImmOperand");
420 auto &defConst = static_cast<ImmOperand &>(defOpnd);
421 int64 defConstValue = defConst.GetValue();
422 return (defConstValue == 0 || defConstValue == 1);
423 }
424 case MOP_xmovrr:
425 case MOP_wmovrr:
426 return IsZeroRegister(defInsn.GetOperand(kInsnSecondOpnd));
427 case MOP_wlsrrri5:
428 case MOP_xlsrrri6: {
429 Operand &opnd2 = defInsn.GetOperand(kInsnThirdOpnd);
430 DEBUG_ASSERT(opnd2.IsIntImmediate(), "expects ImmOperand");
431 auto &opndImm = static_cast<ImmOperand &>(opnd2);
432 int64 shiftBits = opndImm.GetValue();
433 return ((defMop == MOP_wlsrrri5 && shiftBits == (k32BitSize - 1)) ||
434 (defMop == MOP_xlsrrri6 && shiftBits == (k64BitSize - 1)));
435 }
436 default:
437 return false;
438 }
439 }
440
CheckCondition(Insn & csetInsn)441 bool CmpCsetVBPattern::CheckCondition(Insn &csetInsn)
442 {
443 MOperator curMop = csetInsn.GetMachineOpcode();
444 if (curMop != MOP_wcsetrc && curMop != MOP_xcsetrc) {
445 return false;
446 }
447 /* combine [continuous cmp & cset] first, to eliminate more insns */
448 if (IsContinuousCmpCset(csetInsn)) {
449 return false;
450 }
451 RegOperand &ccReg = static_cast<RegOperand &>(csetInsn.GetOperand(kInsnThirdOpnd));
452 regno_t ccRegNo = ccReg.GetRegisterNumber();
453 cmpInsn = GetDefInsn(ccReg);
454 CHECK_NULL_FATAL(cmpInsn);
455 MOperator mop = cmpInsn->GetMachineOpcode();
456 if ((mop != MOP_wcmpri) && (mop != MOP_xcmpri)) {
457 return false;
458 }
459 VRegVersion *ccRegVersion = ssaInfo->FindSSAVersion(ccRegNo);
460 DEBUG_ASSERT(ccRegVersion != nullptr, "nullptr check");
461 if (ccRegVersion->GetAllUseInsns().size() > k1BitSize) {
462 return false;
463 }
464 Operand &cmpSecondOpnd = cmpInsn->GetOperand(kInsnThirdOpnd);
465 CHECK_FATAL(cmpSecondOpnd.IsIntImmediate(), "expects ImmOperand");
466 auto &cmpConst = static_cast<ImmOperand &>(cmpSecondOpnd);
467 cmpConstVal = cmpConst.GetValue();
468 /* get ImmOperand, must be 0 or 1 */
469 if ((cmpConstVal != 0) && (cmpConstVal != k1BitSize)) {
470 return false;
471 }
472 Operand &cmpFirstOpnd = cmpInsn->GetOperand(kInsnSecondOpnd);
473 CHECK_FATAL(cmpFirstOpnd.IsRegister(), "cmpFirstOpnd must be register!");
474 RegOperand &cmpReg = static_cast<RegOperand &>(cmpFirstOpnd);
475 Insn *defInsn = GetDefInsn(cmpReg);
476 if (defInsn == nullptr) {
477 return false;
478 }
479 if (defInsn->GetMachineOpcode() == MOP_wmovrr || defInsn->GetMachineOpcode() == MOP_xmovrr) {
480 auto &srcOpnd = static_cast<RegOperand &>(defInsn->GetOperand(kInsnSecondOpnd));
481 if (!srcOpnd.IsVirtualRegister()) {
482 return false;
483 }
484 }
485 return ((cmpReg.GetValidBitsNum() == k1BitSize) || (cmpReg.GetValidBitsNum() == k0BitSize) ||
486 OpndDefByOneValidBit(*defInsn));
487 }
488
Run(BB & bb,Insn & csetInsn)489 void CmpCsetVBPattern::Run(BB &bb, Insn &csetInsn)
490 {
491 if (!CheckCondition(csetInsn)) {
492 return;
493 }
494 Operand &csetFirstOpnd = csetInsn.GetOperand(kInsnFirstOpnd);
495 Operand &cmpFirstOpnd = cmpInsn->GetOperand(kInsnSecondOpnd);
496 auto &cond = static_cast<CondOperand &>(csetInsn.GetOperand(kInsnSecondOpnd));
497 Insn *newInsn = nullptr;
498
499 /* cmpFirstOpnd == 1 */
500 if ((cmpConstVal == 0 && cond.GetCode() == CC_NE) || (cmpConstVal == 1 && cond.GetCode() == CC_EQ)) {
501 MOperator mopCode = (cmpFirstOpnd.GetSize() == k64BitSize) ? MOP_xmovrr : MOP_wmovrr;
502 newInsn = &cgFunc->GetInsnBuilder()->BuildInsn(mopCode, csetFirstOpnd, cmpFirstOpnd);
503 } else if ((cmpConstVal == 1 && cond.GetCode() == CC_NE) || (cmpConstVal == 0 && cond.GetCode() == CC_EQ)) {
504 /* cmpFirstOpnd == 0 */
505 MOperator mopCode = (cmpFirstOpnd.GetSize() == k64BitSize) ? MOP_xeorrri13 : MOP_weorrri12;
506 ImmOperand &one = static_cast<AArch64CGFunc *>(cgFunc)->CreateImmOperand(1, k8BitSize, false);
507 newInsn = &cgFunc->GetInsnBuilder()->BuildInsn(mopCode, csetFirstOpnd, cmpFirstOpnd, one);
508 }
509 if (newInsn == nullptr) {
510 return;
511 }
512 bb.ReplaceInsn(csetInsn, *newInsn);
513 ssaInfo->ReplaceInsn(csetInsn, *newInsn);
514 if (CG_VALIDBIT_OPT_DUMP && (newInsn != nullptr)) {
515 std::vector<Insn *> prevInsns;
516 prevInsns.emplace_back(cmpInsn);
517 prevInsns.emplace_back(&csetInsn);
518 DumpAfterPattern(prevInsns, newInsn, nullptr);
519 }
520 }
521
SelectNewMop(MOperator mop)522 void CmpBranchesPattern::SelectNewMop(MOperator mop)
523 {
524 switch (mop) {
525 case MOP_bge: {
526 newMop = is64Bit ? MOP_xtbnz : MOP_wtbnz;
527 break;
528 }
529 case MOP_blt: {
530 newMop = is64Bit ? MOP_xtbz : MOP_wtbz;
531 break;
532 }
533 default:
534 break;
535 }
536 }
537
CheckCondition(Insn & insn)538 bool CmpBranchesPattern::CheckCondition(Insn &insn)
539 {
540 MOperator curMop = insn.GetMachineOpcode();
541 if (curMop != MOP_bge && curMop != MOP_blt) {
542 return false;
543 }
544 auto &ccReg = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
545 prevCmpInsn = GetDefInsn(ccReg);
546 if (prevCmpInsn == nullptr) {
547 return false;
548 }
549 MOperator cmpMop = prevCmpInsn->GetMachineOpcode();
550 if (cmpMop != MOP_wcmpri && cmpMop != MOP_xcmpri) {
551 return false;
552 }
553 is64Bit = (cmpMop == MOP_xcmpri);
554 auto &cmpUseOpnd = static_cast<RegOperand &>(prevCmpInsn->GetOperand(kInsnSecondOpnd));
555 auto &cmpImmOpnd = static_cast<ImmOperand &>(prevCmpInsn->GetOperand(kInsnThirdOpnd));
556 int64 cmpImmVal = cmpImmOpnd.GetValue();
557 newImmVal = ValidBitOpt::GetLogValueAtBase2(cmpImmVal);
558 if (newImmVal < 0 || cmpUseOpnd.GetValidBitsNum() != (newImmVal + 1)) {
559 return false;
560 }
561 SelectNewMop(curMop);
562 if (newMop == MOP_undef) {
563 return false;
564 }
565 return true;
566 }
567
Run(BB & bb,Insn & insn)568 void CmpBranchesPattern::Run(BB &bb, Insn &insn)
569 {
570 if (!CheckCondition(insn)) {
571 return;
572 }
573 auto *aarFunc = static_cast<AArch64CGFunc *>(cgFunc);
574 auto &labelOpnd = static_cast<LabelOperand &>(insn.GetOperand(kInsnSecondOpnd));
575 ImmOperand &newImmOpnd = aarFunc->CreateImmOperand(newImmVal, k8BitSize, false);
576 Insn &newInsn =
577 cgFunc->GetInsnBuilder()->BuildInsn(newMop, prevCmpInsn->GetOperand(kInsnSecondOpnd), newImmOpnd, labelOpnd);
578 bb.ReplaceInsn(insn, newInsn);
579 /* update ssa info */
580 ssaInfo->ReplaceInsn(insn, newInsn);
581 /* dump pattern info */
582 if (CG_VALIDBIT_OPT_DUMP) {
583 std::vector<Insn *> prevs;
584 prevs.emplace_back(prevCmpInsn);
585 DumpAfterPattern(prevs, &insn, &newInsn);
586 }
587 }
588 } /* namespace maplebe */
589