1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "peep.h"
17 #include "cg.h"
18 #include "mpl_logging.h"
19 #include "common_utils.h"
20 #if TARGAARCH64
21 #include "aarch64_peep.h"
22 #endif
23 #if TARGRISCV64
24 #include "riscv64_peep.h"
25 #endif
26 #if defined TARGX86_64
27 #include "x64_peep.h"
28 #endif
29 #if TARGARM32
30 #include "arm32_peep.h"
31 #endif
32
33 namespace maplebe {
34 #if TARGAARCH64
IsCCRegCrossVersion(Insn & startInsn,Insn & endInsn,const RegOperand & ccReg) const35 bool CGPeepPattern::IsCCRegCrossVersion(Insn &startInsn, Insn &endInsn, const RegOperand &ccReg) const
36 {
37 if (startInsn.GetBB() != endInsn.GetBB()) {
38 return true;
39 }
40 CHECK_FATAL(ssaInfo != nullptr, "must have ssaInfo");
41 CHECK_FATAL(ccReg.IsSSAForm(), "cc reg must be ssa form");
42 for (auto *curInsn = startInsn.GetNext(); curInsn != nullptr && curInsn != &endInsn; curInsn = curInsn->GetNext()) {
43 if (!curInsn->IsMachineInstruction()) {
44 continue;
45 }
46 if (curInsn->IsCall()) {
47 return true;
48 }
49 uint32 opndNum = curInsn->GetOperandSize();
50 for (uint32 i = 0; i < opndNum; ++i) {
51 Operand &opnd = curInsn->GetOperand(i);
52 if (!opnd.IsRegister()) {
53 continue;
54 }
55 auto ®Opnd = static_cast<RegOperand &>(opnd);
56 if (!curInsn->IsRegDefined(regOpnd.GetRegisterNumber())) {
57 continue;
58 }
59 if (static_cast<RegOperand &>(opnd).IsOfCC()) {
60 VRegVersion *ccVersion = ssaInfo->FindSSAVersion(ccReg.GetRegisterNumber());
61 VRegVersion *curCCVersion = ssaInfo->FindSSAVersion(regOpnd.GetRegisterNumber());
62 CHECK_FATAL(ccVersion != nullptr && curCCVersion != nullptr,
63 "RegVersion must not be null based on ssa");
64 CHECK_FATAL(!ccVersion->IsDeleted() && !curCCVersion->IsDeleted(), "deleted version");
65 if (ccVersion->GetVersionIdx() != curCCVersion->GetVersionIdx()) {
66 return true;
67 }
68 }
69 }
70 }
71 return false;
72 }
73
GetLogValueAtBase2(int64 val) const74 int64 CGPeepPattern::GetLogValueAtBase2(int64 val) const
75 {
76 return (__builtin_popcountll(static_cast<uint64>(val)) == 1) ? (__builtin_ffsll(val) - 1) : -1;
77 }
78
GetAllUseInsn(const RegOperand & defReg) const79 InsnSet CGPeepPattern::GetAllUseInsn(const RegOperand &defReg) const
80 {
81 InsnSet allUseInsn;
82 if ((ssaInfo != nullptr) && defReg.IsSSAForm()) {
83 VRegVersion *defVersion = ssaInfo->FindSSAVersion(defReg.GetRegisterNumber());
84 CHECK_FATAL(defVersion != nullptr, "useVRegVersion must not be null based on ssa");
85 for (auto insnInfo : defVersion->GetAllUseInsns()) {
86 Insn *secondInsn = insnInfo.second->GetInsn();
87 allUseInsn.emplace(secondInsn);
88 }
89 }
90 return allUseInsn;
91 }
92
GetDefInsn(const RegOperand & useReg)93 Insn *CGPeepPattern::GetDefInsn(const RegOperand &useReg)
94 {
95 if (!useReg.IsSSAForm()) {
96 return nullptr;
97 }
98 regno_t useRegNO = useReg.GetRegisterNumber();
99 VRegVersion *useVersion = ssaInfo->FindSSAVersion(useRegNO);
100 DEBUG_ASSERT(useVersion != nullptr, "useVRegVersion must not be null based on ssa");
101 CHECK_FATAL(!useVersion->IsDeleted(), "deleted version");
102 DUInsnInfo *defInfo = useVersion->GetDefInsnInfo();
103 return defInfo == nullptr ? nullptr : defInfo->GetInsn();
104 }
105
DumpAfterPattern(std::vector<Insn * > & prevInsns,const Insn * replacedInsn,const Insn * newInsn)106 void CGPeepPattern::DumpAfterPattern(std::vector<Insn *> &prevInsns, const Insn *replacedInsn, const Insn *newInsn)
107 {
108 LogInfo::MapleLogger() << ">>>>>>> In " << GetPatternName() << " : <<<<<<<\n";
109 if (!prevInsns.empty()) {
110 if ((replacedInsn == nullptr) && (newInsn == nullptr)) {
111 LogInfo::MapleLogger() << "======= RemoveInsns : {\n";
112 } else {
113 LogInfo::MapleLogger() << "======= PrevInsns : {\n";
114 }
115 for (auto *prevInsn : prevInsns) {
116 if (prevInsn != nullptr) {
117 LogInfo::MapleLogger() << "[primal form] ";
118 prevInsn->Dump();
119 if (ssaInfo != nullptr) {
120 LogInfo::MapleLogger() << "[ssa form] ";
121 ssaInfo->DumpInsnInSSAForm(*prevInsn);
122 }
123 }
124 }
125 LogInfo::MapleLogger() << "}\n";
126 }
127 if (replacedInsn != nullptr) {
128 LogInfo::MapleLogger() << "======= OldInsn :\n";
129 LogInfo::MapleLogger() << "[primal form] ";
130 replacedInsn->Dump();
131 if (ssaInfo != nullptr) {
132 LogInfo::MapleLogger() << "[ssa form] ";
133 ssaInfo->DumpInsnInSSAForm(*replacedInsn);
134 }
135 }
136 if (newInsn != nullptr) {
137 LogInfo::MapleLogger() << "======= NewInsn :\n";
138 LogInfo::MapleLogger() << "[primal form] ";
139 newInsn->Dump();
140 if (ssaInfo != nullptr) {
141 LogInfo::MapleLogger() << "[ssa form] ";
142 ssaInfo->DumpInsnInSSAForm(*newInsn);
143 }
144 }
145 }
146
147 /* Check if a regOpnd is live after insn. True if live, otherwise false. */
IfOperandIsLiveAfterInsn(const RegOperand & regOpnd,Insn & insn)148 bool CGPeepPattern::IfOperandIsLiveAfterInsn(const RegOperand ®Opnd, Insn &insn)
149 {
150 for (Insn *nextInsn = insn.GetNext(); nextInsn != nullptr; nextInsn = nextInsn->GetNext()) {
151 if (!nextInsn->IsMachineInstruction()) {
152 continue;
153 }
154 CHECK_FATAL(nextInsn->GetOperandSize() > 0, "must not be zero");
155 int32 lastOpndId = static_cast<int32>(nextInsn->GetOperandSize() - 1);
156 for (int32 i = lastOpndId; i >= 0; --i) {
157 Operand &opnd = nextInsn->GetOperand(static_cast<uint32>(i));
158 if (opnd.IsMemoryAccessOperand()) {
159 auto &mem = static_cast<MemOperand &>(opnd);
160 Operand *base = mem.GetBaseRegister();
161 Operand *offset = mem.GetOffset();
162
163 if (base != nullptr && base->IsRegister()) {
164 auto *tmpRegOpnd = static_cast<RegOperand *>(base);
165 if (tmpRegOpnd->GetRegisterNumber() == regOpnd.GetRegisterNumber()) {
166 return true;
167 }
168 }
169 if (offset != nullptr && offset->IsRegister()) {
170 auto *tmpRegOpnd = static_cast<RegOperand *>(offset);
171 if (tmpRegOpnd->GetRegisterNumber() == regOpnd.GetRegisterNumber()) {
172 return true;
173 }
174 }
175 } else if (opnd.IsList()) {
176 auto &opndList = static_cast<ListOperand &>(opnd).GetOperands();
177 if (find(opndList.begin(), opndList.end(), ®Opnd) != opndList.end()) {
178 return true;
179 }
180 }
181
182 if (!opnd.IsRegister()) {
183 continue;
184 }
185 auto &tmpRegOpnd = static_cast<RegOperand &>(opnd);
186 if (opnd.IsRegister() && tmpRegOpnd.GetRegisterNumber() != regOpnd.GetRegisterNumber()) {
187 continue;
188 }
189 const InsnDesc *md = nextInsn->GetDesc();
190 auto *regProp = (md->opndMD[static_cast<uint32>(i)]);
191 bool isUse = regProp->IsUse();
192 /* if noUse Redefined, no need to check live-out. */
193 return isUse;
194 }
195 }
196 /* Check if it is live-out. */
197 return FindRegLiveOut(regOpnd, *insn.GetBB());
198 }
199
200 /* entrance for find if a regOpnd is live-out. */
FindRegLiveOut(const RegOperand & regOpnd,const BB & bb)201 bool CGPeepPattern::FindRegLiveOut(const RegOperand ®Opnd, const BB &bb)
202 {
203 /*
204 * Each time use peephole, index is initialized by the constructor,
205 * and the internal_flags3 should be cleared.
206 */
207 if (PeepOptimizer::index == 0) {
208 FOR_ALL_BB(currbb, cgFunc)
209 {
210 currbb->SetInternalFlag3(0);
211 }
212 }
213 /* before each invoke check function, increase index. */
214 ++PeepOptimizer::index;
215 return CheckOpndLiveinSuccs(regOpnd, bb);
216 }
217
218 /* Check regOpnd in succs/ehSuccs. True is live-out, otherwise false. */
CheckOpndLiveinSuccs(const RegOperand & regOpnd,const BB & bb) const219 bool CGPeepPattern::CheckOpndLiveinSuccs(const RegOperand ®Opnd, const BB &bb) const
220 {
221 std::stack<BB *> bbStack;
222 bbStack.push(const_cast<BB *>(&bb));
223 while (!bbStack.empty()) {
224 BB *currentBB = bbStack.top();
225 bbStack.pop();
226 if (CheckRegLiveinReturnBB(regOpnd, *currentBB)) {
227 return true;
228 }
229 // The traversal order of sibling nodes in the iterative version
230 // is reversed compared to the recursive version
231 for (auto succ : currentBB->GetSuccs()) {
232 DEBUG_ASSERT(succ->GetInternalFlag3() <= PeepOptimizer::index, "internal error.");
233 if (succ->GetInternalFlag3() == PeepOptimizer::index) {
234 continue;
235 }
236 succ->SetInternalFlag3(PeepOptimizer::index);
237 ReturnType result = IsOpndLiveinBB(regOpnd, *succ);
238 if (result == kResNotFind) {
239 bbStack.push(succ);
240 } else if (result == kResUseFirst) {
241 return true;
242 } else if (result == kResDefFirst) {
243 // Do nothing, continue to process successors
244 }
245 }
246 }
247 return false;
248 }
249
250 /* Check if the reg is used in return BB */
CheckRegLiveinReturnBB(const RegOperand & regOpnd,const BB & bb) const251 bool CGPeepPattern::CheckRegLiveinReturnBB(const RegOperand ®Opnd, const BB &bb) const
252 {
253 #if TARGAARCH64 || TARGRISCV64
254 if (bb.GetKind() == BB::kBBReturn) {
255 regno_t regNO = regOpnd.GetRegisterNumber();
256 RegType regType = regOpnd.GetRegisterType();
257 if (regType == kRegTyVary) {
258 return false;
259 }
260 PrimType returnType = cgFunc->GetFunction().GetReturnType()->GetPrimType();
261 regno_t returnReg = R0;
262 if (IsPrimitiveFloat(returnType)) {
263 returnReg = V0;
264 } else if (IsPrimitiveInteger(returnType)) {
265 returnReg = R0;
266 }
267 if (regNO == returnReg) {
268 return true;
269 }
270 }
271 #endif
272 return false;
273 }
274
275 /*
276 * Check regNO in current bb:
277 * kResUseFirst:first find use point; kResDefFirst:first find define point;
278 * kResNotFind:cannot find regNO, need to continue searching.
279 */
IsOpndLiveinBB(const RegOperand & regOpnd,const BB & bb) const280 ReturnType CGPeepPattern::IsOpndLiveinBB(const RegOperand ®Opnd, const BB &bb) const
281 {
282 FOR_BB_INSNS_CONST(insn, &bb)
283 {
284 if (!insn->IsMachineInstruction()) {
285 continue;
286 }
287 const InsnDesc *md = insn->GetDesc();
288 int32 lastOpndId = static_cast<int32>(insn->GetOperandSize() - 1);
289 for (int32 i = lastOpndId; i >= 0; --i) {
290 Operand &opnd = insn->GetOperand(static_cast<uint32>(i));
291 auto *regProp = (md->opndMD[static_cast<uint32>(i)]);
292 if (opnd.IsConditionCode()) {
293 if (regOpnd.GetRegisterNumber() == kRFLAG) {
294 bool isUse = regProp->IsUse();
295 if (isUse) {
296 return kResUseFirst;
297 }
298 DEBUG_ASSERT(regProp->IsDef(), "register should be redefined.");
299 return kResDefFirst;
300 }
301 } else if (opnd.IsList()) {
302 auto &listOpnd = static_cast<ListOperand &>(opnd);
303 if (insn->GetMachineOpcode() == MOP_asm) {
304 if (static_cast<uint32>(i) == kAsmOutputListOpnd || static_cast<uint32>(i) == kAsmClobberListOpnd) {
305 for (const auto op : listOpnd.GetOperands()) {
306 if (op->GetRegisterNumber() == regOpnd.GetRegisterNumber()) {
307 return kResDefFirst;
308 }
309 }
310 continue;
311 } else if (static_cast<uint32>(i) != kAsmInputListOpnd) {
312 continue;
313 }
314 /* fall thru for kAsmInputListOpnd */
315 }
316 for (const auto op : listOpnd.GetOperands()) {
317 if (op->GetRegisterNumber() == regOpnd.GetRegisterNumber()) {
318 return kResUseFirst;
319 }
320 }
321 } else if (opnd.IsMemoryAccessOperand()) {
322 auto &mem = static_cast<MemOperand &>(opnd);
323 Operand *base = mem.GetBaseRegister();
324 Operand *offset = mem.GetOffset();
325
326 if (base != nullptr) {
327 DEBUG_ASSERT(base->IsRegister(), "internal error.");
328 auto *tmpRegOpnd = static_cast<RegOperand *>(base);
329 if (tmpRegOpnd->GetRegisterNumber() == regOpnd.GetRegisterNumber()) {
330 return kResUseFirst;
331 }
332 }
333 if (offset != nullptr && offset->IsRegister()) {
334 auto *tmpRegOpnd = static_cast<RegOperand *>(offset);
335 if (tmpRegOpnd->GetRegisterNumber() == regOpnd.GetRegisterNumber()) {
336 return kResUseFirst;
337 }
338 }
339 } else if (opnd.IsRegister()) {
340 auto &tmpRegOpnd = static_cast<RegOperand &>(opnd);
341 if (tmpRegOpnd.GetRegisterNumber() == regOpnd.GetRegisterNumber()) {
342 bool isUse = regProp->IsUse();
343 if (isUse) {
344 return kResUseFirst;
345 }
346 DEBUG_ASSERT(regProp->IsDef(), "register should be redefined.");
347 return kResDefFirst;
348 }
349 }
350 }
351 }
352 return kResNotFind;
353 }
354
LogValueAtBase2(int64 val) const355 int PeepPattern::LogValueAtBase2(int64 val) const
356 {
357 return (__builtin_popcountll(static_cast<uint64>(val)) == 1) ? (__builtin_ffsll(val) - 1) : (-1);
358 }
359
360 /* Check if a regOpnd is live after insn. True if live, otherwise false. */
IfOperandIsLiveAfterInsn(const RegOperand & regOpnd,Insn & insn)361 bool PeepPattern::IfOperandIsLiveAfterInsn(const RegOperand ®Opnd, Insn &insn)
362 {
363 for (Insn *nextInsn = insn.GetNext(); nextInsn != nullptr; nextInsn = nextInsn->GetNext()) {
364 if (!nextInsn->IsMachineInstruction()) {
365 continue;
366 }
367 CHECK_FATAL(nextInsn->GetOperandSize() > 0, "must not be zero");
368 int32 lastOpndId = static_cast<int32>(nextInsn->GetOperandSize() - 1);
369 for (int32 i = lastOpndId; i >= 0; --i) {
370 Operand &opnd = nextInsn->GetOperand(static_cast<uint32>(i));
371 if (opnd.IsMemoryAccessOperand()) {
372 auto &mem = static_cast<MemOperand &>(opnd);
373 Operand *base = mem.GetBaseRegister();
374 Operand *offset = mem.GetOffset();
375
376 if (base != nullptr && base->IsRegister()) {
377 auto *tmpRegOpnd = static_cast<RegOperand *>(base);
378 if (tmpRegOpnd->GetRegisterNumber() == regOpnd.GetRegisterNumber()) {
379 return true;
380 }
381 }
382 if (offset != nullptr && offset->IsRegister()) {
383 auto *tmpRegOpnd = static_cast<RegOperand *>(offset);
384 if (tmpRegOpnd->GetRegisterNumber() == regOpnd.GetRegisterNumber()) {
385 return true;
386 }
387 }
388 } else if (opnd.IsList()) {
389 auto &opndList = static_cast<ListOperand &>(opnd).GetOperands();
390 if (find(opndList.begin(), opndList.end(), ®Opnd) != opndList.end()) {
391 return true;
392 }
393 }
394
395 if (!opnd.IsRegister()) {
396 continue;
397 }
398 auto &tmpRegOpnd = static_cast<RegOperand &>(opnd);
399 if (opnd.IsRegister() && tmpRegOpnd.GetRegisterNumber() != regOpnd.GetRegisterNumber()) {
400 continue;
401 }
402 const InsnDesc *md = nextInsn->GetDesc();
403 auto *regProp = (md->opndMD[static_cast<uint64>(i)]);
404 bool isUse = regProp->IsUse();
405 /* if noUse Redefined, no need to check live-out. */
406 return isUse;
407 }
408 }
409 /* Check if it is live-out. */
410 return FindRegLiveOut(regOpnd, *insn.GetBB());
411 }
412
413 /* entrance for find if a regOpnd is live-out. */
FindRegLiveOut(const RegOperand & regOpnd,const BB & bb)414 bool PeepPattern::FindRegLiveOut(const RegOperand ®Opnd, const BB &bb)
415 {
416 /*
417 * Each time use peephole, index is initialized by the constructor,
418 * and the internal_flags3 should be cleared.
419 */
420 if (PeepOptimizer::index == 0) {
421 FOR_ALL_BB(currbb, &cgFunc)
422 {
423 currbb->SetInternalFlag3(0);
424 }
425 }
426 /* before each invoke check function, increase index. */
427 ++PeepOptimizer::index;
428 return CheckOpndLiveinSuccs(regOpnd, bb);
429 }
430
431 /* Check regOpnd in succs/ehSuccs. True is live-out, otherwise false. */
CheckOpndLiveinSuccs(const RegOperand & regOpnd,const BB & bb) const432 bool PeepPattern::CheckOpndLiveinSuccs(const RegOperand ®Opnd, const BB &bb) const
433 {
434 std::stack<BB *> bbStack;
435 bbStack.push(const_cast<BB *>(&bb));
436 while (!bbStack.empty()) {
437 BB *currentBB = bbStack.top();
438 bbStack.pop();
439 if (CheckRegLiveinReturnBB(regOpnd, *currentBB)) {
440 return true;
441 }
442 // The traversal order of sibling nodes in the iterative version
443 // is reversed compared to the recursive version
444 for (auto succ : currentBB->GetSuccs()) {
445 DEBUG_ASSERT(succ->GetInternalFlag3() <= PeepOptimizer::index, "internal error.");
446 if (succ->GetInternalFlag3() == PeepOptimizer::index) {
447 continue;
448 }
449 succ->SetInternalFlag3(PeepOptimizer::index);
450 ReturnType result = IsOpndLiveinBB(regOpnd, *succ);
451 if (result == kResNotFind) {
452 bbStack.push(succ);
453 } else if (result == kResUseFirst) {
454 return true;
455 } else if (result == kResDefFirst) {
456 // Do nothing, continue to process successors
457 }
458 }
459 }
460 return false;
461 }
462
463 /* Check if the reg is used in return BB */
CheckRegLiveinReturnBB(const RegOperand & regOpnd,const BB & bb) const464 bool PeepPattern::CheckRegLiveinReturnBB(const RegOperand ®Opnd, const BB &bb) const
465 {
466 #if TARGAARCH64 || TARGRISCV64
467 if (bb.GetKind() == BB::kBBReturn) {
468 regno_t regNO = regOpnd.GetRegisterNumber();
469 RegType regType = regOpnd.GetRegisterType();
470 if (regType == kRegTyVary) {
471 return false;
472 }
473 PrimType returnType = cgFunc.GetFunction().GetReturnType()->GetPrimType();
474 regno_t returnReg = R0;
475 if (IsPrimitiveFloat(returnType)) {
476 returnReg = V0;
477 } else if (IsPrimitiveInteger(returnType)) {
478 returnReg = R0;
479 }
480 if (regNO == returnReg) {
481 return true;
482 }
483 }
484 #endif
485 return false;
486 }
487
488 /*
489 * Check regNO in current bb:
490 * kResUseFirst:first find use point; kResDefFirst:first find define point;
491 * kResNotFind:cannot find regNO, need to continue searching.
492 */
IsOpndLiveinBB(const RegOperand & regOpnd,const BB & bb) const493 ReturnType PeepPattern::IsOpndLiveinBB(const RegOperand ®Opnd, const BB &bb) const
494 {
495 FOR_BB_INSNS_CONST(insn, &bb)
496 {
497 if (!insn->IsMachineInstruction()) {
498 continue;
499 }
500 const InsnDesc *md = insn->GetDesc();
501 int32 lastOpndId = static_cast<int32>(insn->GetOperandSize() - 1);
502 for (int32 i = lastOpndId; i >= 0; --i) {
503 Operand &opnd = insn->GetOperand(static_cast<uint32>(i));
504 auto *regProp = (md->opndMD[static_cast<uint32>(i)]);
505 if (opnd.IsConditionCode()) {
506 if (regOpnd.GetRegisterNumber() == kRFLAG) {
507 bool isUse = regProp->IsUse();
508 if (isUse) {
509 return kResUseFirst;
510 }
511 DEBUG_ASSERT(regProp->IsDef(), "register should be redefined.");
512 return kResDefFirst;
513 }
514 } else if (opnd.IsList()) {
515 auto &listOpnd = static_cast<ListOperand &>(opnd);
516 if (insn->GetMachineOpcode() == MOP_asm) {
517 if (static_cast<uint32>(i) == kAsmOutputListOpnd || static_cast<uint32>(i) == kAsmClobberListOpnd) {
518 for (const auto op : listOpnd.GetOperands()) {
519 if (op->GetRegisterNumber() == regOpnd.GetRegisterNumber()) {
520 return kResDefFirst;
521 }
522 }
523 continue;
524 } else if (static_cast<uint32>(i) != kAsmInputListOpnd) {
525 continue;
526 }
527 /* fall thru for kAsmInputListOpnd */
528 }
529 for (const auto op : listOpnd.GetOperands()) {
530 if (op->GetRegisterNumber() == regOpnd.GetRegisterNumber()) {
531 return kResUseFirst;
532 }
533 }
534 } else if (opnd.IsMemoryAccessOperand()) {
535 auto &mem = static_cast<MemOperand &>(opnd);
536 Operand *base = mem.GetBaseRegister();
537 Operand *offset = mem.GetOffset();
538
539 if (base != nullptr) {
540 DEBUG_ASSERT(base->IsRegister(), "internal error.");
541 auto *tmpRegOpnd = static_cast<RegOperand *>(base);
542 if (tmpRegOpnd->GetRegisterNumber() == regOpnd.GetRegisterNumber()) {
543 return kResUseFirst;
544 }
545 }
546 if (offset != nullptr && offset->IsRegister()) {
547 auto *tmpRegOpnd = static_cast<RegOperand *>(offset);
548 if (tmpRegOpnd->GetRegisterNumber() == regOpnd.GetRegisterNumber()) {
549 return kResUseFirst;
550 }
551 }
552 } else if (opnd.IsRegister()) {
553 auto &tmpRegOpnd = static_cast<RegOperand &>(opnd);
554 if (tmpRegOpnd.GetRegisterNumber() == regOpnd.GetRegisterNumber()) {
555 bool isUse = regProp->IsUse();
556 if (isUse) {
557 return kResUseFirst;
558 }
559 DEBUG_ASSERT(regProp->IsDef(), "register should be redefined.");
560 return kResDefFirst;
561 }
562 }
563 }
564 }
565 return kResNotFind;
566 }
567
IsMemOperandOptPattern(const Insn & insn,Insn & nextInsn)568 bool PeepPattern::IsMemOperandOptPattern(const Insn &insn, Insn &nextInsn)
569 {
570 /* Check if base register of nextInsn and the dest operand of insn are identical. */
571 auto *memOpnd = static_cast<MemOperand *>(nextInsn.GetMemOpnd());
572 DEBUG_ASSERT(memOpnd != nullptr, "null ptr check");
573 /* Only for AddrMode_B_OI addressing mode. */
574 if (memOpnd->GetAddrMode() != MemOperand::kAddrModeBOi) {
575 return false;
576 }
577 /* Only for immediate is 0. */
578 if (memOpnd->GetOffsetImmediate()->GetOffsetValue() != 0) {
579 return false;
580 }
581 /* Only for intact memory addressing. */
582 if (!memOpnd->IsIntactIndexed()) {
583 return false;
584 }
585
586 auto &oldBaseOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
587 /* Check if dest operand of insn is idential with base register of nextInsn. */
588 if (memOpnd->GetBaseRegister() != &oldBaseOpnd) {
589 return false;
590 }
591
592 #ifdef USE_32BIT_REF
593 if (nextInsn.IsAccessRefField() && nextInsn.GetOperand(kInsnFirstOpnd).GetSize() > k32BitSize) {
594 return false;
595 }
596 #endif
597 /* Check if x0 is used after ldr insn, and if it is in live-out. */
598 if (IfOperandIsLiveAfterInsn(oldBaseOpnd, nextInsn)) {
599 return false;
600 }
601 return true;
602 }
603
604 template <typename T>
Run()605 void PeepOptimizer::Run()
606 {
607 auto *patterMatcher = peepOptMemPool->New<T>(cgFunc, peepOptMemPool);
608 patterMatcher->InitOpts();
609 FOR_ALL_BB(bb, &cgFunc)
610 {
611 FOR_BB_INSNS_SAFE(insn, bb, nextInsn)
612 {
613 if (!insn->IsMachineInstruction()) {
614 continue;
615 }
616 patterMatcher->Run(*bb, *insn);
617 }
618 }
619 }
620
621 int32 PeepOptimizer::index = 0;
622
Peephole0()623 void PeepHoleOptimizer::Peephole0()
624 {
625 auto memPool = std::make_unique<ThreadLocalMemPool>(memPoolCtrler, "peepholeOptObj");
626 PeepOptimizer peepOptimizer(*cgFunc, memPool.get());
627 #if TARGAARCH64 || TARGRISCV64
628 peepOptimizer.Run<AArch64PeepHole0>();
629 #endif
630 #if TARGARM32
631 peepOptimizer.Run<Arm32PeepHole0>();
632 #endif
633 }
634
PeepholeOpt()635 void PeepHoleOptimizer::PeepholeOpt()
636 {
637 auto memPool = std::make_unique<ThreadLocalMemPool>(memPoolCtrler, "peepholeOptObj");
638 PeepOptimizer peepOptimizer(*cgFunc, memPool.get());
639 #if TARGAARCH64 || TARGRISCV64
640 peepOptimizer.Run<AArch64PeepHole>();
641 #endif
642 #if TARGARM32
643 peepOptimizer.Run<Arm32PeepHole>();
644 #endif
645 }
646
PrePeepholeOpt()647 void PeepHoleOptimizer::PrePeepholeOpt()
648 {
649 auto memPool = std::make_unique<ThreadLocalMemPool>(memPoolCtrler, "peepholeOptObj");
650 PeepOptimizer peepOptimizer(*cgFunc, memPool.get());
651 #if TARGAARCH64 || TARGRISCV64
652 peepOptimizer.Run<AArch64PrePeepHole>();
653 #endif
654 #if TARGARM32
655 peepOptimizer.Run<Arm32PrePeepHole>();
656 #endif
657 }
658
PrePeepholeOpt1()659 void PeepHoleOptimizer::PrePeepholeOpt1()
660 {
661 auto memPool = std::make_unique<ThreadLocalMemPool>(memPoolCtrler, "peepholeOptObj");
662 PeepOptimizer peepOptimizer(*cgFunc, memPool.get());
663 #if TARGAARCH64 || TARGRISCV64
664 peepOptimizer.Run<AArch64PrePeepHole1>();
665 #endif
666 #if TARGARM32
667 peepOptimizer.Run<Arm32PrePeepHole1>();
668 #endif
669 }
670
671 /* === SSA form === */
PhaseRun(maplebe::CGFunc & f)672 bool CgPeepHole::PhaseRun(maplebe::CGFunc &f)
673 {
674 CGSSAInfo *cgssaInfo = GET_ANALYSIS(CgSSAConstruct, f);
675 CHECK_FATAL((cgssaInfo != nullptr), "Get ssaInfo failed!");
676 MemPool *mp = GetPhaseMemPool();
677 auto *cgpeep = mp->New<AArch64CGPeepHole>(f, mp, cgssaInfo);
678 CHECK_FATAL((cgpeep != nullptr), "Creat AArch64CGPeepHole failed!");
679 cgpeep->Run();
680 return false;
681 }
682
GetAnalysisDependence(AnalysisDep & aDep) const683 void CgPeepHole::GetAnalysisDependence(AnalysisDep &aDep) const
684 {
685 aDep.AddRequired<CgSSAConstruct>();
686 aDep.AddPreserved<CgSSAConstruct>();
687 }
MAPLE_TRANSFORM_PHASE_REGISTER_CANSKIP(CgPeepHole,cgpeephole)688 MAPLE_TRANSFORM_PHASE_REGISTER_CANSKIP(CgPeepHole, cgpeephole)
689 #endif
690 /* === Physical Pre Form === */
691 bool CgPrePeepHole::PhaseRun(maplebe::CGFunc &f)
692 {
693 MemPool *mp = GetPhaseMemPool();
694 CGPeepHole *cgpeep = f.GetCG()->CreateCGPeepHole(*mp, f);
695 CHECK_FATAL(cgpeep != nullptr, "PeepHoleOptimizer instance create failure");
696 cgpeep->Run();
697 return false;
698 }
MAPLE_TRANSFORM_PHASE_REGISTER_CANSKIP(CgPrePeepHole,cgprepeephole)699 MAPLE_TRANSFORM_PHASE_REGISTER_CANSKIP(CgPrePeepHole, cgprepeephole)
700
701 /* === Physical Post Form === */
702 bool CgPostPeepHole::PhaseRun(maplebe::CGFunc &f)
703 {
704 MemPool *mp = GetPhaseMemPool();
705 CGPeepHole *cgpeep = f.GetCG()->CreateCGPeepHole(*mp, f);
706 CHECK_FATAL(cgpeep != nullptr, "PeepHoleOptimizer instance create failure");
707 cgpeep->Run();
708 return false;
709 }
MAPLE_TRANSFORM_PHASE_REGISTER_CANSKIP(CgPostPeepHole,cgpostpeephole)710 MAPLE_TRANSFORM_PHASE_REGISTER_CANSKIP(CgPostPeepHole, cgpostpeephole)
711
712 #if TARGAARCH64
713 bool CgPrePeepHole0::PhaseRun(maplebe::CGFunc &f)
714 {
715 auto *peep = GetPhaseMemPool()->New<PeepHoleOptimizer>(&f);
716 CHECK_FATAL(peep != nullptr, "PeepHoleOptimizer instance create failure");
717 peep->PrePeepholeOpt();
718 return false;
719 }
MAPLE_TRANSFORM_PHASE_REGISTER_CANSKIP(CgPrePeepHole0,prepeephole)720 MAPLE_TRANSFORM_PHASE_REGISTER_CANSKIP(CgPrePeepHole0, prepeephole)
721
722 bool CgPrePeepHole1::PhaseRun(maplebe::CGFunc &f)
723 {
724 auto *peep = GetPhaseMemPool()->New<PeepHoleOptimizer>(&f);
725 CHECK_FATAL(peep != nullptr, "PeepHoleOptimizer instance create failure");
726 peep->PrePeepholeOpt1();
727 return false;
728 }
729
MAPLE_TRANSFORM_PHASE_REGISTER_CANSKIP(CgPrePeepHole1,prepeephole1)730 MAPLE_TRANSFORM_PHASE_REGISTER_CANSKIP(CgPrePeepHole1, prepeephole1)
731
732 bool CgPeepHole0::PhaseRun(maplebe::CGFunc &f)
733 {
734 ReachingDefinition *reachingDef = nullptr;
735 if (Globals::GetInstance()->GetOptimLevel() >= CGOptions::kLevel2) {
736 reachingDef = GET_ANALYSIS(CgReachingDefinition, f);
737 if (reachingDef == nullptr || !f.GetRDStatus()) {
738 GetAnalysisInfoHook()->ForceEraseAnalysisPhase(f.GetUniqueID(), &CgReachingDefinition::id);
739 return false;
740 }
741 }
742
743 auto *peep = GetPhaseMemPool()->New<PeepHoleOptimizer>(&f);
744 CHECK_FATAL(peep != nullptr, "PeepHoleOptimizer instance create failure");
745 peep->Peephole0();
746 return false;
747 }
MAPLE_TRANSFORM_PHASE_REGISTER_CANSKIP(CgPeepHole0,peephole0)748 MAPLE_TRANSFORM_PHASE_REGISTER_CANSKIP(CgPeepHole0, peephole0)
749
750 bool CgPeepHole1::PhaseRun(maplebe::CGFunc &f)
751 {
752 auto *peep = GetPhaseMemPool()->New<PeepHoleOptimizer>(&f);
753 CHECK_FATAL(peep != nullptr, "PeepHoleOptimizer instance create failure");
754 peep->PeepholeOpt();
755 return false;
756 }
757 MAPLE_TRANSFORM_PHASE_REGISTER_CANSKIP(CgPeepHole1, peephole)
758 #endif
759
760 } /* namespace maplebe */
761