1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "peep.h"
17 #if TARGAARCH64
18 #include "aarch64_peep.h"
19 #endif
20 #if defined TARGX86_64
21 #include "x64_peep.h"
22 #endif
23
24 namespace maplebe {
25 #if TARGAARCH64
26 /* Check if a regOpnd is live after insn. True if live, otherwise false. */
IfOperandIsLiveAfterInsn(const RegOperand & regOpnd,Insn & insn)27 bool CGPeepPattern::IfOperandIsLiveAfterInsn(const RegOperand ®Opnd, Insn &insn)
28 {
29 for (Insn *nextInsn = insn.GetNext(); nextInsn != nullptr; nextInsn = nextInsn->GetNext()) {
30 if (!nextInsn->IsMachineInstruction()) {
31 continue;
32 }
33 CHECK_FATAL(nextInsn->GetOperandSize() > 0, "must not be zero");
34 int32 lastOpndId = static_cast<int32>(nextInsn->GetOperandSize() - 1);
35 for (int32 i = lastOpndId; i >= 0; --i) {
36 Operand &opnd = nextInsn->GetOperand(static_cast<uint32>(i));
37 if (opnd.IsMemoryAccessOperand()) {
38 auto &mem = static_cast<MemOperand &>(opnd);
39 Operand *base = mem.GetBaseRegister();
40 Operand *offset = mem.GetOffset();
41
42 if (base != nullptr && base->IsRegister()) {
43 auto *tmpRegOpnd = static_cast<RegOperand *>(base);
44 if (tmpRegOpnd->GetRegisterNumber() == regOpnd.GetRegisterNumber()) {
45 return true;
46 }
47 }
48 if (offset != nullptr && offset->IsRegister()) {
49 auto *tmpRegOpnd = static_cast<RegOperand *>(offset);
50 if (tmpRegOpnd->GetRegisterNumber() == regOpnd.GetRegisterNumber()) {
51 return true;
52 }
53 }
54 } else if (opnd.IsList()) {
55 auto &opndList = static_cast<ListOperand &>(opnd).GetOperands();
56 if (find(opndList.begin(), opndList.end(), ®Opnd) != opndList.end()) {
57 return true;
58 }
59 }
60
61 if (!opnd.IsRegister()) {
62 continue;
63 }
64 auto &tmpRegOpnd = static_cast<RegOperand &>(opnd);
65 if (opnd.IsRegister() && tmpRegOpnd.GetRegisterNumber() != regOpnd.GetRegisterNumber()) {
66 continue;
67 }
68 const InsnDesc *md = nextInsn->GetDesc();
69 auto *regProp = (md->opndMD[static_cast<uint32>(i)]);
70 bool isUse = regProp->IsUse();
71 /* if noUse Redefined, no need to check live-out. */
72 return isUse;
73 }
74 }
75 /* Check if it is live-out. */
76 return FindRegLiveOut(regOpnd, *insn.GetBB());
77 }
78
79 /* entrance for find if a regOpnd is live-out. */
FindRegLiveOut(const RegOperand & regOpnd,const BB & bb)80 bool CGPeepPattern::FindRegLiveOut(const RegOperand ®Opnd, const BB &bb)
81 {
82 /*
83 * Each time use peephole, index is initialized by the constructor,
84 * and the internal_flags3 should be cleared.
85 */
86 if (PeepOptimizer::index == 0) {
87 FOR_ALL_BB(currbb, cgFunc)
88 {
89 currbb->SetInternalFlag3(0);
90 }
91 }
92 /* before each invoke check function, increase index. */
93 ++PeepOptimizer::index;
94 return CheckOpndLiveinSuccs(regOpnd, bb);
95 }
96
97 /* Check regOpnd in succs/ehSuccs. True is live-out, otherwise false. */
CheckOpndLiveinSuccs(const RegOperand & regOpnd,const BB & bb) const98 bool CGPeepPattern::CheckOpndLiveinSuccs(const RegOperand ®Opnd, const BB &bb) const
99 {
100 std::stack<BB *> bbStack;
101 bbStack.push(const_cast<BB *>(&bb));
102 while (!bbStack.empty()) {
103 BB *currentBB = bbStack.top();
104 bbStack.pop();
105 if (CheckRegLiveinReturnBB(regOpnd, *currentBB)) {
106 return true;
107 }
108 // The traversal order of sibling nodes in the iterative version
109 // is reversed compared to the recursive version
110 for (auto succ : currentBB->GetSuccs()) {
111 DEBUG_ASSERT(succ->GetInternalFlag3() <= PeepOptimizer::index, "internal error.");
112 if (succ->GetInternalFlag3() == PeepOptimizer::index) {
113 continue;
114 }
115 succ->SetInternalFlag3(PeepOptimizer::index);
116 ReturnType result = IsOpndLiveinBB(regOpnd, *succ);
117 if (result == kResNotFind) {
118 bbStack.push(succ);
119 } else if (result == kResUseFirst) {
120 return true;
121 } else if (result == kResDefFirst) {
122 // Do nothing, continue to process successors
123 }
124 }
125 }
126 return false;
127 }
128
129 /* Check if the reg is used in return BB */
CheckRegLiveinReturnBB(const RegOperand & regOpnd,const BB & bb) const130 bool CGPeepPattern::CheckRegLiveinReturnBB(const RegOperand ®Opnd, const BB &bb) const
131 {
132 #if TARGAARCH64 || TARGRISCV64
133 if (bb.GetKind() == BB::kBBReturn) {
134 regno_t regNO = regOpnd.GetRegisterNumber();
135 RegType regType = regOpnd.GetRegisterType();
136 if (regType == kRegTyVary) {
137 return false;
138 }
139 PrimType returnType = cgFunc->GetFunction().GetReturnType()->GetPrimType();
140 regno_t returnReg = R0;
141 if (IsPrimitiveFloat(returnType)) {
142 returnReg = V0;
143 } else if (IsPrimitiveInteger(returnType)) {
144 returnReg = R0;
145 }
146 if (regNO == returnReg) {
147 return true;
148 }
149 }
150 #endif
151 return false;
152 }
153
154 /*
155 * Check regNO in current bb:
156 * kResUseFirst:first find use point; kResDefFirst:first find define point;
157 * kResNotFind:cannot find regNO, need to continue searching.
158 */
IsOpndLiveinBB(const RegOperand & regOpnd,const BB & bb) const159 ReturnType CGPeepPattern::IsOpndLiveinBB(const RegOperand ®Opnd, const BB &bb) const
160 {
161 FOR_BB_INSNS_CONST(insn, &bb)
162 {
163 if (!insn->IsMachineInstruction()) {
164 continue;
165 }
166 const InsnDesc *md = insn->GetDesc();
167 int32 lastOpndId = static_cast<int32>(insn->GetOperandSize() - 1);
168 for (int32 i = lastOpndId; i >= 0; --i) {
169 Operand &opnd = insn->GetOperand(static_cast<uint32>(i));
170 auto *regProp = (md->opndMD[static_cast<uint32>(i)]);
171 if (opnd.IsConditionCode()) {
172 if (regOpnd.GetRegisterNumber() == kRFLAG) {
173 bool isUse = regProp->IsUse();
174 if (isUse) {
175 return kResUseFirst;
176 }
177 DEBUG_ASSERT(regProp->IsDef(), "register should be redefined.");
178 return kResDefFirst;
179 }
180 } else if (opnd.IsList()) {
181 auto &listOpnd = static_cast<ListOperand &>(opnd);
182 if (insn->GetMachineOpcode() == MOP_asm) {
183 if (static_cast<uint32>(i) == kAsmOutputListOpnd || static_cast<uint32>(i) == kAsmClobberListOpnd) {
184 for (const auto op : listOpnd.GetOperands()) {
185 if (op->GetRegisterNumber() == regOpnd.GetRegisterNumber()) {
186 return kResDefFirst;
187 }
188 }
189 continue;
190 } else if (static_cast<uint32>(i) != kAsmInputListOpnd) {
191 continue;
192 }
193 /* fall thru for kAsmInputListOpnd */
194 }
195 for (const auto op : listOpnd.GetOperands()) {
196 if (op->GetRegisterNumber() == regOpnd.GetRegisterNumber()) {
197 return kResUseFirst;
198 }
199 }
200 } else if (opnd.IsMemoryAccessOperand()) {
201 auto &mem = static_cast<MemOperand &>(opnd);
202 Operand *base = mem.GetBaseRegister();
203 Operand *offset = mem.GetOffset();
204
205 if (base != nullptr) {
206 DEBUG_ASSERT(base->IsRegister(), "internal error.");
207 auto *tmpRegOpnd = static_cast<RegOperand *>(base);
208 if (tmpRegOpnd->GetRegisterNumber() == regOpnd.GetRegisterNumber()) {
209 return kResUseFirst;
210 }
211 }
212 if (offset != nullptr && offset->IsRegister()) {
213 auto *tmpRegOpnd = static_cast<RegOperand *>(offset);
214 if (tmpRegOpnd->GetRegisterNumber() == regOpnd.GetRegisterNumber()) {
215 return kResUseFirst;
216 }
217 }
218 } else if (opnd.IsRegister()) {
219 auto &tmpRegOpnd = static_cast<RegOperand &>(opnd);
220 if (tmpRegOpnd.GetRegisterNumber() == regOpnd.GetRegisterNumber()) {
221 bool isUse = regProp->IsUse();
222 if (isUse) {
223 return kResUseFirst;
224 }
225 DEBUG_ASSERT(regProp->IsDef(), "register should be redefined.");
226 return kResDefFirst;
227 }
228 }
229 }
230 }
231 return kResNotFind;
232 }
233
LogValueAtBase2(int64 val) const234 int PeepPattern::LogValueAtBase2(int64 val) const
235 {
236 return (__builtin_popcountll(static_cast<uint64>(val)) == 1) ? (__builtin_ffsll(val) - 1) : (-1);
237 }
238
239 /* Check if a regOpnd is live after insn. True if live, otherwise false. */
IfOperandIsLiveAfterInsn(const RegOperand & regOpnd,Insn & insn)240 bool PeepPattern::IfOperandIsLiveAfterInsn(const RegOperand ®Opnd, Insn &insn)
241 {
242 for (Insn *nextInsn = insn.GetNext(); nextInsn != nullptr; nextInsn = nextInsn->GetNext()) {
243 if (!nextInsn->IsMachineInstruction()) {
244 continue;
245 }
246 CHECK_FATAL(nextInsn->GetOperandSize() > 0, "must not be zero");
247 int32 lastOpndId = static_cast<int32>(nextInsn->GetOperandSize() - 1);
248 for (int32 i = lastOpndId; i >= 0; --i) {
249 Operand &opnd = nextInsn->GetOperand(static_cast<uint32>(i));
250 if (opnd.IsMemoryAccessOperand()) {
251 auto &mem = static_cast<MemOperand &>(opnd);
252 Operand *base = mem.GetBaseRegister();
253 Operand *offset = mem.GetOffset();
254
255 if (base != nullptr && base->IsRegister()) {
256 auto *tmpRegOpnd = static_cast<RegOperand *>(base);
257 if (tmpRegOpnd->GetRegisterNumber() == regOpnd.GetRegisterNumber()) {
258 return true;
259 }
260 }
261 if (offset != nullptr && offset->IsRegister()) {
262 auto *tmpRegOpnd = static_cast<RegOperand *>(offset);
263 if (tmpRegOpnd->GetRegisterNumber() == regOpnd.GetRegisterNumber()) {
264 return true;
265 }
266 }
267 } else if (opnd.IsList()) {
268 auto &opndList = static_cast<ListOperand &>(opnd).GetOperands();
269 if (find(opndList.begin(), opndList.end(), ®Opnd) != opndList.end()) {
270 return true;
271 }
272 }
273
274 if (!opnd.IsRegister()) {
275 continue;
276 }
277 auto &tmpRegOpnd = static_cast<RegOperand &>(opnd);
278 if (opnd.IsRegister() && tmpRegOpnd.GetRegisterNumber() != regOpnd.GetRegisterNumber()) {
279 continue;
280 }
281 const InsnDesc *md = nextInsn->GetDesc();
282 auto *regProp = (md->opndMD[static_cast<uint64>(i)]);
283 bool isUse = regProp->IsUse();
284 /* if noUse Redefined, no need to check live-out. */
285 return isUse;
286 }
287 }
288 /* Check if it is live-out. */
289 return FindRegLiveOut(regOpnd, *insn.GetBB());
290 }
291
292 /* entrance for find if a regOpnd is live-out. */
FindRegLiveOut(const RegOperand & regOpnd,const BB & bb)293 bool PeepPattern::FindRegLiveOut(const RegOperand ®Opnd, const BB &bb)
294 {
295 /*
296 * Each time use peephole, index is initialized by the constructor,
297 * and the internal_flags3 should be cleared.
298 */
299 if (PeepOptimizer::index == 0) {
300 FOR_ALL_BB(currbb, &cgFunc)
301 {
302 currbb->SetInternalFlag3(0);
303 }
304 }
305 /* before each invoke check function, increase index. */
306 ++PeepOptimizer::index;
307 return CheckOpndLiveinSuccs(regOpnd, bb);
308 }
309
310 /* Check regOpnd in succs/ehSuccs. True is live-out, otherwise false. */
CheckOpndLiveinSuccs(const RegOperand & regOpnd,const BB & bb) const311 bool PeepPattern::CheckOpndLiveinSuccs(const RegOperand ®Opnd, const BB &bb) const
312 {
313 std::stack<BB *> bbStack;
314 bbStack.push(const_cast<BB *>(&bb));
315 while (!bbStack.empty()) {
316 BB *currentBB = bbStack.top();
317 bbStack.pop();
318 if (CheckRegLiveinReturnBB(regOpnd, *currentBB)) {
319 return true;
320 }
321 // The traversal order of sibling nodes in the iterative version
322 // is reversed compared to the recursive version
323 for (auto succ : currentBB->GetSuccs()) {
324 DEBUG_ASSERT(succ->GetInternalFlag3() <= PeepOptimizer::index, "internal error.");
325 if (succ->GetInternalFlag3() == PeepOptimizer::index) {
326 continue;
327 }
328 succ->SetInternalFlag3(PeepOptimizer::index);
329 ReturnType result = IsOpndLiveinBB(regOpnd, *succ);
330 if (result == kResNotFind) {
331 bbStack.push(succ);
332 } else if (result == kResUseFirst) {
333 return true;
334 } else if (result == kResDefFirst) {
335 // Do nothing, continue to process successors
336 }
337 }
338 }
339 return false;
340 }
341
342 /* Check if the reg is used in return BB */
CheckRegLiveinReturnBB(const RegOperand & regOpnd,const BB & bb) const343 bool PeepPattern::CheckRegLiveinReturnBB(const RegOperand ®Opnd, const BB &bb) const
344 {
345 #if TARGAARCH64 || TARGRISCV64
346 if (bb.GetKind() == BB::kBBReturn) {
347 regno_t regNO = regOpnd.GetRegisterNumber();
348 RegType regType = regOpnd.GetRegisterType();
349 if (regType == kRegTyVary) {
350 return false;
351 }
352 PrimType returnType = cgFunc.GetFunction().GetReturnType()->GetPrimType();
353 regno_t returnReg = R0;
354 if (IsPrimitiveFloat(returnType)) {
355 returnReg = V0;
356 } else if (IsPrimitiveInteger(returnType)) {
357 returnReg = R0;
358 }
359 if (regNO == returnReg) {
360 return true;
361 }
362 }
363 #endif
364 return false;
365 }
366
367 /*
368 * Check regNO in current bb:
369 * kResUseFirst:first find use point; kResDefFirst:first find define point;
370 * kResNotFind:cannot find regNO, need to continue searching.
371 */
IsOpndLiveinBB(const RegOperand & regOpnd,const BB & bb) const372 ReturnType PeepPattern::IsOpndLiveinBB(const RegOperand ®Opnd, const BB &bb) const
373 {
374 FOR_BB_INSNS_CONST(insn, &bb)
375 {
376 if (!insn->IsMachineInstruction()) {
377 continue;
378 }
379 const InsnDesc *md = insn->GetDesc();
380 int32 lastOpndId = static_cast<int32>(insn->GetOperandSize() - 1);
381 for (int32 i = lastOpndId; i >= 0; --i) {
382 Operand &opnd = insn->GetOperand(static_cast<uint32>(i));
383 auto *regProp = (md->opndMD[static_cast<uint32>(i)]);
384 if (opnd.IsConditionCode()) {
385 if (regOpnd.GetRegisterNumber() == kRFLAG) {
386 bool isUse = regProp->IsUse();
387 if (isUse) {
388 return kResUseFirst;
389 }
390 DEBUG_ASSERT(regProp->IsDef(), "register should be redefined.");
391 return kResDefFirst;
392 }
393 } else if (opnd.IsList()) {
394 auto &listOpnd = static_cast<ListOperand &>(opnd);
395 if (insn->GetMachineOpcode() == MOP_asm) {
396 if (static_cast<uint32>(i) == kAsmOutputListOpnd || static_cast<uint32>(i) == kAsmClobberListOpnd) {
397 for (const auto op : listOpnd.GetOperands()) {
398 if (op->GetRegisterNumber() == regOpnd.GetRegisterNumber()) {
399 return kResDefFirst;
400 }
401 }
402 continue;
403 } else if (static_cast<uint32>(i) != kAsmInputListOpnd) {
404 continue;
405 }
406 /* fall thru for kAsmInputListOpnd */
407 }
408 for (const auto op : listOpnd.GetOperands()) {
409 if (op->GetRegisterNumber() == regOpnd.GetRegisterNumber()) {
410 return kResUseFirst;
411 }
412 }
413 } else if (opnd.IsMemoryAccessOperand()) {
414 auto &mem = static_cast<MemOperand &>(opnd);
415 Operand *base = mem.GetBaseRegister();
416 Operand *offset = mem.GetOffset();
417
418 if (base != nullptr) {
419 DEBUG_ASSERT(base->IsRegister(), "internal error.");
420 auto *tmpRegOpnd = static_cast<RegOperand *>(base);
421 if (tmpRegOpnd->GetRegisterNumber() == regOpnd.GetRegisterNumber()) {
422 return kResUseFirst;
423 }
424 }
425 if (offset != nullptr && offset->IsRegister()) {
426 auto *tmpRegOpnd = static_cast<RegOperand *>(offset);
427 if (tmpRegOpnd->GetRegisterNumber() == regOpnd.GetRegisterNumber()) {
428 return kResUseFirst;
429 }
430 }
431 } else if (opnd.IsRegister()) {
432 auto &tmpRegOpnd = static_cast<RegOperand &>(opnd);
433 if (tmpRegOpnd.GetRegisterNumber() == regOpnd.GetRegisterNumber()) {
434 bool isUse = regProp->IsUse();
435 if (isUse) {
436 return kResUseFirst;
437 }
438 DEBUG_ASSERT(regProp->IsDef(), "register should be redefined.");
439 return kResDefFirst;
440 }
441 }
442 }
443 }
444 return kResNotFind;
445 }
446
IsMemOperandOptPattern(const Insn & insn,Insn & nextInsn)447 bool PeepPattern::IsMemOperandOptPattern(const Insn &insn, Insn &nextInsn)
448 {
449 /* Check if base register of nextInsn and the dest operand of insn are identical. */
450 auto *memOpnd = static_cast<MemOperand *>(nextInsn.GetMemOpnd());
451 DEBUG_ASSERT(memOpnd != nullptr, "null ptr check");
452 /* Only for AddrMode_B_OI addressing mode. */
453 if (memOpnd->GetAddrMode() != MemOperand::kAddrModeBOi) {
454 return false;
455 }
456 /* Only for immediate is 0. */
457 if (memOpnd->GetOffsetImmediate()->GetOffsetValue() != 0) {
458 return false;
459 }
460 /* Only for intact memory addressing. */
461 if (!memOpnd->IsIntactIndexed()) {
462 return false;
463 }
464
465 auto &oldBaseOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
466 /* Check if dest operand of insn is idential with base register of nextInsn. */
467 if (memOpnd->GetBaseRegister() != &oldBaseOpnd) {
468 return false;
469 }
470
471 #ifdef USE_32BIT_REF
472 if (nextInsn.IsAccessRefField() && nextInsn.GetOperand(kInsnFirstOpnd).GetSize() > k32BitSize) {
473 return false;
474 }
475 #endif
476 /* Check if x0 is used after ldr insn, and if it is in live-out. */
477 if (IfOperandIsLiveAfterInsn(oldBaseOpnd, nextInsn)) {
478 return false;
479 }
480 return true;
481 }
482
483 template <typename T>
Run()484 void PeepOptimizer::Run()
485 {
486 auto *patterMatcher = peepOptMemPool->New<T>(cgFunc, peepOptMemPool);
487 patterMatcher->InitOpts();
488 FOR_ALL_BB(bb, &cgFunc)
489 {
490 FOR_BB_INSNS_SAFE(insn, bb, nextInsn)
491 {
492 if (!insn->IsMachineInstruction()) {
493 continue;
494 }
495 patterMatcher->Run(*bb, *insn);
496 }
497 }
498 }
499
500 int32 PeepOptimizer::index = 0;
501
Peephole0()502 void PeepHoleOptimizer::Peephole0()
503 {
504 auto memPool = std::make_unique<ThreadLocalMemPool>(memPoolCtrler, "peepholeOptObj");
505 PeepOptimizer peepOptimizer(*cgFunc, memPool.get());
506 #if TARGAARCH64 || TARGRISCV64
507 peepOptimizer.Run<AArch64PeepHole0>();
508 #endif
509 }
510
PeepholeOpt()511 void PeepHoleOptimizer::PeepholeOpt()
512 {
513 auto memPool = std::make_unique<ThreadLocalMemPool>(memPoolCtrler, "peepholeOptObj");
514 PeepOptimizer peepOptimizer(*cgFunc, memPool.get());
515 #if TARGAARCH64 || TARGRISCV64
516 peepOptimizer.Run<AArch64PeepHole>();
517 #endif
518 }
519 #endif
520
521 /* === Physical Post Form === */
PhaseRun(maplebe::CGFunc & f)522 bool CgPostPeepHole::PhaseRun(maplebe::CGFunc &f)
523 {
524 MemPool *mp = GetPhaseMemPool();
525 CGPeepHole *cgpeep = f.GetCG()->CreateCGPeepHole(*mp, f);
526 CHECK_FATAL(cgpeep != nullptr, "PeepHoleOptimizer instance create failure");
527 cgpeep->Run();
528 return false;
529 }
MAPLE_TRANSFORM_PHASE_REGISTER_CANSKIP(CgPostPeepHole,cgpostpeephole)530 MAPLE_TRANSFORM_PHASE_REGISTER_CANSKIP(CgPostPeepHole, cgpostpeephole)
531
532 #if TARGAARCH64
533 bool CgPeepHole0::PhaseRun(maplebe::CGFunc &f)
534 {
535 auto *peep = GetPhaseMemPool()->New<PeepHoleOptimizer>(&f);
536 CHECK_FATAL(peep != nullptr, "PeepHoleOptimizer instance create failure");
537 peep->Peephole0();
538 return false;
539 }
MAPLE_TRANSFORM_PHASE_REGISTER_CANSKIP(CgPeepHole0,peephole0)540 MAPLE_TRANSFORM_PHASE_REGISTER_CANSKIP(CgPeepHole0, peephole0)
541
542 bool CgPeepHole1::PhaseRun(maplebe::CGFunc &f)
543 {
544 auto *peep = GetPhaseMemPool()->New<PeepHoleOptimizer>(&f);
545 CHECK_FATAL(peep != nullptr, "PeepHoleOptimizer instance create failure");
546 peep->PeepholeOpt();
547 return false;
548 }
549 MAPLE_TRANSFORM_PHASE_REGISTER_CANSKIP(CgPeepHole1, peephole)
550 #endif
551
552 } /* namespace maplebe */
553