1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #if TARGAARCH64
17 #include "aarch64_ebo.h"
18 #endif
19 #if TARGRISCV64
20 #include "riscv64_ebo.h"
21 #endif
22 #if TARGARM32
23 #include "arm32_ebo.h"
24 #endif
25 #include "securec.h"
26
27 #include "common_utils.h"
28 #include "optimize_common.h"
29
30 /*
31 * The Optimizations include forward propagation, common expression elimination, constant folding,
32 * dead code elimination and some target optimizations. The main entry of the optimization is run.
33 * When the Optimization level is less than O2, it can only perform in single block. and in O2 it
34 * can perform it a sequence of blocks.
35 */
36 namespace maplebe {
37 using namespace maple;
38
39 #define EBO_DUMP CG_DEBUG_FUNC(*cgFunc)
40 #define EBO_DUMP_NEWPM CG_DEBUG_FUNC(f)
41 #define TRUE_OPND cgFunc->GetTrueOpnd()
42
43 constexpr uint32 kEboOpndHashLength = 521;
44 constexpr uint32 kEboMaxBBNums = 200;
45
46 /* Return the opndInfo for the first mem operand of insn. */
GetMemInfo(InsnInfo & insnInfo)47 MemOpndInfo *Ebo::GetMemInfo(InsnInfo &insnInfo)
48 {
49 Insn *insn = insnInfo.insn;
50 CHECK_FATAL(insn != nullptr, "insnInfo.insn is nullptr!");
51 CHECK_FATAL(insn->AccessMem(), "insn is not access memory!");
52 uint32 opndNum = insn->GetOperandSize();
53 if (insn->IsLoad()) {
54 for (uint32 i = 0; i < opndNum; ++i) {
55 if (insn->GetOperand(i).IsMemoryAccessOperand()) {
56 return static_cast<MemOpndInfo *>(insnInfo.origOpnd[i]);
57 }
58 }
59 } else if (insn->IsStore()) {
60 int32 resId = 0;
61 for (uint32 i = 0; i < opndNum; ++i) {
62 if (insn->OpndIsDef(i)) {
63 if (insn->GetOperand(i).IsMemoryAccessOperand()) {
64 return static_cast<MemOpndInfo *>(insnInfo.result[resId]);
65 } else {
66 resId++;
67 }
68 }
69 }
70 }
71 return nullptr;
72 }
73
IsFrameReg(Operand & opnd) const74 bool Ebo::IsFrameReg(Operand &opnd) const
75 {
76 if (!opnd.IsRegister()) {
77 return false;
78 }
79 RegOperand ® = static_cast<RegOperand &>(opnd);
80 return cgFunc->IsFrameReg(reg);
81 }
82
GetZeroOpnd(uint32 size) const83 Operand *Ebo::GetZeroOpnd(uint32 size) const
84 {
85 #if TARGAARCH64 || TARGRISCV64
86 return size > k64BitSize ? nullptr : &cgFunc->GetZeroOpnd(size);
87 #else
88 return nullptr;
89 #endif
90 }
91
IsSaveReg(const Operand & opnd)92 bool Ebo::IsSaveReg(const Operand &opnd)
93 {
94 if (!opnd.IsRegister()) {
95 return false;
96 }
97 const RegOperand ® = static_cast<const RegOperand &>(opnd);
98 return cgFunc->IsSaveReg(reg, *cgFunc->GetFunction().GetReturnType(), cgFunc->GetBecommon());
99 }
100
IsPhysicalReg(const Operand & opnd) const101 bool Ebo::IsPhysicalReg(const Operand &opnd) const
102 {
103 if (!opnd.IsRegister()) {
104 return false;
105 }
106 const RegOperand ® = static_cast<const RegOperand &>(opnd);
107 return reg.IsPhysicalRegister();
108 }
109
HasAssignedReg(const Operand & opnd) const110 bool Ebo::HasAssignedReg(const Operand &opnd) const
111 {
112 if (!opnd.IsRegister()) {
113 return false;
114 }
115 const auto ® = static_cast<const RegOperand &>(opnd);
116 return reg.IsVirtualRegister() ? (!IsInvalidReg(reg)) : true;
117 }
118
IsOfSameClass(const Operand & op0,const Operand & op1) const119 bool Ebo::IsOfSameClass(const Operand &op0, const Operand &op1) const
120 {
121 if (!op0.IsRegister() || !op1.IsRegister()) {
122 return false;
123 }
124 const auto ®0 = static_cast<const RegOperand &>(op0);
125 const auto ®1 = static_cast<const RegOperand &>(op1);
126 return reg0.GetRegisterType() == reg1.GetRegisterType();
127 }
128
129 /* return true if opnd of bb is available. */
OpndAvailableInBB(const BB & bb,OpndInfo * info)130 bool Ebo::OpndAvailableInBB(const BB &bb, OpndInfo *info)
131 {
132 if (info == nullptr) {
133 return false;
134 }
135 if (info->opnd == nullptr) {
136 return false;
137 }
138
139 Operand *op = info->opnd;
140 if (IsConstantImmOrReg(*op)) {
141 return true;
142 }
143
144 int32 hashVal = 0;
145 if (op->IsRegShift() || op->IsRegister()) {
146 hashVal = -1;
147 } else {
148 hashVal = info->hashVal;
149 }
150 if (GetOpndInfo(*op, hashVal) != info) {
151 return false;
152 }
153 /* global operands aren't supported at low levels of optimization. */
154 if ((Globals::GetInstance()->GetOptimLevel() < CGOptions::kLevel2) && (&bb != info->bb)) {
155 return false;
156 }
157 if (beforeRegAlloc && IsPhysicalReg(*op)) {
158 return false;
159 }
160 return true;
161 }
162
ForwardPropCheck(const Operand * opndReplace,const OpndInfo & opndInfo,const Operand & opnd,Insn & insn)163 bool Ebo::ForwardPropCheck(const Operand *opndReplace, const OpndInfo &opndInfo, const Operand &opnd, Insn &insn)
164 {
165 if (opndReplace == nullptr) {
166 return false;
167 }
168 if ((opndInfo.replacementInfo != nullptr) && opndInfo.replacementInfo->redefined) {
169 return false;
170 }
171 #if TARGARM32
172 /* for arm32, disable forwardProp in strd insn. */
173 if (insn.GetMachineOpcode() == MOP_strd) {
174 return false;
175 }
176 if (opndInfo.mayReDef) {
177 return false;
178 }
179 #endif
180 if (!(IsConstantImmOrReg(*opndReplace) ||
181 ((OpndAvailableInBB(*insn.GetBB(), opndInfo.replacementInfo) || RegistersIdentical(opnd, *opndReplace)) &&
182 (HasAssignedReg(opnd) == HasAssignedReg(*opndReplace))))) {
183 return false;
184 }
185 /* if beforeRA, replace op should not be PhysicalRe */
186 return !beforeRegAlloc || !IsPhysicalReg(*opndReplace);
187 }
188
RegForwardCheck(Insn & insn,const Operand & opnd,const Operand * opndReplace,Operand & oldOpnd,const OpndInfo * tmpInfo)189 bool Ebo::RegForwardCheck(Insn &insn, const Operand &opnd, const Operand *opndReplace, Operand &oldOpnd,
190 const OpndInfo *tmpInfo)
191 {
192 if (IsConstantImmOrReg(opnd)) {
193 return false;
194 }
195 if (!(!beforeRegAlloc || (HasAssignedReg(oldOpnd) == HasAssignedReg(*opndReplace)) || IsZeroRegister(opnd) ||
196 !insn.IsMove())) {
197 return false;
198 }
199 std::set<regno_t> defRegs = insn.GetDefRegs();
200 if (!(defRegs.empty() ||
201 ((opnd.IsRegister() && !defRegs.count(static_cast<const RegOperand &>(opnd).GetRegisterNumber())) ||
202 !beforeRegAlloc))) {
203 return false;
204 }
205 if (!(beforeRegAlloc || !IsFrameReg(oldOpnd))) {
206 return false;
207 }
208 if (insn.GetBothDefUseOpnd() != kInsnMaxOpnd) {
209 return false;
210 }
211 if (IsPseudoRet(insn)) {
212 return false;
213 }
214
215 return ((IsOfSameClass(oldOpnd, *opndReplace) && (oldOpnd.GetSize() <= opndReplace->GetSize())) ||
216 ((tmpInfo != nullptr) && IsMovToSIMDVmov(insn, *tmpInfo->insn)));
217 }
218
219 /* For Memory Operand, its info was stored in a hash table, this function is to compute its hash value. */
ComputeOpndHash(const Operand & opnd) const220 int32 Ebo::ComputeOpndHash(const Operand &opnd) const
221 {
222 uint64 hashIdx = reinterpret_cast<uint64>(&opnd) >> k4ByteSize;
223 return static_cast<int32>(hashIdx % kEboOpndHashLength);
224 }
225
226 /* Store the operand information. Store it to the vRegInfo if is register. otherwise put it to the hash table. */
SetOpndInfo(const Operand & opnd,OpndInfo * opndInfo,int32 hashVal)227 void Ebo::SetOpndInfo(const Operand &opnd, OpndInfo *opndInfo, int32 hashVal)
228 {
229 /* opnd is Register or RegShift */
230 if (hashVal == -1) {
231 const RegOperand ® = GetRegOperand(opnd);
232 vRegInfo[reg.GetRegisterNumber()] = opndInfo;
233 return;
234 }
235
236 CHECK_FATAL(static_cast<uint64>(static_cast<int64>(hashVal)) < exprInfoTable.size(),
237 "SetOpndInfo hashval outof range!");
238 opndInfo->hashVal = hashVal;
239 opndInfo->hashNext = exprInfoTable.at(hashVal);
240 exprInfoTable.at(hashVal) = opndInfo;
241 }
242
243 /* Used to change the info of opnd from opndinfo to newinfo. */
UpdateOpndInfo(const Operand & opnd,OpndInfo & opndInfo,OpndInfo * newInfo,int32 hashVal)244 void Ebo::UpdateOpndInfo(const Operand &opnd, OpndInfo &opndInfo, OpndInfo *newInfo, int32 hashVal)
245 {
246 if (hashVal == -1) {
247 const RegOperand ® = GetRegOperand(opnd);
248 vRegInfo[reg.GetRegisterNumber()] = newInfo;
249 return;
250 }
251 DEBUG_ASSERT(static_cast<uint32>(hashVal) < exprInfoTable.size(), "SetOpndInfo hashval outof range!");
252 OpndInfo *info = exprInfoTable.at(hashVal);
253 if (newInfo != nullptr) {
254 newInfo->hashNext = opndInfo.hashNext;
255 opndInfo.hashNext = nullptr;
256 if (info == &opndInfo) {
257 exprInfoTable.at(hashVal) = newInfo;
258 return;
259 }
260 while (info != nullptr) {
261 if (info->hashNext == &opndInfo) {
262 info->hashNext = newInfo;
263 return;
264 }
265 info = info->hashNext;
266 }
267 return;
268 }
269 if (info == &opndInfo) {
270 exprInfoTable.at(hashVal) = opndInfo.hashNext;
271 return;
272 }
273 while (info != nullptr) {
274 if (info->hashNext == &opndInfo) {
275 info->hashNext = opndInfo.next;
276 opndInfo.hashNext = nullptr;
277 return;
278 }
279 info = info->hashNext;
280 }
281 }
282
283 /* return true if op1 op2 is equal */
OperandEqual(const Operand & op1,const Operand & op2) const284 bool Ebo::OperandEqual(const Operand &op1, const Operand &op2) const
285 {
286 if (&op1 == &op2) {
287 return true;
288 }
289 if (op1.GetKind() != op2.GetKind()) {
290 return false;
291 }
292 return OperandEqSpecial(op1, op2);
293 }
294
GetOpndInfo(const Operand & opnd,int32 hashVal) const295 OpndInfo *Ebo::GetOpndInfo(const Operand &opnd, int32 hashVal) const
296 {
297 if (hashVal < 0) {
298 const RegOperand ® = GetRegOperand(opnd);
299 auto it = vRegInfo.find(reg.GetRegisterNumber());
300 return it != vRegInfo.end() ? it->second : nullptr;
301 }
302 /* do not find prev memOpend */
303 if (opnd.IsMemoryAccessOperand()) {
304 return nullptr;
305 }
306 DEBUG_ASSERT(static_cast<uint32>(hashVal) < exprInfoTable.size(), "SetOpndInfo hashval outof range!");
307 OpndInfo *info = exprInfoTable.at(hashVal);
308 while (info != nullptr) {
309 if (&opnd == info->opnd) {
310 return info;
311 }
312 info = info->hashNext;
313 }
314 return nullptr;
315 }
316
317 /* Create a opndInfo for opnd. */
GetNewOpndInfo(BB & bb,Insn * insn,Operand & opnd,int32 hashVal)318 OpndInfo *Ebo::GetNewOpndInfo(BB &bb, Insn *insn, Operand &opnd, int32 hashVal)
319 {
320 OpndInfo *opndInfo = nullptr;
321 if (opnd.IsMemoryAccessOperand()) {
322 opndInfo = eboMp->New<MemOpndInfo>(opnd);
323 } else {
324 opndInfo = eboMp->New<OpndInfo>(opnd);
325 }
326 /* Initialize the entry. */
327 opndInfo->hashVal = hashVal;
328 opndInfo->opnd = &opnd;
329 opndInfo->bb = &bb;
330 opndInfo->insn = insn;
331 opndInfo->prev = lastOpndInfo;
332 if (firstOpndInfo == nullptr) {
333 firstOpndInfo = opndInfo;
334 } else {
335 lastOpndInfo->next = opndInfo;
336 }
337 lastOpndInfo = opndInfo;
338 return opndInfo;
339 }
340
341 /* Update the use infomation for localOpnd because of its use insn currentInsn. */
OperandInfoUse(BB & currentBB,Operand & localOpnd)342 OpndInfo *Ebo::OperandInfoUse(BB ¤tBB, Operand &localOpnd)
343 {
344 if (!(localOpnd.IsRegister() || localOpnd.IsRegShift()) && !localOpnd.IsMemoryAccessOperand()) {
345 return nullptr;
346 }
347 int hashVal = 0;
348 /* only arm32 has regShift */
349 if (localOpnd.IsRegister() || localOpnd.IsRegShift()) {
350 hashVal = -1;
351 } else {
352 hashVal = ComputeOpndHash(localOpnd);
353 }
354 OpndInfo *opndInfo = GetOpndInfo(localOpnd, hashVal);
355
356 if (opndInfo == nullptr) {
357 opndInfo = GetNewOpndInfo(currentBB, nullptr, localOpnd, hashVal);
358 SetOpndInfo(localOpnd, opndInfo, hashVal);
359 }
360 IncRef(*opndInfo);
361 return opndInfo;
362 }
363
364 /* return true if op0 is identical with op1 */
RegistersIdentical(const Operand & op0,const Operand & op1) const365 bool Ebo::RegistersIdentical(const Operand &op0, const Operand &op1) const
366 {
367 if (&op0 == &op1) {
368 return true;
369 }
370 if (!(op0.IsRegister() && op1.IsRegister())) {
371 return false;
372 }
373 const RegOperand ®0 = static_cast<const RegOperand &>(op0);
374 const RegOperand ®1 = static_cast<const RegOperand &>(op1);
375 return ((reg0.IsPhysicalRegister() || !IsInvalidReg(reg0)) && (reg1.IsPhysicalRegister() || !IsInvalidReg(reg1)) &&
376 (reg0.GetRegisterType() == reg1.GetRegisterType()) &&
377 (reg0.GetRegisterNumber() == reg1.GetRegisterNumber()));
378 }
379
GetNewInsnInfo(Insn & insn)380 InsnInfo *Ebo::GetNewInsnInfo(Insn &insn)
381 {
382 InsnInfo *insnInfo = eboMp->New<InsnInfo>(*eboMp, insn);
383 insnInfo->prev = lastInsnInfo;
384 if (firstInsnInfo == nullptr) {
385 firstInsnInfo = insnInfo;
386 } else {
387 lastInsnInfo->next = insnInfo;
388 }
389 lastInsnInfo = insnInfo;
390 insnInfo->next = nullptr;
391 return insnInfo;
392 }
393
ComputeHashVal(Insn & insn,const MapleVector<OpndInfo * > & opndInfos) const394 uint32 Ebo::ComputeHashVal(Insn &insn, const MapleVector<OpndInfo *> &opndInfos) const
395 {
396 uint32 hashVal = 0;
397 if (insn.AccessMem()) {
398 hashVal = kEboDefaultMemHash;
399 if (insn.NoAlias()) {
400 hashVal = kEboNoAliasMemHash;
401 }
402 MemOperand *memOpnd = static_cast<MemOperand *>(insn.GetMemOpnd());
403 if (memOpnd != nullptr) {
404 Operand *baseReg = memOpnd->GetBaseRegister();
405 if ((baseReg != nullptr) && IsFrameReg(*baseReg)) {
406 hashVal = kEboSpillMemHash;
407 }
408 }
409 } else if (Globals::GetInstance()->GetTarget()->IsEffectiveCopy(insn)) {
410 hashVal = kEboCopyInsnHash;
411 } else {
412 uint32 opndNum = insn.GetOperandSize();
413 hashVal = insn.GetMachineOpcode();
414 for (uint32 i = 0; i < opndNum; ++i) {
415 hashVal += static_cast<uint32>(reinterpret_cast<uintptr_t>(opndInfos.at(i)));
416 }
417 hashVal = static_cast<uint32>(kEboReservedInsnHash + EBO_EXP_INSN_HASH(hashVal));
418 }
419 return hashVal;
420 }
421
422 /* computeHashVal of insn */
HashInsn(Insn & insn,const MapleVector<OpndInfo * > & origInfo,const MapleVector<OpndInfo * > & opndInfos)423 void Ebo::HashInsn(Insn &insn, const MapleVector<OpndInfo *> &origInfo, const MapleVector<OpndInfo *> &opndInfos)
424 {
425 uint32 hashVal = ComputeHashVal(insn, opndInfos);
426 /* Create a new insnInfo entry and add the new insn to the hash table. */
427 InsnInfo *insnInfo = GetNewInsnInfo(insn);
428 insnInfo->bb = insn.GetBB();
429 insnInfo->insn = &insn;
430 insnInfo->hashIndex = hashVal;
431 insnInfo->same = insnInfoTable.at(hashVal);
432
433 if (!beforeRegAlloc) {
434 if ((insn.IsCall() || insn.IsTailCall() || insn.IsAsmInsn()) && !insn.GetIsThrow()) {
435 DefineCallerSaveRegisters(*insnInfo);
436 } else if (IsClinitCheck(insn)) {
437 DefineClinitSpecialRegisters(*insnInfo);
438 }
439 }
440 uint32 opndNum = insn.GetOperandSize();
441 for (uint32 i = 0; i < opndNum; ++i) {
442 /* Copy all the opndInfo entries for the operands. */
443 insnInfo->origOpnd.emplace_back(origInfo.at(i));
444 insnInfo->optimalOpnd.emplace_back(opndInfos.at(i));
445 /* Keep the result info. */
446 if (!insn.OpndIsDef(i)) {
447 continue;
448 }
449 auto genOpndInfoDef = [this, insnInfo](Operand &op) {
450 OpndInfo *opndInfo = nullptr;
451 if ((&op != TRUE_OPND) &&
452 ((op.IsRegister() && (&op) != GetZeroOpnd(op.GetSize())) ||
453 (op.IsMemoryAccessOperand() && (static_cast<MemOperand &>(op)).GetBaseRegister() != nullptr))) {
454 opndInfo = OperandInfoDef(*insnInfo->bb, *insnInfo->insn, op);
455 opndInfo->insnInfo = insnInfo;
456 }
457 insnInfo->result.emplace_back(opndInfo);
458 };
459 Operand &op = insn.GetOperand(i);
460 if (op.IsList() && !static_cast<ListOperand &>(op).GetOperands().empty()) {
461 for (auto operand : static_cast<ListOperand &>(op).GetOperands()) {
462 genOpndInfoDef(*operand);
463 }
464 } else {
465 genOpndInfoDef(op);
466 }
467 }
468 SetInsnInfo(hashVal, *insnInfo);
469 }
470
471 /* do decref of orig_info, refCount will be set to 0 */
RemoveUses(uint32 opndNum,const MapleVector<OpndInfo * > & origInfo)472 void Ebo::RemoveUses(uint32 opndNum, const MapleVector<OpndInfo *> &origInfo)
473 {
474 OpndInfo *info = nullptr;
475 for (uint32 i = 0; i < opndNum; ++i) {
476 info = origInfo.at(i);
477 if (info != nullptr) {
478 DecRef(*info);
479 if (info->opnd->IsMemoryAccessOperand()) {
480 MemOpndInfo *memInfo = static_cast<MemOpndInfo *>(info);
481 OpndInfo *baseInfo = memInfo->GetBaseInfo();
482 OpndInfo *offsetInfo = memInfo->GetOffsetInfo();
483 if (baseInfo != nullptr) {
484 DecRef(*baseInfo);
485 }
486 if (offsetInfo != nullptr) {
487 DecRef(*offsetInfo);
488 }
489 }
490 }
491 }
492 }
493
BuildMemOpndInfo(BB & bb,Insn & insn,Operand & opnd,uint32 opndIndex)494 OpndInfo *Ebo::BuildMemOpndInfo(BB &bb, Insn &insn, Operand &opnd, uint32 opndIndex)
495 {
496 auto *memOpnd = static_cast<MemOperand *>(&opnd);
497 Operand *base = memOpnd->GetBaseRegister();
498 Operand *offset = memOpnd->GetOffset();
499 OpndInfo *baseInfo = nullptr;
500 OpndInfo *offsetInfo = nullptr;
501 if (base != nullptr) {
502 if (!memOpnd->IsIntactIndexed()) {
503 baseInfo = OperandInfoUse(bb, *base);
504 baseInfo = OperandInfoDef(bb, insn, *base);
505 return baseInfo;
506 } else {
507 baseInfo = OperandInfoUse(bb, *base);
508 }
509 /* forward prop for base register. */
510 if ((baseInfo != nullptr) && base->IsRegister()) {
511 auto *baseReg = static_cast<RegOperand *>(base);
512 Operand *replaceOpnd = baseInfo->replacementOpnd;
513 OpndInfo *replaceInfo = baseInfo->replacementInfo;
514 if ((replaceInfo != nullptr) && (replaceOpnd != nullptr) && !cgFunc->IsSPOrFP(*baseReg) &&
515 (!beforeRegAlloc || (!IsPhysicalReg(*replaceOpnd) && !IsPhysicalReg(*base))) &&
516 IsOfSameClass(*base, *replaceOpnd) && memOpnd->IsIntactIndexed() &&
517 (base->GetSize() <= replaceOpnd->GetSize()) &&
518 /* In case that replace opnd was redefined. */
519 !replaceInfo->redefined) {
520 MemOperand *newMem = static_cast<MemOperand *>(memOpnd->Clone(*cgFunc->GetMemoryPool()));
521 CHECK_FATAL(newMem != nullptr, "newMem is null in Ebo::BuildAllInfo(BB *bb)");
522 newMem->SetBaseRegister(*static_cast<RegOperand *>(replaceOpnd));
523 insn.SetOperand(opndIndex, *newMem);
524 DecRef(*baseInfo);
525 IncRef(*replaceInfo);
526 baseInfo = replaceInfo;
527 }
528 }
529 }
530 if ((offset != nullptr) && offset->IsRegister()) {
531 offsetInfo = OperandInfoUse(bb, *offset);
532 }
533 OpndInfo *opndInfo = OperandInfoUse(bb, insn.GetOperand(opndIndex));
534 CHECK_FATAL(opndInfo != nullptr, "opndInfo should not be null ptr");
535 MemOpndInfo *memInfo = static_cast<MemOpndInfo *>(opndInfo);
536 if (baseInfo != nullptr) {
537 memInfo->SetBaseInfo(*baseInfo);
538 }
539 if (offsetInfo != nullptr) {
540 memInfo->SetOffsetInfo(*offsetInfo);
541 }
542 return memInfo;
543 }
544
BuildOperandInfo(BB & bb,Insn & insn,Operand & opnd,uint32 opndIndex,MapleVector<OpndInfo * > & origInfos)545 OpndInfo *Ebo::BuildOperandInfo(BB &bb, Insn &insn, Operand &opnd, uint32 opndIndex, MapleVector<OpndInfo *> &origInfos)
546 {
547 if (opnd.IsList()) {
548 ListOperand *listOpnd = static_cast<ListOperand *>(&opnd);
549 for (auto op : listOpnd->GetOperands()) {
550 OperandInfoUse(bb, *op);
551 }
552 return nullptr;
553 }
554 DEBUG_ASSERT(opndIndex < origInfos.size(), "SetOpndInfo hashval outof range!");
555 if (opnd.IsConditionCode()) {
556 Operand &rFlag = cgFunc->GetOrCreateRflag();
557 OperandInfoUse(bb, rFlag);
558 /* if operand is Opnd_cond, the orig_info store the info of rFlag. */
559 OpndInfo *tempOpndInfo = GetOpndInfo(rFlag, -1);
560 origInfos.at(opndIndex) = tempOpndInfo;
561 return nullptr;
562 }
563
564 if (!(opnd.IsRegister() || opnd.IsRegShift()) && !opnd.IsMemoryAccessOperand()) {
565 return nullptr;
566 }
567
568 if (opnd.IsMemoryAccessOperand()) {
569 OpndInfo *memInfo = BuildMemOpndInfo(bb, insn, opnd, opndIndex);
570 CHECK_FATAL(memInfo != nullptr, "build memopnd info failed in Ebo::BuildAllInfo");
571 origInfos.at(opndIndex) = memInfo;
572 return nullptr;
573 }
574 OpndInfo *opndInfo = OperandInfoUse(bb, opnd);
575 origInfos.at(opndIndex) = opndInfo;
576 return opndInfo;
577 }
578
579 /*
580 * this func do:
581 * 1. delete DupInsn if SimplifyInsn failed.
582 * 2. buildInsnInfo if delete DupInsn failed(func HashInsn do this).
583 * 3. update replaceInfo.
584 */
FindRedundantInsns(BB & bb,Insn * & insn,const Insn * prev,bool insnReplaced,MapleVector<Operand * > & opnds,MapleVector<OpndInfo * > & opndInfos,const MapleVector<OpndInfo * > & origInfos)585 void Ebo::FindRedundantInsns(BB &bb, Insn *&insn, const Insn *prev, bool insnReplaced, MapleVector<Operand *> &opnds,
586 MapleVector<OpndInfo *> &opndInfos, const MapleVector<OpndInfo *> &origInfos)
587 {
588 CHECK_FATAL(insn != nullptr, "nullptr check");
589 if (!insnReplaced) {
590 CHECK_FATAL(origInfos.size() != 0, "null ptr check");
591 CHECK_FATAL(opndInfos.size() != 0, "null ptr check");
592 HashInsn(*insn, origInfos, opndInfos);
593 /* Processing the result of the insn. */
594 if ((Globals::GetInstance()->GetTarget()->IsEffectiveCopy(*insn) || !insn->GetDefRegs().empty()) &&
595 !insn->IsSpecialIntrinsic()) {
596 Operand *res = &insn->GetOperand(kInsnFirstOpnd);
597 if ((res != nullptr) && (res != TRUE_OPND) && (res != GetZeroOpnd(res->GetSize()))) {
598 CHECK_FATAL(lastInsnInfo != nullptr, "lastInsnInfo is null!");
599 OpndInfo *opndInfo = lastInsnInfo->result[0];
600 /* Don't propagate for fmov insns. */
601 if (Globals::GetInstance()->GetTarget()->IsEffectiveCopy(*insn) && (opndInfo != nullptr) &&
602 !IsFmov(*insn)) {
603 CHECK_FATAL(!opnds.empty(), "null container!");
604 opndInfo->replacementOpnd = opnds[kInsnSecondOpnd];
605 opndInfo->replacementInfo = opndInfos[kInsnSecondOpnd];
606 } else if (insn->GetBothDefUseOpnd() != kInsnMaxOpnd && (opndInfo != nullptr)) {
607 opndInfo->replacementOpnd = nullptr;
608 opndInfo->replacementInfo = nullptr;
609 }
610 }
611 }
612 insn = insn->GetNext();
613 } else {
614 uint32 opndNum = insn->GetOperandSize();
615 RemoveUses(opndNum, origInfos);
616 /* If insn is replaced, reanalyze the new insn to have more opportunities. */
617 insn = (prev == nullptr ? bb.GetFirstInsn() : prev->GetNext());
618 }
619 }
620
PreProcessSpecialInsn(Insn & insn)621 void Ebo::PreProcessSpecialInsn(Insn &insn)
622 {
623 DefineReturnUseRegister(insn);
624
625 if (insn.IsCall() || insn.IsClinit()) {
626 DefineCallUseSpecialRegister(insn);
627 }
628 }
629
630 /* Decrement the use counts for the actual operands of an insnInfo. */
RemoveInsn(InsnInfo & info)631 void Ebo::RemoveInsn(InsnInfo &info)
632 {
633 Insn *insn = info.insn;
634 CHECK_FATAL(insn != nullptr, "get insn in info failed in Ebo::RemoveInsn");
635 uint32 opndNum = insn->GetOperandSize();
636 OpndInfo *opndInfo = nullptr;
637 for (uint32 i = 0; i < opndNum; i++) {
638 if (!insn->OpndIsUse(i)) {
639 continue;
640 }
641 opndInfo = info.origOpnd[i];
642 if (opndInfo != nullptr) {
643 DecRef(*opndInfo);
644 Operand *opndTemp = opndInfo->opnd;
645 if (opndTemp == nullptr) {
646 continue;
647 }
648 if (opndTemp->IsMemoryAccessOperand()) {
649 MemOpndInfo *memInfo = static_cast<MemOpndInfo *>(opndInfo);
650 OpndInfo *baseInfo = memInfo->GetBaseInfo();
651 OpndInfo *offInfo = memInfo->GetOffsetInfo();
652 if (baseInfo != nullptr) {
653 DecRef(*baseInfo);
654 }
655 if (offInfo != nullptr) {
656 DecRef(*offInfo);
657 }
658 }
659 }
660 }
661 #if TARGARM32
662 Arm32CGFunc *a32CGFunc = static_cast<Arm32CGFunc *>(cgFunc);
663 auto &gotInfosMap = a32CGFunc->GetGotInfosMap();
664 for (auto it = gotInfosMap.begin(); it != gotInfosMap.end();) {
665 if (it->first == insn) {
666 it = gotInfosMap.erase(it);
667 } else {
668 ++it;
669 }
670 }
671 auto &constInfosMap = a32CGFunc->GetConstInfosMap();
672 for (auto it = constInfosMap.begin(); it != constInfosMap.end();) {
673 if (it->first == insn) {
674 it = constInfosMap.erase(it);
675 } else {
676 ++it;
677 }
678 }
679 #endif
680 }
681
682 /* Mark opnd is live between def bb and into bb. */
MarkOpndLiveIntoBB(const Operand & opnd,BB & into,BB & def) const683 void Ebo::MarkOpndLiveIntoBB(const Operand &opnd, BB &into, BB &def) const
684 {
685 if (live == nullptr) {
686 return;
687 }
688 if (&into == &def) {
689 return;
690 }
691 CHECK_FATAL(opnd.IsRegister(), "expect register here.");
692 const RegOperand ® = static_cast<const RegOperand &>(opnd);
693 into.SetLiveInBit(reg.GetRegisterNumber());
694 def.SetLiveOutBit(reg.GetRegisterNumber());
695 }
696
697 /* return insn information if has insnInfo,else,return lastInsnInfo */
LocateInsnInfo(const OpndInfo & info)698 InsnInfo *Ebo::LocateInsnInfo(const OpndInfo &info)
699 {
700 if (info.insn != nullptr) {
701 if (info.insnInfo != nullptr) {
702 return info.insnInfo;
703 } else {
704 InsnInfo *insnInfo = lastInsnInfo;
705 int32 limit = 50;
706 for (; (insnInfo != nullptr) && (limit != 0); insnInfo = insnInfo->prev, limit--) {
707 if (insnInfo->insn == info.insn) {
708 return insnInfo;
709 }
710 }
711 }
712 }
713 return nullptr;
714 }
715
UpdateNextInfo(const OpndInfo & opndInfo)716 void Ebo::UpdateNextInfo(const OpndInfo &opndInfo)
717 {
718 OpndInfo *nextInfo = opndInfo.same;
719 while (nextInfo != nullptr) {
720 if (nextInfo->insn != nullptr) {
721 InsnInfo *info = LocateInsnInfo(*nextInfo);
722 if (info != nullptr) {
723 info->mustNotBeRemoved = true;
724 } else {
725 /*
726 * Couldn't find the insnInfo entry. Make sure that the operand has
727 * a use count so that the defining insn will not be deleted.
728 */
729 nextInfo->refCount += opndInfo.refCount;
730 }
731 }
732 nextInfo = nextInfo->same;
733 }
734 }
735
736 /* back up to last saved OpndInfo */
BackupOpndInfoList(OpndInfo * saveLast)737 void Ebo::BackupOpndInfoList(OpndInfo *saveLast)
738 {
739 if (lastOpndInfo == saveLast) {
740 return;
741 }
742 OpndInfo *opndInfo = lastOpndInfo;
743 while (opndInfo != saveLast) {
744 int32 hashVal = 0;
745 if (opndInfo->opnd->IsRegister() || opndInfo->opnd->IsRegShift()) {
746 hashVal = -1;
747 } else {
748 hashVal = opndInfo->hashVal;
749 }
750 UpdateOpndInfo(*opndInfo->opnd, *opndInfo, opndInfo->same, hashVal);
751 opndInfo = opndInfo->prev;
752 }
753 if (saveLast != nullptr) {
754 saveLast->next = nullptr;
755 lastOpndInfo = saveLast;
756 } else {
757 firstOpndInfo = nullptr;
758 lastOpndInfo = nullptr;
759 }
760 }
761
762 /* back up to last saved insn */
BackupInsnInfoList(InsnInfo * saveLast)763 void Ebo::BackupInsnInfoList(InsnInfo *saveLast)
764 {
765 if (lastInsnInfo == saveLast) {
766 return;
767 }
768 InsnInfo *insnInfo = lastInsnInfo;
769 while (insnInfo != saveLast) {
770 SetInsnInfo(insnInfo->hashIndex, *(insnInfo->same));
771 insnInfo = insnInfo->prev;
772 }
773 if (saveLast != nullptr) {
774 saveLast->next = nullptr;
775 lastInsnInfo = saveLast;
776 } else {
777 firstInsnInfo = nullptr;
778 lastInsnInfo = nullptr;
779 }
780 }
781
782 /* add bb to eb ,and build operandinfo of bb */
AddBB2EB(BB & bb)783 void Ebo::AddBB2EB(BB &bb)
784 {
785 OpndInfo *saveLastOpndInfo = lastOpndInfo;
786 InsnInfo *saveLastInsnInfo = lastInsnInfo;
787 SetBBVisited(bb);
788 bbNum++;
789 /* Stop adding BB to EB if the bbs in the current EB exceeds kEboMaxBBNums */
790 if (bbNum < kEboMaxBBNums) {
791 for (auto *bbSucc : bb.GetSuccs()) {
792 if ((bbSucc->GetPreds().size() == 1) && IsNotVisited(*bbSucc)) {
793 AddBB2EB(*bbSucc);
794 }
795 }
796 }
797
798 /* Remove information about Operand's and Insn's in this block. */
799 BackupOpndInfoList(saveLastOpndInfo);
800 BackupInsnInfoList(saveLastInsnInfo);
801 bbNum--;
802 }
803
804 /* Perform EBO */
EboProcess()805 void Ebo::EboProcess()
806 {
807 FOR_ALL_BB(bb, cgFunc) {
808 if (IsNotVisited(*bb)) {
809 bbNum = 0;
810 AddBB2EB(*bb);
811 }
812 }
813 }
814
815 /* Perform EBO on O1 which the optimization can only be in a single block. */
EboProcessSingleBB()816 void Ebo::EboProcessSingleBB()
817 {
818 FOR_ALL_BB(bb, cgFunc) {
819 OpndInfo *saveLastOpndInfo = lastOpndInfo;
820 InsnInfo *saveLastInsnInfo = lastInsnInfo;
821 /* Remove information about Operand's and Insn's in this block. */
822 BackupOpndInfoList(saveLastOpndInfo);
823 BackupInsnInfoList(saveLastInsnInfo);
824 }
825 }
826
EboInit()827 void Ebo::EboInit()
828 {
829 visitedBBs.resize(cgFunc->NumBBs());
830 for (uint32 i = 0; i < cgFunc->NumBBs(); ++i) {
831 visitedBBs[i] = false;
832 }
833 exprInfoTable.resize(kEboMaxOpndHash);
834 for (uint32 i = 0; i < kEboMaxOpndHash; ++i) {
835 exprInfoTable.at(i) = nullptr;
836 }
837 insnInfoTable.resize(kEboMaxInsnHash);
838 for (uint32 i = 0; i < kEboMaxInsnHash; ++i) {
839 insnInfoTable.at(i) = nullptr;
840 }
841 if (!beforeRegAlloc) {
842 BuildCallerSaveRegisters();
843 }
844 optSuccess = false;
845 }
846
847 /* perform EB optimizations right after instruction selection. */
Run()848 void Ebo::Run()
849 {
850 EboInit();
851 if (Globals::GetInstance()->GetOptimLevel() >= CGOptions::kLevel2) {
852 EboProcess();
853 } else {
854 EboProcessSingleBB(); /* Perform SingleBB Optimization when -O1. */
855 }
856 if (optSuccess && cgFunc->GetMirModule().IsCModule()) {
857 Run();
858 }
859 }
860
861 /* === new pm === */
PhaseRun(maplebe::CGFunc & f)862 bool CgEbo0::PhaseRun(maplebe::CGFunc &f)
863 {
864 if (EBO_DUMP_NEWPM) {
865 DotGenerator::GenerateDot("ebo0", f, f.GetMirModule());
866 }
867 LiveAnalysis *live = GET_ANALYSIS(CgLiveAnalysis, f);
868 MemPool *eboMp = GetPhaseMemPool();
869 Ebo *ebo = nullptr;
870 #if TARGAARCH64 || TARGRISCV64
871 ebo = eboMp->New<AArch64Ebo>(f, *eboMp, live, true, PhaseName());
872 #endif
873 #if TARGARM32
874 ebo = eboMp->New<Arm32Ebo>(f, *eboMp, live, true, "ebo0");
875 #endif
876 ebo->Run();
877 /* the live range info may changed, so invalid the info. */
878 if (live != nullptr) {
879 live->ClearInOutDataInfo();
880 }
881 return true;
882 }
883
GetAnalysisDependence(maple::AnalysisDep & aDep) const884 void CgEbo0::GetAnalysisDependence(maple::AnalysisDep &aDep) const
885 {
886 aDep.AddRequired<CgLiveAnalysis>();
887 aDep.AddPreserved<CgLoopAnalysis>();
888 }
MAPLE_TRANSFORM_PHASE_REGISTER_CANSKIP(CgEbo0,ebo)889 MAPLE_TRANSFORM_PHASE_REGISTER_CANSKIP(CgEbo0, ebo)
890
891 bool CgEbo1::PhaseRun(maplebe::CGFunc &f)
892 {
893 if (EBO_DUMP_NEWPM) {
894 DotGenerator::GenerateDot(PhaseName(), f, f.GetMirModule());
895 }
896 LiveAnalysis *live = GET_ANALYSIS(CgLiveAnalysis, f);
897 MemPool *eboMp = GetPhaseMemPool();
898 Ebo *ebo = nullptr;
899 #if TARGAARCH64 || TARGRISCV64
900 ebo = eboMp->New<AArch64Ebo>(f, *eboMp, live, true, PhaseName());
901 #endif
902 #if TARGARM32
903 ebo = eboMp->New<Arm32Ebo>(f, *eboMp, live, true, PhaseName());
904 #endif
905 ebo->Run();
906 /* the live range info may changed, so invalid the info. */
907 if (live != nullptr) {
908 live->ClearInOutDataInfo();
909 }
910 return true;
911 }
912
GetAnalysisDependence(maple::AnalysisDep & aDep) const913 void CgEbo1::GetAnalysisDependence(maple::AnalysisDep &aDep) const
914 {
915 aDep.AddRequired<CgLiveAnalysis>();
916 aDep.AddPreserved<CgLoopAnalysis>();
917 }
MAPLE_TRANSFORM_PHASE_REGISTER_CANSKIP(CgEbo1,ebo1)918 MAPLE_TRANSFORM_PHASE_REGISTER_CANSKIP(CgEbo1, ebo1)
919
920 bool CgPostEbo::PhaseRun(maplebe::CGFunc &f)
921 {
922 if (EBO_DUMP_NEWPM) {
923 DotGenerator::GenerateDot(PhaseName(), f, f.GetMirModule());
924 }
925 LiveAnalysis *live = GET_ANALYSIS(CgLiveAnalysis, f);
926 MemPool *eboMp = GetPhaseMemPool();
927 Ebo *ebo = nullptr;
928 #if TARGAARCH64 || TARGRISCV64
929 ebo = eboMp->New<AArch64Ebo>(f, *eboMp, live, false, PhaseName());
930 #endif
931 #if TARGARM32
932 ebo = eboMp->New<Arm32Ebo>(f, *eboMp, live, false, PhaseName());
933 #endif
934 ebo->Run();
935 /* the live range info may changed, so invalid the info. */
936 if (live != nullptr) {
937 live->ClearInOutDataInfo();
938 }
939 return true;
940 }
941
GetAnalysisDependence(maple::AnalysisDep & aDep) const942 void CgPostEbo::GetAnalysisDependence(maple::AnalysisDep &aDep) const
943 {
944 aDep.AddRequired<CgLiveAnalysis>();
945 aDep.AddPreserved<CgLoopAnalysis>();
946 }
947 MAPLE_TRANSFORM_PHASE_REGISTER_CANSKIP(CgPostEbo, postebo)
948 } /* namespace maplebe */
949