1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #if TARGAARCH64
17 #include "aarch64_ebo.h"
18 #elif TARGRISCV64
19 #include "riscv64_ebo.h"
20 #endif
21 #if TARGARM32
22 #include "arm32_ebo.h"
23 #endif
24 #include "securec.h"
25
26 #include "common_utils.h"
27 #include "optimize_common.h"
28
29 /*
30 * The Optimizations include forward propagation, common expression elimination, constant folding,
31 * dead code elimination and some target optimizations. The main entry of the optimization is run.
32 * When the Optimization level is less than O2, it can only perform in single block. and in O2 it
33 * can perform it a sequence of blocks.
34 */
35 namespace maplebe {
36 using namespace maple;
37
38 #define EBO_DUMP CG_DEBUG_FUNC(*cgFunc)
39 #define EBO_DUMP_NEWPM CG_DEBUG_FUNC(f)
40 #define TRUE_OPND cgFunc->GetTrueOpnd()
41
42 constexpr uint32 kEboOpndHashLength = 521;
43 constexpr uint32 kEboMaxBBNums = 200;
44
45 /* Return the opndInfo for the first mem operand of insn. */
GetMemInfo(InsnInfo & insnInfo)46 MemOpndInfo *Ebo::GetMemInfo(InsnInfo &insnInfo)
47 {
48 Insn *insn = insnInfo.insn;
49 CHECK_FATAL(insn != nullptr, "insnInfo.insn is nullptr!");
50 CHECK_FATAL(insn->AccessMem(), "insn is not access memory!");
51 uint32 opndNum = insn->GetOperandSize();
52 if (insn->IsLoad()) {
53 for (uint32 i = 0; i < opndNum; ++i) {
54 if (insn->GetOperand(i).IsMemoryAccessOperand()) {
55 return static_cast<MemOpndInfo *>(insnInfo.origOpnd[i]);
56 }
57 }
58 } else if (insn->IsStore()) {
59 int32 resId = 0;
60 for (uint32 i = 0; i < opndNum; ++i) {
61 if (insn->OpndIsDef(i)) {
62 if (insn->GetOperand(i).IsMemoryAccessOperand()) {
63 return static_cast<MemOpndInfo *>(insnInfo.result[resId]);
64 } else {
65 resId++;
66 }
67 }
68 }
69 }
70 return nullptr;
71 }
72
EnlargeSpaceForLA(Insn & csetInsn)73 void Ebo::EnlargeSpaceForLA(Insn &csetInsn)
74 {
75 CHECK_FATAL(live != nullptr, "no live info!");
76 live->EnlargeSpaceForLiveAnalysis(*csetInsn.GetBB());
77 }
78
IsFrameReg(Operand & opnd) const79 bool Ebo::IsFrameReg(Operand &opnd) const
80 {
81 if (!opnd.IsRegister()) {
82 return false;
83 }
84 RegOperand ® = static_cast<RegOperand &>(opnd);
85 return cgFunc->IsFrameReg(reg);
86 }
87
GetZeroOpnd(uint32 size) const88 Operand *Ebo::GetZeroOpnd(uint32 size) const
89 {
90 #if TARGAARCH64 || TARGRISCV64
91 return size > k64BitSize ? nullptr : &cgFunc->GetZeroOpnd(size);
92 #else
93 return nullptr;
94 #endif
95 }
96
IsSaveReg(const Operand & opnd)97 bool Ebo::IsSaveReg(const Operand &opnd)
98 {
99 if (!opnd.IsRegister()) {
100 return false;
101 }
102 const RegOperand ® = static_cast<const RegOperand &>(opnd);
103 return cgFunc->IsSaveReg(reg, *cgFunc->GetFunction().GetReturnType(), cgFunc->GetBecommon());
104 }
105
IsPhysicalReg(const Operand & opnd) const106 bool Ebo::IsPhysicalReg(const Operand &opnd) const
107 {
108 if (!opnd.IsRegister()) {
109 return false;
110 }
111 const RegOperand ® = static_cast<const RegOperand &>(opnd);
112 return reg.IsPhysicalRegister();
113 }
114
HasAssignedReg(const Operand & opnd) const115 bool Ebo::HasAssignedReg(const Operand &opnd) const
116 {
117 if (!opnd.IsRegister()) {
118 return false;
119 }
120 const auto ® = static_cast<const RegOperand &>(opnd);
121 return reg.IsVirtualRegister() ? (!IsInvalidReg(reg)) : true;
122 }
123
IsOfSameClass(const Operand & op0,const Operand & op1) const124 bool Ebo::IsOfSameClass(const Operand &op0, const Operand &op1) const
125 {
126 if (!op0.IsRegister() || !op1.IsRegister()) {
127 return false;
128 }
129 const auto ®0 = static_cast<const RegOperand &>(op0);
130 const auto ®1 = static_cast<const RegOperand &>(op1);
131 return reg0.GetRegisterType() == reg1.GetRegisterType();
132 }
133
134 /* return true if opnd of bb is available. */
OpndAvailableInBB(const BB & bb,OpndInfo * info)135 bool Ebo::OpndAvailableInBB(const BB &bb, OpndInfo *info)
136 {
137 if (info == nullptr) {
138 return false;
139 }
140 if (info->opnd == nullptr) {
141 return false;
142 }
143
144 Operand *op = info->opnd;
145 if (IsConstantImmOrReg(*op)) {
146 return true;
147 }
148
149 int32 hashVal = 0;
150 if (op->IsRegShift() || op->IsRegister()) {
151 hashVal = -1;
152 } else {
153 hashVal = info->hashVal;
154 }
155 if (GetOpndInfo(*op, hashVal) != info) {
156 return false;
157 }
158 /* global operands aren't supported at low levels of optimization. */
159 if ((Globals::GetInstance()->GetOptimLevel() < CGOptions::kLevel2) && (&bb != info->bb)) {
160 return false;
161 }
162 if (beforeRegAlloc && IsPhysicalReg(*op)) {
163 return false;
164 }
165 return true;
166 }
167
ForwardPropCheck(const Operand * opndReplace,const OpndInfo & opndInfo,const Operand & opnd,Insn & insn)168 bool Ebo::ForwardPropCheck(const Operand *opndReplace, const OpndInfo &opndInfo, const Operand &opnd, Insn &insn)
169 {
170 if (opndReplace == nullptr) {
171 return false;
172 }
173 if ((opndInfo.replacementInfo != nullptr) && opndInfo.replacementInfo->redefined) {
174 return false;
175 }
176 #if TARGARM32
177 /* for arm32, disable forwardProp in strd insn. */
178 if (insn.GetMachineOpcode() == MOP_strd) {
179 return false;
180 }
181 if (opndInfo.mayReDef) {
182 return false;
183 }
184 #endif
185 if (!(IsConstantImmOrReg(*opndReplace) ||
186 ((OpndAvailableInBB(*insn.GetBB(), opndInfo.replacementInfo) || RegistersIdentical(opnd, *opndReplace)) &&
187 (HasAssignedReg(opnd) == HasAssignedReg(*opndReplace))))) {
188 return false;
189 }
190 /* if beforeRA, replace op should not be PhysicalRe */
191 return !beforeRegAlloc || !IsPhysicalReg(*opndReplace);
192 }
193
RegForwardCheck(Insn & insn,const Operand & opnd,const Operand * opndReplace,Operand & oldOpnd,const OpndInfo * tmpInfo)194 bool Ebo::RegForwardCheck(Insn &insn, const Operand &opnd, const Operand *opndReplace, Operand &oldOpnd,
195 const OpndInfo *tmpInfo)
196 {
197 if (IsConstantImmOrReg(opnd)) {
198 return false;
199 }
200 if (!(!beforeRegAlloc || (HasAssignedReg(oldOpnd) == HasAssignedReg(*opndReplace)) || IsZeroRegister(opnd) ||
201 !insn.IsMove())) {
202 return false;
203 }
204 std::set<regno_t> defRegs = insn.GetDefRegs();
205 if (!(defRegs.empty() ||
206 ((opnd.IsRegister() && !defRegs.count(static_cast<const RegOperand &>(opnd).GetRegisterNumber())) ||
207 !beforeRegAlloc))) {
208 return false;
209 }
210 if (!(beforeRegAlloc || !IsFrameReg(oldOpnd))) {
211 return false;
212 }
213 if (insn.GetBothDefUseOpnd() != kInsnMaxOpnd) {
214 return false;
215 }
216 if (IsPseudoRet(insn)) {
217 return false;
218 }
219
220 return ((IsOfSameClass(oldOpnd, *opndReplace) && (oldOpnd.GetSize() <= opndReplace->GetSize())) ||
221 ((tmpInfo != nullptr) && IsMovToSIMDVmov(insn, *tmpInfo->insn)));
222 }
223
224 /* For Memory Operand, its info was stored in a hash table, this function is to compute its hash value. */
ComputeOpndHash(const Operand & opnd) const225 int32 Ebo::ComputeOpndHash(const Operand &opnd) const
226 {
227 uint64 hashIdx = reinterpret_cast<uint64>(&opnd) >> k4ByteSize;
228 return static_cast<int32>(hashIdx % kEboOpndHashLength);
229 }
230
231 /* Store the operand information. Store it to the vRegInfo if is register. otherwise put it to the hash table. */
SetOpndInfo(const Operand & opnd,OpndInfo * opndInfo,int32 hashVal)232 void Ebo::SetOpndInfo(const Operand &opnd, OpndInfo *opndInfo, int32 hashVal)
233 {
234 /* opnd is Register or RegShift */
235 if (hashVal == -1) {
236 const RegOperand ® = GetRegOperand(opnd);
237 vRegInfo[reg.GetRegisterNumber()] = opndInfo;
238 return;
239 }
240
241 CHECK_FATAL(static_cast<uint64>(static_cast<int64>(hashVal)) < exprInfoTable.size(),
242 "SetOpndInfo hashval outof range!");
243 opndInfo->hashVal = hashVal;
244 opndInfo->hashNext = exprInfoTable.at(hashVal);
245 exprInfoTable.at(hashVal) = opndInfo;
246 }
247
248 /* Used to change the info of opnd from opndinfo to newinfo. */
UpdateOpndInfo(const Operand & opnd,OpndInfo & opndInfo,OpndInfo * newInfo,int32 hashVal)249 void Ebo::UpdateOpndInfo(const Operand &opnd, OpndInfo &opndInfo, OpndInfo *newInfo, int32 hashVal)
250 {
251 if (hashVal == -1) {
252 const RegOperand ® = GetRegOperand(opnd);
253 vRegInfo[reg.GetRegisterNumber()] = newInfo;
254 return;
255 }
256 DEBUG_ASSERT(static_cast<uint32>(hashVal) < exprInfoTable.size(), "SetOpndInfo hashval outof range!");
257 OpndInfo *info = exprInfoTable.at(hashVal);
258 if (newInfo != nullptr) {
259 newInfo->hashNext = opndInfo.hashNext;
260 opndInfo.hashNext = nullptr;
261 if (info == &opndInfo) {
262 exprInfoTable.at(hashVal) = newInfo;
263 return;
264 }
265 while (info != nullptr) {
266 if (info->hashNext == &opndInfo) {
267 info->hashNext = newInfo;
268 return;
269 }
270 info = info->hashNext;
271 }
272 return;
273 }
274 if (info == &opndInfo) {
275 exprInfoTable.at(hashVal) = opndInfo.hashNext;
276 return;
277 }
278 while (info != nullptr) {
279 if (info->hashNext == &opndInfo) {
280 info->hashNext = opndInfo.next;
281 opndInfo.hashNext = nullptr;
282 return;
283 }
284 info = info->hashNext;
285 }
286 }
287
288 /* return true if op1 op2 is equal */
OperandEqual(const Operand & op1,const Operand & op2) const289 bool Ebo::OperandEqual(const Operand &op1, const Operand &op2) const
290 {
291 if (&op1 == &op2) {
292 return true;
293 }
294 if (op1.GetKind() != op2.GetKind()) {
295 return false;
296 }
297 return OperandEqSpecial(op1, op2);
298 }
299
GetOpndInfo(const Operand & opnd,int32 hashVal) const300 OpndInfo *Ebo::GetOpndInfo(const Operand &opnd, int32 hashVal) const
301 {
302 if (hashVal < 0) {
303 const RegOperand ® = GetRegOperand(opnd);
304 auto it = vRegInfo.find(reg.GetRegisterNumber());
305 return it != vRegInfo.end() ? it->second : nullptr;
306 }
307 /* do not find prev memOpend */
308 if (opnd.IsMemoryAccessOperand()) {
309 return nullptr;
310 }
311 DEBUG_ASSERT(static_cast<uint32>(hashVal) < exprInfoTable.size(), "SetOpndInfo hashval outof range!");
312 OpndInfo *info = exprInfoTable.at(hashVal);
313 while (info != nullptr) {
314 if (&opnd == info->opnd) {
315 return info;
316 }
317 info = info->hashNext;
318 }
319 return nullptr;
320 }
321
322 /* Create a opndInfo for opnd. */
GetNewOpndInfo(BB & bb,Insn * insn,Operand & opnd,int32 hashVal)323 OpndInfo *Ebo::GetNewOpndInfo(BB &bb, Insn *insn, Operand &opnd, int32 hashVal)
324 {
325 OpndInfo *opndInfo = nullptr;
326 if (opnd.IsMemoryAccessOperand()) {
327 opndInfo = eboMp->New<MemOpndInfo>(opnd);
328 } else {
329 opndInfo = eboMp->New<OpndInfo>(opnd);
330 }
331 /* Initialize the entry. */
332 opndInfo->hashVal = hashVal;
333 opndInfo->opnd = &opnd;
334 opndInfo->bb = &bb;
335 opndInfo->insn = insn;
336 opndInfo->prev = lastOpndInfo;
337 if (firstOpndInfo == nullptr) {
338 firstOpndInfo = opndInfo;
339 } else {
340 lastOpndInfo->next = opndInfo;
341 }
342 lastOpndInfo = opndInfo;
343 return opndInfo;
344 }
345
346 /* Update the use infomation for localOpnd because of its use insn currentInsn. */
OperandInfoUse(BB & currentBB,Operand & localOpnd)347 OpndInfo *Ebo::OperandInfoUse(BB ¤tBB, Operand &localOpnd)
348 {
349 if (!(localOpnd.IsRegister() || localOpnd.IsRegShift()) && !localOpnd.IsMemoryAccessOperand()) {
350 return nullptr;
351 }
352 int hashVal = 0;
353 /* only arm32 has regShift */
354 if (localOpnd.IsRegister() || localOpnd.IsRegShift()) {
355 hashVal = -1;
356 } else {
357 hashVal = ComputeOpndHash(localOpnd);
358 }
359 OpndInfo *opndInfo = GetOpndInfo(localOpnd, hashVal);
360
361 if (opndInfo == nullptr) {
362 opndInfo = GetNewOpndInfo(currentBB, nullptr, localOpnd, hashVal);
363 SetOpndInfo(localOpnd, opndInfo, hashVal);
364 }
365 IncRef(*opndInfo);
366 return opndInfo;
367 }
368
369 /* return true if op0 is identical with op1 */
RegistersIdentical(const Operand & op0,const Operand & op1) const370 bool Ebo::RegistersIdentical(const Operand &op0, const Operand &op1) const
371 {
372 if (&op0 == &op1) {
373 return true;
374 }
375 if (!(op0.IsRegister() && op1.IsRegister())) {
376 return false;
377 }
378 const RegOperand ®0 = static_cast<const RegOperand &>(op0);
379 const RegOperand ®1 = static_cast<const RegOperand &>(op1);
380 return ((reg0.IsPhysicalRegister() || !IsInvalidReg(reg0)) && (reg1.IsPhysicalRegister() || !IsInvalidReg(reg1)) &&
381 (reg0.GetRegisterType() == reg1.GetRegisterType()) &&
382 (reg0.GetRegisterNumber() == reg1.GetRegisterNumber()));
383 }
384
GetNewInsnInfo(Insn & insn)385 InsnInfo *Ebo::GetNewInsnInfo(Insn &insn)
386 {
387 InsnInfo *insnInfo = eboMp->New<InsnInfo>(*eboMp, insn);
388 insnInfo->prev = lastInsnInfo;
389 if (firstInsnInfo == nullptr) {
390 firstInsnInfo = insnInfo;
391 } else {
392 lastInsnInfo->next = insnInfo;
393 }
394 lastInsnInfo = insnInfo;
395 insnInfo->next = nullptr;
396 return insnInfo;
397 }
398
ComputeHashVal(Insn & insn,const MapleVector<OpndInfo * > & opndInfos) const399 uint32 Ebo::ComputeHashVal(Insn &insn, const MapleVector<OpndInfo *> &opndInfos) const
400 {
401 uint32 hashVal = 0;
402 if (insn.AccessMem()) {
403 hashVal = kEboDefaultMemHash;
404 if (insn.NoAlias()) {
405 hashVal = kEboNoAliasMemHash;
406 }
407 MemOperand *memOpnd = static_cast<MemOperand *>(insn.GetMemOpnd());
408 if (memOpnd != nullptr) {
409 Operand *baseReg = memOpnd->GetBaseRegister();
410 if ((baseReg != nullptr) && IsFrameReg(*baseReg)) {
411 hashVal = kEboSpillMemHash;
412 }
413 }
414 } else if (Globals::GetInstance()->GetTarget()->IsEffectiveCopy(insn)) {
415 hashVal = kEboCopyInsnHash;
416 } else {
417 uint32 opndNum = insn.GetOperandSize();
418 hashVal = insn.GetMachineOpcode();
419 for (uint32 i = 0; i < opndNum; ++i) {
420 hashVal += static_cast<uint32>(reinterpret_cast<uintptr_t>(opndInfos.at(i)));
421 }
422 hashVal = static_cast<uint32>(kEboReservedInsnHash + EBO_EXP_INSN_HASH(hashVal));
423 }
424 return hashVal;
425 }
426
427 /* computeHashVal of insn */
HashInsn(Insn & insn,const MapleVector<OpndInfo * > & origInfo,const MapleVector<OpndInfo * > & opndInfos)428 void Ebo::HashInsn(Insn &insn, const MapleVector<OpndInfo *> &origInfo, const MapleVector<OpndInfo *> &opndInfos)
429 {
430 uint32 hashVal = ComputeHashVal(insn, opndInfos);
431 /* Create a new insnInfo entry and add the new insn to the hash table. */
432 InsnInfo *insnInfo = GetNewInsnInfo(insn);
433 insnInfo->bb = insn.GetBB();
434 insnInfo->insn = &insn;
435 insnInfo->hashIndex = hashVal;
436 insnInfo->same = insnInfoTable.at(hashVal);
437
438 if (!beforeRegAlloc) {
439 if ((insn.IsCall() || insn.IsTailCall() || insn.IsAsmInsn()) && !insn.GetIsThrow()) {
440 DefineCallerSaveRegisters(*insnInfo);
441 } else if (IsClinitCheck(insn)) {
442 DefineClinitSpecialRegisters(*insnInfo);
443 }
444 }
445 uint32 opndNum = insn.GetOperandSize();
446 for (uint32 i = 0; i < opndNum; ++i) {
447 /* Copy all the opndInfo entries for the operands. */
448 insnInfo->origOpnd.emplace_back(origInfo.at(i));
449 insnInfo->optimalOpnd.emplace_back(opndInfos.at(i));
450 /* Keep the result info. */
451 if (!insn.OpndIsDef(i)) {
452 continue;
453 }
454 auto genOpndInfoDef = [this, insnInfo](Operand &op) {
455 OpndInfo *opndInfo = nullptr;
456 if ((&op != TRUE_OPND) &&
457 ((op.IsRegister() && (&op) != GetZeroOpnd(op.GetSize())) ||
458 (op.IsMemoryAccessOperand() && (static_cast<MemOperand &>(op)).GetBaseRegister() != nullptr))) {
459 opndInfo = OperandInfoDef(*insnInfo->bb, *insnInfo->insn, op);
460 opndInfo->insnInfo = insnInfo;
461 }
462 insnInfo->result.emplace_back(opndInfo);
463 };
464 Operand &op = insn.GetOperand(i);
465 if (op.IsList() && !static_cast<ListOperand &>(op).GetOperands().empty()) {
466 for (auto operand : static_cast<ListOperand &>(op).GetOperands()) {
467 genOpndInfoDef(*operand);
468 }
469 } else {
470 genOpndInfoDef(op);
471 }
472 }
473 SetInsnInfo(hashVal, *insnInfo);
474 }
475
476 /* do decref of orig_info, refCount will be set to 0 */
RemoveUses(uint32 opndNum,const MapleVector<OpndInfo * > & origInfo)477 void Ebo::RemoveUses(uint32 opndNum, const MapleVector<OpndInfo *> &origInfo)
478 {
479 OpndInfo *info = nullptr;
480 for (uint32 i = 0; i < opndNum; ++i) {
481 info = origInfo.at(i);
482 if (info != nullptr) {
483 DecRef(*info);
484 if (info->opnd->IsMemoryAccessOperand()) {
485 MemOpndInfo *memInfo = static_cast<MemOpndInfo *>(info);
486 OpndInfo *baseInfo = memInfo->GetBaseInfo();
487 OpndInfo *offsetInfo = memInfo->GetOffsetInfo();
488 if (baseInfo != nullptr) {
489 DecRef(*baseInfo);
490 }
491 if (offsetInfo != nullptr) {
492 DecRef(*offsetInfo);
493 }
494 }
495 }
496 }
497 }
498
BuildMemOpndInfo(BB & bb,Insn & insn,Operand & opnd,uint32 opndIndex)499 OpndInfo *Ebo::BuildMemOpndInfo(BB &bb, Insn &insn, Operand &opnd, uint32 opndIndex)
500 {
501 auto *memOpnd = static_cast<MemOperand *>(&opnd);
502 Operand *base = memOpnd->GetBaseRegister();
503 Operand *offset = memOpnd->GetOffset();
504 OpndInfo *baseInfo = nullptr;
505 OpndInfo *offsetInfo = nullptr;
506 if (base != nullptr) {
507 if (!memOpnd->IsIntactIndexed()) {
508 baseInfo = OperandInfoUse(bb, *base);
509 baseInfo = OperandInfoDef(bb, insn, *base);
510 return baseInfo;
511 } else {
512 baseInfo = OperandInfoUse(bb, *base);
513 }
514 /* forward prop for base register. */
515 if ((baseInfo != nullptr) && base->IsRegister()) {
516 auto *baseReg = static_cast<RegOperand *>(base);
517 Operand *replaceOpnd = baseInfo->replacementOpnd;
518 OpndInfo *replaceInfo = baseInfo->replacementInfo;
519 if ((replaceInfo != nullptr) && (replaceOpnd != nullptr) && !cgFunc->IsSPOrFP(*baseReg) &&
520 (!beforeRegAlloc || (!IsPhysicalReg(*replaceOpnd) && !IsPhysicalReg(*base))) &&
521 IsOfSameClass(*base, *replaceOpnd) && memOpnd->IsIntactIndexed() &&
522 (base->GetSize() <= replaceOpnd->GetSize()) &&
523 /* In case that replace opnd was redefined. */
524 !replaceInfo->redefined) {
525 MemOperand *newMem = static_cast<MemOperand *>(memOpnd->Clone(*cgFunc->GetMemoryPool()));
526 CHECK_FATAL(newMem != nullptr, "newMem is null in Ebo::BuildAllInfo(BB *bb)");
527 newMem->SetBaseRegister(*static_cast<RegOperand *>(replaceOpnd));
528 insn.SetOperand(opndIndex, *newMem);
529 DecRef(*baseInfo);
530 IncRef(*replaceInfo);
531 baseInfo = replaceInfo;
532 }
533 }
534 }
535 if ((offset != nullptr) && offset->IsRegister()) {
536 offsetInfo = OperandInfoUse(bb, *offset);
537 }
538 OpndInfo *opndInfo = OperandInfoUse(bb, insn.GetOperand(opndIndex));
539 CHECK_FATAL(opndInfo != nullptr, "opndInfo should not be null ptr");
540 MemOpndInfo *memInfo = static_cast<MemOpndInfo *>(opndInfo);
541 if (baseInfo != nullptr) {
542 memInfo->SetBaseInfo(*baseInfo);
543 }
544 if (offsetInfo != nullptr) {
545 memInfo->SetOffsetInfo(*offsetInfo);
546 }
547 return memInfo;
548 }
549
BuildOperandInfo(BB & bb,Insn & insn,Operand & opnd,uint32 opndIndex,MapleVector<OpndInfo * > & origInfos)550 OpndInfo *Ebo::BuildOperandInfo(BB &bb, Insn &insn, Operand &opnd, uint32 opndIndex, MapleVector<OpndInfo *> &origInfos)
551 {
552 if (opnd.IsList()) {
553 ListOperand *listOpnd = static_cast<ListOperand *>(&opnd);
554 for (auto op : listOpnd->GetOperands()) {
555 OperandInfoUse(bb, *op);
556 }
557 return nullptr;
558 }
559 DEBUG_ASSERT(opndIndex < origInfos.size(), "SetOpndInfo hashval outof range!");
560 if (opnd.IsConditionCode()) {
561 Operand &rFlag = cgFunc->GetOrCreateRflag();
562 OperandInfoUse(bb, rFlag);
563 /* if operand is Opnd_cond, the orig_info store the info of rFlag. */
564 OpndInfo *tempOpndInfo = GetOpndInfo(rFlag, -1);
565 origInfos.at(opndIndex) = tempOpndInfo;
566 return nullptr;
567 }
568
569 if (!(opnd.IsRegister() || opnd.IsRegShift()) && !opnd.IsMemoryAccessOperand()) {
570 return nullptr;
571 }
572
573 if (opnd.IsMemoryAccessOperand()) {
574 OpndInfo *memInfo = BuildMemOpndInfo(bb, insn, opnd, opndIndex);
575 CHECK_FATAL(memInfo != nullptr, "build memopnd info failed in Ebo::BuildAllInfo");
576 origInfos.at(opndIndex) = memInfo;
577 return nullptr;
578 }
579 OpndInfo *opndInfo = OperandInfoUse(bb, opnd);
580 origInfos.at(opndIndex) = opndInfo;
581 return opndInfo;
582 }
583
ForwardPropagateOpnd(Insn & insn,Operand * & opnd,uint32 opndIndex,OpndInfo * & opndInfo,MapleVector<OpndInfo * > & origInfos)584 bool Ebo::ForwardPropagateOpnd(Insn &insn, Operand *&opnd, uint32 opndIndex, OpndInfo *&opndInfo,
585 MapleVector<OpndInfo *> &origInfos)
586 {
587 CHECK_FATAL(opnd != nullptr, "nullptr check");
588 Operand *opndReplace = opndInfo->replacementOpnd;
589 /* Don't propagate physical registers before register allocation. */
590 if (beforeRegAlloc && (opndReplace != nullptr) && (IsPhysicalReg(*opndReplace) || IsPhysicalReg(*opnd))) {
591 return false;
592 }
593
594 /* forward propagation of constants */
595 CHECK_FATAL(opndIndex < origInfos.size(), "SetOpndInfo hashval outof range!");
596 if (!ForwardPropCheck(opndReplace, *opndInfo, *opnd, insn)) {
597 return false;
598 }
599 Operand *oldOpnd = opnd;
600 opnd = opndInfo->replacementOpnd;
601 opndInfo = opndInfo->replacementInfo;
602
603 /* constant prop. */
604 if (opnd->IsIntImmediate() && oldOpnd->IsRegister()) {
605 if (DoConstProp(insn, opndIndex, *opnd)) {
606 DecRef(*origInfos.at(opndIndex));
607 /* Update the actual expression info. */
608 origInfos.at(opndIndex) = opndInfo;
609 }
610 }
611 /* move reg, wzr, store vreg, mem ==> store wzr, mem */
612 #if TARGAARCH64 || TARGRISCV64
613 if (IsZeroRegister(*opnd) && opndIndex == 0 &&
614 (insn.GetMachineOpcode() == MOP_wstr || insn.GetMachineOpcode() == MOP_xstr)) {
615 if (EBO_DUMP) {
616 LogInfo::MapleLogger() << "===replace operand " << opndIndex << " of insn: \n";
617 insn.Dump();
618 LogInfo::MapleLogger() << "the new insn is:\n";
619 }
620 insn.SetOperand(opndIndex, *opnd);
621 DecRef(*origInfos.at(opndIndex));
622 /* Update the actual expression info. */
623 origInfos.at(opndIndex) = opndInfo;
624 if (EBO_DUMP) {
625 insn.Dump();
626 }
627 }
628 #endif
629 /* forward prop for registers. */
630 if (!RegForwardCheck(insn, *opnd, opndReplace, *oldOpnd, origInfos.at(opndIndex))) {
631 return false;
632 }
633 /* Copies to and from the same register are not needed. */
634 if (!beforeRegAlloc && Globals::GetInstance()->GetTarget()->IsEffectiveCopy(insn) &&
635 (opndIndex == kInsnSecondOpnd) && RegistersIdentical(*opnd, insn.GetOperand(kInsnFirstOpnd))) {
636 if (EBO_DUMP) {
637 LogInfo::MapleLogger() << "===replace operand " << opndIndex << " of insn: \n";
638 insn.Dump();
639 LogInfo::MapleLogger() << "===Remove the new insn because Copies to and from the same register. \n";
640 }
641 return true;
642 }
643 if (static_cast<RegOperand *>(opnd)->GetRegisterNumber() == RSP) {
644 /* Disallow optimization with stack pointer */
645 return false;
646 }
647
648 if (EBO_DUMP) {
649 LogInfo::MapleLogger() << "===replace operand " << opndIndex << " of insn: \n";
650 insn.Dump();
651 LogInfo::MapleLogger() << "the new insn is:\n";
652 }
653 DecRef(*origInfos.at(opndIndex));
654 insn.SetOperand(opndIndex, *opnd);
655
656 if (EBO_DUMP) {
657 insn.Dump();
658 }
659 IncRef(*opndInfo);
660 /* Update the actual expression info. */
661 origInfos.at(opndIndex) = opndInfo;
662 /* extend the live range of the replacement operand. */
663 if ((opndInfo->bb != insn.GetBB()) && opnd->IsRegister()) {
664 MarkOpndLiveIntoBB(*opnd, *insn.GetBB(), *opndInfo->bb);
665 }
666 return false;
667 }
668
669 /*
670 * this func do only one of the following optimization:
671 * 1. Remove DupInsns
672 * 2. SpecialSequence OPT
673 * 3. Remove Redundant "Load"
674 * 4. Constant Fold
675 */
SimplifyInsn(Insn & insn,bool & insnReplaced,bool opndsConstant,const MapleVector<Operand * > & opnds,const MapleVector<OpndInfo * > & opndInfos,const MapleVector<OpndInfo * > & origInfos)676 void Ebo::SimplifyInsn(Insn &insn, bool &insnReplaced, bool opndsConstant, const MapleVector<Operand *> &opnds,
677 const MapleVector<OpndInfo *> &opndInfos, const MapleVector<OpndInfo *> &origInfos)
678 {
679 if (insn.AccessMem()) {
680 if (!insnReplaced) {
681 insnReplaced = SpecialSequence(insn, origInfos);
682 }
683 return;
684 }
685 if (Globals::GetInstance()->GetTarget()->IsEffectiveCopy(insn)) {
686 if (!insnReplaced) {
687 insnReplaced = SpecialSequence(insn, opndInfos);
688 }
689 return;
690 }
691 if (!insnReplaced && !insn.HasSideEffects()) {
692 uint32 opndNum = insn.GetOperandSize();
693 if (opndsConstant && (opndNum > 1)) {
694 if (!insn.GetDefRegs().empty()) {
695 insnReplaced = Csel2Cset(insn, opnds);
696 }
697 }
698 if (insnReplaced) {
699 return;
700 }
701 if (opndNum > 1) {
702 /* special case */
703 if (!insn.GetDefRegs().empty() && ResIsNotDefAndUse(insn)) {
704 if ((opndNum == kInsnFourthOpnd) && (insn.GetDefRegs().size() == 1) &&
705 (((kInsnSecondOpnd < opnds.size()) && (opnds[kInsnSecondOpnd] != nullptr) &&
706 IsConstantImmOrReg(*opnds[kInsnSecondOpnd])) ||
707 ((kInsnThirdOpnd < opnds.size()) && (opnds[kInsnThirdOpnd] != nullptr) &&
708 IsConstantImmOrReg(*opnds[kInsnThirdOpnd])))) {
709 insnReplaced = SimplifyConstOperand(insn, opnds, opndInfos);
710 }
711 }
712 if (!insnReplaced) {
713 insnReplaced = SpecialSequence(insn, origInfos);
714 }
715 }
716 }
717 }
718
719 /*
720 * this func do:
721 * 1. delete DupInsn if SimplifyInsn failed.
722 * 2. buildInsnInfo if delete DupInsn failed(func HashInsn do this).
723 * 3. update replaceInfo.
724 */
FindRedundantInsns(BB & bb,Insn * & insn,const Insn * prev,bool insnReplaced,MapleVector<Operand * > & opnds,MapleVector<OpndInfo * > & opndInfos,const MapleVector<OpndInfo * > & origInfos)725 void Ebo::FindRedundantInsns(BB &bb, Insn *&insn, const Insn *prev, bool insnReplaced, MapleVector<Operand *> &opnds,
726 MapleVector<OpndInfo *> &opndInfos, const MapleVector<OpndInfo *> &origInfos)
727 {
728 CHECK_FATAL(insn != nullptr, "nullptr check");
729 if (!insnReplaced) {
730 CHECK_FATAL(origInfos.size() != 0, "null ptr check");
731 CHECK_FATAL(opndInfos.size() != 0, "null ptr check");
732 HashInsn(*insn, origInfos, opndInfos);
733 /* Processing the result of the insn. */
734 if ((Globals::GetInstance()->GetTarget()->IsEffectiveCopy(*insn) || !insn->GetDefRegs().empty()) &&
735 !insn->IsSpecialIntrinsic()) {
736 Operand *res = &insn->GetOperand(kInsnFirstOpnd);
737 if ((res != nullptr) && (res != TRUE_OPND) && (res != GetZeroOpnd(res->GetSize()))) {
738 CHECK_FATAL(lastInsnInfo != nullptr, "lastInsnInfo is null!");
739 OpndInfo *opndInfo = lastInsnInfo->result[0];
740 /* Don't propagate for fmov insns. */
741 if (Globals::GetInstance()->GetTarget()->IsEffectiveCopy(*insn) && (opndInfo != nullptr) &&
742 !IsFmov(*insn)) {
743 CHECK_FATAL(!opnds.empty(), "null container!");
744 opndInfo->replacementOpnd = opnds[kInsnSecondOpnd];
745 opndInfo->replacementInfo = opndInfos[kInsnSecondOpnd];
746 } else if (insn->GetBothDefUseOpnd() != kInsnMaxOpnd && (opndInfo != nullptr)) {
747 opndInfo->replacementOpnd = nullptr;
748 opndInfo->replacementInfo = nullptr;
749 }
750 }
751 }
752 insn = insn->GetNext();
753 } else {
754 uint32 opndNum = insn->GetOperandSize();
755 RemoveUses(opndNum, origInfos);
756 /* If insn is replaced, reanalyze the new insn to have more opportunities. */
757 insn = (prev == nullptr ? bb.GetFirstInsn() : prev->GetNext());
758 }
759 }
760
PreProcessSpecialInsn(Insn & insn)761 void Ebo::PreProcessSpecialInsn(Insn &insn)
762 {
763 DefineReturnUseRegister(insn);
764
765 if (insn.IsCall() || insn.IsClinit()) {
766 DefineCallUseSpecialRegister(insn);
767 }
768 }
769
770 /*
771 * this func do :
772 * 1.build opereand info of bb;
773 * 2.do Forward propagation after regalloc;
774 * 3.simplify the insn,include Constant folding,redundant insns elimination.
775 */
BuildAllInfo(BB & bb)776 void Ebo::BuildAllInfo(BB &bb)
777 {
778 if (EBO_DUMP) {
779 LogInfo::MapleLogger() << "===Enter BuildOperandinfo of bb:" << bb.GetId() << "===\n";
780 }
781 Insn *insn = bb.GetFirstInsn();
782 while ((insn != nullptr) && (insn != bb.GetLastInsn()->GetNext())) {
783 if (!insn->IsTargetInsn()) {
784 insn = insn->GetNext();
785 continue;
786 }
787 PreProcessSpecialInsn(*insn);
788 uint32 opndNum = insn->GetOperandSize();
789 if (!insn->IsMachineInstruction() || opndNum == 0) {
790 insn = insn->GetNext();
791 continue;
792 }
793 MapleVector<Operand *> opnds(eboAllocator.Adapter());
794 MapleVector<OpndInfo *> opndInfos(eboAllocator.Adapter());
795 MapleVector<OpndInfo *> origInfos(eboAllocator.Adapter());
796 Insn *prev = insn->GetPrev();
797 bool insnReplaced = false;
798 bool opndsConstant = true;
799 /* start : Process all the operands. */
800 for (uint32 i = 0; i < opndNum; ++i) {
801 if (!insn->OpndIsUse(i)) {
802 opnds.emplace_back(nullptr);
803 opndInfos.emplace_back(nullptr);
804 origInfos.emplace_back(nullptr);
805 continue;
806 }
807 Operand *opnd = &(insn->GetOperand(i));
808 opnds.emplace_back(opnd);
809 opndInfos.emplace_back(nullptr);
810 origInfos.emplace_back(nullptr);
811 if (IsConstantImmOrReg(*opnd)) {
812 continue;
813 }
814 OpndInfo *opndInfo = BuildOperandInfo(bb, *insn, *opnd, i, origInfos);
815 if (opndInfo == nullptr) {
816 continue;
817 }
818
819 /* Don't do propagation for special intrinsic insn. */
820 if (!insn->IsSpecialIntrinsic()) {
821 insnReplaced = ForwardPropagateOpnd(*insn, opnd, i, opndInfo, origInfos);
822 }
823 if (insnReplaced) {
824 continue;
825 }
826 opnds.at(i) = opnd;
827 opndInfos.at(i) = opndInfo;
828 if (!IsConstantImmOrReg(*opnd)) {
829 opndsConstant = false;
830 }
831 } /* End : Process all the operands. */
832 #if TARGARM32
833 Arm32Insn *currArm32Insn = static_cast<Arm32Insn *>(insn);
834 if (currArm32Insn->IsCondExecution()) {
835 Operand &rFlag = cgFunc->GetOrCreateRflag();
836 OperandInfoUse(bb, rFlag);
837 }
838 #endif
839
840 if (insnReplaced) {
841 RemoveUses(opndNum, origInfos);
842 Insn *temp = insn->GetNext();
843 bb.RemoveInsn(*insn);
844 insn = temp;
845 continue;
846 }
847
848 /* simplify the insn. */
849 if (!insn->IsSpecialIntrinsic()) {
850 SimplifyInsn(*insn, insnReplaced, opndsConstant, opnds, opndInfos, origInfos);
851 }
852 FindRedundantInsns(bb, insn, prev, insnReplaced, opnds, opndInfos, origInfos);
853 }
854 }
855
856 /* Decrement the use counts for the actual operands of an insnInfo. */
RemoveInsn(InsnInfo & info)857 void Ebo::RemoveInsn(InsnInfo &info)
858 {
859 Insn *insn = info.insn;
860 CHECK_FATAL(insn != nullptr, "get insn in info failed in Ebo::RemoveInsn");
861 uint32 opndNum = insn->GetOperandSize();
862 OpndInfo *opndInfo = nullptr;
863 for (uint32 i = 0; i < opndNum; i++) {
864 if (!insn->OpndIsUse(i)) {
865 continue;
866 }
867 opndInfo = info.origOpnd[i];
868 if (opndInfo != nullptr) {
869 DecRef(*opndInfo);
870 Operand *opndTemp = opndInfo->opnd;
871 if (opndTemp == nullptr) {
872 continue;
873 }
874 if (opndTemp->IsMemoryAccessOperand()) {
875 MemOpndInfo *memInfo = static_cast<MemOpndInfo *>(opndInfo);
876 OpndInfo *baseInfo = memInfo->GetBaseInfo();
877 OpndInfo *offInfo = memInfo->GetOffsetInfo();
878 if (baseInfo != nullptr) {
879 DecRef(*baseInfo);
880 }
881 if (offInfo != nullptr) {
882 DecRef(*offInfo);
883 }
884 }
885 }
886 }
887 #if TARGARM32
888 Arm32CGFunc *a32CGFunc = static_cast<Arm32CGFunc *>(cgFunc);
889 auto &gotInfosMap = a32CGFunc->GetGotInfosMap();
890 for (auto it = gotInfosMap.begin(); it != gotInfosMap.end();) {
891 if (it->first == insn) {
892 it = gotInfosMap.erase(it);
893 } else {
894 ++it;
895 }
896 }
897 auto &constInfosMap = a32CGFunc->GetConstInfosMap();
898 for (auto it = constInfosMap.begin(); it != constInfosMap.end();) {
899 if (it->first == insn) {
900 it = constInfosMap.erase(it);
901 } else {
902 ++it;
903 }
904 }
905 #endif
906 }
907
908 /* Mark opnd is live between def bb and into bb. */
MarkOpndLiveIntoBB(const Operand & opnd,BB & into,BB & def) const909 void Ebo::MarkOpndLiveIntoBB(const Operand &opnd, BB &into, BB &def) const
910 {
911 if (live == nullptr) {
912 return;
913 }
914 if (&into == &def) {
915 return;
916 }
917 CHECK_FATAL(opnd.IsRegister(), "expect register here.");
918 const RegOperand ® = static_cast<const RegOperand &>(opnd);
919 into.SetLiveInBit(reg.GetRegisterNumber());
920 def.SetLiveOutBit(reg.GetRegisterNumber());
921 }
922
923 /* return insn information if has insnInfo,else,return lastInsnInfo */
LocateInsnInfo(const OpndInfo & info)924 InsnInfo *Ebo::LocateInsnInfo(const OpndInfo &info)
925 {
926 if (info.insn != nullptr) {
927 if (info.insnInfo != nullptr) {
928 return info.insnInfo;
929 } else {
930 InsnInfo *insnInfo = lastInsnInfo;
931 int32 limit = 50;
932 for (; (insnInfo != nullptr) && (limit != 0); insnInfo = insnInfo->prev, limit--) {
933 if (insnInfo->insn == info.insn) {
934 return insnInfo;
935 }
936 }
937 }
938 }
939 return nullptr;
940 }
941
942 /* redundant insns elimination */
RemoveUnusedInsns(BB & bb,bool normal)943 void Ebo::RemoveUnusedInsns(BB &bb, bool normal)
944 {
945 OpndInfo *opndInfo = nullptr;
946 Operand *opnd = nullptr;
947
948 if (firstInsnInfo == nullptr) {
949 return;
950 }
951
952 for (InsnInfo *insnInfo = lastInsnInfo; insnInfo != nullptr; insnInfo = insnInfo->prev) {
953 Insn *insn = insnInfo->insn;
954 if ((insn == nullptr) || (insn->GetBB() == nullptr)) {
955 continue;
956 }
957 /* stop looking for insn when it goes out of bb. */
958 if (insn->GetBB() != &bb) {
959 break;
960 }
961
962 uint32 resNum = insn->GetDefRegs().size();
963 if (IsLastAndBranch(bb, *insn)) {
964 goto insn_is_needed;
965 }
966
967 if (insn->IsClinit()) {
968 goto insn_is_needed;
969 }
970
971 if ((resNum == 0) || IsGlobalNeeded(*insn) || insn->IsStore() || IsDecoupleStaticOp(*insn) ||
972 insn->GetBothDefUseOpnd() != kInsnMaxOpnd) {
973 goto insn_is_needed;
974 }
975
976 /* last insn of a 64x1 function is a float, 64x1 function may not be a float */
977 if (cgFunc->GetFunction().GetAttr(FUNCATTR_oneelem_simd) && insnInfo == lastInsnInfo) {
978 goto insn_is_needed;
979 }
980
981 if (insn->GetMachineOpcode() == MOP_asm || insn->IsAtomic()) {
982 goto insn_is_needed;
983 }
984
985 /* Check all result that can be removed. */
986 for (uint32 i = 0; i < resNum; ++i) {
987 opndInfo = insnInfo->result[i];
988 /* A couple of checks. */
989 if (opndInfo == nullptr) {
990 continue;
991 }
992 if ((opndInfo->bb != &bb) || (opndInfo->insn == nullptr)) {
993 goto insn_is_needed;
994 }
995 opnd = opndInfo->opnd;
996 if (opnd == GetZeroOpnd(opnd->GetSize())) {
997 continue;
998 }
999 /* this part optimize some spacial case after RA. */
1000 if (!beforeRegAlloc && Globals::GetInstance()->GetTarget()->IsEffectiveCopy(*insn) && opndInfo &&
1001 insn->GetOperand(kInsnSecondOpnd).IsImmediate() && IsSameRedefine(bb, *insn, *opndInfo)) {
1002 goto can_be_removed;
1003 }
1004 /* end special case optimize */
1005 if ((beforeRegAlloc && IsPhysicalReg(*opnd)) || (IsSaveReg(*opnd) && !opndInfo->redefinedInBB)) {
1006 goto insn_is_needed;
1007 }
1008 /* Copies to and from the same register are not needed. */
1009 if (Globals::GetInstance()->GetTarget()->IsEffectiveCopy(*insn)) {
1010 if (HasAssignedReg(*opnd) && HasAssignedReg(insn->GetOperand(kInsnSecondOpnd)) &&
1011 RegistersIdentical(*opnd, insn->GetOperand(kInsnSecondOpnd))) {
1012 /* We may be able to get rid of the copy, but be sure that the operand is marked live into this
1013 * block. */
1014 if ((insnInfo->origOpnd[kInsnSecondOpnd] != nullptr) &&
1015 (&bb != insnInfo->origOpnd[kInsnSecondOpnd]->bb)) {
1016 MarkOpndLiveIntoBB(*opnd, bb, *insnInfo->origOpnd[kInsnSecondOpnd]->bb);
1017 }
1018 /* propagate use count for this opnd to it's input operand. */
1019 if (opndInfo->same != nullptr) {
1020 opndInfo->same->refCount += opndInfo->refCount;
1021 }
1022
1023 /* remove the copy causes the previous def to reach the end of the block. */
1024 if (!opndInfo->redefined && (opndInfo->same != nullptr)) {
1025 opndInfo->same->redefined = false;
1026 opndInfo->same->redefinedInBB = false;
1027 }
1028 goto can_be_removed;
1029 }
1030 }
1031 /* there must bo no direct references to the operand. */
1032 if (!normal || (opndInfo->refCount != 0)) {
1033 goto insn_is_needed;
1034 }
1035 /*
1036 * When O1, the vreg who live out of bb should be recognized.
1037 * The regs for clinit is also be marked to recognize it can't be deleted. so extend it to O2.
1038 */
1039 if (opnd->IsRegister()) {
1040 RegOperand *reg = static_cast<RegOperand *>(opnd);
1041 if (beforeRegAlloc && !reg->IsBBLocalVReg()) {
1042 goto insn_is_needed;
1043 }
1044 }
1045 /* Volatile || sideeffect */
1046 if (opndInfo->insn->IsVolatile() || opndInfo->insn->HasSideEffects()) {
1047 goto insn_is_needed;
1048 }
1049
1050 if (!opndInfo->redefinedInBB && LiveOutOfBB(*opnd, *opndInfo->bb)) {
1051 goto insn_is_needed;
1052 }
1053
1054 if (opndInfo->redefinedInBB && opndInfo->redefinedInsn != nullptr &&
1055 opndInfo->redefinedInsn->GetBothDefUseOpnd() != kInsnMaxOpnd) {
1056 goto insn_is_needed;
1057 }
1058 }
1059
1060 if (!normal || insnInfo->mustNotBeRemoved || insn->GetDoNotRemove()) {
1061 goto insn_is_needed;
1062 }
1063 can_be_removed:
1064 if (EBO_DUMP) {
1065 LogInfo::MapleLogger() << "< ==== Remove Unused insn in bb:" << bb.GetId() << "====\n";
1066 insn->Dump();
1067 }
1068 RemoveInsn(*insnInfo);
1069 bb.RemoveInsn(*insn);
1070 insnInfo->insn = nullptr;
1071 insnInfo->bb = nullptr;
1072 for (uint32 i = 0; i < resNum; i++) {
1073 opndInfo = insnInfo->result[i];
1074 if (opndInfo == nullptr) {
1075 continue;
1076 }
1077 if (opndInfo->redefined && (opndInfo->same != nullptr)) {
1078 OpndInfo *next = opndInfo->same;
1079 next->redefined = true;
1080 if (opndInfo->redefinedInBB && (opndInfo->same->bb == &bb)) {
1081 next->redefinedInBB = true;
1082 }
1083 }
1084 if (!opndInfo->redefinedInBB && (opndInfo->same != nullptr) && (opndInfo->same->bb == &bb)) {
1085 opndInfo->same->redefinedInBB = false;
1086 }
1087 if (!opndInfo->redefined && (opndInfo->same != nullptr)) {
1088 opndInfo->same->redefined = false;
1089 opndInfo->same->redefinedInBB = false;
1090 }
1091 }
1092 optSuccess = true;
1093 continue;
1094 insn_is_needed:
1095 if (!bb.GetEhSuccs().empty()) {
1096 for (uint32 i = 0; i < resNum; i++) {
1097 opndInfo = insnInfo->result[i];
1098 if ((opndInfo != nullptr) && (opndInfo->opnd != nullptr) && (opndInfo->same != nullptr)) {
1099 UpdateNextInfo(*opndInfo);
1100 }
1101 }
1102 }
1103
1104 if (!bb.GetEhPreds().empty()) {
1105 for (uint32 i = 0; i < insn->GetOperandSize(); ++i) {
1106 opndInfo = insnInfo->origOpnd[i];
1107 if ((opndInfo != nullptr) && (opndInfo->opnd != nullptr) && (opndInfo->same != nullptr)) {
1108 UpdateNextInfo(*opndInfo);
1109 }
1110 if ((opndInfo != nullptr) && opndInfo->opnd && (&bb != opndInfo->bb) && opndInfo->opnd->IsRegister()) {
1111 MarkOpndLiveIntoBB(*opndInfo->opnd, bb, *opndInfo->bb);
1112 }
1113 }
1114 }
1115 } /* end proccess insnInfo in currBB */
1116 }
1117
UpdateNextInfo(const OpndInfo & opndInfo)1118 void Ebo::UpdateNextInfo(const OpndInfo &opndInfo)
1119 {
1120 OpndInfo *nextInfo = opndInfo.same;
1121 while (nextInfo != nullptr) {
1122 if (nextInfo->insn != nullptr) {
1123 InsnInfo *info = LocateInsnInfo(*nextInfo);
1124 if (info != nullptr) {
1125 info->mustNotBeRemoved = true;
1126 } else {
1127 /*
1128 * Couldn't find the insnInfo entry. Make sure that the operand has
1129 * a use count so that the defining insn will not be deleted.
1130 */
1131 nextInfo->refCount += opndInfo.refCount;
1132 }
1133 }
1134 nextInfo = nextInfo->same;
1135 }
1136 }
1137
1138 /* back up to last saved OpndInfo */
BackupOpndInfoList(OpndInfo * saveLast)1139 void Ebo::BackupOpndInfoList(OpndInfo *saveLast)
1140 {
1141 if (lastOpndInfo == saveLast) {
1142 return;
1143 }
1144 OpndInfo *opndInfo = lastOpndInfo;
1145 while (opndInfo != saveLast) {
1146 int32 hashVal = 0;
1147 if (opndInfo->opnd->IsRegister() || opndInfo->opnd->IsRegShift()) {
1148 hashVal = -1;
1149 } else {
1150 hashVal = opndInfo->hashVal;
1151 }
1152 UpdateOpndInfo(*opndInfo->opnd, *opndInfo, opndInfo->same, hashVal);
1153 opndInfo = opndInfo->prev;
1154 }
1155 if (saveLast != nullptr) {
1156 saveLast->next = nullptr;
1157 lastOpndInfo = saveLast;
1158 } else {
1159 firstOpndInfo = nullptr;
1160 lastOpndInfo = nullptr;
1161 }
1162 }
1163
1164 /* back up to last saved insn */
BackupInsnInfoList(InsnInfo * saveLast)1165 void Ebo::BackupInsnInfoList(InsnInfo *saveLast)
1166 {
1167 if (lastInsnInfo == saveLast) {
1168 return;
1169 }
1170 InsnInfo *insnInfo = lastInsnInfo;
1171 while (insnInfo != saveLast) {
1172 SetInsnInfo(insnInfo->hashIndex, *(insnInfo->same));
1173 insnInfo = insnInfo->prev;
1174 }
1175 if (saveLast != nullptr) {
1176 saveLast->next = nullptr;
1177 lastInsnInfo = saveLast;
1178 } else {
1179 firstInsnInfo = nullptr;
1180 lastInsnInfo = nullptr;
1181 }
1182 }
1183
1184 /* add bb to eb ,and build operandinfo of bb */
AddBB2EB(BB & bb)1185 void Ebo::AddBB2EB(BB &bb)
1186 {
1187 OpndInfo *saveLastOpndInfo = lastOpndInfo;
1188 InsnInfo *saveLastInsnInfo = lastInsnInfo;
1189 SetBBVisited(bb);
1190 bbNum++;
1191 BuildAllInfo(bb);
1192 /* Stop adding BB to EB if the bbs in the current EB exceeds kEboMaxBBNums */
1193 if (bbNum < kEboMaxBBNums) {
1194 for (auto *bbSucc : bb.GetSuccs()) {
1195 if ((bbSucc->GetPreds().size() == 1) && IsNotVisited(*bbSucc)) {
1196 AddBB2EB(*bbSucc);
1197 }
1198 }
1199 }
1200
1201 RemoveUnusedInsns(bb, true);
1202 /* Remove information about Operand's and Insn's in this block. */
1203 BackupOpndInfoList(saveLastOpndInfo);
1204 BackupInsnInfoList(saveLastInsnInfo);
1205 bbNum--;
1206 }
1207
1208 /* Perform EBO */
EboProcess()1209 void Ebo::EboProcess()
1210 {
1211 FOR_ALL_BB(bb, cgFunc) {
1212 if (IsNotVisited(*bb)) {
1213 bbNum = 0;
1214 AddBB2EB(*bb);
1215 }
1216 }
1217 }
1218
1219 /* Perform EBO on O1 which the optimization can only be in a single block. */
EboProcessSingleBB()1220 void Ebo::EboProcessSingleBB()
1221 {
1222 FOR_ALL_BB(bb, cgFunc) {
1223 OpndInfo *saveLastOpndInfo = lastOpndInfo;
1224 InsnInfo *saveLastInsnInfo = lastInsnInfo;
1225 BuildAllInfo(*bb);
1226 RemoveUnusedInsns(*bb, true);
1227 /* Remove information about Operand's and Insn's in this block. */
1228 BackupOpndInfoList(saveLastOpndInfo);
1229 BackupInsnInfoList(saveLastInsnInfo);
1230 }
1231 }
1232
EboInit()1233 void Ebo::EboInit()
1234 {
1235 visitedBBs.resize(cgFunc->NumBBs());
1236 for (uint32 i = 0; i < cgFunc->NumBBs(); ++i) {
1237 visitedBBs[i] = false;
1238 }
1239 exprInfoTable.resize(kEboMaxOpndHash);
1240 for (uint32 i = 0; i < kEboMaxOpndHash; ++i) {
1241 exprInfoTable.at(i) = nullptr;
1242 }
1243 insnInfoTable.resize(kEboMaxInsnHash);
1244 for (uint32 i = 0; i < kEboMaxInsnHash; ++i) {
1245 insnInfoTable.at(i) = nullptr;
1246 }
1247 if (!beforeRegAlloc) {
1248 BuildCallerSaveRegisters();
1249 }
1250 optSuccess = false;
1251 }
1252
1253 /* perform EB optimizations right after instruction selection. */
Run()1254 void Ebo::Run()
1255 {
1256 EboInit();
1257 if (Globals::GetInstance()->GetOptimLevel() >= CGOptions::kLevel2) {
1258 EboProcess();
1259 } else {
1260 EboProcessSingleBB(); /* Perform SingleBB Optimization when -O1. */
1261 }
1262 if (optSuccess && cgFunc->GetMirModule().IsCModule()) {
1263 Run();
1264 }
1265 }
1266
1267 /* === new pm === */
PhaseRun(maplebe::CGFunc & f)1268 bool CgEbo0::PhaseRun(maplebe::CGFunc &f)
1269 {
1270 if (EBO_DUMP_NEWPM) {
1271 DotGenerator::GenerateDot("ebo0", f, f.GetMirModule());
1272 }
1273 LiveAnalysis *live = GET_ANALYSIS(CgLiveAnalysis, f);
1274 MemPool *eboMp = GetPhaseMemPool();
1275 Ebo *ebo = nullptr;
1276 #if TARGAARCH64 || TARGRISCV64
1277 ebo = eboMp->New<AArch64Ebo>(f, *eboMp, live, true, PhaseName());
1278 #endif
1279 #if TARGARM32
1280 ebo = eboMp->New<Arm32Ebo>(f, *eboMp, live, true, "ebo0");
1281 #endif
1282 ebo->Run();
1283 /* the live range info may changed, so invalid the info. */
1284 if (live != nullptr) {
1285 live->ClearInOutDataInfo();
1286 }
1287 return true;
1288 }
1289
GetAnalysisDependence(maple::AnalysisDep & aDep) const1290 void CgEbo0::GetAnalysisDependence(maple::AnalysisDep &aDep) const
1291 {
1292 aDep.AddRequired<CgLiveAnalysis>();
1293 aDep.AddPreserved<CgLoopAnalysis>();
1294 }
MAPLE_TRANSFORM_PHASE_REGISTER_CANSKIP(CgEbo0,ebo)1295 MAPLE_TRANSFORM_PHASE_REGISTER_CANSKIP(CgEbo0, ebo)
1296
1297 bool CgEbo1::PhaseRun(maplebe::CGFunc &f)
1298 {
1299 if (EBO_DUMP_NEWPM) {
1300 DotGenerator::GenerateDot(PhaseName(), f, f.GetMirModule(), true);
1301 }
1302 LiveAnalysis *live = GET_ANALYSIS(CgLiveAnalysis, f);
1303 MemPool *eboMp = GetPhaseMemPool();
1304 Ebo *ebo = nullptr;
1305 #if TARGAARCH64 || TARGRISCV64
1306 ebo = eboMp->New<AArch64Ebo>(f, *eboMp, live, true, PhaseName());
1307 #endif
1308 #if TARGARM32
1309 ebo = eboMp->New<Arm32Ebo>(f, *eboMp, live, true, PhaseName());
1310 #endif
1311 ebo->Run();
1312 /* the live range info may changed, so invalid the info. */
1313 if (live != nullptr) {
1314 live->ClearInOutDataInfo();
1315 }
1316 return true;
1317 }
1318
GetAnalysisDependence(maple::AnalysisDep & aDep) const1319 void CgEbo1::GetAnalysisDependence(maple::AnalysisDep &aDep) const
1320 {
1321 aDep.AddRequired<CgLiveAnalysis>();
1322 aDep.AddPreserved<CgLoopAnalysis>();
1323 }
MAPLE_TRANSFORM_PHASE_REGISTER_CANSKIP(CgEbo1,ebo1)1324 MAPLE_TRANSFORM_PHASE_REGISTER_CANSKIP(CgEbo1, ebo1)
1325
1326 bool CgPostEbo::PhaseRun(maplebe::CGFunc &f)
1327 {
1328 if (EBO_DUMP_NEWPM) {
1329 DotGenerator::GenerateDot(PhaseName(), f, f.GetMirModule());
1330 }
1331 LiveAnalysis *live = GET_ANALYSIS(CgLiveAnalysis, f);
1332 MemPool *eboMp = GetPhaseMemPool();
1333 Ebo *ebo = nullptr;
1334 #if TARGAARCH64 || TARGRISCV64
1335 ebo = eboMp->New<AArch64Ebo>(f, *eboMp, live, false, PhaseName());
1336 #endif
1337 #if TARGARM32
1338 ebo = eboMp->New<Arm32Ebo>(f, *eboMp, live, false, PhaseName());
1339 #endif
1340 ebo->Run();
1341 /* the live range info may changed, so invalid the info. */
1342 if (live != nullptr) {
1343 live->ClearInOutDataInfo();
1344 }
1345 return true;
1346 }
1347
GetAnalysisDependence(maple::AnalysisDep & aDep) const1348 void CgPostEbo::GetAnalysisDependence(maple::AnalysisDep &aDep) const
1349 {
1350 aDep.AddRequired<CgLiveAnalysis>();
1351 aDep.AddPreserved<CgLoopAnalysis>();
1352 }
1353 MAPLE_TRANSFORM_PHASE_REGISTER_CANSKIP(CgPostEbo, postebo)
1354 } /* namespace maplebe */
1355