1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "aarch64_reaching.h"
17 #include "aarch64_cg.h"
18 namespace maplebe {
19 /* MCC_ClearLocalStackRef clear 1 stack slot, and MCC_DecRefResetPair clear 2 stack slot,
20 * the stack positins cleared are recorded in callInsn->clearStackOffset
21 */
22 constexpr short kFirstClearMemIndex = 0;
23 constexpr short kSecondClearMemIndex = 1;
24
25 /* insert pseudo insn for parameters definition */
InitStartGen()26 void AArch64ReachingDefinition::InitStartGen()
27 {
28 BB *bb = cgFunc->GetFirstBB();
29
30 /* Parameters should be define first. */
31 CCImpl &parmLocator = *static_cast<AArch64CGFunc *>(cgFunc)->GetOrCreateLocator(cgFunc->GetCurCallConvKind());
32 CCLocInfo pLoc;
33 for (uint32 i = 0; i < cgFunc->GetFunction().GetFormalCount(); ++i) {
34 MIRType *type = cgFunc->GetFunction().GetNthParamType(i);
35 (void)parmLocator.LocateNextParm(*type, pLoc, i == 0, cgFunc->GetFunction().GetMIRFuncType());
36 if (pLoc.reg0 == 0) {
37 /* If is a large frame, parameter addressing mode is based vreg:Vra. */
38 continue;
39 }
40
41 uint64 symSize = cgFunc->GetBecommon().GetTypeSize(type->GetTypeIndex());
42 if ((cgFunc->GetMirModule().GetSrcLang() == kSrcLangC) && (symSize > k8ByteSize)) {
43 /* For C structure passing in one or two registers. */
44 symSize = k8ByteSize;
45 }
46 RegType regType = (pLoc.reg0 < V0) ? kRegTyInt : kRegTyFloat;
47 uint32 srcBitSize = ((symSize < k4ByteSize) ? k4ByteSize : symSize) * kBitsPerByte;
48
49 MOperator mOp;
50 if (regType == kRegTyInt) {
51 if (srcBitSize <= k32BitSize) {
52 mOp = MOP_pseudo_param_def_w;
53 } else {
54 mOp = MOP_pseudo_param_def_x;
55 }
56 } else {
57 if (srcBitSize <= k32BitSize) {
58 mOp = MOP_pseudo_param_def_s;
59 } else {
60 mOp = MOP_pseudo_param_def_d;
61 }
62 }
63
64 AArch64CGFunc *aarchCGFunc = static_cast<AArch64CGFunc *>(cgFunc);
65
66 RegOperand ®Opnd =
67 aarchCGFunc->GetOrCreatePhysicalRegisterOperand(static_cast<AArch64reg>(pLoc.reg0), srcBitSize, regType);
68 Insn &pseudoInsn = cgFunc->GetInsnBuilder()->BuildInsn(mOp, regOpnd);
69 bb->InsertInsnBegin(pseudoInsn);
70 pseudoInsns.emplace_back(&pseudoInsn);
71 if (pLoc.reg1) {
72 RegOperand ®Opnd1 = aarchCGFunc->GetOrCreatePhysicalRegisterOperand(static_cast<AArch64reg>(pLoc.reg1),
73 srcBitSize, regType);
74 Insn &pseudoInsn1 = cgFunc->GetInsnBuilder()->BuildInsn(mOp, regOpnd1);
75 bb->InsertInsnBegin(pseudoInsn1);
76 pseudoInsns.emplace_back(&pseudoInsn1);
77 }
78 if (pLoc.reg2) {
79 RegOperand ®Opnd2 = aarchCGFunc->GetOrCreatePhysicalRegisterOperand(static_cast<AArch64reg>(pLoc.reg2),
80 srcBitSize, regType);
81 Insn &pseudoInsn1 = cgFunc->GetInsnBuilder()->BuildInsn(mOp, regOpnd2);
82 bb->InsertInsnBegin(pseudoInsn1);
83 pseudoInsns.emplace_back(&pseudoInsn1);
84 }
85 if (pLoc.reg3) {
86 RegOperand ®Opnd3 = aarchCGFunc->GetOrCreatePhysicalRegisterOperand(static_cast<AArch64reg>(pLoc.reg3),
87 srcBitSize, regType);
88 Insn &pseudoInsn1 = cgFunc->GetInsnBuilder()->BuildInsn(mOp, regOpnd3);
89 bb->InsertInsnBegin(pseudoInsn1);
90 pseudoInsns.emplace_back(&pseudoInsn1);
91 }
92
93 {
94 /*
95 * define memory address since store param may be transfered to stp and which with the short offset range.
96 * we can not get the correct definition before RA.
97 * example:
98 * add x8, sp, #712
99 * stp x0, x1, [x8] // store param: _this Reg40_R313644
100 * stp x2, x3, [x8,#16] // store param: Reg41_R333743 Reg42_R333622
101 * stp x4, x5, [x8,#32] // store param: Reg43_R401297 Reg44_R313834
102 * str x7, [x8,#48] // store param: Reg46_R401297
103 */
104 MIRSymbol *sym = cgFunc->GetFunction().GetFormal(i);
105 if (!sym->IsPreg()) {
106 MIRSymbol *firstSym = cgFunc->GetFunction().GetFormal(i);
107 const AArch64SymbolAlloc *firstSymLoc =
108 static_cast<AArch64SymbolAlloc *>(cgFunc->GetMemlayout()->GetSymAllocInfo(firstSym->GetStIndex()));
109 int32 stOffset = cgFunc->GetBaseOffset(*firstSymLoc);
110 MIRType *firstType = cgFunc->GetFunction().GetNthParamType(i);
111 uint32 firstSymSize = cgFunc->GetBecommon().GetTypeSize(firstType->GetTypeIndex());
112 uint32 firstStackSize = firstSymSize < k4ByteSize ? k4ByteSize : firstSymSize;
113
114 MemOperand *memOpnd = aarchCGFunc->CreateStackMemOpnd(RFP, stOffset, firstStackSize * kBitsPerByte);
115 MOperator mopTemp = firstStackSize <= k4ByteSize ? MOP_pseudo_param_store_w : MOP_pseudo_param_store_x;
116 Insn &pseudoInsnTemp = cgFunc->GetInsnBuilder()->BuildInsn(mopTemp, *memOpnd);
117 bb->InsertInsnBegin(pseudoInsnTemp);
118 pseudoInsns.emplace_back(&pseudoInsnTemp);
119 }
120 }
121 }
122
123 /* if function has "bl MCC_InitializeLocalStackRef", should define corresponding memory. */
124 AArch64CGFunc *a64CGFunc = static_cast<AArch64CGFunc *>(cgFunc);
125
126 for (uint32 i = 0; i < a64CGFunc->GetRefCount(); ++i) {
127 MemOperand *memOpnd = a64CGFunc->CreateStackMemOpnd(
128 RFP, static_cast<int32>(a64CGFunc->GetBeginOffset() + i * k8BitSize), k64BitSize);
129 Insn &pseudoInsn = cgFunc->GetInsnBuilder()->BuildInsn(MOP_pseudo_ref_init_x, *memOpnd);
130
131 bb->InsertInsnBegin(pseudoInsn);
132 pseudoInsns.emplace_back(&pseudoInsn);
133 }
134 }
135
136 /* insert pseudoInsns for return value R0/V0 */
AddRetPseudoInsn(BB & bb)137 void AArch64ReachingDefinition::AddRetPseudoInsn(BB &bb)
138 {
139 AArch64reg regNO = static_cast<AArch64CGFunc *>(cgFunc)->GetReturnRegisterNumber();
140 if (regNO == kInvalidRegNO) {
141 return;
142 }
143
144 if (regNO == R0) {
145 RegOperand ®Opnd =
146 static_cast<AArch64CGFunc *>(cgFunc)->GetOrCreatePhysicalRegisterOperand(regNO, k64BitSize, kRegTyInt);
147 Insn &retInsn = cgFunc->GetInsnBuilder()->BuildInsn(MOP_pseudo_ret_int, regOpnd);
148 bb.AppendInsn(retInsn);
149 pseudoInsns.emplace_back(&retInsn);
150 } else if (regNO == V0) {
151 RegOperand ®Opnd =
152 static_cast<AArch64CGFunc *>(cgFunc)->GetOrCreatePhysicalRegisterOperand(regNO, k64BitSize, kRegTyFloat);
153 Insn &retInsn = cgFunc->GetInsnBuilder()->BuildInsn(MOP_pseudo_ret_float, regOpnd);
154 bb.AppendInsn(retInsn);
155 pseudoInsns.emplace_back(&retInsn);
156 }
157 }
158
AddRetPseudoInsns()159 void AArch64ReachingDefinition::AddRetPseudoInsns()
160 {
161 uint32 exitBBSize = cgFunc->GetExitBBsVec().size();
162 if (exitBBSize == 0) {
163 if (cgFunc->GetLastBB()->GetPrev()->GetFirstStmt() == cgFunc->GetCleanupLabel() &&
164 cgFunc->GetLastBB()->GetPrev()->GetPrev()) {
165 AddRetPseudoInsn(*cgFunc->GetLastBB()->GetPrev()->GetPrev());
166 } else {
167 AddRetPseudoInsn(*cgFunc->GetLastBB()->GetPrev());
168 }
169 } else {
170 for (uint32 i = 0; i < exitBBSize; ++i) {
171 AddRetPseudoInsn(*cgFunc->GetExitBB(i));
172 }
173 }
174 }
175
GenAllAsmDefRegs(BB & bb,Insn & insn,uint32 index)176 void AArch64ReachingDefinition::GenAllAsmDefRegs(BB &bb, Insn &insn, uint32 index)
177 {
178 for (auto reg : static_cast<ListOperand &>(insn.GetOperand(index)).GetOperands()) {
179 regGen[bb.GetId()]->SetBit(static_cast<RegOperand *>(reg)->GetRegisterNumber());
180 }
181 }
182
GenAllAsmUseRegs(BB & bb,Insn & insn,uint32 index)183 void AArch64ReachingDefinition::GenAllAsmUseRegs(BB &bb, Insn &insn, uint32 index)
184 {
185 for (auto reg : static_cast<ListOperand &>(insn.GetOperand(index)).GetOperands()) {
186 regUse[bb.GetId()]->SetBit(static_cast<RegOperand *>(reg)->GetRegisterNumber());
187 }
188 }
189
190 /* all caller saved register are modified by call insn */
GenAllCallerSavedRegs(BB & bb,Insn & insn)191 void AArch64ReachingDefinition::GenAllCallerSavedRegs(BB &bb, Insn &insn)
192 {
193 if (CGOptions::DoIPARA()) {
194 std::set<regno_t> callerSaveRegs;
195 cgFunc->GetRealCallerSaveRegs(insn, callerSaveRegs);
196 for (auto i : callerSaveRegs) {
197 regGen[bb.GetId()]->SetBit(i);
198 }
199 } else {
200 for (uint32 i = R0; i <= V31; ++i) {
201 if (AArch64Abi::IsCallerSaveReg(static_cast<AArch64reg>(i))) {
202 regGen[bb.GetId()]->SetBit(i);
203 }
204 }
205 }
206 }
207
208 /* reg killed killed by call insn */
IsRegKilledByCallInsn(const Insn & insn,regno_t regNO) const209 bool AArch64ReachingDefinition::IsRegKilledByCallInsn(const Insn &insn, regno_t regNO) const
210 {
211 if (CGOptions::DoIPARA()) {
212 std::set<regno_t> callerSaveRegs;
213 cgFunc->GetRealCallerSaveRegs(insn, callerSaveRegs);
214 return callerSaveRegs.find(regNO) != callerSaveRegs.end();
215 } else {
216 return AArch64Abi::IsCallerSaveReg(static_cast<AArch64reg>(regNO));
217 }
218 }
219
KilledByCallBetweenInsnInSameBB(const Insn & startInsn,const Insn & endInsn,regno_t regNO) const220 bool AArch64ReachingDefinition::KilledByCallBetweenInsnInSameBB(const Insn &startInsn, const Insn &endInsn,
221 regno_t regNO) const
222 {
223 DEBUG_ASSERT(startInsn.GetBB() == endInsn.GetBB(), "two insns must be in same bb");
224 if (CGOptions::DoIPARA()) {
225 for (const Insn *insn = &startInsn; insn != endInsn.GetNext(); insn = insn->GetNext()) {
226 if (insn->IsMachineInstruction() && insn->IsCall() && IsRegKilledByCallInsn(*insn, regNO)) {
227 return true;
228 }
229 }
230 return false;
231 } else {
232 return HasCallBetweenInsnInSameBB(startInsn, endInsn);
233 }
234 }
235 /*
236 * find definition for register between startInsn and endInsn.
237 * startInsn and endInsn is not in same BB
238 * make sure that in path between startBB and endBB there is no redefine.
239 */
FindRegDefBetweenInsnGlobal(uint32 regNO,Insn * startInsn,Insn * endInsn) const240 std::vector<Insn *> AArch64ReachingDefinition::FindRegDefBetweenInsnGlobal(uint32 regNO, Insn *startInsn,
241 Insn *endInsn) const
242 {
243 DEBUG_ASSERT(startInsn->GetBB() != endInsn->GetBB(), "call FindRegDefBetweenInsn please");
244 std::vector<Insn *> defInsnVec;
245 if (startInsn == nullptr || endInsn == nullptr) {
246 return defInsnVec;
247 }
248 /* check startBB */
249 BB *startBB = startInsn->GetBB();
250 std::vector<Insn *> startBBdefInsnVec = FindRegDefBetweenInsn(regNO, startInsn->GetNext(), startBB->GetLastInsn());
251 if (startBBdefInsnVec.size() == 1) {
252 defInsnVec.emplace_back(*startBBdefInsnVec.begin());
253 }
254 if (startBBdefInsnVec.size() > 1 || (startBBdefInsnVec.empty() && regOut[startBB->GetId()]->TestBit(regNO))) {
255 defInsnVec.emplace_back(startInsn);
256 defInsnVec.emplace_back(endInsn);
257 return defInsnVec;
258 }
259 if (IsCallerSavedReg(regNO) && startInsn->GetNext() != nullptr &&
260 KilledByCallBetweenInsnInSameBB(*startInsn->GetNext(), *startBB->GetLastInsn(), regNO)) {
261 defInsnVec.emplace_back(startInsn);
262 defInsnVec.emplace_back(endInsn);
263 return defInsnVec;
264 }
265 /* check endBB */
266 BB *endBB = endInsn->GetBB();
267 std::vector<Insn *> endBBdefInsnVec = FindRegDefBetweenInsn(regNO, endBB->GetFirstInsn(), endInsn->GetPrev());
268 if (endBBdefInsnVec.size() == 1) {
269 defInsnVec.emplace_back(*endBBdefInsnVec.begin());
270 }
271 if (endBBdefInsnVec.size() > 1 || (endBBdefInsnVec.empty() && regIn[endBB->GetId()]->TestBit(regNO))) {
272 defInsnVec.emplace_back(startInsn);
273 defInsnVec.emplace_back(endInsn);
274 return defInsnVec;
275 }
276 if (IsCallerSavedReg(regNO) && endInsn->GetPrev() != nullptr &&
277 KilledByCallBetweenInsnInSameBB(*endBB->GetFirstInsn(), *endInsn->GetPrev(), regNO)) {
278 defInsnVec.emplace_back(startInsn);
279 defInsnVec.emplace_back(endInsn);
280 return defInsnVec;
281 }
282 InsnSet defInsnSet;
283 std::vector<VisitStatus> visitedBB(kMaxBBNum, kNotVisited);
284 visitedBB[endBB->GetId()] = kNormalVisited;
285 visitedBB[startBB->GetId()] = kNormalVisited;
286 std::list<bool> pathStatus;
287 if (DFSFindRegInfoBetweenBB(*startBB, *endBB, regNO, visitedBB, pathStatus, kDumpRegIn)) {
288 defInsnVec.emplace_back(endInsn);
289 }
290 return defInsnVec;
291 }
292
IsRegInAsmList(Insn * insn,uint32 index,uint32 regNO,InsnSet & insnSet)293 static bool IsRegInAsmList(Insn *insn, uint32 index, uint32 regNO, InsnSet &insnSet)
294 {
295 for (auto reg : static_cast<ListOperand &>(insn->GetOperand(index)).GetOperands()) {
296 if (static_cast<RegOperand *>(reg)->GetRegisterNumber() == regNO) {
297 insnSet.insert(insn);
298 return true;
299 }
300 }
301 return false;
302 }
303
FindRegDefInBB(uint32 regNO,BB & bb,InsnSet & defInsnSet) const304 void AArch64ReachingDefinition::FindRegDefInBB(uint32 regNO, BB &bb, InsnSet &defInsnSet) const
305 {
306 if (!regGen[bb.GetId()]->TestBit(regNO)) {
307 return;
308 }
309
310 FOR_BB_INSNS(insn, (&bb))
311 {
312 if (!insn->IsMachineInstruction()) {
313 continue;
314 }
315
316 if (insn->GetMachineOpcode() == MOP_asm) {
317 if (IsRegInAsmList(insn, kAsmOutputListOpnd, regNO, defInsnSet)) {
318 continue;
319 }
320 IsRegInAsmList(insn, kAsmClobberListOpnd, regNO, defInsnSet);
321 continue;
322 }
323 if (insn->IsCall() && IsRegKilledByCallInsn(*insn, regNO)) {
324 (void)defInsnSet.insert(insn);
325 continue;
326 }
327 if (insn->IsRegDefined(regNO)) {
328 (void)defInsnSet.insert(insn);
329 }
330 }
331 }
332
333 /* check whether call insn changed the stack status or not. */
CallInsnClearDesignateStackRef(const Insn & callInsn,int64 offset) const334 bool AArch64ReachingDefinition::CallInsnClearDesignateStackRef(const Insn &callInsn, int64 offset) const
335 {
336 return offset == callInsn.GetClearStackOffset(kFirstClearMemIndex) ||
337 offset == callInsn.GetClearStackOffset(kSecondClearMemIndex);
338 }
339
340 /*
341 * find definition for stack memory operand between startInsn and endInsn.
342 * startInsn and endInsn must be in same BB and startInsn and endInsn are included
343 * special case:
344 * MCC_ClearLocalStackRef clear designate stack position, the designate stack position is thought defined
345 * for example:
346 * add x0, x29, #24
347 * bl MCC_ClearLocalStackRef
348 */
FindMemDefBetweenInsn(uint32 offset,const Insn * startInsn,Insn * endInsn) const349 std::vector<Insn *> AArch64ReachingDefinition::FindMemDefBetweenInsn(uint32 offset, const Insn *startInsn,
350 Insn *endInsn) const
351 {
352 std::vector<Insn *> defInsnVec;
353 if (startInsn == nullptr || endInsn == nullptr) {
354 return defInsnVec;
355 }
356
357 DEBUG_ASSERT(startInsn->GetBB() == endInsn->GetBB(), "two insns must be in a same BB");
358 DEBUG_ASSERT(endInsn->GetId() >= startInsn->GetId(), "two insns must be in a same BB");
359 if (!memGen[startInsn->GetBB()->GetId()]->TestBit(offset / kMemZoomSize)) {
360 return defInsnVec;
361 }
362
363 for (Insn *insn = endInsn; insn != nullptr && insn != startInsn->GetPrev(); insn = insn->GetPrev()) {
364 if (!insn->IsMachineInstruction()) {
365 continue;
366 }
367
368 if (insn->GetMachineOpcode() == MOP_asm) {
369 if (insn->IsAsmModMem()) {
370 defInsnVec.emplace_back(insn);
371 return defInsnVec;
372 }
373 continue;
374 }
375
376 if (insn->IsCall()) {
377 if (CallInsnClearDesignateStackRef(*insn, offset)) {
378 defInsnVec.emplace_back(insn);
379 return defInsnVec;
380 }
381 continue;
382 }
383
384 if (!(insn->IsStore() || AArch64isa::IsPseudoInstruction(insn->GetMachineOpcode()))) {
385 continue;
386 }
387
388 uint32 opndNum = insn->GetOperandSize();
389 for (uint32 i = 0; i < opndNum; ++i) {
390 Operand &opnd = insn->GetOperand(i);
391
392 if (opnd.IsMemoryAccessOperand()) {
393 auto &memOpnd = static_cast<MemOperand &>(opnd);
394 RegOperand *base = memOpnd.GetBaseRegister();
395 RegOperand *index = memOpnd.GetIndexRegister();
396
397 if (base == nullptr || !IsFrameReg(*base) || index != nullptr) {
398 break;
399 }
400
401 if (!insn->IsSpillInsn() && cgFunc->IsAfterRegAlloc()) {
402 break;
403 }
404
405 DEBUG_ASSERT(memOpnd.GetOffsetImmediate() != nullptr, "offset must be a immediate value");
406 int64 memOffset = memOpnd.GetOffsetImmediate()->GetOffsetValue();
407 if ((offset == memOffset) ||
408 (insn->IsStorePair() && offset == memOffset + GetEachMemSizeOfPair(insn->GetMachineOpcode()))) {
409 defInsnVec.emplace_back(insn);
410 return defInsnVec;
411 }
412 }
413 }
414 }
415 return defInsnVec;
416 }
417
FindMemDefInBB(uint32 offset,BB & bb,InsnSet & defInsnSet) const418 void AArch64ReachingDefinition::FindMemDefInBB(uint32 offset, BB &bb, InsnSet &defInsnSet) const
419 {
420 if (!memGen[bb.GetId()]->TestBit(offset / kMemZoomSize)) {
421 return;
422 }
423
424 FOR_BB_INSNS(insn, (&bb))
425 {
426 if (!insn->IsMachineInstruction()) {
427 continue;
428 }
429
430 if (insn->IsCall()) {
431 if (insn->GetMachineOpcode() == MOP_asm) {
432 if (insn->IsAsmModMem()) {
433 (void)defInsnSet.insert(insn);
434 }
435 continue;
436 }
437 if (CallInsnClearDesignateStackRef(*insn, offset)) {
438 (void)defInsnSet.insert(insn);
439 }
440 continue;
441 }
442
443 if (!(insn->IsStore() || AArch64isa::IsPseudoInstruction(insn->GetMachineOpcode()))) {
444 continue;
445 }
446
447 uint32 opndNum = insn->GetOperandSize();
448 for (uint32 i = 0; i < opndNum; ++i) {
449 Operand &opnd = insn->GetOperand(i);
450 if (opnd.IsMemoryAccessOperand()) {
451 auto &memOpnd = static_cast<MemOperand &>(opnd);
452 RegOperand *base = memOpnd.GetBaseRegister();
453 RegOperand *index = memOpnd.GetIndexRegister();
454
455 if (base == nullptr || !IsFrameReg(*base) || index != nullptr) {
456 break;
457 }
458
459 DEBUG_ASSERT(memOpnd.GetOffsetImmediate() != nullptr, "offset must be a immediate value");
460 int64 memOffset = memOpnd.GetOffsetImmediate()->GetOffsetValue();
461 if (offset == memOffset) {
462 (void)defInsnSet.insert(insn);
463 break;
464 }
465 if (insn->IsStorePair() && offset == memOffset + GetEachMemSizeOfPair(insn->GetMachineOpcode())) {
466 (void)defInsnSet.insert(insn);
467 break;
468 }
469 }
470 }
471 }
472 }
473
474 /*
475 * find defininition for register Iteratively.
476 * input:
477 * startBB: find definnition starting from startBB
478 * regNO: the No of register to be find
479 * visitedBB: record these visited BB
480 * defInsnSet: insn defining register is saved in this set
481 */
DFSFindDefForRegOpnd(const BB & startBB,uint32 regNO,std::vector<VisitStatus> & visitedBB,InsnSet & defInsnSet) const482 void AArch64ReachingDefinition::DFSFindDefForRegOpnd(const BB &startBB, uint32 regNO,
483 std::vector<VisitStatus> &visitedBB, InsnSet &defInsnSet) const
484 {
485 std::vector<Insn *> defInsnVec;
486 for (auto predBB : startBB.GetPreds()) {
487 if (visitedBB[predBB->GetId()] != kNotVisited) {
488 continue;
489 }
490 visitedBB[predBB->GetId()] = kNormalVisited;
491 if (regGen[predBB->GetId()]->TestBit(regNO) || (regNO == kRFLAG && predBB->HasCall())) {
492 defInsnVec.clear();
493 defInsnVec = FindRegDefBetweenInsn(regNO, predBB->GetFirstInsn(), predBB->GetLastInsn());
494 defInsnSet.insert(defInsnVec.begin(), defInsnVec.end());
495 } else if (regIn[predBB->GetId()]->TestBit(regNO)) {
496 DFSFindDefForRegOpnd(*predBB, regNO, visitedBB, defInsnSet);
497 }
498 }
499 }
500
501 /*
502 * find defininition for stack memory iteratively.
503 * input:
504 * startBB: find definnition starting from startBB
505 * offset: the offset of memory to be find
506 * visitedBB: record these visited BB
507 * defInsnSet: insn defining register is saved in this set
508 */
DFSFindDefForMemOpnd(const BB & startBB,uint32 offset,std::vector<VisitStatus> & visitedBB,InsnSet & defInsnSet) const509 void AArch64ReachingDefinition::DFSFindDefForMemOpnd(const BB &startBB, uint32 offset,
510 std::vector<VisitStatus> &visitedBB, InsnSet &defInsnSet) const
511 {
512 std::vector<Insn *> defInsnVec;
513 for (auto predBB : startBB.GetPreds()) {
514 if (visitedBB[predBB->GetId()] != kNotVisited) {
515 continue;
516 }
517 visitedBB[predBB->GetId()] = kNormalVisited;
518 if (memGen[predBB->GetId()]->TestBit(offset / kMemZoomSize)) {
519 defInsnVec.clear();
520 defInsnVec = FindMemDefBetweenInsn(offset, predBB->GetFirstInsn(), predBB->GetLastInsn());
521 DEBUG_ASSERT(!defInsnVec.empty(), "opnd must be defined in this bb");
522 defInsnSet.insert(defInsnVec.begin(), defInsnVec.end());
523 } else if (memIn[predBB->GetId()]->TestBit(offset / kMemZoomSize)) {
524 DFSFindDefForMemOpnd(*predBB, offset, visitedBB, defInsnSet);
525 }
526 }
527 }
528
529 /*
530 * find defininition for register.
531 * input:
532 * insn: the insn in which register is used
533 * indexOrRegNO: the index of register in insn or the No of register to be find
534 * isRegNO: if indexOrRegNO is index, this argument is false, else is true
535 * return:
536 * the set of definition insns for register
537 */
FindDefForRegOpnd(Insn & insn,uint32 indexOrRegNO,bool isRegNO) const538 InsnSet AArch64ReachingDefinition::FindDefForRegOpnd(Insn &insn, uint32 indexOrRegNO, bool isRegNO) const
539 {
540 uint32 regNO = indexOrRegNO;
541 if (!isRegNO) {
542 Operand &opnd = insn.GetOperand(indexOrRegNO);
543 auto ®Opnd = static_cast<RegOperand &>(opnd);
544 regNO = regOpnd.GetRegisterNumber();
545 }
546
547 std::vector<Insn *> defInsnVec;
548 if (regGen[insn.GetBB()->GetId()]->TestBit(regNO)) {
549 defInsnVec = FindRegDefBetweenInsn(regNO, insn.GetBB()->GetFirstInsn(), insn.GetPrev());
550 }
551 InsnSet defInsnSet;
552 if (!defInsnVec.empty()) {
553 defInsnSet.insert(defInsnVec.begin(), defInsnVec.end());
554 return defInsnSet;
555 }
556 std::vector<VisitStatus> visitedBB(kMaxBBNum, kNotVisited);
557 if (insn.GetBB()->IsCleanup()) {
558 DFSFindDefForRegOpnd(*insn.GetBB(), regNO, visitedBB, defInsnSet);
559 if (defInsnSet.empty()) {
560 FOR_ALL_BB(bb, cgFunc)
561 {
562 if (bb->IsCleanup()) {
563 continue;
564 }
565 if (regGen[bb->GetId()]->TestBit(regNO)) {
566 FindRegDefInBB(regNO, *bb, defInsnSet);
567 }
568 }
569 }
570 } else {
571 DFSFindDefForRegOpnd(*insn.GetBB(), regNO, visitedBB, defInsnSet);
572 }
573 return defInsnSet;
574 }
575
FindRegUseBetweenInsnGlobal(uint32 regNO,Insn * startInsn,Insn * endInsn,BB * movBB) const576 bool AArch64ReachingDefinition::FindRegUseBetweenInsnGlobal(uint32 regNO, Insn *startInsn, Insn *endInsn,
577 BB *movBB) const
578 {
579 if (startInsn == nullptr || endInsn == nullptr) {
580 return false;
581 }
582 if (startInsn->GetBB() == endInsn->GetBB()) {
583 if (startInsn->GetNextMachineInsn() == endInsn) {
584 return false;
585 } else {
586 return FindRegUsingBetweenInsn(regNO, startInsn->GetNextMachineInsn(), endInsn->GetPreviousMachineInsn());
587 }
588 } else {
589 /* check Start BB */
590 BB *startBB = startInsn->GetBB();
591 if (FindRegUsingBetweenInsn(regNO, startInsn->GetNextMachineInsn(), startBB->GetLastInsn())) {
592 return true;
593 }
594 /* check End BB */
595 BB *endBB = endInsn->GetBB();
596 if (FindRegUsingBetweenInsn(regNO, endBB->GetFirstInsn(), endInsn->GetPreviousMachineInsn())) {
597 return true;
598 }
599 /* Global : startBB cannot dominate BB which it doesn't dominate before */
600 if (startBB == movBB) {
601 return false; /* it will not change dominate */
602 }
603 std::vector<VisitStatus> visitedBB(kMaxBBNum, kNotVisited);
604 visitedBB[movBB->GetId()] = kNormalVisited;
605 visitedBB[startBB->GetId()] = kNormalVisited;
606 if (DFSFindRegDomianBetweenBB(*startBB, regNO, visitedBB)) {
607 return true;
608 }
609 }
610 return false;
611 }
612
HasRegDefBetweenInsnGlobal(uint32 regNO,Insn & startInsn,Insn & endInsn) const613 bool AArch64ReachingDefinition::HasRegDefBetweenInsnGlobal(uint32 regNO, Insn &startInsn, Insn &endInsn) const
614 {
615 CHECK_FATAL((startInsn.GetBB() != endInsn.GetBB()), "Is same BB!");
616 /* check Start BB */
617 BB *startBB = startInsn.GetBB();
618 auto startInsnSet = FindRegDefBetweenInsn(regNO, startInsn.GetNext(), startBB->GetLastInsn());
619 if (!startInsnSet.empty()) {
620 return true;
621 }
622 /* check End BB */
623 BB *endBB = endInsn.GetBB();
624 auto endInsnSet = FindRegDefBetweenInsn(regNO, endBB->GetFirstInsn(), endInsn.GetPrev());
625 if (!endInsnSet.empty()) {
626 return true;
627 }
628 if (!startBB->GetSuccs().empty()) {
629 for (auto *succ : startBB->GetSuccs()) {
630 if (succ == endBB) {
631 return (!startInsnSet.empty() && !endInsnSet.empty());
632 }
633 }
634 }
635 /* check bb Between start and end */
636 std::vector<VisitStatus> visitedBB(kMaxBBNum, kNotVisited);
637 visitedBB[startBB->GetId()] = kNormalVisited;
638 visitedBB[endBB->GetId()] = kNormalVisited;
639 return DFSFindRegDefBetweenBB(*startBB, *endBB, regNO, visitedBB);
640 }
641
DFSFindRegDefBetweenBB(const BB & startBB,const BB & endBB,uint32 regNO,std::vector<VisitStatus> & visitedBB) const642 bool AArch64ReachingDefinition::DFSFindRegDefBetweenBB(const BB &startBB, const BB &endBB, uint32 regNO,
643 std::vector<VisitStatus> &visitedBB) const
644 {
645 if (&startBB == &endBB) {
646 return false;
647 }
648 for (auto succBB : startBB.GetSuccs()) {
649 if (visitedBB[succBB->GetId()] != kNotVisited) {
650 continue;
651 }
652 visitedBB[succBB->GetId()] = kNormalVisited;
653 if (regGen[succBB->GetId()]->TestBit(regNO)) {
654 return true;
655 }
656 if (DFSFindRegDefBetweenBB(*succBB, endBB, regNO, visitedBB)) {
657 return true;
658 }
659 }
660 return false;
661 }
662
DFSFindRegDomianBetweenBB(const BB startBB,uint32 regNO,std::vector<VisitStatus> & visitedBB) const663 bool AArch64ReachingDefinition::DFSFindRegDomianBetweenBB(const BB startBB, uint32 regNO,
664 std::vector<VisitStatus> &visitedBB) const
665 {
666 for (auto succBB : startBB.GetSuccs()) {
667 if (visitedBB[succBB->GetId()] != kNotVisited) {
668 continue;
669 }
670 visitedBB[succBB->GetId()] = kNormalVisited;
671 if (regIn[succBB->GetId()]->TestBit(regNO)) {
672 return true;
673 } else if (regGen[succBB->GetId()]->TestBit(regNO)) {
674 continue;
675 }
676 if (DFSFindRegDomianBetweenBB(*succBB, regNO, visitedBB)) {
677 return true;
678 }
679 }
680 return false;
681 }
682
DFSFindRegInfoBetweenBB(const BB startBB,const BB & endBB,uint32 regNO,std::vector<VisitStatus> & visitedBB,std::list<bool> & pathStatus,DumpType infoType) const683 bool AArch64ReachingDefinition::DFSFindRegInfoBetweenBB(const BB startBB, const BB &endBB, uint32 regNO,
684 std::vector<VisitStatus> &visitedBB,
685 std::list<bool> &pathStatus, DumpType infoType) const
686 {
687 for (auto succBB : startBB.GetSuccs()) {
688 if (succBB == &endBB) {
689 for (auto status : pathStatus) {
690 if (!status) {
691 return true;
692 }
693 }
694 continue;
695 }
696 if (visitedBB[succBB->GetId()] != kNotVisited) {
697 continue;
698 }
699 visitedBB[succBB->GetId()] = kNormalVisited;
700 /* path is no clean check regInfo */
701 bool isPathClean = true;
702 switch (infoType) {
703 case kDumpRegUse: {
704 isPathClean = !regUse[succBB->GetId()]->TestBit(regNO);
705 break;
706 }
707 case kDumpRegGen: {
708 isPathClean = !regGen[succBB->GetId()]->TestBit(regNO);
709 break;
710 }
711 case kDumpRegIn: {
712 isPathClean = !(regIn[succBB->GetId()]->TestBit(regNO) || regGen[succBB->GetId()]->TestBit(regNO));
713 break;
714 }
715 default:
716 CHECK_FATAL(false, "NIY");
717 }
718 pathStatus.emplace_back(isPathClean);
719 if (DFSFindRegInfoBetweenBB(*succBB, endBB, regNO, visitedBB, pathStatus, infoType)) {
720 return true;
721 }
722 pathStatus.pop_back();
723 }
724 return false;
725 }
726
FindRegUsingBetweenInsn(uint32 regNO,Insn * startInsn,const Insn * endInsn) const727 bool AArch64ReachingDefinition::FindRegUsingBetweenInsn(uint32 regNO, Insn *startInsn, const Insn *endInsn) const
728 {
729 if (startInsn == nullptr || endInsn == nullptr) {
730 return false;
731 }
732
733 DEBUG_ASSERT(startInsn->GetBB() == endInsn->GetBB(), "two insns must be in a same BB");
734 for (Insn *insn = startInsn; insn != nullptr && insn != endInsn->GetNext(); insn = insn->GetNext()) {
735 if (!insn->IsMachineInstruction()) {
736 continue;
737 }
738 if (insn->GetMachineOpcode() == MOP_asm) {
739 InsnSet Temp;
740 if (IsRegInAsmList(insn, kAsmInputListOpnd, regNO, Temp) ||
741 IsRegInAsmList(insn, kAsmOutputListOpnd, regNO, Temp)) {
742 return true;
743 }
744 continue;
745 }
746 const InsnDesc *md = insn->GetDesc();
747 uint32 opndNum = insn->GetOperandSize();
748 for (uint32 i = 0; i < opndNum; ++i) {
749 Operand &opnd = insn->GetOperand(i);
750 if (opnd.IsList()) {
751 auto &listOpnd = static_cast<ListOperand &>(opnd);
752 for (auto listElem : listOpnd.GetOperands()) {
753 RegOperand *regOpnd = static_cast<RegOperand *>(listElem);
754 DEBUG_ASSERT(regOpnd != nullptr, "parameter operand must be RegOperand");
755 if (regNO == regOpnd->GetRegisterNumber()) {
756 return true;
757 }
758 }
759 continue;
760 }
761
762 auto *regProp = md->opndMD[i];
763 if (!regProp->IsUse() && !opnd.IsMemoryAccessOperand()) {
764 continue;
765 }
766
767 if (opnd.IsMemoryAccessOperand()) {
768 auto &memOpnd = static_cast<MemOperand &>(opnd);
769 RegOperand *base = memOpnd.GetBaseRegister();
770 RegOperand *index = memOpnd.GetIndexRegister();
771 if ((base != nullptr && base->GetRegisterNumber() == regNO) ||
772 (index != nullptr && index->GetRegisterNumber() == regNO)) {
773 return true;
774 }
775 } else if (opnd.IsConditionCode()) {
776 Operand &rflagOpnd = cgFunc->GetOrCreateRflag();
777 RegOperand &rflagReg = static_cast<RegOperand &>(rflagOpnd);
778 if (rflagReg.GetRegisterNumber() == regNO) {
779 return true;
780 }
781 } else if (opnd.IsRegister() && (static_cast<RegOperand &>(opnd).GetRegisterNumber() == regNO)) {
782 return true;
783 }
784 }
785 }
786 return false;
787 }
788
789 /*
790 * find insn using register between startInsn and endInsn.
791 * startInsn and endInsn must be in same BB and startInsn and endInsn are included
792 */
FindRegUseBetweenInsn(uint32 regNO,Insn * startInsn,Insn * endInsn,InsnSet & regUseInsnSet) const793 bool AArch64ReachingDefinition::FindRegUseBetweenInsn(uint32 regNO, Insn *startInsn, Insn *endInsn,
794 InsnSet ®UseInsnSet) const
795 {
796 bool findFinish = false;
797 if (startInsn == nullptr || endInsn == nullptr) {
798 return findFinish;
799 }
800
801 DEBUG_ASSERT(startInsn->GetBB() == endInsn->GetBB(), "two insns must be in a same BB");
802 for (Insn *insn = startInsn; insn != nullptr && insn != endInsn->GetNext(); insn = insn->GetNext()) {
803 if (!insn->IsMachineInstruction()) {
804 continue;
805 }
806 if (insn->GetMachineOpcode() == MOP_asm) {
807 IsRegInAsmList(insn, kAsmInputListOpnd, regNO, regUseInsnSet);
808 if (IsRegInAsmList(insn, kAsmOutputListOpnd, regNO, regUseInsnSet)) {
809 break;
810 }
811 continue;
812 }
813 /* if insn is call and regNO is caller-saved register, then regNO will not be used later */
814 if (insn->IsCall() && IsRegKilledByCallInsn(*insn, regNO)) {
815 findFinish = true;
816 }
817
818 const InsnDesc *md = insn->GetDesc();
819 uint32 opndNum = insn->GetOperandSize();
820 for (uint32 i = 0; i < opndNum; ++i) {
821 Operand &opnd = insn->GetOperand(i);
822 if (opnd.IsList()) {
823 auto &listOpnd = static_cast<ListOperand &>(opnd);
824 for (auto listElem : listOpnd.GetOperands()) {
825 RegOperand *regOpnd = static_cast<RegOperand *>(listElem);
826 DEBUG_ASSERT(regOpnd != nullptr, "parameter operand must be RegOperand");
827 if (regNO == regOpnd->GetRegisterNumber()) {
828 (void)regUseInsnSet.insert(insn);
829 }
830 }
831 continue;
832 } else if (opnd.IsMemoryAccessOperand()) {
833 auto &memOpnd = static_cast<MemOperand &>(opnd);
834 RegOperand *baseOpnd = memOpnd.GetBaseRegister();
835 if (baseOpnd != nullptr && (memOpnd.GetAddrMode() == MemOperand::kAddrModeBOi) &&
836 (memOpnd.IsPostIndexed() || memOpnd.IsPreIndexed()) && baseOpnd->GetRegisterNumber() == regNO) {
837 findFinish = true;
838 }
839 }
840
841 auto *regProp = md->opndMD[i];
842 if (regProp->IsDef() && opnd.IsRegister() &&
843 (static_cast<RegOperand &>(opnd).GetRegisterNumber() == regNO)) {
844 findFinish = true;
845 }
846
847 if (!regProp->IsUse() && !opnd.IsMemoryAccessOperand()) {
848 continue;
849 }
850
851 if (opnd.IsMemoryAccessOperand()) {
852 auto &memOpnd = static_cast<MemOperand &>(opnd);
853 RegOperand *base = memOpnd.GetBaseRegister();
854 RegOperand *index = memOpnd.GetIndexRegister();
855 if ((base != nullptr && base->GetRegisterNumber() == regNO) ||
856 (index != nullptr && index->GetRegisterNumber() == regNO)) {
857 (void)regUseInsnSet.insert(insn);
858 }
859 } else if (opnd.IsConditionCode()) {
860 Operand &rflagOpnd = cgFunc->GetOrCreateRflag();
861 RegOperand &rflagReg = static_cast<RegOperand &>(rflagOpnd);
862 if (rflagReg.GetRegisterNumber() == regNO) {
863 (void)regUseInsnSet.insert(insn);
864 }
865 } else if (opnd.IsRegister() && (static_cast<RegOperand &>(opnd).GetRegisterNumber() == regNO)) {
866 (void)regUseInsnSet.insert(insn);
867 }
868 }
869
870 if (findFinish) {
871 break;
872 }
873 }
874 return findFinish;
875 }
876
877 /*
878 * find insn using stack memory operand between startInsn and endInsn.
879 * startInsn and endInsn must be in same BB and startInsn and endInsn are included
880 */
FindMemUseBetweenInsn(uint32 offset,Insn * startInsn,const Insn * endInsn,InsnSet & memUseInsnSet) const881 bool AArch64ReachingDefinition::FindMemUseBetweenInsn(uint32 offset, Insn *startInsn, const Insn *endInsn,
882 InsnSet &memUseInsnSet) const
883 {
884 bool findFinish = false;
885 if (startInsn == nullptr || endInsn == nullptr) {
886 return findFinish;
887 }
888
889 DEBUG_ASSERT(startInsn->GetBB() == endInsn->GetBB(), "two insns must be in a same BB");
890 DEBUG_ASSERT(endInsn->GetId() >= startInsn->GetId(), "end ID must be greater than or equal to start ID");
891
892 for (Insn *insn = startInsn; insn != nullptr && insn != endInsn->GetNext(); insn = insn->GetNext()) {
893 if (!insn->IsMachineInstruction()) {
894 continue;
895 }
896
897 if (insn->IsCall()) {
898 if (insn->GetMachineOpcode() == MOP_asm) {
899 return true;
900 }
901 if (CallInsnClearDesignateStackRef(*insn, offset)) {
902 return true;
903 }
904 continue;
905 }
906
907 const InsnDesc *md = insn->GetDesc();
908 uint32 opndNum = insn->GetOperandSize();
909 for (uint32 i = 0; i < opndNum; ++i) {
910 Operand &opnd = insn->GetOperand(i);
911 if (!opnd.IsMemoryAccessOperand()) {
912 continue;
913 }
914
915 auto &memOpnd = static_cast<MemOperand &>(opnd);
916 RegOperand *base = memOpnd.GetBaseRegister();
917 if (base == nullptr || !IsFrameReg(*base)) {
918 continue;
919 }
920
921 DEBUG_ASSERT(memOpnd.GetIndexRegister() == nullptr, "offset must not be Register for frame MemOperand");
922 DEBUG_ASSERT(memOpnd.GetOffsetImmediate() != nullptr, "offset must be a immediate value");
923 int64 memOffset = memOpnd.GetOffsetImmediate()->GetValue();
924
925 if (insn->IsStore() || AArch64isa::IsPseudoInstruction(insn->GetMachineOpcode())) {
926 if (memOffset == offset) {
927 findFinish = true;
928 continue;
929 }
930 if (insn->IsStorePair() && offset == memOffset + GetEachMemSizeOfPair(insn->GetMachineOpcode())) {
931 findFinish = true;
932 continue;
933 }
934 }
935
936 if (!md->opndMD[i]->IsUse()) {
937 continue;
938 }
939
940 if (offset == memOffset) {
941 (void)memUseInsnSet.insert(insn);
942 } else if (insn->IsLoadPair() && offset == memOffset + GetEachMemSizeOfPair(insn->GetMachineOpcode())) {
943 (void)memUseInsnSet.insert(insn);
944 }
945 }
946
947 if (findFinish) {
948 break;
949 }
950 }
951 return findFinish;
952 }
953
954 /* find all definition for stack memory operand insn.opnd[index] */
FindDefForMemOpnd(Insn & insn,uint32 indexOrOffset,bool isOffset) const955 InsnSet AArch64ReachingDefinition::FindDefForMemOpnd(Insn &insn, uint32 indexOrOffset, bool isOffset) const
956 {
957 InsnSet defInsnSet;
958 int64 memOffSet = 0;
959 if (!isOffset) {
960 Operand &opnd = insn.GetOperand(indexOrOffset);
961 DEBUG_ASSERT(opnd.IsMemoryAccessOperand(), "opnd must be MemOperand");
962
963 auto &memOpnd = static_cast<MemOperand &>(opnd);
964 RegOperand *base = memOpnd.GetBaseRegister();
965 RegOperand *indexReg = memOpnd.GetIndexRegister();
966
967 if (base == nullptr || !IsFrameReg(*base) || indexReg) {
968 return defInsnSet;
969 }
970 DEBUG_ASSERT(memOpnd.GetOffsetImmediate() != nullptr, "offset must be a immediate value");
971 memOffSet = memOpnd.GetOffsetImmediate()->GetOffsetValue();
972 } else {
973 memOffSet = indexOrOffset;
974 }
975 std::vector<Insn *> defInsnVec;
976 if (memGen[insn.GetBB()->GetId()]->TestBit(static_cast<uint32>(memOffSet / kMemZoomSize))) {
977 defInsnVec = FindMemDefBetweenInsn(memOffSet, insn.GetBB()->GetFirstInsn(), insn.GetPrev());
978 }
979
980 if (!defInsnVec.empty()) {
981 defInsnSet.insert(defInsnVec.begin(), defInsnVec.end());
982 return defInsnSet;
983 }
984 std::vector<VisitStatus> visitedBB(kMaxBBNum, kNotVisited);
985 if (insn.GetBB()->IsCleanup()) {
986 DFSFindDefForMemOpnd(*insn.GetBB(), memOffSet, visitedBB, defInsnSet);
987 if (defInsnSet.empty()) {
988 FOR_ALL_BB(bb, cgFunc)
989 {
990 if (bb->IsCleanup()) {
991 continue;
992 }
993
994 if (memGen[bb->GetId()]->TestBit(static_cast<uint32>(memOffSet / kMemZoomSize))) {
995 FindMemDefInBB(memOffSet, *bb, defInsnSet);
996 }
997 }
998 }
999 } else {
1000 DFSFindDefForMemOpnd(*insn.GetBB(), memOffSet, visitedBB, defInsnSet);
1001 }
1002
1003 return defInsnSet;
1004 }
1005
1006 /*
1007 * find all insn using stack memory operand insn.opnd[index]
1008 * secondMem is used to represent the second stack memory opernad in store pair insn
1009 */
FindUseForMemOpnd(Insn & insn,uint8 index,bool secondMem) const1010 InsnSet AArch64ReachingDefinition::FindUseForMemOpnd(Insn &insn, uint8 index, bool secondMem) const
1011 {
1012 Operand &opnd = insn.GetOperand(index);
1013 DEBUG_ASSERT(opnd.IsMemoryAccessOperand(), "opnd must be MemOperand");
1014 auto &memOpnd = static_cast<MemOperand &>(opnd);
1015 RegOperand *base = memOpnd.GetBaseRegister();
1016
1017 InsnSet useInsnSet;
1018 if (base == nullptr || !IsFrameReg(*base)) {
1019 return useInsnSet;
1020 }
1021
1022 DEBUG_ASSERT(memOpnd.GetIndexRegister() == nullptr, "IndexRegister no nullptr");
1023 DEBUG_ASSERT(memOpnd.GetOffsetImmediate() != nullptr, "offset must be a immediate value");
1024 int64 memOffSet = memOpnd.GetOffsetImmediate()->GetOffsetValue();
1025 if (secondMem) {
1026 DEBUG_ASSERT(insn.IsStorePair(), "second MemOperand can only be defined in stp insn");
1027 memOffSet += GetEachMemSizeOfPair(insn.GetMachineOpcode());
1028 }
1029 /* memOperand may be redefined in current BB */
1030 bool findFinish = FindMemUseBetweenInsn(memOffSet, insn.GetNext(), insn.GetBB()->GetLastInsn(), useInsnSet);
1031 std::vector<bool> visitedBB(kMaxBBNum, false);
1032 if (!findFinish && memOut[insn.GetBB()->GetId()]->TestBit(static_cast<uint32>(memOffSet / kMemZoomSize))) {
1033 DFSFindUseForMemOpnd(*insn.GetBB(), memOffSet, visitedBB, useInsnSet, false);
1034 }
1035 if (!insn.GetBB()->IsCleanup() && firstCleanUpBB) {
1036 if (memUse[firstCleanUpBB->GetId()]->TestBit(static_cast<uint32>(memOffSet / kMemZoomSize))) {
1037 findFinish = FindMemUseBetweenInsn(memOffSet, firstCleanUpBB->GetFirstInsn(), firstCleanUpBB->GetLastInsn(),
1038 useInsnSet);
1039 if (findFinish ||
1040 !memOut[firstCleanUpBB->GetId()]->TestBit(static_cast<uint32>(memOffSet / kMemZoomSize))) {
1041 return useInsnSet;
1042 }
1043 }
1044 DFSFindUseForMemOpnd(*firstCleanUpBB, memOffSet, visitedBB, useInsnSet, false);
1045 }
1046 return useInsnSet;
1047 }
1048
1049 /*
1050 * initialize bb.gen and bb.use
1051 * if it is not computed in first time, bb.gen and bb.use must be cleared firstly
1052 */
InitGenUse(BB & bb,bool firstTime)1053 void AArch64ReachingDefinition::InitGenUse(BB &bb, bool firstTime)
1054 {
1055 if (!firstTime && (mode & kRDRegAnalysis)) {
1056 regGen[bb.GetId()]->ResetAllBit();
1057 regUse[bb.GetId()]->ResetAllBit();
1058 }
1059 if (!firstTime && (mode & kRDMemAnalysis)) {
1060 memGen[bb.GetId()]->ResetAllBit();
1061 memUse[bb.GetId()]->ResetAllBit();
1062 }
1063
1064 if (bb.IsEmpty()) {
1065 return;
1066 }
1067
1068 FOR_BB_INSNS(insn, (&bb))
1069 {
1070 if (!insn->IsMachineInstruction()) {
1071 continue;
1072 }
1073
1074 if (insn->GetMachineOpcode() == MOP_asm) {
1075 GenAllAsmDefRegs(bb, *insn, kAsmOutputListOpnd);
1076 GenAllAsmDefRegs(bb, *insn, kAsmClobberListOpnd);
1077 GenAllAsmUseRegs(bb, *insn, kAsmInputListOpnd);
1078 continue;
1079 }
1080 if (insn->IsCall() || insn->IsTailCall()) {
1081 GenAllCallerSavedRegs(bb, *insn);
1082 InitMemInfoForClearStackCall(*insn);
1083 }
1084
1085 const InsnDesc *md = insn->GetDesc();
1086 uint32 opndNum = insn->GetOperandSize();
1087 for (uint32 i = 0; i < opndNum; ++i) {
1088 Operand &opnd = insn->GetOperand(i);
1089 auto *regProp = md->opndMD[i];
1090 if (opnd.IsList() && (mode & kRDRegAnalysis)) {
1091 DEBUG_ASSERT(regProp->IsUse(), "ListOperand is used in insn");
1092 InitInfoForListOpnd(bb, opnd);
1093 } else if (opnd.IsMemoryAccessOperand()) {
1094 InitInfoForMemOperand(*insn, opnd, regProp->IsDef());
1095 } else if (opnd.IsConditionCode() && (mode & kRDRegAnalysis)) {
1096 DEBUG_ASSERT(regProp->IsUse(), "condition code is used in insn");
1097 InitInfoForConditionCode(bb);
1098 } else if (opnd.IsRegister() && (mode & kRDRegAnalysis)) {
1099 InitInfoForRegOpnd(bb, opnd, regProp->IsDef());
1100 }
1101 }
1102 }
1103 }
1104
InitMemInfoForClearStackCall(Insn & callInsn)1105 void AArch64ReachingDefinition::InitMemInfoForClearStackCall(Insn &callInsn)
1106 {
1107 if (!(mode & kRDMemAnalysis) || !callInsn.IsClearDesignateStackCall()) {
1108 return;
1109 }
1110 int64 firstOffset = callInsn.GetClearStackOffset(kFirstClearMemIndex);
1111 constexpr int64 defaultValOfClearMemOffset = -1;
1112 if (firstOffset != defaultValOfClearMemOffset) {
1113 memGen[callInsn.GetBB()->GetId()]->SetBit(firstOffset / kMemZoomSize);
1114 }
1115 int64 secondOffset = callInsn.GetClearStackOffset(kSecondClearMemIndex);
1116 if (secondOffset != defaultValOfClearMemOffset) {
1117 memGen[callInsn.GetBB()->GetId()]->SetBit(static_cast<uint32>(secondOffset / kMemZoomSize));
1118 }
1119 }
1120
InitInfoForMemOperand(Insn & insn,Operand & opnd,bool isDef)1121 void AArch64ReachingDefinition::InitInfoForMemOperand(Insn &insn, Operand &opnd, bool isDef)
1122 {
1123 DEBUG_ASSERT(opnd.IsMemoryAccessOperand(), "opnd must be MemOperand");
1124 MemOperand &memOpnd = static_cast<MemOperand &>(opnd);
1125 RegOperand *base = memOpnd.GetBaseRegister();
1126 RegOperand *index = memOpnd.GetIndexRegister();
1127
1128 if (base == nullptr) {
1129 return;
1130 }
1131 if ((mode & kRDMemAnalysis) && IsFrameReg(*base)) {
1132 if (index != nullptr) {
1133 SetAnalysisMode(kRDRegAnalysis);
1134 return;
1135 }
1136 CHECK_FATAL(index == nullptr, "Existing [x29 + index] Memory Address");
1137 DEBUG_ASSERT(memOpnd.GetOffsetImmediate(), "offset must be a immediate value");
1138 int64 offsetVal = memOpnd.GetOffsetImmediate()->GetOffsetValue();
1139 if ((offsetVal % kMemZoomSize) != 0) {
1140 SetAnalysisMode(kRDRegAnalysis);
1141 }
1142
1143 if (!isDef) {
1144 memUse[insn.GetBB()->GetId()]->SetBit(offsetVal / kMemZoomSize);
1145 if (insn.IsLoadPair()) {
1146 int64 nextMemOffset = offsetVal + GetEachMemSizeOfPair(insn.GetMachineOpcode());
1147 memUse[insn.GetBB()->GetId()]->SetBit(nextMemOffset / kMemZoomSize);
1148 }
1149 } else if (isDef) {
1150 memGen[insn.GetBB()->GetId()]->SetBit(offsetVal / kMemZoomSize);
1151 if (insn.IsStorePair()) {
1152 int64 nextMemOffset = offsetVal + GetEachMemSizeOfPair(insn.GetMachineOpcode());
1153 memGen[insn.GetBB()->GetId()]->SetBit(nextMemOffset / kMemZoomSize);
1154 }
1155 }
1156 }
1157
1158 if ((mode & kRDRegAnalysis) != 0) {
1159 regUse[insn.GetBB()->GetId()]->SetBit(base->GetRegisterNumber());
1160 if (index != nullptr) {
1161 regUse[insn.GetBB()->GetId()]->SetBit(index->GetRegisterNumber());
1162 }
1163 if (memOpnd.GetAddrMode() == MemOperand::kAddrModeBOi && (memOpnd.IsPostIndexed() || memOpnd.IsPreIndexed())) {
1164 /* Base operand has changed. */
1165 regGen[insn.GetBB()->GetId()]->SetBit(base->GetRegisterNumber());
1166 }
1167 }
1168 }
1169
InitInfoForListOpnd(const BB & bb,Operand & opnd)1170 void AArch64ReachingDefinition::InitInfoForListOpnd(const BB &bb, Operand &opnd)
1171 {
1172 ListOperand *listOpnd = static_cast<ListOperand *>(&opnd);
1173 for (auto listElem : listOpnd->GetOperands()) {
1174 RegOperand *regOpnd = static_cast<RegOperand *>(listElem);
1175 DEBUG_ASSERT(regOpnd != nullptr, "used Operand in call insn must be Register");
1176 regUse[bb.GetId()]->SetBit(regOpnd->GetRegisterNumber());
1177 }
1178 }
1179
InitInfoForConditionCode(const BB & bb)1180 void AArch64ReachingDefinition::InitInfoForConditionCode(const BB &bb)
1181 {
1182 Operand &rflagOpnd = cgFunc->GetOrCreateRflag();
1183 RegOperand &rflagReg = static_cast<RegOperand &>(rflagOpnd);
1184 regUse[bb.GetId()]->SetBit(rflagReg.GetRegisterNumber());
1185 }
1186
InitInfoForRegOpnd(const BB & bb,Operand & opnd,bool isDef)1187 void AArch64ReachingDefinition::InitInfoForRegOpnd(const BB &bb, Operand &opnd, bool isDef)
1188 {
1189 RegOperand *regOpnd = static_cast<RegOperand *>(&opnd);
1190 if (!isDef) {
1191 regUse[bb.GetId()]->SetBit(regOpnd->GetRegisterNumber());
1192 } else {
1193 regGen[bb.GetId()]->SetBit(regOpnd->GetRegisterNumber());
1194 }
1195 }
1196
GetStackSize() const1197 int32 AArch64ReachingDefinition::GetStackSize() const
1198 {
1199 const int sizeofFplr = kDivide2 * kAarch64IntregBytelen;
1200 return static_cast<int32>(static_cast<AArch64MemLayout *>(cgFunc->GetMemlayout())->RealStackFrameSize() +
1201 sizeofFplr);
1202 }
1203
IsCallerSavedReg(uint32 regNO) const1204 bool AArch64ReachingDefinition::IsCallerSavedReg(uint32 regNO) const
1205 {
1206 return AArch64Abi::IsCallerSaveReg(static_cast<AArch64reg>(regNO));
1207 }
1208
GetEachMemSizeOfPair(MOperator opCode) const1209 int64 AArch64ReachingDefinition::GetEachMemSizeOfPair(MOperator opCode) const
1210 {
1211 switch (opCode) {
1212 case MOP_wstp:
1213 case MOP_sstp:
1214 case MOP_wstlxp:
1215 case MOP_wldp:
1216 case MOP_xldpsw:
1217 case MOP_sldp:
1218 case MOP_wldaxp:
1219 return kWordByteNum;
1220 case MOP_xstp:
1221 case MOP_dstp:
1222 case MOP_xstlxp:
1223 case MOP_xldp:
1224 case MOP_dldp:
1225 case MOP_xldaxp:
1226 return kDoubleWordByteNum;
1227 default:
1228 return 0;
1229 }
1230 }
1231 } /* namespace maplebe */
1232