1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #if TARGAARCH64
17 #include "aarch64_reaching.h"
18 #include "aarch64_isa.h"
19 #elif TARGRISCV64
20 #include "riscv64_reaching.h"
21 #endif
22 #if TARGARM32
23 #include "arm32_reaching.h"
24 #endif
25 #include "cg_option.h"
26 #include "cgfunc.h"
27 #include "cg.h"
28
29 /*
30 * This phase build bb->in and bb->out infomation for stack memOperand and RegOperand. each bit in DataInfo
31 * represent whether the register or memory is live or not. to save storage space, the offset of stack is divided
32 * by 4, since the offset is a multiple 4.
33 * this algorithm mainly include 2 parts:
34 * 1. initialize each BB
35 * (1) insert pseudoInsns for function parameters, ehBB, and return R0/V0
36 * (2) init bb->gen, bb->use, bb->out
37 * 2. build in and out
38 * (1) In[BB] = Union all of out[Parents(bb)]
39 * (2) Out[BB] = gen[BB] union in[BB]
40 * aditionally, this phase provide several commen funcfion about data flow. users can call these functions in
41 * optimization phase conveniently.
42 */
43 namespace maplebe {
ReachingDefinition(CGFunc & func,MemPool & memPool)44 ReachingDefinition::ReachingDefinition(CGFunc &func, MemPool &memPool)
45 : AnalysisResult(&memPool),
46 cgFunc(&func),
47 rdAlloc(&memPool),
48 stackMp(func.GetStackMemPool()),
49 pseudoInsns(rdAlloc.Adapter()),
50 kMaxBBNum(cgFunc->NumBBs() + 1),
51 normalBBSet(rdAlloc.Adapter()),
52 cleanUpBBSet(rdAlloc.Adapter())
53 {
54 }
55
56 /* check whether the opnd is stack register or not */
IsFrameReg(const Operand & opnd) const57 bool ReachingDefinition::IsFrameReg(const Operand &opnd) const
58 {
59 if (!opnd.IsRegister()) {
60 return false;
61 }
62 auto ® = static_cast<const RegOperand &>(opnd);
63 return cgFunc->IsFrameReg(reg);
64 }
65
66 /* intialize bb->out, bb->out only include generated DataInfo */
InitOut(const BB & bb)67 void ReachingDefinition::InitOut(const BB &bb)
68 {
69 if (mode & kRDRegAnalysis) {
70 *regOut[bb.GetId()] = *regGen[bb.GetId()];
71 }
72 if (mode & kRDMemAnalysis) {
73 *memOut[bb.GetId()] = *memGen[bb.GetId()];
74 }
75 }
76
77 /* when DataInfo will not be used later, they should be cleared. */
ClearDefUseInfo()78 void ReachingDefinition::ClearDefUseInfo()
79 {
80 for (auto insn : pseudoInsns) {
81 /* Keep return pseudo to extend the return register liveness to 'ret'.
82 * Backward propagation can move the return register definition far from the return.
83 */
84 #ifndef TARGX86_64
85 if (Triple::GetTriple().GetArch() == Triple::ArchType::x64) {
86 if (insn->GetMachineOpcode() == MOP_pseudo_ret_int || insn->GetMachineOpcode() == MOP_pseudo_ret_float) {
87 continue;
88 }
89 }
90 #endif
91 insn->GetBB()->RemoveInsn(*insn);
92 }
93 FOR_ALL_BB(bb, cgFunc) {
94 delete (regGen[bb->GetId()]);
95 regGen[bb->GetId()] = nullptr;
96 delete (regUse[bb->GetId()]);
97 regUse[bb->GetId()] = nullptr;
98 delete (regIn[bb->GetId()]);
99 regIn[bb->GetId()] = nullptr;
100 delete (regOut[bb->GetId()]);
101 regOut[bb->GetId()] = nullptr;
102 delete (memGen[bb->GetId()]);
103 memGen[bb->GetId()] = nullptr;
104 delete (memUse[bb->GetId()]);
105 memUse[bb->GetId()] = nullptr;
106 delete (memIn[bb->GetId()]);
107 memIn[bb->GetId()] = nullptr;
108 delete (memOut[bb->GetId()]);
109 memOut[bb->GetId()] = nullptr;
110 }
111 regGen.clear();
112 regGen.shrink_to_fit();
113 regUse.clear();
114 regUse.shrink_to_fit();
115 regIn.clear();
116 regIn.shrink_to_fit();
117 regOut.clear();
118 regOut.shrink_to_fit();
119 memGen.clear();
120 memGen.shrink_to_fit();
121 memUse.clear();
122 memUse.shrink_to_fit();
123 memIn.clear();
124 memIn.shrink_to_fit();
125 memOut.clear();
126 memOut.shrink_to_fit();
127 cgFunc->SetRD(nullptr);
128 }
129
130 /*
131 * find used insns for register.
132 * input:
133 * insn: the insn in which register is defined
134 * regNO: the No of register
135 * isRegNO: this argument is used to form function overloading
136 * return:
137 * the set of used insns for register
138 */
FindUseForRegOpnd(Insn & insn,uint32 indexOrRegNO,bool isRegNO) const139 InsnSet ReachingDefinition::FindUseForRegOpnd(Insn &insn, uint32 indexOrRegNO, bool isRegNO) const
140 {
141 InsnSet useInsnSet;
142 uint32 regNO = indexOrRegNO;
143 if (!isRegNO) {
144 Operand &opnd = insn.GetOperand(indexOrRegNO);
145 auto ®Opnd = static_cast<RegOperand &>(opnd);
146 regNO = regOpnd.GetRegisterNumber();
147 }
148 /* register may be redefined in current bb */
149 bool findFinish = FindRegUseBetweenInsn(regNO, insn.GetNext(), insn.GetBB()->GetLastInsn(), useInsnSet);
150 std::vector<bool> visitedBB(kMaxBBNum, false);
151 if (!findFinish && regOut[insn.GetBB()->GetId()]->TestBit(regNO)) {
152 DFSFindUseForRegOpnd(*insn.GetBB(), regNO, visitedBB, useInsnSet, false);
153 }
154
155 if (!insn.GetBB()->IsCleanup() && firstCleanUpBB != nullptr) {
156 if (regUse[firstCleanUpBB->GetId()]->TestBit(regNO)) {
157 findFinish =
158 FindRegUseBetweenInsn(regNO, firstCleanUpBB->GetFirstInsn(), firstCleanUpBB->GetLastInsn(), useInsnSet);
159 if (findFinish || !regOut[firstCleanUpBB->GetId()]->TestBit(regNO)) {
160 return useInsnSet;
161 }
162 }
163 DFSFindUseForRegOpnd(*firstCleanUpBB, regNO, visitedBB, useInsnSet, false);
164 }
165
166 return useInsnSet;
167 }
168
169 /*
170 * find used insns for register iteratively.
171 * input:
172 * startBB: find used insns starting from startBB
173 * regNO: the No of register to be find
174 * visitedBB: record these visited BB
175 * useInsnSet: used insns of register is saved in this set
176 */
DFSFindUseForRegOpnd(const BB & startBB,uint32 regNO,std::vector<bool> & visitedBB,InsnSet & useInsnSet,bool onlyFindForEhSucc=false) const177 void ReachingDefinition::DFSFindUseForRegOpnd(const BB &startBB, uint32 regNO, std::vector<bool> &visitedBB,
178 InsnSet &useInsnSet, bool onlyFindForEhSucc = false) const
179 {
180 if (!onlyFindForEhSucc) {
181 for (auto succBB : startBB.GetSuccs()) {
182 if (!regIn[succBB->GetId()]->TestBit(regNO)) {
183 continue;
184 }
185 if (visitedBB[succBB->GetId()]) {
186 continue;
187 }
188 visitedBB[succBB->GetId()] = true;
189 bool findFinish = false;
190 if (regUse[succBB->GetId()]->TestBit(regNO)) {
191 findFinish = FindRegUseBetweenInsn(regNO, succBB->GetFirstInsn(), succBB->GetLastInsn(), useInsnSet);
192 } else if (regGen[succBB->GetId()]->TestBit(regNO)) {
193 findFinish = true;
194 }
195 if (!findFinish && regOut[succBB->GetId()]->TestBit(regNO)) {
196 DFSFindUseForRegOpnd(*succBB, regNO, visitedBB, useInsnSet, false);
197 }
198 }
199 }
200 }
201
202 /* check whether register defined in regDefInsn has used insns */
RegHasUsePoint(uint32 regNO,Insn & regDefInsn) const203 bool ReachingDefinition::RegHasUsePoint(uint32 regNO, Insn ®DefInsn) const
204 {
205 InsnSet useInsnSet;
206 bool findFinish = FindRegUseBetweenInsn(regNO, regDefInsn.GetNext(), regDefInsn.GetBB()->GetLastInsn(), useInsnSet);
207 if (!useInsnSet.empty()) {
208 return true;
209 }
210 if (!findFinish) {
211 std::vector<bool> visitedBB(kMaxBBNum, false);
212 return RegIsUsedInOtherBB(*regDefInsn.GetBB(), regNO, visitedBB);
213 }
214 return false;
215 }
216
217 /* check whether register is used in other BB except startBB */
RegIsUsedInOtherBB(const BB & startBB,uint32 regNO,std::vector<bool> & visitedBB) const218 bool ReachingDefinition::RegIsUsedInOtherBB(const BB &startBB, uint32 regNO, std::vector<bool> &visitedBB) const
219 {
220 InsnSet useInsnSet;
221 for (auto succBB : startBB.GetSuccs()) {
222 if (!regIn[succBB->GetId()]->TestBit(regNO)) {
223 continue;
224 }
225 if (visitedBB[succBB->GetId()]) {
226 continue;
227 }
228 visitedBB[succBB->GetId()] = true;
229 bool findFinish = false;
230 if (regUse[succBB->GetId()]->TestBit(regNO)) {
231 if (!regGen[succBB->GetId()]->TestBit(regNO)) {
232 return true;
233 }
234 useInsnSet.clear();
235 findFinish = FindRegUseBetweenInsn(regNO, succBB->GetFirstInsn(), succBB->GetLastInsn(), useInsnSet);
236 if (!useInsnSet.empty()) {
237 return true;
238 }
239 } else if (regGen[succBB->GetId()]->TestBit(regNO)) {
240 findFinish = true;
241 }
242 if (!findFinish && regOut[succBB->GetId()]->TestBit(regNO)) {
243 if (RegIsUsedInOtherBB(*succBB, regNO, visitedBB)) {
244 return true;
245 }
246 }
247 }
248 return false;
249 }
250
RegIsUsedInCleanUpBB(uint32 regNO) const251 bool ReachingDefinition::RegIsUsedInCleanUpBB(uint32 regNO) const
252 {
253 if (firstCleanUpBB == nullptr) {
254 return false;
255 }
256 InsnSet useInsnSet;
257 if (regUse[firstCleanUpBB->GetId()]->TestBit(regNO)) {
258 bool findFinish =
259 FindRegUseBetweenInsn(regNO, firstCleanUpBB->GetFirstInsn(), firstCleanUpBB->GetLastInsn(), useInsnSet);
260 if (!useInsnSet.empty()) {
261 return true;
262 }
263 if (findFinish) {
264 return false;
265 }
266 }
267
268 std::vector<bool> visitedBB(kMaxBBNum, false);
269 DFSFindUseForRegOpnd(*firstCleanUpBB, regNO, visitedBB, useInsnSet, false);
270 if (useInsnSet.empty()) {
271 return true;
272 }
273
274 return false;
275 }
276
277 /*
278 * find used insns for stack memory operand iteratively.
279 * input:
280 * startBB: find used insns starting from startBB
281 * offset: the offset of memory to be find
282 * visitedBB: record these visited BB
283 * useInsnSet: used insns of stack memory operand is saved in this set
284 */
DFSFindUseForMemOpnd(const BB & startBB,uint32 offset,std::vector<bool> & visitedBB,InsnSet & useInsnSet,bool onlyFindForEhSucc=false) const285 void ReachingDefinition::DFSFindUseForMemOpnd(const BB &startBB, uint32 offset, std::vector<bool> &visitedBB,
286 InsnSet &useInsnSet, bool onlyFindForEhSucc = false) const
287 {
288 if (!onlyFindForEhSucc) {
289 for (auto succBB : startBB.GetSuccs()) {
290 if (!memIn[succBB->GetId()]->TestBit(offset / kMemZoomSize)) {
291 continue;
292 }
293 if (visitedBB[succBB->GetId()]) {
294 continue;
295 }
296 visitedBB[succBB->GetId()] = true;
297 bool findFinish = false;
298 if (memUse[succBB->GetId()]->TestBit(offset / kMemZoomSize)) {
299 findFinish = FindMemUseBetweenInsn(offset, succBB->GetFirstInsn(), succBB->GetLastInsn(), useInsnSet);
300 } else if (memGen[succBB->GetId()]->TestBit(offset / kMemZoomSize)) {
301 findFinish = true;
302 }
303 if (!findFinish && memOut[succBB->GetId()]->TestBit(offset / kMemZoomSize)) {
304 DFSFindUseForMemOpnd(*succBB, offset, visitedBB, useInsnSet);
305 }
306 }
307 }
308 }
309
310 /* Out[BB] = gen[BB] union in[BB]. if bb->out changed, return true. */
GenerateOut(const BB & bb)311 bool ReachingDefinition::GenerateOut(const BB &bb)
312 {
313 bool outInfoChanged = false;
314 if (mode & kRDRegAnalysis) {
315 LocalMapleAllocator alloc(stackMp);
316 DataInfo &bbRegOutBak = regOut[bb.GetId()]->Clone(alloc);
317 *regOut[bb.GetId()] = *(regIn[bb.GetId()]);
318 regOut[bb.GetId()]->OrBits(*regGen[bb.GetId()]);
319 if (!regOut[bb.GetId()]->IsEqual(bbRegOutBak)) {
320 outInfoChanged = true;
321 }
322 }
323
324 if (mode & kRDMemAnalysis) {
325 LocalMapleAllocator alloc(stackMp);
326 DataInfo &bbMemOutBak = memOut[bb.GetId()]->Clone(alloc);
327 *memOut[bb.GetId()] = *memIn[bb.GetId()];
328 memOut[bb.GetId()]->OrBits(*memGen[bb.GetId()]);
329 if (!memOut[bb.GetId()]->IsEqual(bbMemOutBak)) {
330 outInfoChanged = true;
331 }
332 }
333 return outInfoChanged;
334 }
335
GenerateOut(const BB & bb,const std::set<uint32> & infoIndex,const bool isReg)336 bool ReachingDefinition::GenerateOut(const BB &bb, const std::set<uint32> &infoIndex, const bool isReg)
337 {
338 bool outInfoChanged = false;
339 if (isReg) {
340 for (auto index : infoIndex) {
341 uint64 bbRegOutBak = regOut[bb.GetId()]->GetElem(index);
342 regOut[bb.GetId()]->SetElem(index, regIn[bb.GetId()]->GetElem(index));
343 regOut[bb.GetId()]->OrDesignateBits(*regGen[bb.GetId()], index);
344 if (!outInfoChanged && (bbRegOutBak != regOut[bb.GetId()]->GetElem(index))) {
345 outInfoChanged = true;
346 }
347 }
348 } else {
349 for (auto index : infoIndex) {
350 uint64 bbMemOutBak = memOut[bb.GetId()]->GetElem(index);
351 memOut[bb.GetId()]->SetElem(index, memIn[bb.GetId()]->GetElem(index));
352 memOut[bb.GetId()]->OrDesignateBits(*memGen[bb.GetId()], index);
353 if (bbMemOutBak != memOut[bb.GetId()]->GetElem(index)) {
354 outInfoChanged = true;
355 }
356 }
357 }
358 return outInfoChanged;
359 }
360
361 /* In[BB] = Union all of out[Parents(bb)]. return true if bb->in changed. */
GenerateIn(const BB & bb)362 bool ReachingDefinition::GenerateIn(const BB &bb)
363 {
364 bool inInfoChanged = false;
365 if (mode & kRDRegAnalysis) {
366 LocalMapleAllocator alloc(stackMp);
367 DataInfo &bbRegInBak = regIn[bb.GetId()]->Clone(alloc);
368 for (auto predBB : bb.GetPreds()) {
369 regIn[bb.GetId()]->OrBits(*regOut[predBB->GetId()]);
370 }
371
372 if (!regIn[bb.GetId()]->IsEqual(bbRegInBak)) {
373 inInfoChanged = true;
374 }
375 }
376 if (mode & kRDMemAnalysis) {
377 LocalMapleAllocator alloc(stackMp);
378 DataInfo &memInBak = memIn[bb.GetId()]->Clone(alloc);
379 for (auto predBB : bb.GetPreds()) {
380 memIn[bb.GetId()]->OrBits(*memOut[predBB->GetId()]);
381 }
382
383 if (!memIn[bb.GetId()]->IsEqual(memInBak)) {
384 inInfoChanged = true;
385 }
386 }
387 return inInfoChanged;
388 }
389
390 /* In[BB] = Union all of out[Parents(bb)]. return true if bb->in changed. */
GenerateIn(const BB & bb,const std::set<uint32> & infoIndex,const bool isReg)391 bool ReachingDefinition::GenerateIn(const BB &bb, const std::set<uint32> &infoIndex, const bool isReg)
392 {
393 bool inInfoChanged = false;
394
395 if (isReg) {
396 for (auto index : infoIndex) {
397 uint64 bbRegInBak = regIn[bb.GetId()]->GetElem(index);
398 regIn[bb.GetId()]->SetElem(index, 0ULL);
399 for (auto predBB : bb.GetPreds()) {
400 regIn[bb.GetId()]->OrDesignateBits(*regOut[predBB->GetId()], index);
401 }
402
403 if (bbRegInBak != regIn[bb.GetId()]->GetElem(index)) {
404 inInfoChanged = true;
405 }
406 }
407 } else {
408 for (auto index : infoIndex) {
409 uint64 bbMemInBak = memIn[bb.GetId()]->GetElem(index);
410 memIn[bb.GetId()]->SetElem(index, 0ULL);
411 for (auto predBB : bb.GetPreds()) {
412 memIn[bb.GetId()]->OrDesignateBits(*memOut[predBB->GetId()], index);
413 }
414
415 if (bbMemInBak != memIn[bb.GetId()]->GetElem(index)) {
416 inInfoChanged = true;
417 }
418 }
419 }
420 return inInfoChanged;
421 }
422
423 /* In[firstCleanUpBB] = Union all of out[bbNormalSet] */
GenerateInForFirstCleanUpBB()424 bool ReachingDefinition::GenerateInForFirstCleanUpBB()
425 {
426 CHECK_NULL_FATAL(firstCleanUpBB);
427 if (mode & kRDRegAnalysis) {
428 regIn[firstCleanUpBB->GetId()]->ResetAllBit();
429 }
430 if (mode & kRDMemAnalysis) {
431 memIn[firstCleanUpBB->GetId()]->ResetAllBit();
432 }
433
434 for (auto normalBB : normalBBSet) {
435 if (mode & kRDRegAnalysis) {
436 regIn[firstCleanUpBB->GetId()]->OrBits(*regOut[normalBB->GetId()]);
437 }
438
439 if (mode & kRDMemAnalysis) {
440 memIn[firstCleanUpBB->GetId()]->OrBits(*memOut[normalBB->GetId()]);
441 }
442 }
443
444 return ((regIn[firstCleanUpBB->GetId()] != nullptr && regIn[firstCleanUpBB->GetId()]->Size() > 0) ||
445 (memIn[firstCleanUpBB->GetId()] != nullptr && memIn[firstCleanUpBB->GetId()]->Size() > 0));
446 }
447
GenerateInForFirstCleanUpBB(bool isReg,const std::set<uint32> & infoIndex)448 bool ReachingDefinition::GenerateInForFirstCleanUpBB(bool isReg, const std::set<uint32> &infoIndex)
449 {
450 CHECK_NULL_FATAL(firstCleanUpBB);
451 bool inInfoChanged = false;
452 if (isReg) {
453 for (auto index : infoIndex) {
454 uint64 regInElemBak = regIn[firstCleanUpBB->GetId()]->GetElem(index);
455 regIn[firstCleanUpBB->GetId()]->SetElem(index, 0ULL);
456 for (auto &normalBB : normalBBSet) {
457 regIn[firstCleanUpBB->GetId()]->OrDesignateBits(*regOut[normalBB->GetId()], index);
458 }
459 if (!inInfoChanged && (regIn[firstCleanUpBB->GetId()]->GetElem(index) != regInElemBak)) {
460 inInfoChanged = true;
461 }
462 }
463 } else {
464 for (auto index : infoIndex) {
465 uint64 memInElemBak = memIn[firstCleanUpBB->GetId()]->GetElem(index);
466 memIn[firstCleanUpBB->GetId()]->SetElem(index, 0ULL);
467 for (auto &normalBB : normalBBSet) {
468 memIn[firstCleanUpBB->GetId()]->OrDesignateBits(*memOut[normalBB->GetId()], index);
469 }
470 if (!inInfoChanged && (memIn[firstCleanUpBB->GetId()]->GetElem(index) != memInElemBak)) {
471 inInfoChanged = true;
472 }
473 }
474 }
475 return inInfoChanged;
476 }
477
478 /* allocate memory for DataInfo of bb */
InitRegAndMemInfo(const BB & bb)479 void ReachingDefinition::InitRegAndMemInfo(const BB &bb)
480 {
481 if (mode & kRDRegAnalysis) {
482 const uint32 kMaxRegCount = cgFunc->GetMaxVReg();
483 regGen[bb.GetId()] = new DataInfo(kMaxRegCount, rdAlloc);
484 regUse[bb.GetId()] = new DataInfo(kMaxRegCount, rdAlloc);
485 regIn[bb.GetId()] = new DataInfo(kMaxRegCount, rdAlloc);
486 regOut[bb.GetId()] = new DataInfo(kMaxRegCount, rdAlloc);
487 }
488
489 if (mode & kRDMemAnalysis) {
490 const int32 kStackSize = GetStackSize();
491 memGen[bb.GetId()] = new DataInfo((kStackSize / kMemZoomSize), rdAlloc);
492 memUse[bb.GetId()] = new DataInfo((kStackSize / kMemZoomSize), rdAlloc);
493 memIn[bb.GetId()] = new DataInfo((kStackSize / kMemZoomSize), rdAlloc);
494 memOut[bb.GetId()] = new DataInfo((kStackSize / kMemZoomSize), rdAlloc);
495 }
496 }
497
498 /* insert pseudoInsns for function parameters, ehBB, and return R0/V0. init bb->gen, bb->use, bb->out */
Initialize()499 void ReachingDefinition::Initialize()
500 {
501 InitDataSize();
502 AddRetPseudoInsns();
503 FOR_ALL_BB(bb, cgFunc) {
504 InitRegAndMemInfo(*bb);
505 }
506 FOR_ALL_BB(bb, cgFunc) {
507 if (bb == cgFunc->GetFirstBB()) {
508 InitStartGen();
509 }
510 InitGenUse(*bb);
511 InitOut(*bb);
512
513 if (bb->IsCleanup()) {
514 if (bb->GetFirstStmt() == cgFunc->GetCleanupLabel()) {
515 firstCleanUpBB = bb;
516 }
517 (void)cleanUpBBSet.insert(bb);
518 } else {
519 (void)normalBBSet.insert(bb);
520 }
521 }
522 maxInsnNO = 0;
523 FOR_ALL_BB(bb, cgFunc) {
524 FOR_BB_INSNS(insn, bb) {
525 insn->SetId(maxInsnNO);
526 maxInsnNO += kInsnNoInterval;
527 }
528 }
529 }
530
InitDataSize()531 void ReachingDefinition::InitDataSize()
532 {
533 /* to visit vec[cgFunc->NumBBs()], size should be cgFunc->NumBBs() + 1 */
534 const uint32 dataSize = cgFunc->NumBBs() + 1;
535 regIn.resize(dataSize);
536 regOut.resize(dataSize);
537 regGen.resize(dataSize);
538 regUse.resize(dataSize);
539 memIn.resize(dataSize);
540 memOut.resize(dataSize);
541 memGen.resize(dataSize);
542 memUse.resize(dataSize);
543 }
544
545 /* compute bb->in, bb->out for each BB execpt cleanup BB */
BuildInOutForFuncBody()546 void ReachingDefinition::BuildInOutForFuncBody()
547 {
548 std::unordered_set<BB *> normalBBSetBak(normalBBSet.begin(), normalBBSet.end());
549 std::unordered_set<BB *>::iterator setItr;
550 while (!normalBBSetBak.empty()) {
551 setItr = normalBBSetBak.begin();
552 BB *bb = *setItr;
553 DEBUG_ASSERT(bb != nullptr, "null ptr check");
554 (void)normalBBSetBak.erase(setItr);
555
556 if (GenerateIn(*bb)) {
557 if (GenerateOut(*bb)) {
558 for (auto succ : bb->GetSuccs()) {
559 (void)normalBBSetBak.insert(succ);
560 }
561 }
562 }
563 }
564 DEBUG_ASSERT(normalBBSetBak.empty(), "CG internal error.");
565 }
566
567 /* if bb->out changed, update in and out */
UpdateInOut(BB & changedBB)568 void ReachingDefinition::UpdateInOut(BB &changedBB)
569 {
570 InitGenUse(changedBB, false);
571 if (!GenerateOut(changedBB)) {
572 return;
573 }
574
575 std::unordered_set<BB *> bbSet;
576 std::unordered_set<BB *>::iterator setItr;
577
578 for (auto succ : changedBB.GetSuccs()) {
579 (void)bbSet.insert(succ);
580 }
581
582 while (!bbSet.empty()) {
583 setItr = bbSet.begin();
584 BB *bb = *setItr;
585 DEBUG_ASSERT(bb != nullptr, "null ptr check");
586 bbSet.erase(setItr);
587
588 if (GenerateIn(*bb)) {
589 if (GenerateOut(*bb)) {
590 for (auto succ : bb->GetSuccs()) {
591 (void)bbSet.insert(succ);
592 }
593 }
594 }
595 }
596
597 if (!changedBB.IsCleanup() && firstCleanUpBB != nullptr) {
598 BuildInOutForCleanUpBB();
599 }
600 }
601
UpdateInOut(BB & changedBB,bool isReg)602 void ReachingDefinition::UpdateInOut(BB &changedBB, bool isReg)
603 {
604 std::set<uint32> changedInfoIndex;
605 if (isReg) {
606 LocalMapleAllocator alloc(stackMp);
607 DataInfo &genInfoBak = regGen[changedBB.GetId()]->Clone(alloc);
608 InitGenUse(changedBB, false);
609 genInfoBak.EorBits(*regGen[changedBB.GetId()]);
610 genInfoBak.GetNonZeroElemsIndex(changedInfoIndex);
611 } else {
612 LocalMapleAllocator alloc(stackMp);
613 DataInfo &genInfoBak = memGen[changedBB.GetId()]->Clone(alloc);
614 InitGenUse(changedBB, false);
615 genInfoBak.EorBits(*memGen[changedBB.GetId()]);
616 genInfoBak.GetNonZeroElemsIndex(changedInfoIndex);
617 }
618 if (changedInfoIndex.empty()) {
619 return;
620 }
621 if (!GenerateOut(changedBB, changedInfoIndex, isReg)) {
622 return;
623 }
624 std::set<BB *, BBIdCmp> bbSet;
625 std::set<BB *, BBIdCmp>::iterator setItr;
626 for (auto &succ : changedBB.GetSuccs()) {
627 (void)bbSet.insert(succ);
628 }
629 while (!bbSet.empty()) {
630 setItr = bbSet.begin();
631 BB *bb = *setItr;
632 bbSet.erase(setItr);
633 if (GenerateIn(*bb, changedInfoIndex, isReg)) {
634 if (GenerateOut(*bb, changedInfoIndex, isReg)) {
635 for (auto &succ : bb->GetSuccs()) {
636 (void)bbSet.insert(succ);
637 }
638 }
639 }
640 }
641
642 if (!changedBB.IsCleanup() && firstCleanUpBB != nullptr) {
643 BuildInOutForCleanUpBB(isReg, changedInfoIndex);
644 }
645 }
646
647 /* compute bb->in, bb->out for cleanup BBs */
BuildInOutForCleanUpBB()648 void ReachingDefinition::BuildInOutForCleanUpBB()
649 {
650 DEBUG_ASSERT(firstCleanUpBB != nullptr, "firstCleanUpBB must not be nullptr");
651 if (GenerateInForFirstCleanUpBB()) {
652 GenerateOut(*firstCleanUpBB);
653 }
654 std::unordered_set<BB *> cleanupBBSetBak(cleanUpBBSet.begin(), cleanUpBBSet.end());
655 std::unordered_set<BB *>::iterator setItr;
656
657 while (!cleanupBBSetBak.empty()) {
658 setItr = cleanupBBSetBak.begin();
659 BB *bb = *setItr;
660 cleanupBBSetBak.erase(setItr);
661 if (GenerateIn(*bb)) {
662 if (GenerateOut(*bb)) {
663 for (auto succ : bb->GetSuccs()) {
664 (void)cleanupBBSetBak.insert(succ);
665 }
666 }
667 }
668 }
669 DEBUG_ASSERT(cleanupBBSetBak.empty(), "CG internal error.");
670 }
671
BuildInOutForCleanUpBB(bool isReg,const std::set<uint32> & index)672 void ReachingDefinition::BuildInOutForCleanUpBB(bool isReg, const std::set<uint32> &index)
673 {
674 DEBUG_ASSERT(firstCleanUpBB != nullptr, "firstCleanUpBB must not be nullptr");
675 if (GenerateInForFirstCleanUpBB(isReg, index)) {
676 GenerateOut(*firstCleanUpBB, index, isReg);
677 }
678 std::unordered_set<BB *> cleanupBBSetBak(cleanUpBBSet.begin(), cleanUpBBSet.end());
679 std::unordered_set<BB *>::iterator setItr;
680 while (!cleanupBBSetBak.empty()) {
681 setItr = cleanupBBSetBak.begin();
682 BB *bb = *setItr;
683 cleanupBBSetBak.erase(setItr);
684 if (GenerateIn(*bb, index, isReg)) {
685 if (GenerateOut(*bb, index, isReg)) {
686 for (auto &succ : bb->GetSuccs()) {
687 (void)cleanupBBSetBak.insert(succ);
688 }
689 }
690 }
691 }
692 DEBUG_ASSERT(cleanupBBSetBak.empty(), "CG internal error.");
693 }
694
695 /* entry for ReachingDefinition Analysis, mode represent to analyze RegOperand, MemOperand or both of them */
AnalysisStart()696 void ReachingDefinition::AnalysisStart()
697 {
698 if (!cgFunc->GetFirstBB()) {
699 return;
700 }
701 Initialize();
702 /* Build in/out for function body first. (Except cleanup bb) */
703 BuildInOutForFuncBody();
704 /* If cleanup bb exists, build in/out for cleanup bbs. firstCleanUpBB->in = Union all non-cleanup bb's out. */
705 if (firstCleanUpBB != nullptr) {
706 BuildInOutForCleanUpBB();
707 }
708 cgFunc->SetRD(this);
709 }
710
711 /* check whether currentBB can reach endBB according to control flow */
CanReachEndBBFromCurrentBB(const BB & currentBB,const BB & endBB,std::vector<bool> & traversedBBSet) const712 bool ReachingDefinition::CanReachEndBBFromCurrentBB(const BB ¤tBB, const BB &endBB,
713 std::vector<bool> &traversedBBSet) const
714 {
715 if (¤tBB == &endBB) {
716 return true;
717 }
718 for (auto predBB : endBB.GetPreds()) {
719 if (traversedBBSet[predBB->GetId()]) {
720 continue;
721 }
722 traversedBBSet[predBB->GetId()] = true;
723 if (predBB == ¤tBB) {
724 return true;
725 }
726 if (CanReachEndBBFromCurrentBB(currentBB, *predBB, traversedBBSet)) {
727 return true;
728 }
729 }
730 return false;
731 }
732
733 /* check whether register may be redefined form startBB to endBB */
IsLiveInAllPathBB(uint32 regNO,const BB & startBB,const BB & endBB,std::vector<bool> & visitedBB,bool isFirstNo) const734 bool ReachingDefinition::IsLiveInAllPathBB(uint32 regNO, const BB &startBB, const BB &endBB,
735 std::vector<bool> &visitedBB, bool isFirstNo) const
736 {
737 for (auto succ : startBB.GetSuccs()) {
738 if (visitedBB[succ->GetId()]) {
739 continue;
740 }
741 visitedBB[succ->GetId()] = true;
742 if (isFirstNo && CheckRegLiveinReturnBB(regNO, *succ)) {
743 return false;
744 }
745 std::vector<bool> traversedPathSet(kMaxBBNum, false);
746 bool canReachEndBB = true;
747 if (regGen[succ->GetId()]->TestBit(regNO)) {
748 canReachEndBB = CanReachEndBBFromCurrentBB(*succ, endBB, traversedPathSet);
749 if (canReachEndBB) {
750 return false;
751 }
752 }
753 if (!canReachEndBB) {
754 continue;
755 }
756 bool isLive = IsLiveInAllPathBB(regNO, *succ, endBB, visitedBB, isFirstNo);
757 if (!isLive) {
758 return false;
759 }
760 }
761
762 return true;
763 }
764
765 /* Check if the reg is used in return BB */
CheckRegLiveinReturnBB(uint32 regNO,const BB & bb) const766 bool ReachingDefinition::CheckRegLiveinReturnBB(uint32 regNO, const BB &bb) const
767 {
768 #if TARGAARCH64 || TARGRISCV64
769 if (bb.GetKind() == BB::kBBReturn) {
770 PrimType returnType = cgFunc->GetFunction().GetReturnType()->GetPrimType();
771 regno_t returnReg = R0;
772 if (IsPrimitiveFloat(returnType)) {
773 returnReg = V0;
774 } else if (IsPrimitiveInteger(returnType)) {
775 returnReg = R0;
776 }
777 if (regNO == returnReg) {
778 return true;
779 }
780 }
781 #endif
782 return false;
783 }
784
RegIsUsedIncaller(uint32 regNO,Insn & startInsn,Insn & endInsn) const785 bool ReachingDefinition::RegIsUsedIncaller(uint32 regNO, Insn &startInsn, Insn &endInsn) const
786 {
787 if (startInsn.GetBB() != endInsn.GetBB()) {
788 return false;
789 }
790 if (startInsn.GetNext() == &endInsn || &startInsn == &endInsn) {
791 return false;
792 }
793 auto RegDefVec = FindRegDefBetweenInsn(regNO, startInsn.GetNext(), endInsn.GetPrev());
794 if (!RegDefVec.empty()) {
795 return false;
796 }
797 if (IsCallerSavedReg(regNO) && startInsn.GetNext() != nullptr &&
798 KilledByCallBetweenInsnInSameBB(*startInsn.GetNext(), *(startInsn.GetBB()->GetLastInsn()), regNO)) {
799 return true;
800 }
801 if (CheckRegLiveinReturnBB(regNO, *startInsn.GetBB())) {
802 return true;
803 }
804 return false;
805 }
806
807 /* check whether control flow can reach endInsn from startInsn */
RegIsLiveBetweenInsn(uint32 regNO,Insn & startInsn,Insn & endInsn,bool isBack,bool isFirstNo) const808 bool ReachingDefinition::RegIsLiveBetweenInsn(uint32 regNO, Insn &startInsn, Insn &endInsn, bool isBack,
809 bool isFirstNo) const
810 {
811 DEBUG_ASSERT(&startInsn != &endInsn, "startInsn is not equal to endInsn");
812 if (startInsn.GetBB() == endInsn.GetBB()) {
813 /* register is difined more than once */
814 if (startInsn.GetId() > endInsn.GetId()) {
815 if (!isBack) {
816 return false;
817 } else {
818 return true;
819 }
820 }
821 if (startInsn.GetNext() == &endInsn) {
822 return true;
823 }
824 if (regGen[startInsn.GetBB()->GetId()]->TestBit(regNO)) {
825 std::vector<Insn *> RegDefVec;
826 if (isBack) {
827 RegDefVec = FindRegDefBetweenInsn(regNO, startInsn.GetNext(), endInsn.GetPrev());
828 } else {
829 RegDefVec = FindRegDefBetweenInsn(regNO, &startInsn, endInsn.GetPrev());
830 }
831 if (!RegDefVec.empty()) {
832 return false;
833 }
834 }
835 if (IsCallerSavedReg(regNO) &&
836 KilledByCallBetweenInsnInSameBB(*startInsn.GetNext(), *endInsn.GetPrev(), regNO)) {
837 return false;
838 }
839 return true;
840 }
841
842 if (&startInsn != startInsn.GetBB()->GetLastInsn() && regGen[startInsn.GetBB()->GetId()]->TestBit(regNO) &&
843 !FindRegDefBetweenInsn(regNO, startInsn.GetNext(), startInsn.GetBB()->GetLastInsn()).empty()) {
844 return false;
845 }
846
847 if (&startInsn != startInsn.GetBB()->GetLastInsn() && IsCallerSavedReg(regNO) &&
848 KilledByCallBetweenInsnInSameBB(*startInsn.GetNext(), *startInsn.GetBB()->GetLastInsn(), regNO)) {
849 return false;
850 }
851
852 if (&endInsn != endInsn.GetBB()->GetFirstInsn() && regGen[endInsn.GetBB()->GetId()]->TestBit(regNO) &&
853 !FindRegDefBetweenInsn(regNO, endInsn.GetBB()->GetFirstInsn(), endInsn.GetPrev()).empty()) {
854 return false;
855 }
856
857 if (&endInsn != endInsn.GetBB()->GetFirstInsn() && IsCallerSavedReg(regNO) &&
858 KilledByCallBetweenInsnInSameBB(*endInsn.GetBB()->GetFirstInsn(), *endInsn.GetPrev(), regNO)) {
859 return false;
860 }
861
862 std::vector<bool> visitedBB(kMaxBBNum, false);
863 return IsLiveInAllPathBB(regNO, *startInsn.GetBB(), *endInsn.GetBB(), visitedBB, isFirstNo);
864 }
865
SetDefInsnVecForAsm(Insn * insn,uint32 index,uint32 regNO,std::vector<Insn * > & defInsnVec)866 static bool SetDefInsnVecForAsm(Insn *insn, uint32 index, uint32 regNO, std::vector<Insn *> &defInsnVec)
867 {
868 for (auto reg : static_cast<ListOperand &>(insn->GetOperand(index)).GetOperands()) {
869 if (static_cast<RegOperand *>(reg)->GetRegisterNumber() == regNO) {
870 defInsnVec.emplace_back(insn);
871 return true;
872 }
873 }
874 return false;
875 }
876
FindRegDefBetweenInsn(uint32 regNO,Insn * startInsn,Insn * endInsn,bool findAll,bool analysisDone) const877 std::vector<Insn *> ReachingDefinition::FindRegDefBetweenInsn(uint32 regNO, Insn *startInsn, Insn *endInsn,
878 bool findAll, bool analysisDone) const
879 {
880 std::vector<Insn *> defInsnVec;
881 if (startInsn == nullptr || endInsn == nullptr) {
882 return defInsnVec;
883 }
884
885 DEBUG_ASSERT(startInsn->GetBB() == endInsn->GetBB(), "two insns must be in a same BB");
886 if (analysisDone && !regGen[startInsn->GetBB()->GetId()]->TestBit(regNO)) {
887 return defInsnVec;
888 }
889
890 for (Insn *insn = endInsn; insn != nullptr && insn != startInsn->GetPrev(); insn = insn->GetPrev()) {
891 if (!insn->IsMachineInstruction()) {
892 continue;
893 }
894
895 if (insn->IsAsmInsn()) {
896 if (SetDefInsnVecForAsm(insn, kAsmOutputListOpnd, regNO, defInsnVec) ||
897 SetDefInsnVecForAsm(insn, kAsmClobberListOpnd, regNO, defInsnVec)) {
898 if (findAll) {
899 defInsnVec.emplace_back(insn);
900 } else {
901 return defInsnVec;
902 }
903 }
904 }
905 if (insn->IsCall() && IsRegKilledByCallInsn(*insn, regNO)) {
906 defInsnVec.emplace_back(insn);
907 if (!findAll) {
908 return defInsnVec;
909 }
910 }
911 if (insn->IsRegDefined(regNO)) {
912 defInsnVec.emplace_back(insn);
913 if (!findAll) {
914 return defInsnVec;
915 }
916 }
917 }
918 return defInsnVec;
919 }
920
RegIsUsedOrDefBetweenInsn(uint32 regNO,Insn & startInsn,Insn & endInsn) const921 bool ReachingDefinition::RegIsUsedOrDefBetweenInsn(uint32 regNO, Insn &startInsn, Insn &endInsn) const
922 {
923 DEBUG_ASSERT(&startInsn != &endInsn, "startInsn is not equal to endInsn");
924 if (startInsn.GetBB() == endInsn.GetBB()) {
925 /* register is difined more than once */
926 if (startInsn.GetId() > endInsn.GetId()) {
927 return false;
928 }
929 if (startInsn.GetNext() == &endInsn) {
930 return true;
931 }
932 if (regGen[startInsn.GetBB()->GetId()]->TestBit(regNO) &&
933 !FindRegDefBetweenInsn(regNO, startInsn.GetNext(), endInsn.GetPrev()).empty()) {
934 return false;
935 }
936 if (regUse[startInsn.GetBB()->GetId()]->TestBit(regNO)) {
937 InsnSet useInsnSet;
938 FindRegUseBetweenInsn(regNO, startInsn.GetNext(), endInsn.GetPrev(), useInsnSet);
939 if (!useInsnSet.empty()) {
940 return false;
941 }
942 }
943 if (IsCallerSavedReg(regNO) &&
944 KilledByCallBetweenInsnInSameBB(*startInsn.GetNext(), *endInsn.GetPrev(), regNO)) {
945 return false;
946 }
947 return true;
948 }
949
950 if (&startInsn != startInsn.GetBB()->GetLastInsn() && regGen[startInsn.GetBB()->GetId()]->TestBit(regNO) &&
951 !FindRegDefBetweenInsn(regNO, startInsn.GetNext(), startInsn.GetBB()->GetLastInsn()).empty()) {
952 return false;
953 }
954
955 if (regUse[startInsn.GetBB()->GetId()]->TestBit(regNO)) {
956 InsnSet useInsnSet;
957 FindRegUseBetweenInsn(regNO, startInsn.GetNext(), startInsn.GetBB()->GetLastInsn(), useInsnSet);
958 if (!useInsnSet.empty()) {
959 return false;
960 }
961 }
962
963 if (&startInsn != startInsn.GetBB()->GetLastInsn() && IsCallerSavedReg(regNO) &&
964 KilledByCallBetweenInsnInSameBB(*startInsn.GetNext(), *startInsn.GetBB()->GetLastInsn(), regNO)) {
965 return false;
966 }
967
968 if (&endInsn != endInsn.GetBB()->GetFirstInsn() && regGen[endInsn.GetBB()->GetId()]->TestBit(regNO) &&
969 !FindRegDefBetweenInsn(regNO, endInsn.GetBB()->GetFirstInsn(), endInsn.GetPrev()).empty()) {
970 return false;
971 }
972
973 if (regUse[startInsn.GetBB()->GetId()]->TestBit(regNO)) {
974 InsnSet useInsnSet;
975 FindRegUseBetweenInsn(regNO, endInsn.GetBB()->GetFirstInsn(), endInsn.GetPrev(), useInsnSet);
976 if (!useInsnSet.empty()) {
977 return false;
978 }
979 }
980
981 if (&endInsn != endInsn.GetBB()->GetFirstInsn() && IsCallerSavedReg(regNO) &&
982 KilledByCallBetweenInsnInSameBB(*endInsn.GetBB()->GetFirstInsn(), *endInsn.GetPrev(), regNO)) {
983 return false;
984 }
985
986 std::vector<bool> visitedBB(kMaxBBNum, false);
987 return IsUseOrDefInAllPathBB(regNO, *startInsn.GetBB(), *endInsn.GetBB(), visitedBB);
988 }
989
990 /* check whether register may be redefined form in the same BB */
IsUseOrDefBetweenInsn(uint32 regNO,const BB & curBB,const Insn & startInsn,Insn & endInsn) const991 bool ReachingDefinition::IsUseOrDefBetweenInsn(uint32 regNO, const BB &curBB, const Insn &startInsn,
992 Insn &endInsn) const
993 {
994 if (regGen[curBB.GetId()]->TestBit(regNO)) {
995 if (!FindRegDefBetweenInsn(regNO, startInsn.GetNext(), endInsn.GetPrev()).empty()) {
996 return false;
997 }
998 }
999 if (regUse[curBB.GetId()]->TestBit(regNO)) {
1000 InsnSet useInsnSet;
1001 FindRegUseBetweenInsn(regNO, startInsn.GetNext(), endInsn.GetPrev(), useInsnSet);
1002 if (!useInsnSet.empty()) {
1003 return false;
1004 }
1005 }
1006 return true;
1007 }
1008
1009 /* check whether register may be redefined form startBB to endBB */
IsUseOrDefInAllPathBB(uint32 regNO,const BB & startBB,const BB & endBB,std::vector<bool> & visitedBB) const1010 bool ReachingDefinition::IsUseOrDefInAllPathBB(uint32 regNO, const BB &startBB, const BB &endBB,
1011 std::vector<bool> &visitedBB) const
1012 {
1013 for (auto succ : startBB.GetSuccs()) {
1014 if (visitedBB[succ->GetId()] || succ == &endBB) {
1015 continue;
1016 }
1017 visitedBB[succ->GetId()] = true;
1018 std::vector<bool> traversedPathSet(kMaxBBNum, false);
1019 bool canReachEndBB = true;
1020 if (regGen[succ->GetId()]->TestBit(regNO) || regUse[succ->GetId()]->TestBit(regNO) ||
1021 (succ->HasCall() && IsCallerSavedReg(regNO))) {
1022 canReachEndBB = CanReachEndBBFromCurrentBB(*succ, endBB, traversedPathSet);
1023 if (canReachEndBB) {
1024 return false;
1025 }
1026 }
1027 if (!canReachEndBB) {
1028 continue;
1029 }
1030 bool isLive = IsUseOrDefInAllPathBB(regNO, *succ, endBB, visitedBB);
1031 if (!isLive) {
1032 return false;
1033 }
1034 }
1035
1036 return true;
1037 }
1038
HasCallBetweenInsnInSameBB(const Insn & startInsn,const Insn & endInsn) const1039 bool ReachingDefinition::HasCallBetweenInsnInSameBB(const Insn &startInsn, const Insn &endInsn) const
1040 {
1041 DEBUG_ASSERT(startInsn.GetBB() == endInsn.GetBB(), "two insns must be in same bb");
1042 for (const Insn *insn = &startInsn; insn != endInsn.GetNext(); insn = insn->GetNext()) {
1043 if (insn->IsMachineInstruction() && insn->IsCall()) {
1044 return true;
1045 }
1046 }
1047 return false;
1048 }
1049
1050 /* operand is only defined in startBB, and only used in endBB.
1051 * so traverse from endBB to startBB, all paths reach startBB finally.
1052 * startBB and endBB are different, and call insns in both of them are not counted.
1053 * whether startBB and endBB are in a loop is not counted.
1054 */
HasCallInPath(const BB & startBB,const BB & endBB,std::vector<bool> & visitedBB) const1055 bool ReachingDefinition::HasCallInPath(const BB &startBB, const BB &endBB, std::vector<bool> &visitedBB) const
1056 {
1057 DEBUG_ASSERT(&startBB != &endBB, "startBB and endBB are not counted");
1058 std::queue<const BB *> bbQueue;
1059 bbQueue.push(&endBB);
1060 visitedBB[endBB.GetId()] = true;
1061 while (!bbQueue.empty()) {
1062 const BB *bb = bbQueue.front();
1063 bbQueue.pop();
1064 for (auto predBB : bb->GetPreds()) {
1065 if (predBB == &startBB || visitedBB[predBB->GetId()]) {
1066 continue;
1067 }
1068 if (predBB->HasCall()) {
1069 return true;
1070 }
1071 visitedBB[predBB->GetId()] = true;
1072 bbQueue.push(predBB);
1073 }
1074 }
1075 return false;
1076 }
1077
1078 /* because of time cost, this function is not precise, BB in loop is not counted */
HasCallBetweenDefUse(const Insn & defInsn,const Insn & useInsn) const1079 bool ReachingDefinition::HasCallBetweenDefUse(const Insn &defInsn, const Insn &useInsn) const
1080 {
1081 if (defInsn.GetBB()->GetId() == useInsn.GetBB()->GetId()) {
1082 if (&useInsn == defInsn.GetNext()) {
1083 return false;
1084 }
1085 if (useInsn.GetId() > defInsn.GetId()) {
1086 return HasCallBetweenInsnInSameBB(defInsn, *useInsn.GetPrev());
1087 }
1088 /* useInsn is in front of defInsn, we think there is call insn between them conservatively */
1089 return true;
1090 }
1091 /* check defInsn->GetBB() */
1092 if (&defInsn != defInsn.GetBB()->GetLastInsn() && defInsn.GetBB()->HasCall() &&
1093 HasCallBetweenInsnInSameBB(*defInsn.GetNext(), *defInsn.GetBB()->GetLastInsn())) {
1094 return true;
1095 }
1096 /* check useInsn->GetBB() */
1097 if (&useInsn != useInsn.GetBB()->GetFirstInsn() && useInsn.GetBB()->HasCall() &&
1098 HasCallBetweenInsnInSameBB(*useInsn.GetBB()->GetFirstInsn(), *useInsn.GetPrev())) {
1099 return true;
1100 }
1101 std::vector<bool> visitedBB(kMaxBBNum, false);
1102 return HasCallInPath(*defInsn.GetBB(), *useInsn.GetBB(), visitedBB);
1103 }
1104
EnlargeRegCapacity(uint32 size)1105 void ReachingDefinition::EnlargeRegCapacity(uint32 size)
1106 {
1107 FOR_ALL_BB(bb, cgFunc) {
1108 regIn[bb->GetId()]->EnlargeCapacityToAdaptSize(size);
1109 regOut[bb->GetId()]->EnlargeCapacityToAdaptSize(size);
1110 regGen[bb->GetId()]->EnlargeCapacityToAdaptSize(size);
1111 regUse[bb->GetId()]->EnlargeCapacityToAdaptSize(size);
1112 }
1113 }
1114
DumpInfo(const BB & bb,DumpType flag) const1115 void ReachingDefinition::DumpInfo(const BB &bb, DumpType flag) const
1116 {
1117 const DataInfo *info = nullptr;
1118 switch (flag) {
1119 case kDumpRegGen:
1120 LogInfo::MapleLogger() << " regGen:\n";
1121 info = regGen[bb.GetId()];
1122 break;
1123 case kDumpRegUse:
1124 LogInfo::MapleLogger() << " regUse:\n";
1125 info = regUse[bb.GetId()];
1126 break;
1127 case kDumpRegIn:
1128 LogInfo::MapleLogger() << " regIn:\n";
1129 info = regIn[bb.GetId()];
1130 break;
1131 case kDumpRegOut:
1132 LogInfo::MapleLogger() << " regOut:\n";
1133 info = regOut[bb.GetId()];
1134 break;
1135 case kDumpMemGen:
1136 LogInfo::MapleLogger() << " memGen:\n";
1137 info = memGen[bb.GetId()];
1138 break;
1139 case kDumpMemIn:
1140 LogInfo::MapleLogger() << " memIn:\n";
1141 info = memIn[bb.GetId()];
1142 break;
1143 case kDumpMemOut:
1144 LogInfo::MapleLogger() << " memOut:\n";
1145 info = memOut[bb.GetId()];
1146 break;
1147 case kDumpMemUse:
1148 LogInfo::MapleLogger() << " memUse:\n";
1149 info = memUse[bb.GetId()];
1150 break;
1151 default:
1152 return;
1153 }
1154 DEBUG_ASSERT(info != nullptr, "null ptr check");
1155 uint32 count = 1;
1156 LogInfo::MapleLogger() << " ";
1157 for (uint32 i = 0; i != info->Size(); ++i) {
1158 if (info->TestBit(i)) {
1159 count += 1;
1160 if (kDumpMemGen <= flag && flag <= kDumpMemUse) {
1161 /* Each element i means a 4 byte stack slot. */
1162 LogInfo::MapleLogger() << (i * 4) << " ";
1163 } else {
1164 LogInfo::MapleLogger() << i << " ";
1165 }
1166 /* 10 output per line */
1167 if (count % 10 == 0) {
1168 LogInfo::MapleLogger() << "\n";
1169 LogInfo::MapleLogger() << " ";
1170 }
1171 }
1172 }
1173
1174 LogInfo::MapleLogger() << "\n";
1175 }
1176
DumpBBCGIR(const BB & bb) const1177 void ReachingDefinition::DumpBBCGIR(const BB &bb) const
1178 {
1179 if (bb.IsCleanup()) {
1180 LogInfo::MapleLogger() << "[is_cleanup] ";
1181 }
1182 if (bb.IsUnreachable()) {
1183 LogInfo::MapleLogger() << "[unreachable] ";
1184 }
1185 if (bb.GetSuccs().size()) {
1186 LogInfo::MapleLogger() << " succs: ";
1187 for (auto *succBB : bb.GetSuccs()) {
1188 LogInfo::MapleLogger() << succBB->GetId() << " ";
1189 }
1190 }
1191 LogInfo::MapleLogger() << "\n";
1192
1193 FOR_BB_INSNS_CONST(insn, &bb) {
1194 LogInfo::MapleLogger() << " ";
1195 insn->Dump();
1196 }
1197 LogInfo::MapleLogger() << "\n";
1198 }
1199
Dump(uint32 flag) const1200 void ReachingDefinition::Dump(uint32 flag) const
1201 {
1202 MIRSymbol *mirSymbol = GlobalTables::GetGsymTable().GetSymbolFromStidx(cgFunc->GetFunction().GetStIdx().Idx());
1203 DEBUG_ASSERT(mirSymbol != nullptr, "get symbol in function failed in ReachingDefinition::Dump");
1204 LogInfo::MapleLogger() << "\n---- Reaching definition analysis for " << mirSymbol->GetName();
1205 LogInfo::MapleLogger() << " ----\n";
1206 FOR_ALL_BB(bb, cgFunc) {
1207 LogInfo::MapleLogger() << " === BB_" << bb->GetId() << " ===\n";
1208
1209 if (flag & kDumpBBCGIR) {
1210 DumpBBCGIR(*bb);
1211 }
1212
1213 if (flag & kDumpRegIn) {
1214 DumpInfo(*bb, kDumpRegIn);
1215 }
1216
1217 if (flag & kDumpRegUse) {
1218 DumpInfo(*bb, kDumpRegUse);
1219 }
1220
1221 if (flag & kDumpRegGen) {
1222 DumpInfo(*bb, kDumpRegGen);
1223 }
1224
1225 if (flag & kDumpRegOut) {
1226 DumpInfo(*bb, kDumpRegOut);
1227 }
1228
1229 if (flag & kDumpMemIn) {
1230 DumpInfo(*bb, kDumpMemIn);
1231 }
1232
1233 if (flag & kDumpMemGen) {
1234 DumpInfo(*bb, kDumpMemGen);
1235 }
1236
1237 if (flag & kDumpMemOut) {
1238 DumpInfo(*bb, kDumpMemOut);
1239 }
1240
1241 if (flag & kDumpMemUse) {
1242 DumpInfo(*bb, kDumpMemUse);
1243 }
1244 }
1245 LogInfo::MapleLogger() << "------------------------------------------------------\n";
1246 }
1247
PhaseRun(maplebe::CGFunc & f)1248 bool CgReachingDefinition::PhaseRun(maplebe::CGFunc &f)
1249 {
1250 #if TARGAARCH64 || TARGRISCV64
1251 reachingDef = GetPhaseAllocator()->New<AArch64ReachingDefinition>(f, *GetPhaseMemPool());
1252 #endif
1253 #if TARGARM32
1254 reachingDef = GetPhaseAllocator()->New<Arm32ReachingDefinition>(f, *GetPhaseMemPool());
1255 #endif
1256 reachingDef->SetAnalysisMode(kRDAllAnalysis);
1257 reachingDef->AnalysisStart();
1258 return false;
1259 }
MAPLE_ANALYSIS_PHASE_REGISTER(CgReachingDefinition,reachingdefinition)1260 MAPLE_ANALYSIS_PHASE_REGISTER(CgReachingDefinition, reachingdefinition)
1261
1262 bool CgClearRDInfo::PhaseRun(maplebe::CGFunc &f)
1263 {
1264 if (f.GetRDStatus()) {
1265 f.GetRD()->ClearDefUseInfo();
1266 }
1267 return false;
1268 }
1269 MAPLE_TRANSFORM_PHASE_REGISTER(CgClearRDInfo, clearrdinfo)
1270 } /* namespace maplebe */
1271