1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "reg_alloc_lsra.h"
17 #include "aarch64_cg.h"
18
19 namespace maplebe {
20 namespace {
21 constexpr uint32 kSpilled = 1;
22 /* Here, kLoopWeight is a fine-tuned empirical parameter */
23 constexpr uint32 kLoopWeight = 4;
24 } // namespace
25 #ifdef ARK_LITECG_DEBUG
26 constexpr uint32 kPrintedActiveListLength = 10;
27
28 /*
29 * This LSRA implementation is an interpretation of the [Poletto97] paper.
30 * BFS BB ordering is used to order the instructions. The live intervals are vased on
31 * this instruction order. All vreg defines should come before an use, else a warning is
32 * given.
33 * Live interval is traversed in order from lower instruction order to higher order.
34 * When encountering a live interval for the first time, it is assumed to be live and placed
35 * inside the 'active' structure until the vreg's last access. During the time a vreg
36 * is in 'active', the vreg occupies a physical register allocation and no other vreg can
37 * be allocated the same physical register.
38 */
PrintRegSet(const MapleSet<uint32> & set,const std::string & str) const39 void LSRALinearScanRegAllocator::PrintRegSet(const MapleSet<uint32> &set, const std::string &str) const
40 {
41 LogInfo::MapleLogger() << str;
42 for (auto reg : set) {
43 LogInfo::MapleLogger() << " " << reg;
44 }
45 LogInfo::MapleLogger() << "\n";
46 }
47
PrintLiveRanges(const LiveInterval & li) const48 void LSRALinearScanRegAllocator::PrintLiveRanges(const LiveInterval &li) const
49 {
50 if (li.GetAssignedReg() != 0) {
51 uint32 base = (li.GetRegType() == kRegTyInt) ? firstIntReg : firstFpReg;
52 char regType = (li.GetRegType() == kRegTyInt) ? 'R' : 'V';
53 LogInfo::MapleLogger() << "(assigned " << regType << (li.GetAssignedReg() - base) << ")";
54 }
55 if (li.GetStackSlot() == kSpilled) {
56 LogInfo::MapleLogger() << "(spill)";
57 }
58 if (li.IsShouldSave()) {
59 LogInfo::MapleLogger() << "(ShouldSave)";
60 }
61 for (auto range : li.GetRanges()) {
62 LogInfo::MapleLogger() << "[" << range.GetStart() << ", " << range.GetEnd() << "]"
63 << " ";
64 }
65 LogInfo::MapleLogger() << "\n";
66 }
67
PrintAllLiveRanges() const68 void LSRALinearScanRegAllocator::PrintAllLiveRanges() const
69 {
70 LogInfo::MapleLogger() << "func: " << cgFunc->GetName() << "\n";
71 for (auto *li : liveIntervalsArray) {
72 if (li == nullptr || li->GetRegNO() == 0) {
73 continue;
74 }
75 LogInfo::MapleLogger() << "vreg" << li->GetRegNO() << ": ";
76 PrintLiveRanges(*li);
77 }
78 }
79 #endif
80
SpillStackMapInfo()81 void LSRALinearScanRegAllocator::SpillStackMapInfo()
82 {
83 RA_TIMER_REGISTER(lsra, "LSRA SpillStackMapInfo");
84 const auto &referenceMapInsns = cgFunc->GetStackMapInsns();
85
86 for (auto *insn : referenceMapInsns) {
87 auto &deoptInfo = insn->GetStackMap()->GetDeoptInfo();
88 const auto &deoptBundleInfo = deoptInfo.GetDeoptBundleInfo();
89 for (const auto &item : deoptBundleInfo) {
90 const auto *opnd = item.second;
91 if (!opnd->IsRegister()) {
92 continue;
93 }
94 auto *li = liveIntervalsArray[(static_cast<const RegOperand *>(opnd))->GetRegisterNumber()];
95 if (li != nullptr) {
96 #ifdef ARK_LITECG_DEBUG
97 if (needDump) {
98 PrintLiveInterval(*li, "Spill Deopt:");
99 }
100 #endif
101 li->SetStackSlot(kSpilled);
102 li->SetShouldSave(false);
103 }
104 }
105
106 for (auto regNO : insn->GetStackMapLiveIn()) {
107 if (!cgFunc->IsRegReference(regNO)) {
108 continue;
109 }
110 auto *li = liveIntervalsArray[regNO];
111 if (li == nullptr) {
112 continue;
113 }
114 auto itr = derivedRef2Base.find(regNO);
115 if (itr != derivedRef2Base.end()) {
116 auto baseRegNum = (itr->second)->GetRegisterNumber();
117 DEBUG_ASSERT(liveIntervalsArray[baseRegNum] != nullptr, "empty li");
118 #ifdef ARK_LITECG_DEBUG
119 if (needDump) {
120 PrintLiveInterval(*liveIntervalsArray[baseRegNum], "Spill StackMap derivedRef:");
121 }
122 #endif
123 liveIntervalsArray[baseRegNum]->SetStackSlot(kSpilled);
124 liveIntervalsArray[baseRegNum]->SetShouldSave(false);
125 }
126 #ifdef ARK_LITECG_DEBUG
127 if (needDump) {
128 PrintLiveInterval(*li, "Spill StackMap:");
129 }
130 #endif
131 li->SetStackSlot(kSpilled);
132 li->SetShouldSave(false);
133 }
134 }
135 }
136
137 #ifdef ARK_LITECG_DEBUG
PrintLiveInterval(const LiveInterval & li,const std::string & str) const138 void LSRALinearScanRegAllocator::PrintLiveInterval(const LiveInterval &li, const std::string &str) const
139 {
140 LogInfo::MapleLogger() << str << "\n";
141 if (li.GetPhysUse() > 0) {
142 LogInfo::MapleLogger() << "\tregNO " << li.GetRegNO();
143 LogInfo::MapleLogger() << " firstDef " << li.GetFirstDef();
144 LogInfo::MapleLogger() << " physUse " << li.GetPhysUse();
145 } else {
146 LogInfo::MapleLogger() << "\tregNO " << li.GetRegNO();
147 LogInfo::MapleLogger() << " firstDef " << li.GetFirstDef();
148 LogInfo::MapleLogger() << " lastUse " << li.GetLastUse();
149 LogInfo::MapleLogger() << " assigned " << li.GetAssignedReg();
150 LogInfo::MapleLogger() << " refCount " << li.GetRefCount();
151 LogInfo::MapleLogger() << " priority " << li.GetPriority();
152 }
153 LogInfo::MapleLogger() << "\n";
154 }
155
PrintParamQueue(const std::string & str)156 void LSRALinearScanRegAllocator::PrintParamQueue(const std::string &str)
157 {
158 LogInfo::MapleLogger() << str << "\n";
159 for (SingleQue &que : intParamQueue) {
160 if (que.empty()) {
161 continue;
162 }
163 LiveInterval *li = que.front();
164 LiveInterval *last = que.back();
165 PrintLiveInterval(*li, "");
166 while (li != last) {
167 que.pop_front();
168 que.push_back(li);
169 li = que.front();
170 PrintLiveInterval(*li, "");
171 }
172 que.pop_front();
173 que.push_back(li);
174 }
175 }
176
PrintCallQueue(const std::string & str) const177 void LSRALinearScanRegAllocator::PrintCallQueue(const std::string &str) const
178 {
179 LogInfo::MapleLogger() << str << "\n";
180 for (auto callInsnID : callQueue) {
181 LogInfo::MapleLogger() << callInsnID << " ";
182 }
183 LogInfo::MapleLogger() << "\n";
184 }
185
PrintActiveList(const std::string & str,uint32 len) const186 void LSRALinearScanRegAllocator::PrintActiveList(const std::string &str, uint32 len) const
187 {
188 uint32 count = 0;
189 LogInfo::MapleLogger() << str << " " << active.size() << "\n";
190 for (auto *li : active) {
191 PrintLiveInterval(*li, "");
192 ++count;
193 if ((len != 0) && (count == len)) {
194 break;
195 }
196 }
197 }
198
PrintLiveIntervals() const199 void LSRALinearScanRegAllocator::PrintLiveIntervals() const
200 {
201 /* vreg LogInfo */
202 for (auto *li : liveIntervalsArray) {
203 if (li == nullptr || li->GetRegNO() == 0) {
204 continue;
205 }
206 PrintLiveInterval(*li, "");
207 }
208 LogInfo::MapleLogger() << "\n";
209 /* preg LogInfo */
210 for (auto param : intParamQueue) {
211 for (auto *li : param) {
212 if (li == nullptr || li->GetRegNO() == 0) {
213 continue;
214 }
215 PrintLiveInterval(*li, "");
216 }
217 }
218 LogInfo::MapleLogger() << "\n";
219 }
220
DebugCheckActiveList() const221 void LSRALinearScanRegAllocator::DebugCheckActiveList() const
222 {
223 LiveInterval *prev = nullptr;
224 for (auto *li : active) {
225 if (prev != nullptr) {
226 if ((li->GetRegNO() <= regInfo->GetLastParamsFpReg()) &&
227 (prev->GetRegNO() > regInfo->GetLastParamsFpReg())) {
228 if (li->GetFirstDef() < prev->GetFirstDef()) {
229 LogInfo::MapleLogger() << "ERRer: active list with out of order phys + vreg\n";
230 PrintLiveInterval(*prev, "prev");
231 PrintLiveInterval(*li, "current");
232 PrintActiveList("Active", kPrintedActiveListLength);
233 }
234 }
235 if ((li->GetRegNO() <= regInfo->GetLastParamsFpReg()) &&
236 (prev->GetRegNO() <= regInfo->GetLastParamsFpReg())) {
237 if (li->GetFirstDef() < prev->GetFirstDef()) {
238 LogInfo::MapleLogger() << "ERRer: active list with out of order phys reg use\n";
239 PrintLiveInterval(*prev, "prev");
240 PrintLiveInterval(*li, "current");
241 PrintActiveList("Active", kPrintedActiveListLength);
242 }
243 }
244 } else {
245 prev = li;
246 }
247 }
248 }
249 #endif
250
251 /*
252 * Prepare the free physical register pool for allocation.
253 * When a physical register is allocated, it is removed from the pool.
254 * The physical register is re-inserted into the pool when the associated live
255 * interval has ended.
256 */
InitFreeRegPool()257 void LSRALinearScanRegAllocator::InitFreeRegPool()
258 {
259 for (regno_t regNO : regInfo->GetAllRegs()) {
260 if (!regInfo->IsAvailableReg(regNO)) {
261 continue;
262 }
263 if (regInfo->IsGPRegister(regNO)) {
264 if (regInfo->IsYieldPointReg(regNO)) {
265 continue;
266 }
267 /* ExtraSpillReg */
268 if (regInfo->IsSpillRegInRA(regNO, needExtraSpillReg)) {
269 intSpillRegSet.push_back(regNO - firstIntReg);
270 continue;
271 }
272 if (regInfo->IsCalleeSavedReg(regNO)) {
273 /* callee-saved registers */
274 (void)intCalleeRegSet.insert(regNO - firstIntReg);
275 intCalleeMask |= 1u << (regNO - firstIntReg);
276 } else {
277 /* caller-saved registers */
278 (void)intCallerRegSet.insert(regNO - firstIntReg);
279 intCallerMask |= 1u << (regNO - firstIntReg);
280 }
281 } else {
282 /* fp ExtraSpillReg */
283 if (regInfo->IsSpillRegInRA(regNO, needExtraSpillReg)) {
284 fpSpillRegSet.push_back(regNO - firstFpReg);
285 continue;
286 }
287 if (regInfo->IsCalleeSavedReg(regNO)) {
288 /* fp callee-saved registers */
289 (void)fpCalleeRegSet.insert(regNO - firstFpReg);
290 fpCalleeMask |= 1u << (regNO - firstFpReg);
291 } else {
292 /* fp caller-saved registers */
293 (void)fpCallerRegSet.insert(regNO - firstFpReg);
294 fpCallerMask |= 1u << (regNO - firstFpReg);
295 }
296 }
297 }
298 DEBUG_ASSERT(intSpillRegSet.size() >= 2U, "too few spill regs");
299
300 #ifdef ARK_LITECG_DEBUG
301 if (needDump) {
302 PrintRegSet(intCallerRegSet, "ALLOCATABLE_INT_CALLER");
303 PrintRegSet(intCalleeRegSet, "ALLOCATABLE_INT_CALLEE");
304 PrintRegSet(intParamRegSet, "ALLOCATABLE_INT_PARAM");
305 PrintRegSet(fpCallerRegSet, "ALLOCATABLE_FP_CALLER");
306 PrintRegSet(fpCalleeRegSet, "ALLOCATABLE_FP_CALLEE");
307 PrintRegSet(fpParamRegSet, "ALLOCATABLE_FP_PARAM");
308 LogInfo::MapleLogger() << "INT_SPILL_REGS";
309 for (uint32 intSpillRegNO : intSpillRegSet) {
310 LogInfo::MapleLogger() << " " << intSpillRegNO;
311 }
312 LogInfo::MapleLogger() << "\n";
313 LogInfo::MapleLogger() << "FP_SPILL_REGS";
314 for (uint32 fpSpillRegNO : fpSpillRegSet) {
315 LogInfo::MapleLogger() << " " << fpSpillRegNO;
316 }
317 LogInfo::MapleLogger() << "\n";
318 LogInfo::MapleLogger() << std::hex;
319 LogInfo::MapleLogger() << "INT_CALLER_MASK " << intCallerMask << "\n";
320 LogInfo::MapleLogger() << "INT_CALLEE_MASK " << intCalleeMask << "\n";
321 LogInfo::MapleLogger() << "INT_PARAM_MASK " << intParamMask << "\n";
322 LogInfo::MapleLogger() << "FP_CALLER_FP_MASK " << fpCallerMask << "\n";
323 LogInfo::MapleLogger() << "FP_CALLEE_FP_MASK " << fpCalleeMask << "\n";
324 LogInfo::MapleLogger() << "FP_PARAM_FP_MASK " << fpParamMask << "\n";
325 LogInfo::MapleLogger() << std::dec;
326 }
327 #endif
328 }
329
RecordPhysRegs(const RegOperand & regOpnd,uint32 insnNum,bool isDef)330 void LSRALinearScanRegAllocator::RecordPhysRegs(const RegOperand ®Opnd, uint32 insnNum, bool isDef)
331 {
332 RegType regType = regOpnd.GetRegisterType();
333 uint32 regNO = regOpnd.GetRegisterNumber();
334 if (regType == kRegTyCc || regType == kRegTyVary) {
335 return;
336 }
337 if (regInfo->IsUnconcernedReg(regNO) || regInfo->IsVirtualRegister(regNO)) {
338 return;
339 }
340 if (isDef) {
341 /* parameter/return register def is assumed to be live until a call. */
342 auto *li = memPool->New<LiveInterval>(*memPool);
343 li->SetRegNO(regNO);
344 li->SetRegType(regType);
345 li->SetStackSlot(0xFFFFFFFF);
346 li->SetFirstDef(insnNum);
347 li->SetPhysUse(insnNum);
348 li->SetAssignedReg(regNO);
349
350 if (regType == kRegTyInt) {
351 intParamQueue[regInfo->GetIntParamRegIdx(regNO)].push_back(li);
352 } else {
353 fpParamQueue[regInfo->GetFpParamRegIdx(regNO)].push_back(li);
354 }
355 } else {
356 if (regType == kRegTyInt) {
357 CHECK_FATAL(!intParamQueue[regInfo->GetIntParamRegIdx(regNO)].empty(),
358 "was not defined before use, impossible");
359 LiveInterval *li = intParamQueue[regInfo->GetIntParamRegIdx(regNO)].back();
360 li->SetPhysUse(insnNum);
361 } else {
362 CHECK_FATAL(!fpParamQueue[regInfo->GetFpParamRegIdx(regNO)].empty(),
363 "was not defined before use, impossible");
364 LiveInterval *li = fpParamQueue[regInfo->GetFpParamRegIdx(regNO)].back();
365 li->SetPhysUse(insnNum);
366 }
367 }
368 }
369
UpdateRegUsedInfo(LiveInterval & li,regno_t regNO)370 void LSRALinearScanRegAllocator::UpdateRegUsedInfo(LiveInterval &li, regno_t regNO)
371 {
372 uint32 index = regNO / (sizeof(uint64) * k8ByteSize);
373 uint64 bit = regNO % (sizeof(uint64) * k8ByteSize);
374 if ((regUsedInBB[index] & (static_cast<uint64>(1) << bit)) != 0) {
375 li.SetMultiUseInBB(true);
376 }
377 regUsedInBB[index] |= (static_cast<uint64>(1) << bit);
378
379 if (minVregNum > regNO) {
380 minVregNum = regNO;
381 }
382 if (maxVregNum < regNO) {
383 maxVregNum = regNO;
384 }
385 }
386
387 /* main entry function for live interval computation. */
SetupLiveInterval(Operand & opnd,Insn & insn,bool isDef,uint32 & nUses,uint32 regSize)388 void LSRALinearScanRegAllocator::SetupLiveInterval(Operand &opnd, Insn &insn, bool isDef, uint32 &nUses, uint32 regSize)
389 {
390 if (!opnd.IsRegister()) {
391 return;
392 }
393 auto ®Opnd = static_cast<RegOperand &>(opnd);
394 uint32 insnNum = insn.GetId();
395 if (!regInfo->IsVirtualRegister(regOpnd)) {
396 RecordPhysRegs(regOpnd, insnNum, isDef);
397 return;
398 }
399 RegType regType = regOpnd.GetRegisterType();
400 if (regType == kRegTyCc || regType == kRegTyVary) {
401 return;
402 }
403
404 LiveInterval *li = nullptr;
405 uint32 regNO = regOpnd.GetRegisterNumber();
406 DEBUG_ASSERT(regNO <= liveIntervalsArray.size() - 1, "index out of range");
407 if (liveIntervalsArray[regNO] == nullptr) {
408 li = memPool->New<LiveInterval>(*memPool);
409 li->SetRegNO(regNO);
410 li->SetStackSlot(0xFFFFFFFF);
411 liveIntervalsArray[regNO] = li;
412 liQue.push_back(li);
413 } else {
414 li = liveIntervalsArray[regNO];
415 }
416 li->SetRegType(regType);
417
418 BB *curBB = insn.GetBB();
419 if (isDef) {
420 /* never set to 0 before, why consider this condition ? */
421 if (li->GetFirstDef() == 0) {
422 li->SetFirstDef(insnNum);
423 li->SetLastUse(insnNum + 1);
424 } else if (!curBB->IsUnreachable()) {
425 if (li->GetLastUse() < insnNum || li->IsUseBeforeDef()) {
426 li->SetLastUse(insnNum + 1);
427 }
428 }
429 // add range
430 li->AddRange(insn.GetId(), insn.GetId());
431 } else {
432 if (li->GetFirstDef() == 0) {
433 DEBUG_ASSERT(false, "SetupLiveInterval: use before def");
434 }
435 /*
436 * In ComputeLiveInterval when extending live interval using
437 * live-out information, li created does not have a type.
438 */
439 if (!curBB->IsUnreachable()) {
440 li->SetLastUse(insnNum);
441 }
442 ++nUses;
443 // update range
444 if (li->GetRanges().empty()) {
445 li->AddRange(insn.GetId(), insn.GetId());
446 } else {
447 li->GetRanges().back().SetEnd(insn.GetId());
448 }
449 }
450 if (isDef) {
451 li->SetMaxDefSize(std::max(regSize, li->GetMaxDefSize()));
452 } else {
453 li->SetMaxUseSize(std::max(regSize, li->GetMaxUseSize()));
454 }
455 if (li->GetMaxDefSize() == 0) {
456 li->SetSpillSize(li->GetMaxUseSize());
457 } else if (li->GetMaxUseSize() == 0) {
458 li->SetSpillSize(li->GetMaxDefSize());
459 } else {
460 li->SetSpillSize(std::min(li->GetMaxDefSize(), li->GetMaxUseSize()));
461 }
462 li->SetRefCount(li->GetRefCount() + 1);
463 li->AddUsePositions(insnNum);
464 UpdateRegUsedInfo(*li, regNO);
465
466 /* setup the def/use point for it */
467 DEBUG_ASSERT(regNO < liveIntervalsArray.size(), "out of range of vector liveIntervalsArray");
468 }
469
470 /*
471 * Support 'hole' in LSRA.
472 * For a live interval, there might be multiple segments of live ranges,
473 * and between these segments a 'hole'.
474 * Some other short lived vreg can go into these 'holes'.
475 *
476 * from : starting instruction sequence id
477 * to : ending instruction sequence id
478 */
AddRange(uint32 from,uint32 to)479 void LSRALinearScanRegAllocator::LiveInterval::AddRange(uint32 from, uint32 to)
480 {
481 if (ranges.empty()) {
482 ranges.emplace_back(from, to);
483 return;
484 }
485 /* create a new range */
486 if (from > ranges.back().GetEnd()) {
487 ranges.emplace_back(from, to);
488 return;
489 }
490 DEBUG_ASSERT(to >= ranges.back().GetStart(), "No possible on forward traverse.");
491 if (to >= ranges.back().GetEnd() && from < ranges.back().GetStart()) {
492 ranges.back().SetStart(from);
493 ranges.back().SetEnd(to);
494 return;
495 }
496 /* extend it's range forward. e.g. def-use opnd */
497 if (to >= ranges.back().GetEnd() && from < ranges.back().GetEnd()) {
498 ranges.back().SetEnd(to);
499 return;
500 }
501 return;
502 }
503
FindPosRange(uint32 pos)504 MapleVector<LSRALinearScanRegAllocator::LinearRange>::iterator LSRALinearScanRegAllocator::LiveInterval::FindPosRange(
505 uint32 pos)
506 {
507 while (rangeFinder != ranges.end()) {
508 if (rangeFinder->GetEnd() >= pos) {
509 break;
510 }
511 ++rangeFinder;
512 }
513 return rangeFinder;
514 }
515
516 /* Extend live interval with live-in info */
UpdateLiveIntervalByLiveIn(const BB & bb,uint32 insnNum)517 void LSRALinearScanRegAllocator::UpdateLiveIntervalByLiveIn(const BB &bb, uint32 insnNum)
518 {
519 for (const auto ®NO : bb.GetLiveInRegNO()) {
520 if (!regInfo->IsVirtualRegister(regNO)) {
521 /* Do not consider physical regs. */
522 continue;
523 }
524 DEBUG_ASSERT(regNO < liveIntervalsArray.size(), "index out of range.");
525 LiveInterval *liOuter = liveIntervalsArray[regNO];
526 if (liOuter != nullptr) {
527 // add live interval range
528 liOuter->AddRange(insnNum, insnNum);
529 continue;
530 }
531 if (bb.IsEmpty() && bb.GetId() != 1) {
532 continue;
533 }
534 /*
535 * try-catch related
536 * Since it is livein but not seen before, its a use before def
537 * spill it temporarily
538 */
539 auto *li = memPool->New<LiveInterval>(*memPool);
540 li->SetRegNO(regNO);
541 li->SetStackSlot(kSpilled);
542 liveIntervalsArray[regNO] = li;
543 li->SetFirstDef(insnNum);
544 liQue.push_back(li);
545
546 li->SetUseBeforeDef(true);
547
548 // add live interval range
549 li->AddRange(insnNum, insnNum);
550
551 if (!bb.IsUnreachable()) {
552 if (bb.GetId() != 1) {
553 LogInfo::MapleLogger() << "ERROR: " << regNO << " use before def in bb " << bb.GetId() << " : "
554 << cgFunc->GetName() << "\n";
555 DEBUG_ASSERT(false, "There should only be [use before def in bb 1], temporarily.");
556 }
557 LogInfo::MapleLogger() << "WARNING: " << regNO << " use before def in bb " << bb.GetId() << " : "
558 << cgFunc->GetName() << "\n";
559 }
560 }
561 }
562
563 /* traverse live in regNO, for each live in regNO create a new liveinterval */
UpdateParamLiveIntervalByLiveIn(const BB & bb,uint32 insnNum)564 void LSRALinearScanRegAllocator::UpdateParamLiveIntervalByLiveIn(const BB &bb, uint32 insnNum)
565 {
566 for (const auto regNO : bb.GetLiveInRegNO()) {
567 if (regInfo->IsUnconcernedReg(regNO) || regInfo->IsVirtualRegister(regNO)) {
568 continue;
569 }
570 auto *li = memPool->New<LiveInterval>(*memPool);
571 li->SetRegNO(regNO);
572 li->SetStackSlot(0xFFFFFFFF);
573 li->SetFirstDef(insnNum);
574 li->SetPhysUse(insnNum);
575 li->SetAssignedReg(regNO);
576
577 if (regInfo->IsGPRegister(regNO)) {
578 li->SetRegType(kRegTyInt);
579 intParamQueue[regInfo->GetIntParamRegIdx(regNO)].push_back(li);
580 } else {
581 li->SetRegType(kRegTyFloat);
582 fpParamQueue[regInfo->GetFpParamRegIdx(regNO)].push_back(li);
583 }
584 }
585 }
586
ComputeLiveIn(BB & bb,uint32 insnNum)587 void LSRALinearScanRegAllocator::ComputeLiveIn(BB &bb, uint32 insnNum)
588 {
589 if (bb.IsEmpty() && bb.GetId() != 1) {
590 return;
591 }
592 #ifdef ARK_LITECG_DEBUG
593 if (needDump) {
594 LogInfo::MapleLogger() << "bb(" << bb.GetId() << ")LIVEOUT:";
595 for (const auto &liveOutRegNO : bb.GetLiveOutRegNO()) {
596 LogInfo::MapleLogger() << " " << liveOutRegNO;
597 }
598 LogInfo::MapleLogger() << ".\n";
599 LogInfo::MapleLogger() << "bb(" << bb.GetId() << ")LIVEIN:";
600 for (const auto &liveInRegNO : bb.GetLiveInRegNO()) {
601 LogInfo::MapleLogger() << " " << liveInRegNO;
602 }
603 LogInfo::MapleLogger() << ".\n";
604 }
605 #endif
606
607 UpdateLiveIntervalByLiveIn(bb, insnNum);
608
609 if (bb.GetFirstInsn() == nullptr) {
610 return;
611 }
612 UpdateParamLiveIntervalByLiveIn(bb, insnNum);
613 }
614
ComputeLiveOut(BB & bb,uint32 insnNum)615 void LSRALinearScanRegAllocator::ComputeLiveOut(BB &bb, uint32 insnNum)
616 {
617 auto updateLiveIntervalByLiveOut = [this, &bb, insnNum](regno_t regNO) {
618 /* Extend live interval with live-out info */
619 LiveInterval *li = liveIntervalsArray[regNO];
620 if (li != nullptr && !bb.IsEmpty()) {
621 li->SetLastUse(bb.GetLastInsn()->GetId());
622
623 // update live interval range
624 if (li->GetRanges().empty()) {
625 li->AddRange(insnNum, insnNum);
626 } else {
627 li->GetRanges().back().SetEnd(insnNum);
628 }
629 }
630 };
631
632 /*
633 * traverse live out regNO
634 * for each live out regNO if the last corresponding live interval is created within this bb
635 * update this lastUse of li to the end of BB
636 */
637 for (const auto regNO : bb.GetLiveOutRegNO()) {
638 if (regInfo->IsUnconcernedReg(regNO)) {
639 continue;
640 }
641 if (!regInfo->IsVirtualRegister(regNO)) {
642 LiveInterval *liOut = nullptr;
643 if (regInfo->IsGPRegister(regNO)) {
644 if (intParamQueue[regInfo->GetIntParamRegIdx(regNO)].empty()) {
645 continue;
646 }
647 liOut = intParamQueue[regInfo->GetIntParamRegIdx(regNO)].back();
648 if (bb.GetFirstInsn() && liOut->GetFirstDef() >= bb.GetFirstInsn()->GetId()) {
649 liOut->SetPhysUse(insnNum);
650 }
651 } else {
652 if (fpParamQueue[regInfo->GetFpParamRegIdx(regNO)].empty()) {
653 continue;
654 }
655 liOut = fpParamQueue[regInfo->GetFpParamRegIdx(regNO)].back();
656 if (bb.GetFirstInsn() && liOut->GetFirstDef() >= bb.GetFirstInsn()->GetId()) {
657 liOut->SetPhysUse(insnNum);
658 }
659 }
660 continue;
661 }
662 updateLiveIntervalByLiveOut(regNO);
663 auto iter = derivedRef2Base.find(regNO);
664 if (iter != derivedRef2Base.end()) {
665 updateLiveIntervalByLiveOut(iter->second->GetRegisterNumber());
666 }
667 }
668 }
669
670 struct RegOpndInfo {
RegOpndInfomaplebe::RegOpndInfo671 RegOpndInfo(RegOperand &opnd, uint32 size, uint32 idx, bool def)
672 : regOpnd(opnd), regSize(size), opndIdx(idx), isDef(def)
673 {
674 }
675 RegOperand ®Opnd;
676 uint32 regSize = 0;
677 uint32 opndIdx = -1;
678 bool isDef = false;
679 };
680
ComputeLiveIntervalForEachOperand(Insn & insn)681 void LSRALinearScanRegAllocator::ComputeLiveIntervalForEachOperand(Insn &insn)
682 {
683 std::vector<RegOpndInfo> allRegOpndInfo;
684 const InsnDesc *md = insn.GetDesc();
685 for (uint32 i = 0; i < insn.GetOperandSize(); ++i) {
686 Operand &opnd = insn.GetOperand(i);
687 const OpndDesc *opndDesc = md->GetOpndDes(i);
688 DEBUG_ASSERT(opndDesc != nullptr, "ptr null check.");
689 if (opnd.IsRegister()) {
690 auto ®Opnd = static_cast<RegOperand &>(opnd);
691 // Specifically, the "use-def" opnd is treated as a "use" opnd
692 bool isUse = opndDesc->IsRegUse();
693 // Fixup: The size in the insn md of x64 is not accurate, and the size in the fload opnd
694 // does not change, so we use the size in the opnd first.
695 auto regSize = (regOpnd.GetRegisterType() == kRegTyInt ? opndDesc->GetSize() : regOpnd.GetSize());
696 allRegOpndInfo.emplace_back(regOpnd, regSize, i, !isUse);
697 } else if (opnd.IsMemoryAccessOperand()) {
698 auto &memOpnd = static_cast<MemOperand &>(opnd);
699 RegOperand *base = memOpnd.GetBaseRegister();
700 RegOperand *offset = memOpnd.GetIndexRegister();
701 if (base != nullptr) {
702 allRegOpndInfo.emplace_back(*base, k64BitSize, i, false);
703 }
704 if (offset != nullptr) {
705 allRegOpndInfo.emplace_back(*offset, k64BitSize, i, false);
706 }
707 } else if (opnd.IsList()) {
708 auto &listOpnd = static_cast<ListOperand &>(opnd);
709 for (auto op : listOpnd.GetOperands()) {
710 allRegOpndInfo.emplace_back(*op, op->GetSize(), i, opndDesc->IsDef());
711 }
712 }
713 }
714
715 uint32 numUses = 0;
716 for (auto ®OpndInfo : allRegOpndInfo) {
717 if (!regOpndInfo.isDef) {
718 SetupLiveInterval(regOpndInfo.regOpnd, insn, false, numUses, regOpndInfo.regSize);
719 }
720 }
721
722 for (auto ®OpndInfo : allRegOpndInfo) {
723 if (regOpndInfo.isDef) {
724 SetupLiveInterval(regOpndInfo.regOpnd, insn, true, numUses, regOpndInfo.regSize);
725 }
726 }
727
728 if (numUses >= regInfo->GetNormalUseOperandNum()) {
729 needExtraSpillReg = true;
730 }
731 }
732
ComputeLoopLiveIntervalPriority(const LoopDesc & loop)733 void LSRALinearScanRegAllocator::ComputeLoopLiveIntervalPriority(const LoopDesc &loop)
734 {
735 for (const auto *lp : loop.GetChildLoops()) {
736 /* handle nested Loops */
737 ComputeLoopLiveIntervalPriority(*lp);
738 }
739 for (auto bbId : loop.GetLoopBBs()) {
740 auto *bb = cgFunc->GetBBFromID(bbId);
741 if (bb->IsEmpty()) {
742 continue;
743 }
744 FOR_BB_INSNS(insn, bb)
745 {
746 ComputeLoopLiveIntervalPriorityInInsn(*insn);
747 }
748 loopBBRegSet.clear();
749 }
750 }
751
ComputeLoopLiveIntervalPriorityInInsn(const Insn & insn)752 void LSRALinearScanRegAllocator::ComputeLoopLiveIntervalPriorityInInsn(const Insn &insn)
753 {
754 uint32 opndNum = insn.GetOperandSize();
755 for (uint32 i = 0; i < opndNum; ++i) {
756 Operand &opnd = insn.GetOperand(i);
757 if (!opnd.IsRegister()) {
758 continue;
759 }
760 auto ®Opnd = static_cast<RegOperand &>(opnd);
761 if (!regInfo->IsVirtualRegister(regOpnd)) {
762 continue;
763 }
764 uint32 regNo = regOpnd.GetRegisterNumber();
765 LiveInterval *li = liveIntervalsArray[regNo];
766 if (li == nullptr || loopBBRegSet.find(regNo) != loopBBRegSet.end()) {
767 continue;
768 }
769 li->SetPriority(kLoopWeight * li->GetPriority());
770 (void)loopBBRegSet.insert(regNo);
771 }
772 return;
773 }
774
ComputeLiveIntervalForCall(Insn & insn)775 void LSRALinearScanRegAllocator::ComputeLiveIntervalForCall(Insn &insn)
776 {
777 uint32 cnt = 0;
778 // referencemap info
779 for (auto regNO : insn.GetStackMapLiveIn()) {
780 if (derivedRef2Base.find(regNO) != derivedRef2Base.end()) {
781 auto baseOpnd = derivedRef2Base[regNO];
782 auto regSize = baseOpnd->GetSize();
783 SetupLiveInterval(*baseOpnd, insn, false, cnt, regSize);
784 }
785 }
786 // deoptinfo
787 if (insn.GetStackMap() != nullptr) {
788 for (auto [_, opnd] : insn.GetStackMap()->GetDeoptInfo().GetDeoptBundleInfo()) {
789 auto regSize = opnd->GetSize();
790 SetupLiveInterval(*opnd, insn, false, cnt, regSize);
791 }
792 }
793 }
794
ComputeLiveInterval()795 void LSRALinearScanRegAllocator::ComputeLiveInterval()
796 {
797 RA_TIMER_REGISTER(lsra, "LSRA ComputeLiveInterval");
798 liQue.clear();
799 uint32 regUsedInBBSz = (cgFunc->GetMaxVReg() / (sizeof(uint64) * k8ByteSize) + 1);
800 regUsedInBB.resize(regUsedInBBSz, 0);
801 uint32 insnNum = 1;
802 for (BB *bb : bfs->sortedBBs) {
803 ComputeLiveIn(*bb, insnNum);
804 FOR_BB_INSNS(insn, bb)
805 {
806 insn->SetId(insnNum);
807 /* skip comment and debug insn */
808 if (!insn->IsMachineInstruction()) {
809 continue;
810 }
811
812 /* RecordCall, remember calls for caller/callee allocation. */
813 if (insn->IsCall()) {
814 if (!insn->GetIsThrow()) {
815 callQueue.emplace_back(insn->GetId());
816 }
817 if (!cgFunc->IsStackMapComputed()) {
818 ComputeLiveIntervalForCall(*insn);
819 }
820 }
821
822 ComputeLiveIntervalForEachOperand(*insn);
823
824 /* handle return value for call insn */
825 if (insn->IsCall()) {
826 /* For all backend architectures so far, adopt all RetRegs as Def via this insn,
827 * and then their live begins.
828 * next optimization, you can determine which registers are actually used.
829 */
830 for (uint32 i = 0; i < regInfo->GetIntRetRegsNum(); i++) {
831 auto *retReg = regInfo->GetOrCreatePhyRegOperand(regInfo->GetIntRetReg(i), k64BitSize, kRegTyInt);
832 RecordPhysRegs(*retReg, insnNum, true);
833 }
834 for (uint32 i = 0; i < regInfo->GetFpRetRegsNum(); i++) {
835 auto *retReg = regInfo->GetOrCreatePhyRegOperand(regInfo->GetFpRetReg(i), k64BitSize, kRegTyFloat);
836 RecordPhysRegs(*retReg, insnNum, true);
837 }
838 }
839 ++insnNum;
840 }
841
842 ComputeLiveOut(*bb, insnNum);
843 }
844
845 maxInsnNum = insnNum - 1; /* insn_num started from 1 */
846 regUsedInBB.clear();
847 /* calculate Live Interval weight */
848 for (auto *li : liveIntervalsArray) {
849 if (li == nullptr || li->GetRegNO() == 0) {
850 continue;
851 }
852 if (li->GetPhysUse() > 0) {
853 continue;
854 }
855 if (li->GetLastUse() > li->GetFirstDef()) {
856 li->SetPriority(static_cast<float>(li->GetRefCount()) /
857 static_cast<float>(li->GetLastUse() - li->GetFirstDef()));
858 } else {
859 li->SetPriority(static_cast<float>(li->GetRefCount()) /
860 static_cast<float>(li->GetFirstDef() - li->GetLastUse()));
861 }
862 }
863
864 // enhance loop Live Interval Priority
865 for (const auto *lp : loopInfo.GetLoops()) {
866 ComputeLoopLiveIntervalPriority(*lp);
867 }
868 #ifdef ARK_LITECG_DEBUG
869 if (needDump) {
870 PrintLiveIntervals();
871 }
872 #endif
873 }
874
875 /* Calculate the weight of a live interval for pre-spill and flexible spill */
LiveIntervalAnalysis()876 void LSRALinearScanRegAllocator::LiveIntervalAnalysis()
877 {
878 RA_TIMER_REGISTER(lsra, "LSRA BuildIntervalRanges");
879 for (uint32 bbIdx = 0; bbIdx < bfs->sortedBBs.size(); ++bbIdx) {
880 BB *bb = bfs->sortedBBs[bbIdx];
881
882 FOR_BB_INSNS(insn, bb)
883 {
884 /* 1 calculate live interfere */
885 if (!insn->IsMachineInstruction() || insn->GetId() == 0) {
886 /* New instruction inserted by reg alloc (ie spill) */
887 continue;
888 }
889 /* 1.1 simple retire from active */
890 MapleSet<LiveInterval *, ActiveCmp>::iterator it;
891 for (it = active.begin(); it != active.end(); /* erase will update */) {
892 auto *li = static_cast<LiveInterval *>(*it);
893 if (li->GetLastUse() > insn->GetId()) {
894 break;
895 }
896 it = active.erase(it);
897 }
898 const InsnDesc *md = insn->GetDesc();
899 uint32 opndNum = insn->GetOperandSize();
900 for (uint32 i = 0; i < opndNum; ++i) {
901 const OpndDesc *regProp = md->GetOpndDes(i);
902 DEBUG_ASSERT(regProp != nullptr, "pointer is null in LSRALinearScanRegAllocator::LiveIntervalAnalysis");
903 bool isDef = regProp->IsRegDef();
904 Operand &opnd = insn->GetOperand(i);
905 if (isDef) {
906 auto ®Opnd = static_cast<RegOperand &>(opnd);
907 if (regOpnd.IsVirtualRegister() && regOpnd.GetRegisterType() != kRegTyCc) {
908 /* 1.2 simple insert to active */
909 uint32 regNO = regOpnd.GetRegisterNumber();
910 LiveInterval *li = liveIntervalsArray[regNO];
911 if (li->GetFirstDef() == insn->GetId() && li->GetStackSlot() != kSpilled) {
912 (void)active.insert(li);
913 }
914 }
915 }
916 }
917
918 /* 2 get interfere info, and analysis */
919 uint32 interNum = active.size();
920 #ifdef ARK_LITECG_DEBUG
921 if (needDump) {
922 LogInfo::MapleLogger() << "In insn " << insn->GetId() << ", " << interNum
923 << " overlap live intervals.\n";
924 LogInfo::MapleLogger() << "\n";
925 }
926 #endif
927
928 /* 2.2 interfere with each other, analysis which to spill */
929 while (interNum > CGOptions::GetOverlapNum()) {
930 LiveInterval *lowestLi = nullptr;
931 FindLowestPrioInActive(lowestLi);
932 if (lowestLi != nullptr) {
933 #ifdef ARK_LITECG_DEBUG
934 if (needDump) {
935 PrintLiveInterval(*lowestLi, "Pre spilled: ");
936 }
937 #endif
938 lowestLi->SetStackSlot(kSpilled);
939 lowestLi->SetShouldSave(false);
940 active.erase(itFinded);
941 interNum = active.size();
942 } else {
943 break;
944 }
945 }
946 }
947 }
948 active.clear();
949 }
950
UpdateCallQueueAtRetirement(uint32 insnID)951 void LSRALinearScanRegAllocator::UpdateCallQueueAtRetirement(uint32 insnID)
952 {
953 /*
954 * active list is sorted based on increasing lastUse
955 * any operand whose use is greater than current
956 * instruction number is still in use.
957 * If the use is less than or equal to instruction number
958 * then it is possible to retire this live interval and
959 * reclaim the physical register associated with it.
960 */
961 #ifdef ARK_LITECG_DEBUG
962 if (needDump) {
963 LogInfo::MapleLogger() << "RetireActiveByInsn instr_num " << insnID << "\n";
964 }
965 #endif
966 /* Retire invalidated call from call queue */
967 while (!callQueue.empty() && callQueue.front() <= insnID) {
968 callQueue.pop_front();
969 }
970 }
971
972 /* update allocate info by active queue */
UpdateActiveAllocateInfo(const LiveInterval & li)973 void LSRALinearScanRegAllocator::UpdateActiveAllocateInfo(const LiveInterval &li)
974 {
975 uint32 start = li.GetFirstDef();
976 uint32 end = li.GetLastUse();
977 if (li.IsUseBeforeDef()) {
978 --start;
979 }
980 for (auto *activeLi : active) {
981 uint32 regNO = activeLi->GetAssignedReg();
982 uint32 rangeStartPos;
983 auto posRange = activeLi->FindPosRange(start);
984 if (posRange == activeLi->GetRanges().end()) {
985 rangeStartPos = UINT32_MAX;
986 } else {
987 rangeStartPos = posRange->GetStart();
988 }
989 if (rangeStartPos > start) {
990 if (rangeStartPos < end) {
991 blockForbiddenMask |= (1UL << activeLi->GetAssignedReg());
992 }
993 if (rangeStartPos < freeUntilPos[regNO]) {
994 freeUntilPos[regNO] = rangeStartPos;
995 }
996 } else {
997 freeUntilPos[regNO] = 0;
998 }
999 }
1000 }
1001
1002 /* update allocate info by param queue */
UpdateParamAllocateInfo(const LiveInterval & li)1003 void LSRALinearScanRegAllocator::UpdateParamAllocateInfo(const LiveInterval &li)
1004 {
1005 bool isInt = (li.GetRegType() == kRegTyInt);
1006 MapleVector<SingleQue> ¶mQueue = isInt ? intParamQueue : fpParamQueue;
1007 uint32 baseReg = isInt ? firstIntReg : firstFpReg;
1008 uint32 paramNum = isInt ? regInfo->GetIntRegs().size() : regInfo->GetFpRegs().size();
1009 uint32 start = li.GetFirstDef();
1010 uint32 end = li.GetLastUse();
1011 for (uint32 i = 0; i < paramNum; ++i) {
1012 while (!paramQueue[i].empty() && paramQueue[i].front()->GetPhysUse() <= start) {
1013 paramQueue[i].pop_front();
1014 }
1015 if (paramQueue[i].empty()) {
1016 continue;
1017 }
1018 auto regNo = paramQueue[i].front()->GetRegNO();
1019 uint32 startPos = paramQueue[i].front()->GetFirstDef();
1020 if (startPos <= start) {
1021 freeUntilPos[regNo] = 0;
1022 } else {
1023 if (startPos < end) {
1024 blockForbiddenMask |= (1UL << (i + baseReg));
1025 }
1026 if (startPos < freeUntilPos[regNo]) {
1027 freeUntilPos[regNo] = startPos;
1028 }
1029 }
1030 }
1031 }
1032
1033 /* update active in retire */
RetireActive(LiveInterval & li,uint32 insnID)1034 void LSRALinearScanRegAllocator::RetireActive(LiveInterval &li, uint32 insnID)
1035 {
1036 /* Retire live intervals from active list */
1037 MapleSet<LiveInterval *, ActiveCmp>::iterator it;
1038 for (it = active.begin(); it != active.end(); /* erase will update */) {
1039 auto *activeLi = static_cast<LiveInterval *>(*it);
1040 if (activeLi->GetLastUse() > insnID) {
1041 ++it;
1042 continue;
1043 }
1044 /*
1045 * live interval ended for this reg in active
1046 * release physical reg assigned to free reg pool
1047 */
1048 #ifdef ARK_LITECG_DEBUG
1049 if (needDump) {
1050 LogInfo::MapleLogger() << "Removing "
1051 << "(" << activeLi->GetAssignedReg() << ")"
1052 << "from regmask\n";
1053 PrintLiveInterval(*activeLi, "\tRemoving virt_reg li\n");
1054 }
1055 #endif
1056 it = active.erase(it);
1057 }
1058 }
1059
1060 /* find the best physical reg by freeUntilPos */
GetRegFromMask(uint32 mask,regno_t offset,const LiveInterval & li)1061 uint32 LSRALinearScanRegAllocator::GetRegFromMask(uint32 mask, regno_t offset, const LiveInterval &li)
1062 {
1063 uint32 prefer = li.GetPrefer();
1064 if (prefer != 0) {
1065 uint32 preg = li.GetPrefer() - offset;
1066 if ((mask & (1u << preg)) != 0 && freeUntilPos[prefer] >= li.GetLastUse()) {
1067 return prefer;
1068 }
1069 }
1070 uint32 bestReg = 0;
1071 uint32 maxFreeUntilPos = 0;
1072 for (uint32 preg = 0; preg < k32BitSize; ++preg) {
1073 if ((mask & (1u << preg)) == 0) {
1074 continue;
1075 }
1076 uint32 regNO = preg + offset;
1077 if (freeUntilPos[regNO] >= li.GetLastUse()) {
1078 return regNO;
1079 }
1080 if (freeUntilPos[regNO] > maxFreeUntilPos) {
1081 maxFreeUntilPos = freeUntilPos[regNO];
1082 bestReg = regNO;
1083 }
1084 }
1085 return bestReg;
1086 }
1087
1088 /* Determine if live interval crosses the call */
NeedSaveAcrossCall(LiveInterval & li)1089 bool LSRALinearScanRegAllocator::NeedSaveAcrossCall(LiveInterval &li)
1090 {
1091 bool saveAcrossCall = false;
1092 for (uint32 callInsnID : callQueue) {
1093 if (callInsnID > li.GetLastUse()) {
1094 break;
1095 }
1096 if (callInsnID < li.GetFirstDef()) {
1097 continue;
1098 }
1099 /* Need to spill/fill around this call */
1100 for (auto range : li.GetRanges()) {
1101 uint32 start = range.GetStart();
1102 if (callInsnID >= start && callInsnID < range.GetEnd()) {
1103 saveAcrossCall = true;
1104 break;
1105 }
1106 }
1107 if (saveAcrossCall) {
1108 break;
1109 }
1110 }
1111 #ifdef ARK_LITECG_DEBUG
1112 if (needDump) {
1113 if (saveAcrossCall) {
1114 LogInfo::MapleLogger() << "\t\tlive interval crosses a call\n";
1115 } else {
1116 LogInfo::MapleLogger() << "\t\tlive interval does not cross a call\n";
1117 }
1118 }
1119 #endif
1120 return saveAcrossCall;
1121 }
1122
1123 /* Return a phys register number for the live interval. */
FindAvailablePhyReg(LiveInterval & li)1124 uint32 LSRALinearScanRegAllocator::FindAvailablePhyReg(LiveInterval &li)
1125 {
1126 uint32 regNO = 0;
1127 if (li.GetRegType() == kRegTyInt) {
1128 regNO = FindAvailablePhyReg(li, true);
1129 } else {
1130 DEBUG_ASSERT(li.GetRegType() == kRegTyFloat, "impossible register type");
1131 regNO = FindAvailablePhyReg(li, false);
1132 }
1133 return regNO;
1134 }
1135
1136 /* Spill and reload for caller saved registers. */
InsertCallerSave(Insn & insn,Operand & opnd,bool isDef,uint32 spillIdx)1137 void LSRALinearScanRegAllocator::InsertCallerSave(Insn &insn, Operand &opnd, bool isDef, uint32 spillIdx)
1138 {
1139 auto ®Opnd = static_cast<RegOperand &>(opnd);
1140 uint32 vRegNO = regOpnd.GetRegisterNumber();
1141 if (vRegNO >= liveIntervalsArray.size()) {
1142 CHECK_FATAL(false, "index out of range in LSRALinearScanRegAllocator::InsertCallerSave");
1143 }
1144 LiveInterval *rli = liveIntervalsArray[vRegNO];
1145 auto regType = rli->GetRegType();
1146
1147 if (!isDef && rli->IsNoNeedReloadPosition(insn.GetId())) {
1148 #ifdef ARK_LITECG_DEBUG
1149 if (needDump) {
1150 LogInfo::MapleLogger() << "InsertCallerSave R" << rli->GetRegNO() << " assigned " << rli->GetAssignedReg()
1151 << " skipping\n";
1152 }
1153 #endif
1154 return;
1155 }
1156
1157 if (!rli->IsShouldSave()) {
1158 return;
1159 }
1160
1161 uint32 regSize = rli->GetSpillSize();
1162 PrimType spType;
1163
1164 if (regType == kRegTyInt) {
1165 spType = (regSize <= k32BitSize) ? PTY_u32 : PTY_u64;
1166 } else {
1167 spType = (regSize <= k32BitSize) ? PTY_f32 : PTY_f64;
1168 }
1169
1170 #ifdef ARK_LITECG_DEBUG
1171 if (needDump) {
1172 LogInfo::MapleLogger() << "InsertCallerSave R" << vRegNO << (isDef ? " def" : " use") << "\n";
1173 }
1174 #endif
1175
1176 MemOperand *memOpnd = nullptr;
1177 RegOperand *phyOpnd = nullptr;
1178
1179 phyOpnd = regInfo->GetOrCreatePhyRegOperand(static_cast<regno_t>(rli->GetAssignedReg()), regSize, regType);
1180
1181 std::string comment;
1182 bool isOutOfRange = false;
1183 auto tmpReg = static_cast<regno_t>(intSpillRegSet[spillIdx] + firstIntReg);
1184 if (isDef) {
1185 Insn *nextInsn = insn.GetNext();
1186 memOpnd = GetSpillMem(vRegNO, true, insn, regInfo->GetReservedSpillReg(), isOutOfRange, regSize);
1187 Insn *stInsn = regInfo->BuildStrInsn(regSize, spType, *phyOpnd, *memOpnd);
1188 comment = " SPILL for caller_save " + std::to_string(vRegNO);
1189 ++callerSaveSpillCount;
1190 if (rli->GetLastUse() == insn.GetId()) {
1191 regInfo->FreeSpillRegMem(vRegNO);
1192 comment += " end";
1193 }
1194 stInsn->SetComment(comment);
1195 if (isOutOfRange && nextInsn != nullptr) {
1196 insn.GetBB()->InsertInsnBefore(*nextInsn, *stInsn);
1197 } else if (isOutOfRange && nextInsn == nullptr) {
1198 insn.GetBB()->AppendInsn(*stInsn);
1199 } else {
1200 insn.GetBB()->InsertInsnAfter(insn, *stInsn);
1201 }
1202 } else {
1203 memOpnd = GetSpillMem(vRegNO, false, insn, tmpReg, isOutOfRange, regSize);
1204 Insn *ldInsn = regInfo->BuildLdrInsn(regSize, spType, *phyOpnd, *memOpnd);
1205 comment = " RELOAD for caller_save " + std::to_string(vRegNO);
1206 ++callerSaveReloadCount;
1207 if (rli->GetLastUse() == insn.GetId()) {
1208 regInfo->FreeSpillRegMem(vRegNO);
1209 comment += " end";
1210 }
1211 ldInsn->SetComment(comment);
1212 insn.GetBB()->InsertInsnBefore(insn, *ldInsn);
1213 }
1214 }
1215
GetSpillMem(uint32 vRegNO,bool isDest,Insn & insn,regno_t regNO,bool & isOutOfRange,uint32 bitSize) const1216 MemOperand *LSRALinearScanRegAllocator::GetSpillMem(uint32 vRegNO, bool isDest, Insn &insn, regno_t regNO,
1217 bool &isOutOfRange, uint32 bitSize) const
1218 {
1219 MemOperand *memOpnd = regInfo->GetOrCreatSpillMem(vRegNO, bitSize);
1220 return regInfo->AdjustMemOperandIfOffsetOutOfRange(memOpnd, std::make_pair(vRegNO, regNO), isDest, insn,
1221 isOutOfRange);
1222 }
1223
1224 /*
1225 * Generate spill/reload for an operand.
1226 * spill_idx : one of 3 phys regs set aside for the purpose of spills.
1227 */
SpillOperand(Insn & insn,Operand & opnd,bool isDef,uint32 spillIdx)1228 void LSRALinearScanRegAllocator::SpillOperand(Insn &insn, Operand &opnd, bool isDef, uint32 spillIdx)
1229 {
1230 /*
1231 * Insert spill (def) and fill (use) instructions for the operand.
1232 * Keep track of the 'slot' (base 0). The actual slot on the stack
1233 * will be some 'base_slot_offset' + 'slot' off FP.
1234 * For simplification, entire 64bit register is spilled/filled.
1235 *
1236 * For example, a virtual register home 'slot' on the stack is location 5.
1237 * This represents a 64bit slot (8bytes). The base_slot_offset
1238 * from the base 'slot' determined by whoever is added, off FP.
1239 * stack address is ( FP - (5 * 8) + base_slot_offset )
1240 * So the algorithm is simple, for each virtual register that is not
1241 * allocated, it has to have a home address on the stack (a slot).
1242 * A class variable is used, start from 0, increment by 1.
1243 * Since LiveInterval already represent unique regNO information,
1244 * just add a slot number to it. Subsequent reference to a regNO
1245 * will either get an allocated physical register or a slot number
1246 * for computing the stack location.
1247 *
1248 * This function will also determine the operand to be a def or use.
1249 * For def, spill instruction(s) is appended after the insn.
1250 * For use, spill instruction(s) is prepended before the insn.
1251 * Use FP - (slot# *8) for now. Will recompute if base_slot_offset
1252 * is not 0.
1253 *
1254 * The total number of slots used will be used to compute the stack
1255 * frame size. This will require some interface external to LSRA.
1256 *
1257 * For normal instruction, two spill regs should be enough. The caller
1258 * controls which ones to use.
1259 * For more complex operations, need to break down the instruction.
1260 * eg. store v1 -> [v2 + v3] // 3 regs needed
1261 * => p1 <- v2 // address part 1
1262 * p2 <- v3 // address part 2
1263 * p1 <- p1 + p2 // freeing up p2
1264 * p2 <- v1
1265 * store p2 -> [p1]
1266 * or we can allocate more registers to the spill register set
1267 * For store multiple, need to break it down into two or more instr.
1268 */
1269 auto ®Opnd = static_cast<RegOperand &>(opnd);
1270 uint32 regNO = regOpnd.GetRegisterNumber();
1271 #ifdef ARK_LITECG_DEBUG
1272 if (needDump) {
1273 LogInfo::MapleLogger() << "SpillOperand " << regNO << "\n";
1274 }
1275 #endif
1276
1277 regno_t spReg;
1278 PrimType spType;
1279 CHECK_FATAL(regNO < liveIntervalsArray.size(), "index out of range in LSRALinearScanRegAllocator::SpillOperand");
1280 LiveInterval *li = liveIntervalsArray[regNO];
1281 DEBUG_ASSERT(!li->IsShouldSave(), "SpillOperand: Should not be caller");
1282 uint32 regSize = li->GetSpillSize();
1283 RegType regType = regOpnd.GetRegisterType();
1284
1285 if (li->GetRegType() == kRegTyInt) {
1286 DEBUG_ASSERT((spillIdx < intSpillRegSet.size()), "SpillOperand: ran out int spill reg");
1287 spReg = intSpillRegSet[spillIdx] + firstIntReg;
1288 spType = (regSize <= k32BitSize) ? PTY_u32 : PTY_u64;
1289 } else if (li->GetRegType() == kRegTyFloat) {
1290 DEBUG_ASSERT((spillIdx < fpSpillRegSet.size()), "SpillOperand: ran out fp spill reg");
1291 spReg = fpSpillRegSet[spillIdx] + firstFpReg;
1292 spType = (regSize <= k32BitSize) ? PTY_f32 : PTY_f64;
1293 } else {
1294 CHECK_FATAL(false, "SpillOperand: Should be int or float type");
1295 }
1296 auto tmpReg = static_cast<regno_t>(intSpillRegSet[spillIdx] + firstIntReg);
1297
1298 bool isOutOfRange = false;
1299 RegOperand *phyOpnd = GetSpillPhyRegOperand(insn, static_cast<regno_t>(spReg), regSize, regType);
1300 li->SetAssignedReg(phyOpnd->GetRegisterNumber());
1301
1302 MemOperand *memOpnd = nullptr;
1303 if (isDef) {
1304 /*
1305 * Need to assign spReg (one of the two spill reg) to the destination of the insn.
1306 * spill_vreg <- opn1 op opn2
1307 * to
1308 * spReg <- opn1 op opn2
1309 * store spReg -> spillmem
1310 */
1311 li->SetStackSlot(kSpilled);
1312
1313 ++spillCount;
1314 Insn *nextInsn = insn.GetNext();
1315 memOpnd = GetSpillMem(regNO, true, insn, regInfo->GetReservedSpillReg(), isOutOfRange, regSize);
1316 Insn *stInsn = regInfo->BuildStrInsn(regSize, spType, *phyOpnd, *memOpnd);
1317 if (li->GetLastUse() == insn.GetId()) {
1318 regInfo->FreeSpillRegMem(regNO);
1319 }
1320 if (CGOptions::GetInstance().GenerateVerboseCG()) {
1321 std::string comment = " SPILL vreg:" + std::to_string(regNO);
1322 stInsn->SetComment(comment);
1323 }
1324
1325 if (isOutOfRange && nextInsn != nullptr) {
1326 insn.GetBB()->InsertInsnBefore(*nextInsn, *stInsn);
1327 } else if (isOutOfRange && nextInsn == nullptr) {
1328 insn.GetBB()->AppendInsn(*stInsn);
1329 } else {
1330 insn.GetBB()->InsertInsnAfter(insn, *stInsn);
1331 }
1332 } else {
1333 /* Here, reverse of isDef, change either opn1 or opn2 to the spReg. */
1334 if (li->GetStackSlot() == 0xFFFFFFFF) {
1335 LogInfo::MapleLogger() << "WARNING: " << regNO << " assigned " << li->GetAssignedReg()
1336 << " restore without spill in bb " << insn.GetBB()->GetId() << " : "
1337 << cgFunc->GetName() << "\n";
1338 }
1339 ++reloadCount;
1340 memOpnd = GetSpillMem(regNO, false, insn, tmpReg, isOutOfRange, regSize);
1341 Insn *ldInsn = regInfo->BuildLdrInsn(regSize, spType, *phyOpnd, *memOpnd);
1342 if (li->GetLastUse() == insn.GetId()) {
1343 regInfo->FreeSpillRegMem(regNO);
1344 }
1345 if (CGOptions::GetInstance().GenerateVerboseCG()) {
1346 std::string comment = " RELOAD vreg" + std::to_string(regNO);
1347 ldInsn->SetComment(comment);
1348 }
1349 insn.GetBB()->InsertInsnBefore(insn, *ldInsn);
1350 }
1351 }
1352
GetSpillPhyRegOperand(Insn & insn,regno_t regNo,uint32 regSize,RegType regType)1353 RegOperand *LSRALinearScanRegAllocator::GetSpillPhyRegOperand(Insn &insn, regno_t regNo, uint32 regSize,
1354 RegType regType)
1355 {
1356 RegOperand *phyOpnd = nullptr;
1357 phyOpnd = regInfo->GetOrCreatePhyRegOperand(regNo, regSize, regType);
1358 MOperator mOp = insn.GetMachineOpcode();
1359 if (regInfo->IsMovFromRegtoReg(mOp, insn)) {
1360 auto ®Opnd1 = static_cast<RegOperand &>(insn.GetOperand(1));
1361 auto ®Opnd0 = static_cast<RegOperand &>(insn.GetOperand(0));
1362 if ((!regInfo->IsVirtualRegister(regOpnd1)) && (regInfo->IsVirtualRegister(regOpnd0))) {
1363 phyOpnd = ®Opnd1;
1364 } else if ((regInfo->IsVirtualRegister(regOpnd1)) && (!regInfo->IsVirtualRegister(regOpnd0))) {
1365 phyOpnd = ®Opnd0;
1366 } else if ((regInfo->IsVirtualRegister(regOpnd1)) && (regInfo->IsVirtualRegister(regOpnd0))) {
1367 multiVregMov = true;
1368 }
1369 }
1370 CHECK_FATAL(phyOpnd != nullptr, "SpillOperand: valid physical register operand");
1371 return phyOpnd;
1372 }
1373
1374 /* find the lowest li that meets the constraints related to li0 form current active */
FindLowestPrioInActive(LiveInterval * & targetLi,LiveInterval * li0,RegType regType)1375 void LSRALinearScanRegAllocator::FindLowestPrioInActive(LiveInterval *&targetLi, LiveInterval *li0, RegType regType)
1376 {
1377 std::map<regno_t, uint32> activeLiAssignedRegCnt;
1378 for (auto *li : active) {
1379 if (li->GetAssignedReg() != 0) {
1380 ++activeLiAssignedRegCnt[li->GetAssignedReg()];
1381 }
1382 }
1383
1384 constexpr float kBiggestFloat = 1000.0;
1385 float lowestPrio = (li0 != nullptr ? li0->GetPriority() : kBiggestFloat);
1386 bool found = false;
1387 bool hintCalleeSavedReg = li0 && NeedSaveAcrossCall(*li0);
1388 MapleSet<LiveInterval *, ActiveCmp>::iterator lowestIt;
1389 for (auto it = active.begin(); it != active.end(); ++it) {
1390 LiveInterval *li = static_cast<LiveInterval *>(*it);
1391 regno_t regNO = li->GetAssignedReg();
1392 /* 1. Basic Constraints */
1393 if (li->GetPriority() >= lowestPrio || li->GetRegType() != regType) {
1394 continue;
1395 }
1396 /* 2. If li is pre-assigned to Physical register primitively, ignore it. */
1397 if (!regInfo->IsVirtualRegister(li->GetRegNO())) {
1398 continue;
1399 }
1400 /* 3. CalleeSavedReg is preferred here. If li is assigned to Non-CalleeSavedReg, ignore it. */
1401 if (hintCalleeSavedReg && !regInfo->IsCalleeSavedReg(regNO - firstIntReg)) {
1402 continue;
1403 }
1404 /* 4. if regNO is assigned to multiple active li, ignore it. */
1405 if (activeLiAssignedRegCnt[regNO] > 1) {
1406 continue;
1407 }
1408 lowestPrio = li->GetPriority();
1409 lowestIt = it;
1410 found = true;
1411 }
1412 if (found) {
1413 targetLi = *lowestIt;
1414 itFinded = lowestIt;
1415 }
1416 return;
1417 }
1418
1419 /* Set a vreg in live interval as being marked for spill. */
SetLiSpill(LiveInterval & li)1420 void LSRALinearScanRegAllocator::SetLiSpill(LiveInterval &li)
1421 {
1422 #ifdef ARK_LITECG_DEBUG
1423 if (needDump) {
1424 LogInfo::MapleLogger() << "SetLiSpill " << li.GetRegNO();
1425 LogInfo::MapleLogger() << "(refCount " << li.GetRefCount() << ")\n";
1426 }
1427 #endif
1428 li.SetStackSlot(kSpilled);
1429 li.SetShouldSave(false);
1430 }
1431
HandleSpillForLi(LiveInterval & li)1432 uint32 LSRALinearScanRegAllocator::HandleSpillForLi(LiveInterval &li)
1433 {
1434 /* choose the lowest priority li to spill */
1435 RegType regType = li.GetRegType();
1436 LiveInterval *spillLi = nullptr;
1437 FindLowestPrioInActive(spillLi, &li, regType);
1438
1439 if (spillLi == nullptr || li.GetStackSlot() == kSpilled || li.GetRefCount() <= spillLi->GetRefCount() ||
1440 spillLi->GetLastUse() < li.GetLastUse()) {
1441 /* spill current li */
1442 #ifdef ARK_LITECG_DEBUG
1443 if (needDump) {
1444 LogInfo::MapleLogger() << "Flexible Spill: still spill " << li.GetRegNO() << ".\n";
1445 }
1446 #endif
1447 SetLiSpill(li);
1448 return 0;
1449 }
1450 DEBUG_ASSERT(spillLi != nullptr, "spillLi is null in LSRALinearScanRegAllocator::HandleSpillForLi");
1451
1452 uint32 newRegNO = spillLi->GetAssignedReg();
1453 #ifdef ARK_LITECG_DEBUG
1454 if (needDump) {
1455 LogInfo::MapleLogger() << "Flexible Spill: " << spillLi->GetRegNO() << " instead of " << li.GetRegNO() << ".\n";
1456 PrintLiveInterval(*spillLi, "TO spill: ");
1457 PrintLiveInterval(li, "Instead of: ");
1458 }
1459 #endif
1460
1461 li.SetAssignedReg(newRegNO);
1462 if (!regInfo->IsCalleeSavedReg(newRegNO) && NeedSaveAcrossCall(li)) {
1463 li.SetShouldSave(true);
1464 }
1465
1466 /* spill this live interval */
1467 (void)active.erase(itFinded);
1468 SetLiSpill(*spillLi);
1469 spillLi->SetAssignedReg(0);
1470
1471 (void)active.insert(&li);
1472 return newRegNO;
1473 }
1474
FindAvailablePhyRegAcrossCall(LiveInterval & li,bool isIntReg)1475 uint32 LSRALinearScanRegAllocator::FindAvailablePhyRegAcrossCall(LiveInterval &li, bool isIntReg)
1476 {
1477 uint32 callerRegMask = isIntReg ? intCallerMask : fpCallerMask;
1478 uint32 calleeRegMask = isIntReg ? intCalleeMask : fpCalleeMask;
1479 regno_t reg0 = isIntReg ? firstIntReg : firstFpReg;
1480
1481 /* call in live interval, use callee if available */
1482 auto bestReg = GetRegFromMask(calleeRegMask, reg0, li);
1483 if (bestReg != 0 && freeUntilPos[bestReg] >= li.GetLastUse()) {
1484 li.SetShouldSave(false);
1485 return bestReg;
1486 }
1487 /* can be optimize multi use between calls rather than in bb */
1488 if (bestReg == 0 || li.IsMultiUseInBB()) {
1489 auto secondReg = GetRegFromMask(callerRegMask, reg0, li);
1490 if (freeUntilPos[secondReg] >= li.GetLastUse()) {
1491 li.SetShouldSave(true);
1492 return secondReg;
1493 }
1494 }
1495 return 0;
1496 }
1497
FindAvailablePhyReg(LiveInterval & li,bool isIntReg)1498 uint32 LSRALinearScanRegAllocator::FindAvailablePhyReg(LiveInterval &li, bool isIntReg)
1499 {
1500 /* See if register is live accross a call */
1501 bool liAcrossCall = NeedSaveAcrossCall(li);
1502 if (liAcrossCall && !regInfo->IsPrefCallerSaveRegs(li.GetRegType(), li.GetSpillSize())) {
1503 return FindAvailablePhyRegAcrossCall(li, isIntReg);
1504 }
1505
1506 uint32 callerRegMask = isIntReg ? intCallerMask : fpCallerMask;
1507 uint32 calleeRegMask = isIntReg ? intCalleeMask : fpCalleeMask;
1508 regno_t reg0 = isIntReg ? firstIntReg : firstFpReg;
1509 auto bestReg = GetRegFromMask(callerRegMask, reg0, li);
1510 if (bestReg == 0) {
1511 bestReg = GetRegFromMask(calleeRegMask, reg0, li);
1512 } else if (freeUntilPos[bestReg] < li.GetLastUse()) {
1513 auto secondReg = GetRegFromMask(calleeRegMask, reg0, li);
1514 if (secondReg != 0) {
1515 bestReg = (freeUntilPos[bestReg] > freeUntilPos[secondReg]) ? bestReg : secondReg;
1516 }
1517 }
1518
1519 if (bestReg != 0 && freeUntilPos[bestReg] < li.GetLastUse()) {
1520 DEBUG_ASSERT(freeUntilPos[bestReg] != 0, "impossible");
1521 bestReg = 0;
1522 }
1523 /* try to fill in holes */
1524 /* try first split if no hole exists */
1525 return bestReg;
1526 }
1527
1528 /* Shell function to find a physical register for an operand. */
AssignPhysRegs(LiveInterval & li)1529 uint32 LSRALinearScanRegAllocator::AssignPhysRegs(LiveInterval &li)
1530 {
1531 if (spillAll) {
1532 return 0;
1533 }
1534
1535 /* pre spilled: */
1536 if (li.GetStackSlot() != 0xFFFFFFFF) {
1537 return 0;
1538 }
1539 #ifdef ARK_LITECG_DEBUG
1540 if (needDump) {
1541 uint32 activeSz = active.size();
1542 LogInfo::MapleLogger() << "\tAssignPhysRegs-active_sz " << activeSz << "\n";
1543 }
1544 #endif
1545
1546 uint32 regNO = FindAvailablePhyReg(li);
1547 if (regNO != 0) {
1548 li.SetAssignedReg(regNO);
1549 if (regInfo->IsCalleeSavedReg(regNO)) {
1550 #ifdef ARK_LITECG_DEBUG
1551 if (needDump) {
1552 LogInfo::MapleLogger() << "\tCallee-save register for save/restore in prologue/epilogue: " << regNO
1553 << "\n";
1554 }
1555 #endif
1556 cgFunc->AddtoCalleeSaved(regNO);
1557 }
1558 }
1559 return regNO;
1560 }
1561
AssignPhysRegsForLi(LiveInterval & li)1562 void LSRALinearScanRegAllocator::AssignPhysRegsForLi(LiveInterval &li)
1563 {
1564 uint32 newRegNO = AssignPhysRegs(li);
1565 if (newRegNO == 0) {
1566 newRegNO = HandleSpillForLi(li);
1567 }
1568
1569 if (newRegNO != 0) {
1570 (void)active.insert(&li);
1571 }
1572 }
1573
1574 /* Replace Use-Def Opnd */
GetReplaceUdOpnd(Insn & insn,Operand & opnd,uint32 & spillIdx)1575 RegOperand *LSRALinearScanRegAllocator::GetReplaceUdOpnd(Insn &insn, Operand &opnd, uint32 &spillIdx)
1576 {
1577 if (!opnd.IsRegister()) {
1578 return nullptr;
1579 }
1580 const auto *regOpnd = static_cast<RegOperand *>(&opnd);
1581
1582 uint32 vRegNO = regOpnd->GetRegisterNumber();
1583 RegType regType = regOpnd->GetRegisterType();
1584 if (regType == kRegTyCc || regType == kRegTyVary) {
1585 return nullptr;
1586 }
1587 if (regInfo->IsUnconcernedReg(vRegNO) || !regInfo->IsVirtualRegister(vRegNO)) {
1588 return nullptr;
1589 }
1590
1591 DEBUG_ASSERT(vRegNO < liveIntervalsArray.size(),
1592 "index out of range of MapleVector in LSRALinearScanRegAllocator::GetReplaceUdOpnd");
1593 LiveInterval *li = liveIntervalsArray[vRegNO];
1594
1595 regno_t regNO = li->GetAssignedReg();
1596 if (regInfo->IsCalleeSavedReg(regNO)) {
1597 cgFunc->AddtoCalleeSaved(regNO);
1598 }
1599
1600 if (li->IsShouldSave()) {
1601 InsertCallerSave(insn, opnd, false, spillIdx);
1602 InsertCallerSave(insn, opnd, true, spillIdx);
1603 } else if (li->GetStackSlot() == kSpilled) {
1604 SpillOperand(insn, opnd, false, spillIdx);
1605 SpillOperand(insn, opnd, true, spillIdx);
1606 ++spillIdx;
1607 }
1608 RegOperand *phyOpnd =
1609 regInfo->GetOrCreatePhyRegOperand(static_cast<regno_t>(li->GetAssignedReg()), opnd.GetSize(), regType);
1610
1611 return phyOpnd;
1612 }
1613
1614 /*
1615 * Create an operand with physical register assigned, or a spill register
1616 * in the case where a physical register cannot be assigned.
1617 */
GetReplaceOpnd(Insn & insn,Operand & opnd,uint32 & spillIdx,bool isDef)1618 RegOperand *LSRALinearScanRegAllocator::GetReplaceOpnd(Insn &insn, Operand &opnd, uint32 &spillIdx, bool isDef)
1619 {
1620 if (!opnd.IsRegister()) {
1621 return nullptr;
1622 }
1623 const auto *regOpnd = static_cast<RegOperand *>(&opnd);
1624
1625 uint32 vRegNO = regOpnd->GetRegisterNumber();
1626 RegType regType = regOpnd->GetRegisterType();
1627 if (regType == kRegTyCc || regType == kRegTyVary) {
1628 return nullptr;
1629 }
1630 if (regInfo->IsUnconcernedReg(vRegNO) || !regInfo->IsVirtualRegister(vRegNO)) {
1631 return nullptr;
1632 }
1633
1634 DEBUG_ASSERT(vRegNO < liveIntervalsArray.size(),
1635 "index out of range of MapleVector in LSRALinearScanRegAllocator::GetReplaceOpnd");
1636 LiveInterval *li = liveIntervalsArray[vRegNO];
1637
1638 regno_t regNO = li->GetAssignedReg();
1639 if (regInfo->IsCalleeSavedReg(regNO)) {
1640 cgFunc->AddtoCalleeSaved(regNO);
1641 }
1642 if (li->IsShouldSave() || li->GetStackSlot() == kSpilled) {
1643 spillIdx = isDef ? 0 : spillIdx;
1644 if (li->IsShouldSave()) {
1645 InsertCallerSave(insn, opnd, isDef, spillIdx);
1646 } else {
1647 SpillOperand(insn, opnd, isDef, spillIdx);
1648 }
1649 if (!isDef) {
1650 ++spillIdx;
1651 }
1652 }
1653 RegOperand *phyOpnd =
1654 regInfo->GetOrCreatePhyRegOperand(static_cast<regno_t>(li->GetAssignedReg()), opnd.GetSize(), regType);
1655
1656 return phyOpnd;
1657 }
1658
UpdateLocalDefWithBBLiveIn(const BB & bb)1659 bool LSRALinearScanRegAllocator::CallerSaveOpt::UpdateLocalDefWithBBLiveIn(const BB &bb)
1660 {
1661 bool changed = false;
1662 auto bbId = bb.GetId();
1663 for (uint32 i = 0; i < callerSaves.size(); ++i) {
1664 auto *li = callerSaves[i];
1665 auto &defLiveIn = localDefLiveIn[i];
1666 auto &defAfterInsn = localDefAfterInsn[i];
1667
1668 if (bb.GetLiveInRegNO().count(li->GetRegNO()) == 0) {
1669 continue;
1670 }
1671 if (defLiveIn.count(bbId) != 0) {
1672 continue;
1673 }
1674 bool isLocalDef = true;
1675 for (auto *pred : bb.GetPreds()) {
1676 if (defAfterInsn.count(pred->GetId()) == 0) {
1677 isLocalDef = false;
1678 break;
1679 }
1680 }
1681 if (isLocalDef) {
1682 defLiveIn.insert(bb.GetId());
1683 defAfterInsn.emplace(bb.GetId(), 0);
1684 changed = true;
1685 }
1686 }
1687 return changed;
1688 }
1689
CollectCallerNoNeedReloadByInsn(const Insn & insn)1690 void LSRALinearScanRegAllocator::CallerSaveOpt::CollectCallerNoNeedReloadByInsn(const Insn &insn)
1691 {
1692 auto curId = insn.GetId();
1693 for (uint32 i = 0; i < callerSaves.size(); ++i) {
1694 auto *li = callerSaves[i];
1695 auto &defLiveIn = localDefLiveIn[i];
1696 auto &defAfterInsn = localDefAfterInsn[i];
1697 auto posRange = li->FindPosRange(curId);
1698 if (posRange == li->GetRanges().end() || posRange->GetStart() > curId) {
1699 continue;
1700 }
1701 if (insn.IsCall()) {
1702 defAfterInsn.erase(insn.GetBB()->GetId());
1703 continue;
1704 }
1705 auto &usePositions = li->GetUsePositions();
1706 if (std::binary_search(usePositions.begin(), usePositions.end(), curId)) {
1707 auto iter = defAfterInsn.find(insn.GetBB()->GetId());
1708 if (iter == defAfterInsn.end() || iter->second > curId) {
1709 defAfterInsn.insert_or_assign(insn.GetBB()->GetId(), curId);
1710 continue;
1711 }
1712 if (iter->second == curId && defLiveIn.count(insn.GetBB()->GetId()) == 0) {
1713 continue;
1714 }
1715 li->AddNoNeedReloadPosition(curId);
1716 }
1717 }
1718 }
1719
Run()1720 void LSRALinearScanRegAllocator::CallerSaveOpt::Run()
1721 {
1722 bool changed;
1723 do {
1724 changed = false;
1725 for (auto *li : callerSaves) {
1726 li->InitRangeFinder();
1727 }
1728 for (BB *bb : bfs->sortedBBs) {
1729 if (!UpdateLocalDefWithBBLiveIn(*bb) && !firstTime) {
1730 continue;
1731 }
1732 changed = true;
1733 FOR_BB_INSNS(insn, bb)
1734 {
1735 if (!insn->IsMachineInstruction() || insn->GetId() == 0) {
1736 continue;
1737 }
1738 CollectCallerNoNeedReloadByInsn(*insn);
1739 }
1740 }
1741 firstTime = false;
1742 } while (changed);
1743 }
1744
FinalizeUseRegisters(Insn & insn,uint32 & spillIdx)1745 void LSRALinearScanRegAllocator::FinalizeUseRegisters(Insn &insn, uint32 &spillIdx)
1746 {
1747 const InsnDesc *md = insn.GetDesc();
1748
1749 for (uint32 i = 0; i < insn.GetOperandSize(); ++i) {
1750 const OpndDesc *opndDesc = md->GetOpndDes(i);
1751 DEBUG_ASSERT(opndDesc != nullptr, "pointer is null in LSRALinearScanRegAllocator::FinalizeRegisters");
1752 Operand &opnd = insn.GetOperand(i);
1753 if (opndDesc->IsDef() && !opnd.IsMemoryAccessOperand()) {
1754 continue;
1755 }
1756 if (opnd.IsList()) {
1757 auto &listOpnd = static_cast<ListOperand&>(opnd);
1758 auto *newList = &cgFunc->GetOpndBuilder()->CreateList(cgFunc->GetFuncScopeAllocator()->GetMemPool());
1759 for (auto *regOpnd : listOpnd.GetOperands()) {
1760 RegOperand *phyOpnd =
1761 regInfo->IsVirtualRegister(*regOpnd) ? GetReplaceOpnd(insn, *regOpnd, spillIdx, false) : regOpnd;
1762 newList->PushOpnd(*phyOpnd);
1763 }
1764 insn.SetOperand(i, *newList);
1765 } else if (opnd.IsMemoryAccessOperand()) {
1766 auto *memOpnd = static_cast<MemOperand*>(static_cast<MemOperand&>(opnd).Clone(*cgFunc->GetMemoryPool()));
1767 DEBUG_ASSERT(memOpnd != nullptr, "memopnd is null in LSRALinearScanRegAllocator::FinalizeRegisters");
1768 insn.SetOperand(i, *memOpnd);
1769 Operand *base = memOpnd->GetBaseRegister();
1770 Operand *offset = memOpnd->GetIndexRegister();
1771 if (base != nullptr) {
1772 auto *phyOpnd = GetReplaceOpnd(insn, *base, spillIdx, false);
1773 if (phyOpnd != nullptr) {
1774 memOpnd->SetBaseRegister(*phyOpnd);
1775 }
1776 }
1777 if (offset != nullptr) {
1778 auto *phyOpnd = GetReplaceOpnd(insn, *offset, spillIdx, false);
1779 if (phyOpnd != nullptr) {
1780 memOpnd->SetIndexRegister(*phyOpnd);
1781 }
1782 }
1783 } else {
1784 auto *phyOpnd = GetReplaceOpnd(insn, opnd, spillIdx, false);
1785 if (phyOpnd != nullptr) {
1786 insn.SetOperand(i, *phyOpnd);
1787 }
1788 }
1789 }
1790 }
1791
FinalizeUseDefRegisters(Insn & insn,uint32 & spillIdx)1792 void LSRALinearScanRegAllocator::FinalizeUseDefRegisters(Insn &insn, uint32 &spillIdx)
1793 {
1794 const InsnDesc *md = insn.GetDesc();
1795
1796 for (uint32 i = 0; i < insn.GetOperandSize(); ++i) {
1797 const OpndDesc *opndDesc = md->GetOpndDes(i);
1798 DEBUG_ASSERT(opndDesc != nullptr, "pointer is null in LSRALinearScanRegAllocator::FinalizeRegisters");
1799 Operand &opnd = insn.GetOperand(i);
1800 bool isUseDef = opndDesc->IsRegDef() && opndDesc->IsRegUse();
1801 if (!isUseDef) {
1802 continue;
1803 }
1804 RegOperand *phyOpnd = GetReplaceUdOpnd(insn, opnd, spillIdx);
1805 if (phyOpnd != nullptr) {
1806 insn.SetOperand(i, *phyOpnd);
1807 }
1808 }
1809 }
1810
FinalizeDefRegisters(Insn & insn,uint32 & spillIdx)1811 void LSRALinearScanRegAllocator::FinalizeDefRegisters(Insn &insn, uint32 &spillIdx)
1812 {
1813 const InsnDesc *md = insn.GetDesc();
1814
1815 for (uint32 i = 0; i < insn.GetOperandSize(); ++i) {
1816 const OpndDesc *opndDesc = md->GetOpndDes(i);
1817 DEBUG_ASSERT(opndDesc != nullptr, "pointer is null in LSRALinearScanRegAllocator::FinalizeRegisters");
1818 Operand &opnd = insn.GetOperand(i);
1819 if (opndDesc->IsUse() || opnd.IsMemoryAccessOperand()) {
1820 continue;
1821 }
1822 if (opnd.IsRegister()) {
1823 RegOperand *phyOpnd = GetReplaceOpnd(insn, opnd, spillIdx, true);
1824 if (phyOpnd != nullptr) {
1825 insn.SetOperand(i, *phyOpnd);
1826 }
1827 } else if (opnd.IsList()) {
1828 auto &listOpnd = static_cast<ListOperand&>(opnd);
1829 auto *newList = &cgFunc->GetOpndBuilder()->CreateList(cgFunc->GetFuncScopeAllocator()->GetMemPool());
1830 for (auto *regOpnd : listOpnd.GetOperands()) {
1831 RegOperand *phyOpnd =
1832 regInfo->IsVirtualRegister(*regOpnd) ? GetReplaceOpnd(insn, *regOpnd, spillIdx, true) : regOpnd;
1833 newList->PushOpnd(*phyOpnd);
1834 }
1835 insn.SetOperand(i, *newList);
1836 }
1837 }
1838 MOperator mOp = insn.GetMachineOpcode();
1839 if (insn.GetPrev() && multiVregMov) {
1840 if (regInfo->IsMovFromRegtoReg(mOp, insn) && insn.GetPrev()->IsLoad()) {
1841 auto ®OpndPrev = static_cast<RegOperand &>(insn.GetPrev()->GetOperand(0));
1842 auto ®Opnd = static_cast<RegOperand &>(insn.GetOperand(1));
1843 auto ®OpndDest = static_cast<RegOperand &>(insn.GetOperand(0));
1844 if ((!regInfo->IsVirtualRegister(regOpndPrev)) && (!regInfo->IsVirtualRegister(regOpnd)) &&
1845 (regOpndPrev == regOpnd)) {
1846 insn.SetOperand(1, regOpndDest);
1847 insn.GetPrev()->SetOperand(0, regOpndDest);
1848 }
1849 }
1850 }
1851 multiVregMov = false;
1852 }
FinalizeFreeReferenceSpillStack(Insn & insn)1853 void LSRALinearScanRegAllocator::FinalizeFreeReferenceSpillStack(Insn &insn)
1854 {
1855 if (!insn.IsCall()) {
1856 return;
1857 }
1858
1859 auto freeSpillStack = [this, insn](regno_t regno) {
1860 auto *li = liveIntervalsArray[regno];
1861 if (li != nullptr && insn.GetId() >= li->GetLastUse()) {
1862 regInfo->FreeSpillRegMem(regno);
1863 }
1864 };
1865
1866 // referencemap info
1867 for (auto regNO : insn.GetStackMapLiveIn()) {
1868 freeSpillStack(regNO);
1869 auto iter = derivedRef2Base.find(regNO);
1870 if (iter != derivedRef2Base.end()) {
1871 freeSpillStack(iter->second->GetRegisterNumber());
1872 }
1873 }
1874 // deoptinfo
1875 if (insn.GetStackMap() == nullptr) {
1876 return;
1877 }
1878 for (auto [_, opnd] : insn.GetStackMap()->GetDeoptInfo().GetDeoptBundleInfo()) {
1879 if (!opnd->IsRegister()) {
1880 continue;
1881 }
1882 freeSpillStack(static_cast<RegOperand*>(opnd)->GetRegisterNumber());
1883 }
1884 }
1885
1886 /* Iterate through all instructions and change the vreg to preg. */
FinalizeRegisters()1887 void LSRALinearScanRegAllocator::FinalizeRegisters()
1888 {
1889 RA_TIMER_REGISTER(lsra, "LSRA FinalizeRegisters");
1890 CallerSaveOpt opt(liveIntervalsArray, bfs);
1891 opt.Run();
1892 for (BB *bb : bfs->sortedBBs) {
1893 FOR_BB_INSNS(insn, bb)
1894 {
1895 if (!insn->IsMachineInstruction() || insn->GetId() == 0) {
1896 continue;
1897 }
1898
1899 uint32 spillIdx = 0;
1900 // Handle source(use) opernads first
1901 FinalizeUseRegisters(*insn, spillIdx);
1902
1903 // Handle ud(use-def) opernads
1904 FinalizeUseDefRegisters(*insn, spillIdx);
1905
1906 // Handle dest(def) opernads last
1907 FinalizeDefRegisters(*insn, spillIdx);
1908
1909 // Free spill stack for reference reg
1910 FinalizeFreeReferenceSpillStack(*insn);
1911 }
1912 }
1913 }
1914
SetStackMapDerivedInfo()1915 void LSRALinearScanRegAllocator::SetStackMapDerivedInfo()
1916 {
1917 for (BB *bb : bfs->sortedBBs) {
1918 FOR_BB_INSNS(insn, bb) {
1919 if (!insn->IsMachineInstruction()) {
1920 continue;
1921 }
1922 const InsnDesc *md = insn->GetDesc();
1923 for (uint32 i = 0; i < insn->GetOperandSize(); ++i) {
1924 if (!md->GetOpndDes(i)->IsRegDef()) {
1925 continue;
1926 }
1927 auto ®Opnd = static_cast<RegOperand&>(insn->GetOperand(i));
1928 if (regOpnd.GetBaseRefOpnd() != nullptr) {
1929 // set the base reference of derived reference for stackmap
1930 derivedRef2Base[regOpnd.GetRegisterNumber()] = regOpnd.GetBaseRefOpnd();
1931 }
1932 }
1933 }
1934 }
1935 }
1936
CollectReferenceMap()1937 void LSRALinearScanRegAllocator::CollectReferenceMap()
1938 {
1939 RA_TIMER_REGISTER(lsra, "LSRA CollectReferenceMap");
1940 const auto &referenceMapInsns = cgFunc->GetStackMapInsns();
1941 #ifdef ARK_LITECG_DEBUG
1942 if (needDump) {
1943 LogInfo::MapleLogger() << "===========reference map stack info================\n";
1944 }
1945 #endif
1946
1947 for (auto *insn : referenceMapInsns) {
1948 for (auto regNO : insn->GetStackMapLiveIn()) {
1949 if (!cgFunc->IsRegReference(regNO)) {
1950 continue;
1951 }
1952
1953 auto *li = liveIntervalsArray[regNO];
1954 if (li == nullptr) {
1955 continue;
1956 }
1957
1958 if (li->IsShouldSave() || li->GetStackSlot() == kSpilled) {
1959 auto itr = derivedRef2Base.find(regNO);
1960 if (itr != derivedRef2Base.end()) {
1961 auto baseRegNum = (itr->second)->GetRegisterNumber();
1962 MemOperand *baseRegMemOpnd = cgFunc->GetOrCreatSpillMem(baseRegNum, k64BitSize);
1963 int64 baseRefMemoffset = baseRegMemOpnd->GetOffsetImmediate()->GetOffsetValue();
1964 insn->GetStackMap()->GetReferenceMap().ReocordStackRoots(baseRefMemoffset);
1965 #ifdef ARK_LITECG_DEBUG
1966 if (needDump) {
1967 LogInfo::MapleLogger() << "--------insn id: " << insn->GetId()
1968 << " base regNO: " << baseRegNum << " offset: "
1969 << baseRefMemoffset << std::endl;
1970 }
1971 #endif
1972 }
1973 MemOperand *memOperand = cgFunc->GetOrCreatSpillMem(regNO, k64BitSize);
1974 int64 offset = memOperand->GetOffsetImmediate()->GetOffsetValue();
1975 insn->GetStackMap()->GetReferenceMap().ReocordStackRoots(offset);
1976 if (itr == derivedRef2Base.end()) {
1977 insn->GetStackMap()->GetReferenceMap().ReocordStackRoots(offset);
1978 }
1979 #ifdef ARK_LITECG_DEBUG
1980 if (needDump) {
1981 LogInfo::MapleLogger() << "--------insn id: " << insn->GetId() << " regNO: " << regNO
1982 << " offset: " << offset << std::endl;
1983 }
1984 #endif
1985 } else {
1986 // li->GetAssignedReg - R0/RAX?
1987 CHECK_FATAL(false, "not support currently");
1988 insn->GetStackMap()->GetReferenceMap().ReocordRegisterRoots(li->GetAssignedReg());
1989 }
1990 }
1991 }
1992
1993 #ifdef ARK_LITECG_DEBUG
1994 if (needDump) {
1995 LogInfo::MapleLogger() << "===========reference map stack info end================\n";
1996 }
1997 if (needDump) {
1998 LogInfo::MapleLogger() << "===========reference map info================\n";
1999 for (auto *insn : referenceMapInsns) {
2000 LogInfo::MapleLogger() << " referenceMap insn: ";
2001 insn->Dump();
2002 insn->GetStackMap()->GetReferenceMap().Dump();
2003 }
2004 }
2005 #endif
2006 }
2007
SolveRegOpndDeoptInfo(const RegOperand & regOpnd,DeoptInfo & deoptInfo,int32 deoptVregNO) const2008 void LSRALinearScanRegAllocator::SolveRegOpndDeoptInfo(const RegOperand ®Opnd, DeoptInfo &deoptInfo,
2009 int32 deoptVregNO) const
2010 {
2011 if (!regInfo->IsVirtualRegister(regOpnd)) {
2012 // Get Register No
2013 deoptInfo.RecordDeoptVreg2LocationInfo(deoptVregNO, LocationInfo({kInRegister, 0}));
2014 return;
2015 }
2016 // process virtual RegOperand
2017 regno_t vRegNO = regOpnd.GetRegisterNumber();
2018 LiveInterval *li = liveIntervalsArray[vRegNO];
2019 if (li->IsShouldSave() || li->GetStackSlot() == kSpilled) {
2020 MemOperand *memOpnd = cgFunc->GetOrCreatSpillMem(vRegNO, regOpnd.GetSize());
2021 SolveMemOpndDeoptInfo(*(static_cast<const MemOperand *>(memOpnd)), deoptInfo, deoptVregNO);
2022 } else {
2023 // Get Register NO
2024 deoptInfo.RecordDeoptVreg2LocationInfo(deoptVregNO, LocationInfo({kInRegister, li->GetAssignedReg()}));
2025 }
2026 }
2027
SolveMemOpndDeoptInfo(const MemOperand & memOpnd,DeoptInfo & deoptInfo,int32 deoptVregNO) const2028 void LSRALinearScanRegAllocator::SolveMemOpndDeoptInfo(const MemOperand &memOpnd, DeoptInfo &deoptInfo,
2029 int32 deoptVregNO) const
2030 {
2031 int64 offset = memOpnd.GetOffsetImmediate()->GetOffsetValue();
2032 deoptInfo.RecordDeoptVreg2LocationInfo(deoptVregNO, LocationInfo({kOnStack, offset}));
2033 }
2034
CollectDeoptInfo()2035 void LSRALinearScanRegAllocator::CollectDeoptInfo()
2036 {
2037 RA_TIMER_REGISTER(lsra, "LSRA CollectDeoptInfo");
2038 const auto referenceMapInsns = cgFunc->GetStackMapInsns();
2039 for (auto *insn : referenceMapInsns) {
2040 auto &deoptInfo = insn->GetStackMap()->GetDeoptInfo();
2041 const auto &deoptBundleInfo = deoptInfo.GetDeoptBundleInfo();
2042 if (deoptBundleInfo.empty()) {
2043 continue;
2044 }
2045 for (const auto &item : deoptBundleInfo) {
2046 const auto *opnd = item.second;
2047 if (opnd->IsRegister()) {
2048 SolveRegOpndDeoptInfo(*static_cast<const RegOperand *>(opnd), deoptInfo, item.first);
2049 continue;
2050 }
2051 if (opnd->IsImmediate()) {
2052 const auto *immOpnd = static_cast<const ImmOperand *>(opnd);
2053 deoptInfo.RecordDeoptVreg2LocationInfo(item.first, LocationInfo({kInConstValue, immOpnd->GetValue()}));
2054 continue;
2055 }
2056 if (opnd->IsMemoryAccessOperand()) {
2057 SolveMemOpndDeoptInfo(*(static_cast<const MemOperand *>(opnd)), deoptInfo, item.first);
2058 continue;
2059 }
2060 DEBUG_ASSERT(false, "can't reach here!");
2061 }
2062 }
2063 #ifdef ARK_LITECG_DEBUG
2064 if (needDump) {
2065 LogInfo::MapleLogger() << "===========deopt info================\n";
2066 for (auto *insn : referenceMapInsns) {
2067 LogInfo::MapleLogger() << "---- deoptInfo insn: ";
2068 insn->Dump();
2069 insn->GetStackMap()->GetDeoptInfo().Dump();
2070 }
2071 }
2072 #endif
2073 }
2074
SetAllocMode()2075 void LSRALinearScanRegAllocator::SetAllocMode()
2076 {
2077 #ifdef ARK_LITECG_DEBUG
2078 if (needDump) {
2079 if (spillAll) {
2080 LogInfo::MapleLogger() << "spillAll mode on\n";
2081 }
2082 }
2083 #endif
2084 }
2085
LinearScanRegAllocator()2086 void LSRALinearScanRegAllocator::LinearScanRegAllocator()
2087 {
2088 RA_TIMER_REGISTER(lsra, "LSRA LinearScanRegAllocator");
2089 #ifdef ARK_LITECG_DEBUG
2090 if (needDump) {
2091 PrintParamQueue("Initial param queue");
2092 PrintCallQueue("Initial call queue");
2093 }
2094 #endif
2095 freeUntilPos.resize(regInfo->GetAllRegNum(), UINT32_MAX);
2096 MapleVector<uint32> initialPosVec(freeUntilPos);
2097 uint32 curInsnID = 0;
2098
2099 while (!liQue.empty()) {
2100 LiveInterval *li = liQue.front();
2101 liQue.pop_front();
2102 if (li->GetRangesSize() == 0) {
2103 /* range building has been skiped */
2104 li->AddRange(li->GetFirstDef(), li->GetLastUse());
2105 }
2106 li->InitRangeFinder();
2107 #ifdef ARK_LITECG_DEBUG
2108 if (needDump) {
2109 LogInfo::MapleLogger() << "======Alloc R" << li->GetRegNO() << "======"
2110 << "\n";
2111 }
2112 #endif
2113 blockForbiddenMask = 0;
2114 freeUntilPos = initialPosVec;
2115 DEBUG_ASSERT(li->GetFirstDef() >= curInsnID, "wrong li order");
2116 curInsnID = li->GetFirstDef();
2117 RetireActive(*li, curInsnID);
2118 UpdateCallQueueAtRetirement(curInsnID);
2119 UpdateActiveAllocateInfo(*li);
2120 UpdateParamAllocateInfo(*li);
2121 #ifdef ARK_LITECG_DEBUG
2122 if (needDump) {
2123 DebugCheckActiveList();
2124 LogInfo::MapleLogger() << "freeUntilPos:";
2125 for (uint32 i = 0; i < freeUntilPos.size(); ++i) {
2126 LogInfo::MapleLogger() << "[" << i << "," << freeUntilPos[i] << "], ";
2127 }
2128 LogInfo::MapleLogger() << "\n";
2129 }
2130 #endif
2131 AssignPhysRegsForLi(*li);
2132 }
2133 }
2134
CollectStackMapInfo()2135 void LSRALinearScanRegAllocator::CollectStackMapInfo()
2136 {
2137 CollectReferenceMap();
2138 CollectDeoptInfo();
2139 }
2140
2141 /* Main entrance for the LSRA register allocator */
AllocateRegisters()2142 bool LSRALinearScanRegAllocator::AllocateRegisters()
2143 {
2144 cgFunc->SetIsAfterRegAlloc();
2145 liveIntervalsArray.resize(cgFunc->GetMaxVReg());
2146 regInfo->Fini();
2147 SetAllocMode();
2148 #ifdef ARK_LITECG_DEBUG
2149 if (needDump) {
2150 const MIRModule &mirModule = cgFunc->GetMirModule();
2151 DotGenerator::GenerateDot("RA", *cgFunc, mirModule);
2152 DotGenerator::GenerateDot("RAe", *cgFunc, mirModule);
2153 LogInfo::MapleLogger() << "Entering LinearScanRegAllocator: " << cgFunc->GetName() << "\n";
2154 }
2155 #endif
2156
2157 if (!cgFunc->IsStackMapComputed()) {
2158 SetStackMapDerivedInfo();
2159 }
2160
2161 ComputeLiveInterval();
2162
2163 if (!cgFunc->IsStackMapComputed()) {
2164 SpillStackMapInfo();
2165 }
2166
2167 bool enableDoLSRAPreSpill = true;
2168 if (enableDoLSRAPreSpill) {
2169 LiveIntervalAnalysis();
2170 }
2171
2172 InitFreeRegPool();
2173
2174 LinearScanRegAllocator();
2175
2176 #ifdef ARK_LITECG_DEBUG
2177 if (needDump) {
2178 PrintAllLiveRanges();
2179 }
2180 #endif
2181
2182 FinalizeRegisters();
2183
2184 if (!cgFunc->IsStackMapComputed()) {
2185 CollectStackMapInfo();
2186 }
2187
2188 #ifdef ARK_LITECG_DEBUG
2189 if (needDump) {
2190 LogInfo::MapleLogger() << "Total " << spillCount << " spillCount in " << cgFunc->GetName() << " \n";
2191 LogInfo::MapleLogger() << "Total " << reloadCount << " reloadCount\n";
2192 LogInfo::MapleLogger() << "Total "
2193 << "(" << spillCount << "+ " << callerSaveSpillCount
2194 << ") = " << (spillCount + callerSaveSpillCount) << " SPILL\n";
2195 LogInfo::MapleLogger() << "Total "
2196 << "(" << reloadCount << "+ " << callerSaveReloadCount
2197 << ") = " << (reloadCount + callerSaveReloadCount) << " RELOAD\n";
2198 uint32_t insertInsn = spillCount + callerSaveSpillCount + reloadCount + callerSaveReloadCount;
2199 float rate = (float(insertInsn) / float(maxInsnNum));
2200 LogInfo::MapleLogger() << "insn Num Befor RA:" << maxInsnNum << ", insert " << insertInsn << " insns: "
2201 << ", insertInsn/insnNumBeforRA: " << rate << "\n";
2202 }
2203 #endif
2204
2205 bfs = nullptr; /* bfs is not utilized outside the function. */
2206
2207 return true;
2208 }
2209
2210 } /* namespace maplebe */
2211