1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "reg_alloc_lsra.h"
17 #include "aarch64_cg.h"
18
19 namespace maplebe {
20 namespace {
21 constexpr uint32 kSpilled = 1;
22 /* Here, kLoopWeight is a fine-tuned empirical parameter */
23 constexpr uint32 kLoopWeight = 4;
24 } // namespace
25 #ifdef ARK_LITECG_DEBUG
26 constexpr uint32 kPrintedActiveListLength = 10;
27
28 /*
29 * This LSRA implementation is an interpretation of the [Poletto97] paper.
30 * BFS BB ordering is used to order the instructions. The live intervals are vased on
31 * this instruction order. All vreg defines should come before an use, else a warning is
32 * given.
33 * Live interval is traversed in order from lower instruction order to higher order.
34 * When encountering a live interval for the first time, it is assumed to be live and placed
35 * inside the 'active' structure until the vreg's last access. During the time a vreg
36 * is in 'active', the vreg occupies a physical register allocation and no other vreg can
37 * be allocated the same physical register.
38 */
PrintRegSet(const MapleSet<uint32> & set,const std::string & str) const39 void LSRALinearScanRegAllocator::PrintRegSet(const MapleSet<uint32> &set, const std::string &str) const
40 {
41 LogInfo::MapleLogger() << str;
42 for (auto reg : set) {
43 LogInfo::MapleLogger() << " " << reg;
44 }
45 LogInfo::MapleLogger() << "\n";
46 }
47
PrintLiveRanges(const LiveInterval & li) const48 void LSRALinearScanRegAllocator::PrintLiveRanges(const LiveInterval &li) const
49 {
50 if (li.GetAssignedReg() != 0) {
51 uint32 base = (li.GetRegType() == kRegTyInt) ? firstIntReg : firstFpReg;
52 char regType = (li.GetRegType() == kRegTyInt) ? 'R' : 'V';
53 LogInfo::MapleLogger() << "(assigned " << regType << (li.GetAssignedReg() - base) << ")";
54 }
55 if (li.GetStackSlot() == kSpilled) {
56 LogInfo::MapleLogger() << "(spill)";
57 }
58 if (li.IsShouldSave()) {
59 LogInfo::MapleLogger() << "(ShouldSave)";
60 }
61 for (auto range : li.GetRanges()) {
62 LogInfo::MapleLogger() << "[" << range.GetStart() << ", " << range.GetEnd() << "]"
63 << " ";
64 }
65 LogInfo::MapleLogger() << "\n";
66 }
67
PrintAllLiveRanges() const68 void LSRALinearScanRegAllocator::PrintAllLiveRanges() const
69 {
70 LogInfo::MapleLogger() << "func: " << cgFunc->GetName() << "\n";
71 for (auto *li : liveIntervalsArray) {
72 if (li == nullptr || li->GetRegNO() == 0) {
73 continue;
74 }
75 LogInfo::MapleLogger() << "vreg" << li->GetRegNO() << ": ";
76 PrintLiveRanges(*li);
77 }
78 }
79 #endif
80
SpillStackMapInfo()81 void LSRALinearScanRegAllocator::SpillStackMapInfo()
82 {
83 RA_TIMER_REGISTER(lsra, "LSRA SpillStackMapInfo");
84 const auto &referenceMapInsns = cgFunc->GetStackMapInsns();
85
86 for (auto *insn : referenceMapInsns) {
87 auto &deoptInfo = insn->GetStackMap()->GetDeoptInfo();
88 const auto &deoptBundleInfo = deoptInfo.GetDeoptBundleInfo();
89 for (const auto &item : deoptBundleInfo) {
90 const auto *opnd = item.second;
91 if (!opnd->IsRegister()) {
92 continue;
93 }
94 auto *li = liveIntervalsArray[(static_cast<const RegOperand *>(opnd))->GetRegisterNumber()];
95 if (li != nullptr) {
96 #ifdef ARK_LITECG_DEBUG
97 if (needDump) {
98 PrintLiveInterval(*li, "Spill Deopt:");
99 }
100 #endif
101 li->SetStackSlot(kSpilled);
102 li->SetShouldSave(false);
103 }
104 }
105
106 for (auto regNO : insn->GetStackMapLiveIn()) {
107 if (!cgFunc->IsRegReference(regNO)) {
108 continue;
109 }
110 auto *li = liveIntervalsArray[regNO];
111 if (li == nullptr) {
112 continue;
113 }
114 auto itr = derivedRef2Base.find(regNO);
115 if (itr != derivedRef2Base.end()) {
116 auto baseRegNum = (itr->second)->GetRegisterNumber();
117 DEBUG_ASSERT(liveIntervalsArray[baseRegNum] != nullptr, "empty li");
118 #ifdef ARK_LITECG_DEBUG
119 if (needDump) {
120 PrintLiveInterval(*liveIntervalsArray[baseRegNum], "Spill StackMap derivedRef:");
121 }
122 #endif
123 liveIntervalsArray[baseRegNum]->SetStackSlot(kSpilled);
124 liveIntervalsArray[baseRegNum]->SetShouldSave(false);
125 }
126 #ifdef ARK_LITECG_DEBUG
127 if (needDump) {
128 PrintLiveInterval(*li, "Spill StackMap:");
129 }
130 #endif
131 li->SetStackSlot(kSpilled);
132 li->SetShouldSave(false);
133 }
134 }
135 }
136
137 #ifdef ARK_LITECG_DEBUG
PrintLiveInterval(const LiveInterval & li,const std::string & str) const138 void LSRALinearScanRegAllocator::PrintLiveInterval(const LiveInterval &li, const std::string &str) const
139 {
140 LogInfo::MapleLogger() << str << "\n";
141 if (li.GetPhysUse() > 0) {
142 LogInfo::MapleLogger() << "\tregNO " << li.GetRegNO();
143 LogInfo::MapleLogger() << " firstDef " << li.GetFirstDef();
144 LogInfo::MapleLogger() << " physUse " << li.GetPhysUse();
145 } else {
146 LogInfo::MapleLogger() << "\tregNO " << li.GetRegNO();
147 LogInfo::MapleLogger() << " firstDef " << li.GetFirstDef();
148 LogInfo::MapleLogger() << " lastUse " << li.GetLastUse();
149 LogInfo::MapleLogger() << " assigned " << li.GetAssignedReg();
150 LogInfo::MapleLogger() << " refCount " << li.GetRefCount();
151 LogInfo::MapleLogger() << " priority " << li.GetPriority();
152 }
153 LogInfo::MapleLogger() << "\n";
154 }
155
PrintParamQueue(const std::string & str)156 void LSRALinearScanRegAllocator::PrintParamQueue(const std::string &str)
157 {
158 LogInfo::MapleLogger() << str << "\n";
159 for (SingleQue &que : intParamQueue) {
160 if (que.empty()) {
161 continue;
162 }
163 LiveInterval *li = que.front();
164 LiveInterval *last = que.back();
165 PrintLiveInterval(*li, "");
166 while (li != last) {
167 que.pop_front();
168 que.push_back(li);
169 li = que.front();
170 PrintLiveInterval(*li, "");
171 }
172 que.pop_front();
173 que.push_back(li);
174 }
175 }
176
PrintCallQueue(const std::string & str) const177 void LSRALinearScanRegAllocator::PrintCallQueue(const std::string &str) const
178 {
179 LogInfo::MapleLogger() << str << "\n";
180 for (auto callInsnID : callQueue) {
181 LogInfo::MapleLogger() << callInsnID << " ";
182 }
183 LogInfo::MapleLogger() << "\n";
184 }
185
PrintActiveList(const std::string & str,uint32 len) const186 void LSRALinearScanRegAllocator::PrintActiveList(const std::string &str, uint32 len) const
187 {
188 uint32 count = 0;
189 LogInfo::MapleLogger() << str << " " << active.size() << "\n";
190 for (auto *li : active) {
191 PrintLiveInterval(*li, "");
192 ++count;
193 if ((len != 0) && (count == len)) {
194 break;
195 }
196 }
197 }
198
PrintLiveIntervals() const199 void LSRALinearScanRegAllocator::PrintLiveIntervals() const
200 {
201 /* vreg LogInfo */
202 for (auto *li : liveIntervalsArray) {
203 if (li == nullptr || li->GetRegNO() == 0) {
204 continue;
205 }
206 PrintLiveInterval(*li, "");
207 }
208 LogInfo::MapleLogger() << "\n";
209 /* preg LogInfo */
210 for (auto param : intParamQueue) {
211 for (auto *li : param) {
212 if (li == nullptr || li->GetRegNO() == 0) {
213 continue;
214 }
215 PrintLiveInterval(*li, "");
216 }
217 }
218 LogInfo::MapleLogger() << "\n";
219 }
220
DebugCheckActiveList() const221 void LSRALinearScanRegAllocator::DebugCheckActiveList() const
222 {
223 LiveInterval *prev = nullptr;
224 for (auto *li : active) {
225 if (prev != nullptr) {
226 if ((li->GetRegNO() <= regInfo->GetLastParamsFpReg()) &&
227 (prev->GetRegNO() > regInfo->GetLastParamsFpReg())) {
228 if (li->GetFirstDef() < prev->GetFirstDef()) {
229 LogInfo::MapleLogger() << "ERRer: active list with out of order phys + vreg\n";
230 PrintLiveInterval(*prev, "prev");
231 PrintLiveInterval(*li, "current");
232 PrintActiveList("Active", kPrintedActiveListLength);
233 }
234 }
235 if ((li->GetRegNO() <= regInfo->GetLastParamsFpReg()) &&
236 (prev->GetRegNO() <= regInfo->GetLastParamsFpReg())) {
237 if (li->GetFirstDef() < prev->GetFirstDef()) {
238 LogInfo::MapleLogger() << "ERRer: active list with out of order phys reg use\n";
239 PrintLiveInterval(*prev, "prev");
240 PrintLiveInterval(*li, "current");
241 PrintActiveList("Active", kPrintedActiveListLength);
242 }
243 }
244 } else {
245 prev = li;
246 }
247 }
248 }
249 #endif
250
251 /*
252 * Prepare the free physical register pool for allocation.
253 * When a physical register is allocated, it is removed from the pool.
254 * The physical register is re-inserted into the pool when the associated live
255 * interval has ended.
256 */
InitFreeRegPool()257 void LSRALinearScanRegAllocator::InitFreeRegPool()
258 {
259 for (regno_t regNO : regInfo->GetAllRegs()) {
260 if (!regInfo->IsAvailableReg(regNO)) {
261 continue;
262 }
263 if (regInfo->IsGPRegister(regNO)) {
264 if (regInfo->IsYieldPointReg(regNO)) {
265 continue;
266 }
267 /* ExtraSpillReg */
268 if (regInfo->IsSpillRegInRA(regNO, needExtraSpillReg)) {
269 intSpillRegSet.push_back(regNO - firstIntReg);
270 continue;
271 }
272 if (regInfo->IsCalleeSavedReg(regNO)) {
273 /* callee-saved registers */
274 #ifdef ARK_LITECG_DEBUG
275 (void)intCalleeRegSet.insert(regNO - firstIntReg);
276 #endif
277 intCalleeMask |= 1u << (regNO - firstIntReg);
278 } else {
279 /* caller-saved registers */
280 #ifdef ARK_LITECG_DEBUG
281 (void)intCallerRegSet.insert(regNO - firstIntReg);
282 #endif
283 intCallerMask |= 1u << (regNO - firstIntReg);
284 }
285 } else {
286 /* fp ExtraSpillReg */
287 if (regInfo->IsSpillRegInRA(regNO, needExtraSpillReg)) {
288 fpSpillRegSet.push_back(regNO - firstFpReg);
289 continue;
290 }
291 if (regInfo->IsCalleeSavedReg(regNO)) {
292 /* fp callee-saved registers */
293 #ifdef ARK_LITECG_DEBUG
294 (void)fpCalleeRegSet.insert(regNO - firstFpReg);
295 #endif
296 fpCalleeMask |= 1u << (regNO - firstFpReg);
297 } else {
298 /* fp caller-saved registers */
299 #ifdef ARK_LITECG_DEBUG
300 (void)fpCallerRegSet.insert(regNO - firstFpReg);
301 #endif
302 fpCallerMask |= 1u << (regNO - firstFpReg);
303 }
304 }
305 }
306 DEBUG_ASSERT(intSpillRegSet.size() >= 2U, "too few spill regs");
307
308 #ifdef ARK_LITECG_DEBUG
309 if (needDump) {
310 PrintRegSet(intCallerRegSet, "ALLOCATABLE_INT_CALLER");
311 PrintRegSet(intCalleeRegSet, "ALLOCATABLE_INT_CALLEE");
312 PrintRegSet(intParamRegSet, "ALLOCATABLE_INT_PARAM");
313 PrintRegSet(fpCallerRegSet, "ALLOCATABLE_FP_CALLER");
314 PrintRegSet(fpCalleeRegSet, "ALLOCATABLE_FP_CALLEE");
315 PrintRegSet(fpParamRegSet, "ALLOCATABLE_FP_PARAM");
316 LogInfo::MapleLogger() << "INT_SPILL_REGS";
317 for (uint32 intSpillRegNO : intSpillRegSet) {
318 LogInfo::MapleLogger() << " " << intSpillRegNO;
319 }
320 LogInfo::MapleLogger() << "\n";
321 LogInfo::MapleLogger() << "FP_SPILL_REGS";
322 for (uint32 fpSpillRegNO : fpSpillRegSet) {
323 LogInfo::MapleLogger() << " " << fpSpillRegNO;
324 }
325 LogInfo::MapleLogger() << "\n";
326 LogInfo::MapleLogger() << std::hex;
327 LogInfo::MapleLogger() << "INT_CALLER_MASK " << intCallerMask << "\n";
328 LogInfo::MapleLogger() << "INT_CALLEE_MASK " << intCalleeMask << "\n";
329 LogInfo::MapleLogger() << "INT_PARAM_MASK " << intParamMask << "\n";
330 LogInfo::MapleLogger() << "FP_CALLER_FP_MASK " << fpCallerMask << "\n";
331 LogInfo::MapleLogger() << "FP_CALLEE_FP_MASK " << fpCalleeMask << "\n";
332 LogInfo::MapleLogger() << "FP_PARAM_FP_MASK " << fpParamMask << "\n";
333 LogInfo::MapleLogger() << std::dec;
334 }
335 #endif
336 }
337
RecordPhysRegs(const RegOperand & regOpnd,uint32 insnNum,bool isDef)338 void LSRALinearScanRegAllocator::RecordPhysRegs(const RegOperand ®Opnd, uint32 insnNum, bool isDef)
339 {
340 RegType regType = regOpnd.GetRegisterType();
341 uint32 regNO = regOpnd.GetRegisterNumber();
342 if (regType == kRegTyCc || regType == kRegTyVary) {
343 return;
344 }
345 if (regInfo->IsUnconcernedReg(regNO) || regInfo->IsVirtualRegister(regNO)) {
346 return;
347 }
348 if (isDef) {
349 /* parameter/return register def is assumed to be live until a call. */
350 auto *li = memPool->New<LiveInterval>(*memPool);
351 li->SetRegNO(regNO);
352 li->SetRegType(regType);
353 li->SetStackSlot(0xFFFFFFFF);
354 li->SetFirstDef(insnNum);
355 li->SetPhysUse(insnNum);
356 li->SetAssignedReg(regNO);
357
358 if (regType == kRegTyInt) {
359 intParamQueue[regInfo->GetIntParamRegIdx(regNO)].push_back(li);
360 } else {
361 fpParamQueue[regInfo->GetFpParamRegIdx(regNO)].push_back(li);
362 }
363 } else {
364 if (regType == kRegTyInt) {
365 CHECK_FATAL(!intParamQueue[regInfo->GetIntParamRegIdx(regNO)].empty(),
366 "was not defined before use, impossible");
367 LiveInterval *li = intParamQueue[regInfo->GetIntParamRegIdx(regNO)].back();
368 li->SetPhysUse(insnNum);
369 } else {
370 CHECK_FATAL(!fpParamQueue[regInfo->GetFpParamRegIdx(regNO)].empty(),
371 "was not defined before use, impossible");
372 LiveInterval *li = fpParamQueue[regInfo->GetFpParamRegIdx(regNO)].back();
373 li->SetPhysUse(insnNum);
374 }
375 }
376 }
377
UpdateRegUsedInfo(LiveInterval & li,regno_t regNO)378 void LSRALinearScanRegAllocator::UpdateRegUsedInfo(LiveInterval &li, regno_t regNO)
379 {
380 uint32 index = regNO / (sizeof(uint64) * k8ByteSize);
381 uint64 bit = regNO % (sizeof(uint64) * k8ByteSize);
382 if ((regUsedInBB[index] & (static_cast<uint64>(1) << bit)) != 0) {
383 li.SetMultiUseInBB(true);
384 }
385 regUsedInBB[index] |= (static_cast<uint64>(1) << bit);
386
387 if (minVregNum > regNO) {
388 minVregNum = regNO;
389 }
390 if (maxVregNum < regNO) {
391 maxVregNum = regNO;
392 }
393 }
394
395 /* main entry function for live interval computation. */
SetupLiveInterval(Operand & opnd,Insn & insn,bool isDef,uint32 & nUses,uint32 regSize)396 void LSRALinearScanRegAllocator::SetupLiveInterval(Operand &opnd, Insn &insn, bool isDef, uint32 &nUses, uint32 regSize)
397 {
398 if (!opnd.IsRegister()) {
399 return;
400 }
401 auto ®Opnd = static_cast<RegOperand &>(opnd);
402 uint32 insnNum = insn.GetId();
403 if (!regInfo->IsVirtualRegister(regOpnd)) {
404 RecordPhysRegs(regOpnd, insnNum, isDef);
405 return;
406 }
407 RegType regType = regOpnd.GetRegisterType();
408 if (regType == kRegTyCc || regType == kRegTyVary) {
409 return;
410 }
411
412 LiveInterval *li = nullptr;
413 uint32 regNO = regOpnd.GetRegisterNumber();
414 DEBUG_ASSERT(regNO <= liveIntervalsArray.size() - 1, "index out of range");
415 if (liveIntervalsArray[regNO] == nullptr) {
416 li = memPool->New<LiveInterval>(*memPool);
417 li->SetRegNO(regNO);
418 li->SetStackSlot(0xFFFFFFFF);
419 liveIntervalsArray[regNO] = li;
420 liQue.push_back(li);
421 } else {
422 li = liveIntervalsArray[regNO];
423 }
424 li->SetRegType(regType);
425
426 BB *curBB = insn.GetBB();
427 if (isDef) {
428 /* never set to 0 before, why consider this condition ? */
429 if (li->GetFirstDef() == 0) {
430 li->SetFirstDef(insnNum);
431 li->SetLastUse(insnNum + 1);
432 } else if (!curBB->IsUnreachable()) {
433 if (li->GetLastUse() < insnNum || li->IsUseBeforeDef()) {
434 li->SetLastUse(insnNum + 1);
435 }
436 }
437 // add range
438 li->AddRange(insn.GetId(), insn.GetId());
439 } else {
440 if (li->GetFirstDef() == 0) {
441 DEBUG_ASSERT(false, "SetupLiveInterval: use before def");
442 }
443 /*
444 * In ComputeLiveInterval when extending live interval using
445 * live-out information, li created does not have a type.
446 */
447 if (!curBB->IsUnreachable()) {
448 li->SetLastUse(insnNum);
449 }
450 ++nUses;
451 // update range
452 if (li->GetRanges().empty()) {
453 li->AddRange(insn.GetId(), insn.GetId());
454 } else {
455 li->GetRanges().back().SetEnd(insn.GetId());
456 }
457 }
458 if (isDef) {
459 li->SetMaxDefSize(std::max(regSize, li->GetMaxDefSize()));
460 } else {
461 li->SetMaxUseSize(std::max(regSize, li->GetMaxUseSize()));
462 }
463 if (li->GetMaxDefSize() == 0) {
464 li->SetSpillSize(li->GetMaxUseSize());
465 } else if (li->GetMaxUseSize() == 0) {
466 li->SetSpillSize(li->GetMaxDefSize());
467 } else {
468 li->SetSpillSize(std::min(li->GetMaxDefSize(), li->GetMaxUseSize()));
469 }
470 li->SetRefCount(li->GetRefCount() + 1);
471 li->AddUsePositions(insnNum);
472 UpdateRegUsedInfo(*li, regNO);
473
474 /* setup the def/use point for it */
475 DEBUG_ASSERT(regNO < liveIntervalsArray.size(), "out of range of vector liveIntervalsArray");
476 }
477
478 /*
479 * Support 'hole' in LSRA.
480 * For a live interval, there might be multiple segments of live ranges,
481 * and between these segments a 'hole'.
482 * Some other short lived vreg can go into these 'holes'.
483 *
484 * from : starting instruction sequence id
485 * to : ending instruction sequence id
486 */
AddRange(uint32 from,uint32 to)487 void LSRALinearScanRegAllocator::LiveInterval::AddRange(uint32 from, uint32 to)
488 {
489 if (ranges.empty()) {
490 ranges.emplace_back(from, to);
491 return;
492 }
493 /* create a new range */
494 if (from > ranges.back().GetEnd()) {
495 ranges.emplace_back(from, to);
496 return;
497 }
498 DEBUG_ASSERT(to >= ranges.back().GetStart(), "No possible on forward traverse.");
499 if (to >= ranges.back().GetEnd() && from < ranges.back().GetStart()) {
500 ranges.back().SetStart(from);
501 ranges.back().SetEnd(to);
502 return;
503 }
504 /* extend it's range forward. e.g. def-use opnd */
505 if (to >= ranges.back().GetEnd() && from < ranges.back().GetEnd()) {
506 ranges.back().SetEnd(to);
507 return;
508 }
509 return;
510 }
511
FindPosRange(uint32 pos)512 MapleVector<LSRALinearScanRegAllocator::LinearRange>::iterator LSRALinearScanRegAllocator::LiveInterval::FindPosRange(
513 uint32 pos)
514 {
515 while (rangeFinder != ranges.end()) {
516 if (rangeFinder->GetEnd() >= pos) {
517 break;
518 }
519 ++rangeFinder;
520 }
521 return rangeFinder;
522 }
523
524 /* Extend live interval with live-in info */
UpdateLiveIntervalByLiveIn(const BB & bb,uint32 insnNum)525 void LSRALinearScanRegAllocator::UpdateLiveIntervalByLiveIn(const BB &bb, uint32 insnNum)
526 {
527 for (const auto ®NO : bb.GetLiveInRegNO()) {
528 if (!regInfo->IsVirtualRegister(regNO)) {
529 /* Do not consider physical regs. */
530 continue;
531 }
532 DEBUG_ASSERT(regNO < liveIntervalsArray.size(), "index out of range.");
533 LiveInterval *liOuter = liveIntervalsArray[regNO];
534 if (liOuter != nullptr) {
535 // add live interval range
536 liOuter->AddRange(insnNum, insnNum);
537 continue;
538 }
539 if (bb.IsEmpty() && bb.GetId() != 1) {
540 continue;
541 }
542 /*
543 * try-catch related
544 * Since it is livein but not seen before, its a use before def
545 * spill it temporarily
546 */
547 auto *li = memPool->New<LiveInterval>(*memPool);
548 li->SetRegNO(regNO);
549 li->SetStackSlot(kSpilled);
550 liveIntervalsArray[regNO] = li;
551 li->SetFirstDef(insnNum);
552 liQue.push_back(li);
553
554 li->SetUseBeforeDef(true);
555
556 // add live interval range
557 li->AddRange(insnNum, insnNum);
558
559 if (!bb.IsUnreachable()) {
560 if (bb.GetId() != 1) {
561 LogInfo::MapleLogger() << "ERROR: " << regNO << " use before def in bb " << bb.GetId() << " : "
562 << cgFunc->GetName() << "\n";
563 DEBUG_ASSERT(false, "There should only be [use before def in bb 1], temporarily.");
564 }
565 LogInfo::MapleLogger() << "WARNING: " << regNO << " use before def in bb " << bb.GetId() << " : "
566 << cgFunc->GetName() << "\n";
567 }
568 }
569 }
570
571 /* traverse live in regNO, for each live in regNO create a new liveinterval */
UpdateParamLiveIntervalByLiveIn(const BB & bb,uint32 insnNum)572 void LSRALinearScanRegAllocator::UpdateParamLiveIntervalByLiveIn(const BB &bb, uint32 insnNum)
573 {
574 for (const auto regNO : bb.GetLiveInRegNO()) {
575 if (regInfo->IsUnconcernedReg(regNO)) {
576 continue;
577 }
578 if (regInfo->IsVirtualRegister(regNO)) {
579 break;
580 }
581 auto *li = memPool->New<LiveInterval>(*memPool);
582 li->SetRegNO(regNO);
583 li->SetStackSlot(0xFFFFFFFF);
584 li->SetFirstDef(insnNum);
585 li->SetPhysUse(insnNum);
586 li->SetAssignedReg(regNO);
587
588 if (regInfo->IsGPRegister(regNO)) {
589 li->SetRegType(kRegTyInt);
590 intParamQueue[regInfo->GetIntParamRegIdx(regNO)].push_back(li);
591 } else {
592 li->SetRegType(kRegTyFloat);
593 fpParamQueue[regInfo->GetFpParamRegIdx(regNO)].push_back(li);
594 }
595 }
596 }
597
ComputeLiveIn(BB & bb,uint32 insnNum)598 void LSRALinearScanRegAllocator::ComputeLiveIn(BB &bb, uint32 insnNum)
599 {
600 if (bb.IsEmpty() && bb.GetId() != 1) {
601 return;
602 }
603 #ifdef ARK_LITECG_DEBUG
604 if (needDump) {
605 LogInfo::MapleLogger() << "bb(" << bb.GetId() << ")LIVEOUT:";
606 for (const auto &liveOutRegNO : bb.GetLiveOutRegNO()) {
607 LogInfo::MapleLogger() << " " << liveOutRegNO;
608 }
609 LogInfo::MapleLogger() << ".\n";
610 LogInfo::MapleLogger() << "bb(" << bb.GetId() << ")LIVEIN:";
611 for (const auto &liveInRegNO : bb.GetLiveInRegNO()) {
612 LogInfo::MapleLogger() << " " << liveInRegNO;
613 }
614 LogInfo::MapleLogger() << ".\n";
615 }
616 #endif
617
618 UpdateLiveIntervalByLiveIn(bb, insnNum);
619
620 if (bb.GetFirstInsn() == nullptr) {
621 return;
622 }
623 UpdateParamLiveIntervalByLiveIn(bb, insnNum);
624 }
625
ComputeLiveOut(BB & bb,uint32 insnNum)626 void LSRALinearScanRegAllocator::ComputeLiveOut(BB &bb, uint32 insnNum)
627 {
628 auto updateLiveIntervalByLiveOut = [this, &bb, insnNum](regno_t regNO) {
629 /* Extend live interval with live-out info */
630 LiveInterval *li = liveIntervalsArray[regNO];
631 if (li != nullptr && !bb.IsEmpty()) {
632 li->SetLastUse(bb.GetLastInsn()->GetId());
633
634 // update live interval range
635 if (li->GetRanges().empty()) {
636 li->AddRange(insnNum, insnNum);
637 } else {
638 li->GetRanges().back().SetEnd(insnNum);
639 }
640 }
641 };
642
643 /*
644 * traverse live out regNO
645 * for each live out regNO if the last corresponding live interval is created within this bb
646 * update this lastUse of li to the end of BB
647 */
648 for (const auto regNO : bb.GetLiveOutRegNO()) {
649 if (!regInfo->IsVirtualRegister(regNO)) {
650 if (regInfo->IsUnconcernedReg(regNO)) {
651 continue;
652 }
653 LiveInterval *liOut = nullptr;
654 if (regInfo->IsGPRegister(regNO)) {
655 if (intParamQueue[regInfo->GetIntParamRegIdx(regNO)].empty()) {
656 continue;
657 }
658 liOut = intParamQueue[regInfo->GetIntParamRegIdx(regNO)].back();
659 if (bb.GetFirstInsn() && liOut->GetFirstDef() >= bb.GetFirstInsn()->GetId()) {
660 liOut->SetPhysUse(insnNum);
661 }
662 } else {
663 if (fpParamQueue[regInfo->GetFpParamRegIdx(regNO)].empty()) {
664 continue;
665 }
666 liOut = fpParamQueue[regInfo->GetFpParamRegIdx(regNO)].back();
667 if (bb.GetFirstInsn() && liOut->GetFirstDef() >= bb.GetFirstInsn()->GetId()) {
668 liOut->SetPhysUse(insnNum);
669 }
670 }
671 continue;
672 }
673 updateLiveIntervalByLiveOut(regNO);
674 auto iter = derivedRef2Base.find(regNO);
675 if (iter != derivedRef2Base.end()) {
676 updateLiveIntervalByLiveOut(iter->second->GetRegisterNumber());
677 }
678 }
679 }
680
681 struct RegOpndInfo {
RegOpndInfomaplebe::RegOpndInfo682 RegOpndInfo(RegOperand &opnd, uint32 size, uint32 idx, bool def)
683 : regOpnd(opnd), regSize(size), opndIdx(idx), isDef(def)
684 {
685 }
686 RegOperand ®Opnd;
687 uint32 regSize = 0;
688 uint32 opndIdx = -1;
689 bool isDef = false;
690 };
691
ComputeLiveIntervalForEachOperand(Insn & insn)692 void LSRALinearScanRegAllocator::ComputeLiveIntervalForEachOperand(Insn &insn)
693 {
694 std::vector<RegOpndInfo> allRegOpndInfo;
695 const InsnDesc *md = insn.GetDesc();
696 for (uint32 i = 0; i < insn.GetOperandSize(); ++i) {
697 Operand &opnd = insn.GetOperand(i);
698 const OpndDesc *opndDesc = md->GetOpndDes(i);
699 DEBUG_ASSERT(opndDesc != nullptr, "ptr null check.");
700 if (opnd.IsRegister()) {
701 auto ®Opnd = static_cast<RegOperand &>(opnd);
702 // Specifically, the "use-def" opnd is treated as a "use" opnd
703 bool isUse = opndDesc->IsRegUse();
704 // Fixup: The size in the insn md of x64 is not accurate, and the size in the fload opnd
705 // does not change, so we use the size in the opnd first.
706 auto regSize = (regOpnd.GetRegisterType() == kRegTyInt ? opndDesc->GetSize() : regOpnd.GetSize());
707 allRegOpndInfo.emplace_back(regOpnd, regSize, i, !isUse);
708 } else if (opnd.IsMemoryAccessOperand()) {
709 auto &memOpnd = static_cast<MemOperand &>(opnd);
710 RegOperand *base = memOpnd.GetBaseRegister();
711 RegOperand *offset = memOpnd.GetIndexRegister();
712 if (base != nullptr) {
713 allRegOpndInfo.emplace_back(*base, k64BitSize, i, false);
714 }
715 if (offset != nullptr) {
716 allRegOpndInfo.emplace_back(*offset, k64BitSize, i, false);
717 }
718 } else if (opnd.IsList()) {
719 auto &listOpnd = static_cast<ListOperand &>(opnd);
720 for (auto op : listOpnd.GetOperands()) {
721 allRegOpndInfo.emplace_back(*op, op->GetSize(), i, opndDesc->IsDef());
722 }
723 }
724 }
725
726 uint32 numUses = 0;
727 for (auto ®OpndInfo : allRegOpndInfo) {
728 if (!regOpndInfo.isDef) {
729 SetupLiveInterval(regOpndInfo.regOpnd, insn, false, numUses, regOpndInfo.regSize);
730 }
731 }
732
733 for (auto ®OpndInfo : allRegOpndInfo) {
734 if (regOpndInfo.isDef) {
735 SetupLiveInterval(regOpndInfo.regOpnd, insn, true, numUses, regOpndInfo.regSize);
736 }
737 }
738
739 if (numUses >= regInfo->GetNormalUseOperandNum()) {
740 needExtraSpillReg = true;
741 }
742 }
743
ComputeLoopLiveIntervalPriority(const LoopDesc & loop)744 void LSRALinearScanRegAllocator::ComputeLoopLiveIntervalPriority(const LoopDesc &loop)
745 {
746 for (const auto *lp : loop.GetChildLoops()) {
747 /* handle nested Loops */
748 ComputeLoopLiveIntervalPriority(*lp);
749 }
750 for (auto bbId : loop.GetLoopBBs()) {
751 auto *bb = cgFunc->GetBBFromID(bbId);
752 if (bb->IsEmpty()) {
753 continue;
754 }
755 FOR_BB_INSNS(insn, bb)
756 {
757 ComputeLoopLiveIntervalPriorityInInsn(*insn);
758 }
759 loopBBRegSet.clear();
760 }
761 }
762
ComputeLoopLiveIntervalPriorityInInsn(const Insn & insn)763 void LSRALinearScanRegAllocator::ComputeLoopLiveIntervalPriorityInInsn(const Insn &insn)
764 {
765 uint32 opndNum = insn.GetOperandSize();
766 for (uint32 i = 0; i < opndNum; ++i) {
767 Operand &opnd = insn.GetOperand(i);
768 if (!opnd.IsRegister()) {
769 continue;
770 }
771 auto ®Opnd = static_cast<RegOperand &>(opnd);
772 if (!regInfo->IsVirtualRegister(regOpnd)) {
773 continue;
774 }
775 uint32 regNo = regOpnd.GetRegisterNumber();
776 LiveInterval *li = liveIntervalsArray[regNo];
777 if (li == nullptr || loopBBRegSet.find(regNo) != loopBBRegSet.end()) {
778 continue;
779 }
780 li->SetPriority(kLoopWeight * li->GetPriority());
781 (void)loopBBRegSet.insert(regNo);
782 }
783 return;
784 }
785
ComputeLiveIntervalForCall(Insn & insn)786 void LSRALinearScanRegAllocator::ComputeLiveIntervalForCall(Insn &insn)
787 {
788 uint32 cnt = 0;
789 // referencemap info
790 for (auto regNO : insn.GetStackMapLiveIn()) {
791 if (derivedRef2Base.find(regNO) != derivedRef2Base.end()) {
792 auto baseOpnd = derivedRef2Base[regNO];
793 auto regSize = baseOpnd->GetSize();
794 SetupLiveInterval(*baseOpnd, insn, false, cnt, regSize);
795 }
796 }
797 // deoptinfo
798 if (insn.GetStackMap() != nullptr) {
799 for (auto [_, opnd] : insn.GetStackMap()->GetDeoptInfo().GetDeoptBundleInfo()) {
800 auto regSize = opnd->GetSize();
801 SetupLiveInterval(*opnd, insn, false, cnt, regSize);
802 }
803 }
804 }
805
ComputeLiveInterval()806 void LSRALinearScanRegAllocator::ComputeLiveInterval()
807 {
808 RA_TIMER_REGISTER(lsra, "LSRA ComputeLiveInterval");
809 liQue.clear();
810 uint32 regUsedInBBSz = (cgFunc->GetMaxVReg() / (sizeof(uint64) * k8ByteSize) + 1);
811 regUsedInBB.resize(regUsedInBBSz, 0);
812 uint32 insnNum = 1;
813 for (BB *bb : bfs->sortedBBs) {
814 ComputeLiveIn(*bb, insnNum);
815 FOR_BB_INSNS(insn, bb)
816 {
817 insn->SetId(insnNum);
818 /* skip comment and debug insn */
819 if (!insn->IsMachineInstruction()) {
820 continue;
821 }
822
823 /* RecordCall, remember calls for caller/callee allocation. */
824 if (insn->IsCall()) {
825 if (!insn->GetIsThrow()) {
826 callQueue.emplace_back(insn->GetId());
827 }
828 if (!cgFunc->IsStackMapComputed()) {
829 ComputeLiveIntervalForCall(*insn);
830 }
831 }
832
833 ComputeLiveIntervalForEachOperand(*insn);
834
835 /* handle return value for call insn */
836 if (insn->IsCall()) {
837 /* For all backend architectures so far, adopt all RetRegs as Def via this insn,
838 * and then their live begins.
839 * next optimization, you can determine which registers are actually used.
840 */
841 for (uint32 i = 0; i < regInfo->GetIntRetRegsNum(); i++) {
842 auto *retReg = regInfo->GetOrCreatePhyRegOperand(regInfo->GetIntRetReg(i), k64BitSize, kRegTyInt);
843 RecordPhysRegs(*retReg, insnNum, true);
844 }
845 for (uint32 i = 0; i < regInfo->GetFpRetRegsNum(); i++) {
846 auto *retReg = regInfo->GetOrCreatePhyRegOperand(regInfo->GetFpRetReg(i), k64BitSize, kRegTyFloat);
847 RecordPhysRegs(*retReg, insnNum, true);
848 }
849 }
850 ++insnNum;
851 }
852
853 ComputeLiveOut(*bb, insnNum);
854 }
855
856 maxInsnNum = insnNum - 1; /* insn_num started from 1 */
857 regUsedInBB.clear();
858 /* calculate Live Interval weight */
859 for (auto *li : liveIntervalsArray) {
860 if (li == nullptr || li->GetRegNO() == 0) {
861 continue;
862 }
863 if (li->GetPhysUse() > 0) {
864 continue;
865 }
866 if (li->GetLastUse() > li->GetFirstDef()) {
867 li->SetPriority(static_cast<float>(li->GetRefCount()) /
868 static_cast<float>(li->GetLastUse() - li->GetFirstDef()));
869 } else {
870 li->SetPriority(static_cast<float>(li->GetRefCount()) /
871 static_cast<float>(li->GetFirstDef() - li->GetLastUse()));
872 }
873 }
874
875 // enhance loop Live Interval Priority
876 for (const auto *lp : loopInfo.GetLoops()) {
877 ComputeLoopLiveIntervalPriority(*lp);
878 }
879 #ifdef ARK_LITECG_DEBUG
880 if (needDump) {
881 PrintLiveIntervals();
882 }
883 #endif
884 }
885
886 /* Calculate the weight of a live interval for pre-spill and flexible spill */
LiveIntervalAnalysis()887 void LSRALinearScanRegAllocator::LiveIntervalAnalysis()
888 {
889 RA_TIMER_REGISTER(lsra, "LSRA BuildIntervalRanges");
890 for (uint32 bbIdx = 0; bbIdx < bfs->sortedBBs.size(); ++bbIdx) {
891 BB *bb = bfs->sortedBBs[bbIdx];
892
893 FOR_BB_INSNS(insn, bb)
894 {
895 /* 1 calculate live interfere */
896 if (!insn->IsMachineInstruction() || insn->GetId() == 0) {
897 /* New instruction inserted by reg alloc (ie spill) */
898 continue;
899 }
900 /* 1.1 simple retire from active */
901 MapleSet<LiveInterval *, ActiveCmp>::iterator it;
902 for (it = active.begin(); it != active.end(); /* erase will update */) {
903 auto *li = static_cast<LiveInterval *>(*it);
904 if (li->GetLastUse() > insn->GetId()) {
905 break;
906 }
907 it = active.erase(it);
908 }
909 const InsnDesc *md = insn->GetDesc();
910 uint32 opndNum = insn->GetOperandSize();
911 for (uint32 i = 0; i < opndNum; ++i) {
912 const OpndDesc *regProp = md->GetOpndDes(i);
913 DEBUG_ASSERT(regProp != nullptr, "pointer is null in LSRALinearScanRegAllocator::LiveIntervalAnalysis");
914 bool isDef = regProp->IsRegDef();
915 Operand &opnd = insn->GetOperand(i);
916 if (isDef) {
917 auto ®Opnd = static_cast<RegOperand &>(opnd);
918 if (regOpnd.IsVirtualRegister() && regOpnd.GetRegisterType() != kRegTyCc) {
919 /* 1.2 simple insert to active */
920 uint32 regNO = regOpnd.GetRegisterNumber();
921 LiveInterval *li = liveIntervalsArray[regNO];
922 if (li->GetFirstDef() == insn->GetId() && li->GetStackSlot() != kSpilled) {
923 (void)active.insert(li);
924 }
925 }
926 }
927 }
928
929 /* 2 get interfere info, and analysis */
930 uint32 interNum = active.size();
931 #ifdef ARK_LITECG_DEBUG
932 if (needDump) {
933 LogInfo::MapleLogger() << "In insn " << insn->GetId() << ", " << interNum
934 << " overlap live intervals.\n";
935 LogInfo::MapleLogger() << "\n";
936 }
937 #endif
938
939 /* 2.2 interfere with each other, analysis which to spill */
940 while (interNum > CGOptions::GetOverlapNum()) {
941 LiveInterval *lowestLi = nullptr;
942 FindLowestPrioInActive(lowestLi);
943 if (lowestLi != nullptr) {
944 #ifdef ARK_LITECG_DEBUG
945 if (needDump) {
946 PrintLiveInterval(*lowestLi, "Pre spilled: ");
947 }
948 #endif
949 lowestLi->SetStackSlot(kSpilled);
950 lowestLi->SetShouldSave(false);
951 active.erase(itFinded);
952 interNum = active.size();
953 } else {
954 break;
955 }
956 }
957 }
958 }
959 active.clear();
960 }
961
UpdateCallQueueAtRetirement(uint32 insnID)962 void LSRALinearScanRegAllocator::UpdateCallQueueAtRetirement(uint32 insnID)
963 {
964 /*
965 * active list is sorted based on increasing lastUse
966 * any operand whose use is greater than current
967 * instruction number is still in use.
968 * If the use is less than or equal to instruction number
969 * then it is possible to retire this live interval and
970 * reclaim the physical register associated with it.
971 */
972 #ifdef ARK_LITECG_DEBUG
973 if (needDump) {
974 LogInfo::MapleLogger() << "RetireActiveByInsn instr_num " << insnID << "\n";
975 }
976 #endif
977 /* Retire invalidated call from call queue */
978 while (!callQueue.empty() && callQueue.front() <= insnID) {
979 callQueue.pop_front();
980 }
981 }
982
983 /* update allocate info by active queue */
UpdateActiveAllocateInfo(const LiveInterval & li)984 void LSRALinearScanRegAllocator::UpdateActiveAllocateInfo(const LiveInterval &li)
985 {
986 uint32 start = li.GetFirstDef();
987 uint32 end = li.GetLastUse();
988 if (li.IsUseBeforeDef()) {
989 --start;
990 }
991 for (auto *activeLi : active) {
992 uint32 regNO = activeLi->GetAssignedReg();
993 uint32 rangeStartPos;
994 auto posRange = activeLi->FindPosRange(start);
995 if (posRange == activeLi->GetRanges().end()) {
996 rangeStartPos = UINT32_MAX;
997 } else {
998 rangeStartPos = posRange->GetStart();
999 }
1000 if (rangeStartPos > start) {
1001 if (rangeStartPos < end) {
1002 blockForbiddenMask |= (1UL << activeLi->GetAssignedReg());
1003 }
1004 if (rangeStartPos < freeUntilPos[regNO]) {
1005 freeUntilPos[regNO] = rangeStartPos;
1006 }
1007 } else {
1008 freeUntilPos[regNO] = 0;
1009 }
1010 }
1011 }
1012
1013 /* update allocate info by param queue */
UpdateParamAllocateInfo(const LiveInterval & li)1014 void LSRALinearScanRegAllocator::UpdateParamAllocateInfo(const LiveInterval &li)
1015 {
1016 bool isInt = (li.GetRegType() == kRegTyInt);
1017 MapleVector<SingleQue> ¶mQueue = isInt ? intParamQueue : fpParamQueue;
1018 uint32 baseReg = isInt ? firstIntReg : firstFpReg;
1019 uint32 paramNum = isInt ? regInfo->GetIntRegs().size() : regInfo->GetFpRegs().size();
1020 uint32 start = li.GetFirstDef();
1021 uint32 end = li.GetLastUse();
1022 for (uint32 i = 0; i < paramNum; ++i) {
1023 while (!paramQueue[i].empty() && paramQueue[i].front()->GetPhysUse() <= start) {
1024 paramQueue[i].pop_front();
1025 }
1026 if (paramQueue[i].empty()) {
1027 continue;
1028 }
1029 auto regNo = paramQueue[i].front()->GetRegNO();
1030 uint32 startPos = paramQueue[i].front()->GetFirstDef();
1031 if (startPos <= start) {
1032 freeUntilPos[regNo] = 0;
1033 } else {
1034 if (startPos < end) {
1035 blockForbiddenMask |= (1UL << (i + baseReg));
1036 }
1037 if (startPos < freeUntilPos[regNo]) {
1038 freeUntilPos[regNo] = startPos;
1039 }
1040 }
1041 }
1042 }
1043
1044 /* update active in retire */
RetireActive(LiveInterval & li,uint32 insnID)1045 void LSRALinearScanRegAllocator::RetireActive(LiveInterval &li, uint32 insnID)
1046 {
1047 /* Retire live intervals from active list */
1048 MapleSet<LiveInterval *, ActiveCmp>::iterator it;
1049 for (it = active.begin(); it != active.end(); /* erase will update */) {
1050 auto *activeLi = static_cast<LiveInterval *>(*it);
1051 if (activeLi->GetLastUse() > insnID) {
1052 ++it;
1053 continue;
1054 }
1055 /*
1056 * live interval ended for this reg in active
1057 * release physical reg assigned to free reg pool
1058 */
1059 #ifdef ARK_LITECG_DEBUG
1060 if (needDump) {
1061 LogInfo::MapleLogger() << "Removing "
1062 << "(" << activeLi->GetAssignedReg() << ")"
1063 << "from regmask\n";
1064 PrintLiveInterval(*activeLi, "\tRemoving virt_reg li\n");
1065 }
1066 #endif
1067 it = active.erase(it);
1068 }
1069 }
1070
1071 /* find the best physical reg by freeUntilPos */
GetRegFromMask(uint32 mask,regno_t offset,const LiveInterval & li)1072 uint32 LSRALinearScanRegAllocator::GetRegFromMask(uint32 mask, regno_t offset, const LiveInterval &li)
1073 {
1074 uint32 prefer = li.GetPrefer();
1075 if (prefer != 0) {
1076 uint32 preg = li.GetPrefer() - offset;
1077 if ((mask & (1u << preg)) != 0 && freeUntilPos[prefer] >= li.GetLastUse()) {
1078 return prefer;
1079 }
1080 }
1081 uint32 bestReg = 0;
1082 uint32 maxFreeUntilPos = 0;
1083 for (uint32 preg = 0; preg < k32BitSize; ++preg) {
1084 if ((mask & (1u << preg)) == 0) {
1085 continue;
1086 }
1087 uint32 regNO = preg + offset;
1088 if (freeUntilPos[regNO] >= li.GetLastUse()) {
1089 return regNO;
1090 }
1091 if (freeUntilPos[regNO] > maxFreeUntilPos) {
1092 maxFreeUntilPos = freeUntilPos[regNO];
1093 bestReg = regNO;
1094 }
1095 }
1096 return bestReg;
1097 }
1098
1099 /* Determine if live interval crosses the call */
NeedSaveAcrossCall(LiveInterval & li)1100 bool LSRALinearScanRegAllocator::NeedSaveAcrossCall(LiveInterval &li)
1101 {
1102 bool saveAcrossCall = false;
1103 for (uint32 callInsnID : callQueue) {
1104 if (callInsnID > li.GetLastUse()) {
1105 break;
1106 }
1107 if (callInsnID < li.GetFirstDef()) {
1108 continue;
1109 }
1110 /* Need to spill/fill around this call */
1111 for (auto range : li.GetRanges()) {
1112 uint32 start = range.GetStart();
1113 if (callInsnID >= start && callInsnID < range.GetEnd()) {
1114 saveAcrossCall = true;
1115 break;
1116 }
1117 }
1118 if (saveAcrossCall) {
1119 break;
1120 }
1121 }
1122 #ifdef ARK_LITECG_DEBUG
1123 if (needDump) {
1124 if (saveAcrossCall) {
1125 LogInfo::MapleLogger() << "\t\tlive interval crosses a call\n";
1126 } else {
1127 LogInfo::MapleLogger() << "\t\tlive interval does not cross a call\n";
1128 }
1129 }
1130 #endif
1131 return saveAcrossCall;
1132 }
1133
1134 /* Return a phys register number for the live interval. */
FindAvailablePhyReg(LiveInterval & li)1135 uint32 LSRALinearScanRegAllocator::FindAvailablePhyReg(LiveInterval &li)
1136 {
1137 uint32 regNO = 0;
1138 if (li.GetRegType() == kRegTyInt) {
1139 regNO = FindAvailablePhyReg(li, true);
1140 } else {
1141 DEBUG_ASSERT(li.GetRegType() == kRegTyFloat, "impossible register type");
1142 regNO = FindAvailablePhyReg(li, false);
1143 }
1144 return regNO;
1145 }
1146
1147 /* Spill and reload for caller saved registers. */
InsertCallerSave(Insn & insn,Operand & opnd,bool isDef,uint32 spillIdx)1148 void LSRALinearScanRegAllocator::InsertCallerSave(Insn &insn, Operand &opnd, bool isDef, uint32 spillIdx)
1149 {
1150 auto ®Opnd = static_cast<RegOperand &>(opnd);
1151 uint32 vRegNO = regOpnd.GetRegisterNumber();
1152 if (vRegNO >= liveIntervalsArray.size()) {
1153 CHECK_FATAL(false, "index out of range in LSRALinearScanRegAllocator::InsertCallerSave");
1154 }
1155 LiveInterval *rli = liveIntervalsArray[vRegNO];
1156 auto regType = rli->GetRegType();
1157
1158 if (!isDef && rli->IsNoNeedReloadPosition(insn.GetId())) {
1159 #ifdef ARK_LITECG_DEBUG
1160 if (needDump) {
1161 LogInfo::MapleLogger() << "InsertCallerSave R" << rli->GetRegNO() << " assigned " << rli->GetAssignedReg()
1162 << " skipping\n";
1163 }
1164 #endif
1165 return;
1166 }
1167
1168 if (!rli->IsShouldSave()) {
1169 return;
1170 }
1171
1172 uint32 regSize = rli->GetSpillSize();
1173 PrimType spType;
1174
1175 if (regType == kRegTyInt) {
1176 spType = (regSize <= k32BitSize) ? PTY_u32 : PTY_u64;
1177 } else {
1178 spType = (regSize <= k32BitSize) ? PTY_f32 : PTY_f64;
1179 }
1180
1181 #ifdef ARK_LITECG_DEBUG
1182 if (needDump) {
1183 LogInfo::MapleLogger() << "InsertCallerSave R" << vRegNO << (isDef ? " def" : " use") << "\n";
1184 }
1185 #endif
1186
1187 MemOperand *memOpnd = nullptr;
1188 RegOperand *phyOpnd = nullptr;
1189
1190 phyOpnd = regInfo->GetOrCreatePhyRegOperand(static_cast<regno_t>(rli->GetAssignedReg()), regSize, regType);
1191
1192 std::string comment;
1193 bool isOutOfRange = false;
1194 auto tmpReg = static_cast<regno_t>(intSpillRegSet[spillIdx] + firstIntReg);
1195 if (isDef) {
1196 Insn *nextInsn = insn.GetNext();
1197 memOpnd = GetSpillMem(vRegNO, true, insn, regInfo->GetReservedSpillReg(), isOutOfRange, regSize);
1198 Insn *stInsn = regInfo->BuildStrInsn(regSize, spType, *phyOpnd, *memOpnd);
1199 comment = " SPILL for caller_save " + std::to_string(vRegNO);
1200 ++callerSaveSpillCount;
1201 if (rli->GetLastUse() == insn.GetId()) {
1202 regInfo->FreeSpillRegMem(vRegNO);
1203 comment += " end";
1204 }
1205 stInsn->SetComment(comment);
1206 if (isOutOfRange && nextInsn != nullptr) {
1207 insn.GetBB()->InsertInsnBefore(*nextInsn, *stInsn);
1208 } else if (isOutOfRange && nextInsn == nullptr) {
1209 insn.GetBB()->AppendInsn(*stInsn);
1210 } else {
1211 insn.GetBB()->InsertInsnAfter(insn, *stInsn);
1212 }
1213 } else {
1214 memOpnd = GetSpillMem(vRegNO, false, insn, tmpReg, isOutOfRange, regSize);
1215 Insn *ldInsn = regInfo->BuildLdrInsn(regSize, spType, *phyOpnd, *memOpnd);
1216 comment = " RELOAD for caller_save " + std::to_string(vRegNO);
1217 ++callerSaveReloadCount;
1218 if (rli->GetLastUse() == insn.GetId()) {
1219 regInfo->FreeSpillRegMem(vRegNO);
1220 comment += " end";
1221 }
1222 ldInsn->SetComment(comment);
1223 insn.GetBB()->InsertInsnBefore(insn, *ldInsn);
1224 }
1225 }
1226
GetSpillMem(uint32 vRegNO,bool isDest,Insn & insn,regno_t regNO,bool & isOutOfRange,uint32 bitSize) const1227 MemOperand *LSRALinearScanRegAllocator::GetSpillMem(uint32 vRegNO, bool isDest, Insn &insn, regno_t regNO,
1228 bool &isOutOfRange, uint32 bitSize) const
1229 {
1230 MemOperand *memOpnd = regInfo->GetOrCreatSpillMem(vRegNO, bitSize);
1231 return regInfo->AdjustMemOperandIfOffsetOutOfRange(memOpnd, std::make_pair(vRegNO, regNO), isDest, insn,
1232 isOutOfRange);
1233 }
1234
1235 /*
1236 * Generate spill/reload for an operand.
1237 * spill_idx : one of 3 phys regs set aside for the purpose of spills.
1238 */
SpillOperand(Insn & insn,Operand & opnd,bool isDef,uint32 spillIdx)1239 void LSRALinearScanRegAllocator::SpillOperand(Insn &insn, Operand &opnd, bool isDef, uint32 spillIdx)
1240 {
1241 /*
1242 * Insert spill (def) and fill (use) instructions for the operand.
1243 * Keep track of the 'slot' (base 0). The actual slot on the stack
1244 * will be some 'base_slot_offset' + 'slot' off FP.
1245 * For simplification, entire 64bit register is spilled/filled.
1246 *
1247 * For example, a virtual register home 'slot' on the stack is location 5.
1248 * This represents a 64bit slot (8bytes). The base_slot_offset
1249 * from the base 'slot' determined by whoever is added, off FP.
1250 * stack address is ( FP - (5 * 8) + base_slot_offset )
1251 * So the algorithm is simple, for each virtual register that is not
1252 * allocated, it has to have a home address on the stack (a slot).
1253 * A class variable is used, start from 0, increment by 1.
1254 * Since LiveInterval already represent unique regNO information,
1255 * just add a slot number to it. Subsequent reference to a regNO
1256 * will either get an allocated physical register or a slot number
1257 * for computing the stack location.
1258 *
1259 * This function will also determine the operand to be a def or use.
1260 * For def, spill instruction(s) is appended after the insn.
1261 * For use, spill instruction(s) is prepended before the insn.
1262 * Use FP - (slot# *8) for now. Will recompute if base_slot_offset
1263 * is not 0.
1264 *
1265 * The total number of slots used will be used to compute the stack
1266 * frame size. This will require some interface external to LSRA.
1267 *
1268 * For normal instruction, two spill regs should be enough. The caller
1269 * controls which ones to use.
1270 * For more complex operations, need to break down the instruction.
1271 * eg. store v1 -> [v2 + v3] // 3 regs needed
1272 * => p1 <- v2 // address part 1
1273 * p2 <- v3 // address part 2
1274 * p1 <- p1 + p2 // freeing up p2
1275 * p2 <- v1
1276 * store p2 -> [p1]
1277 * or we can allocate more registers to the spill register set
1278 * For store multiple, need to break it down into two or more instr.
1279 */
1280 auto ®Opnd = static_cast<RegOperand &>(opnd);
1281 uint32 regNO = regOpnd.GetRegisterNumber();
1282 #ifdef ARK_LITECG_DEBUG
1283 if (needDump) {
1284 LogInfo::MapleLogger() << "SpillOperand " << regNO << "\n";
1285 }
1286 #endif
1287
1288 regno_t spReg;
1289 PrimType spType;
1290 CHECK_FATAL(regNO < liveIntervalsArray.size(), "index out of range in LSRALinearScanRegAllocator::SpillOperand");
1291 LiveInterval *li = liveIntervalsArray[regNO];
1292 DEBUG_ASSERT(!li->IsShouldSave(), "SpillOperand: Should not be caller");
1293 uint32 regSize = li->GetSpillSize();
1294 RegType regType = regOpnd.GetRegisterType();
1295
1296 if (li->GetRegType() == kRegTyInt) {
1297 DEBUG_ASSERT((spillIdx < intSpillRegSet.size()), "SpillOperand: ran out int spill reg");
1298 spReg = intSpillRegSet[spillIdx] + firstIntReg;
1299 spType = (regSize <= k32BitSize) ? PTY_u32 : PTY_u64;
1300 } else if (li->GetRegType() == kRegTyFloat) {
1301 DEBUG_ASSERT((spillIdx < fpSpillRegSet.size()), "SpillOperand: ran out fp spill reg");
1302 spReg = fpSpillRegSet[spillIdx] + firstFpReg;
1303 spType = (regSize <= k32BitSize) ? PTY_f32 : PTY_f64;
1304 } else {
1305 CHECK_FATAL(false, "SpillOperand: Should be int or float type");
1306 }
1307 auto tmpReg = static_cast<regno_t>(intSpillRegSet[spillIdx] + firstIntReg);
1308
1309 bool isOutOfRange = false;
1310 RegOperand *phyOpnd = GetSpillPhyRegOperand(insn, static_cast<regno_t>(spReg), regSize, regType);
1311 li->SetAssignedReg(phyOpnd->GetRegisterNumber());
1312
1313 MemOperand *memOpnd = nullptr;
1314 if (isDef) {
1315 /*
1316 * Need to assign spReg (one of the two spill reg) to the destination of the insn.
1317 * spill_vreg <- opn1 op opn2
1318 * to
1319 * spReg <- opn1 op opn2
1320 * store spReg -> spillmem
1321 */
1322 li->SetStackSlot(kSpilled);
1323
1324 ++spillCount;
1325 Insn *nextInsn = insn.GetNext();
1326 memOpnd = GetSpillMem(regNO, true, insn, regInfo->GetReservedSpillReg(), isOutOfRange, regSize);
1327 Insn *stInsn = regInfo->BuildStrInsn(regSize, spType, *phyOpnd, *memOpnd);
1328 if (li->GetLastUse() == insn.GetId()) {
1329 regInfo->FreeSpillRegMem(regNO);
1330 }
1331 if (CGOptions::GetInstance().GenerateVerboseCG()) {
1332 std::string comment = " SPILL vreg:" + std::to_string(regNO);
1333 stInsn->SetComment(comment);
1334 }
1335
1336 if (isOutOfRange && nextInsn != nullptr) {
1337 insn.GetBB()->InsertInsnBefore(*nextInsn, *stInsn);
1338 } else if (isOutOfRange && nextInsn == nullptr) {
1339 insn.GetBB()->AppendInsn(*stInsn);
1340 } else {
1341 insn.GetBB()->InsertInsnAfter(insn, *stInsn);
1342 }
1343 } else {
1344 /* Here, reverse of isDef, change either opn1 or opn2 to the spReg. */
1345 if (li->GetStackSlot() == 0xFFFFFFFF) {
1346 LogInfo::MapleLogger() << "WARNING: " << regNO << " assigned " << li->GetAssignedReg()
1347 << " restore without spill in bb " << insn.GetBB()->GetId() << " : "
1348 << cgFunc->GetName() << "\n";
1349 }
1350 ++reloadCount;
1351 memOpnd = GetSpillMem(regNO, false, insn, tmpReg, isOutOfRange, regSize);
1352 Insn *ldInsn = regInfo->BuildLdrInsn(regSize, spType, *phyOpnd, *memOpnd);
1353 if (li->GetLastUse() == insn.GetId()) {
1354 regInfo->FreeSpillRegMem(regNO);
1355 }
1356 if (CGOptions::GetInstance().GenerateVerboseCG()) {
1357 std::string comment = " RELOAD vreg" + std::to_string(regNO);
1358 ldInsn->SetComment(comment);
1359 }
1360 insn.GetBB()->InsertInsnBefore(insn, *ldInsn);
1361 }
1362 }
1363
GetSpillPhyRegOperand(Insn & insn,regno_t regNo,uint32 regSize,RegType regType)1364 RegOperand *LSRALinearScanRegAllocator::GetSpillPhyRegOperand(Insn &insn, regno_t regNo, uint32 regSize,
1365 RegType regType)
1366 {
1367 RegOperand *phyOpnd = nullptr;
1368 phyOpnd = regInfo->GetOrCreatePhyRegOperand(regNo, regSize, regType);
1369 MOperator mOp = insn.GetMachineOpcode();
1370 if (regInfo->IsMovFromRegtoReg(mOp, insn)) {
1371 auto ®Opnd1 = static_cast<RegOperand &>(insn.GetOperand(1));
1372 auto ®Opnd0 = static_cast<RegOperand &>(insn.GetOperand(0));
1373 if ((!regInfo->IsVirtualRegister(regOpnd1)) && (regInfo->IsVirtualRegister(regOpnd0))) {
1374 phyOpnd = ®Opnd1;
1375 } else if ((regInfo->IsVirtualRegister(regOpnd1)) && (!regInfo->IsVirtualRegister(regOpnd0))) {
1376 phyOpnd = ®Opnd0;
1377 } else if ((regInfo->IsVirtualRegister(regOpnd1)) && (regInfo->IsVirtualRegister(regOpnd0))) {
1378 multiVregMov = true;
1379 }
1380 }
1381 CHECK_FATAL(phyOpnd != nullptr, "SpillOperand: valid physical register operand");
1382 return phyOpnd;
1383 }
1384
1385 /* find the lowest li that meets the constraints related to li0 form current active */
FindLowestPrioInActive(LiveInterval * & targetLi,LiveInterval * li0,RegType regType)1386 void LSRALinearScanRegAllocator::FindLowestPrioInActive(LiveInterval *&targetLi, LiveInterval *li0, RegType regType)
1387 {
1388 std::unordered_map<regno_t, uint32> activeLiAssignedRegCnt;
1389 for (auto *li : active) {
1390 if (li->GetAssignedReg() != 0) {
1391 ++activeLiAssignedRegCnt[li->GetAssignedReg()];
1392 }
1393 }
1394
1395 constexpr float kBiggestFloat = 1000.0;
1396 float lowestPrio = (li0 != nullptr ? li0->GetPriority() : kBiggestFloat);
1397 bool found = false;
1398 bool hintCalleeSavedReg = li0 && NeedSaveAcrossCall(*li0);
1399 MapleSet<LiveInterval *, ActiveCmp>::iterator lowestIt;
1400 for (auto it = active.begin(); it != active.end(); ++it) {
1401 LiveInterval *li = static_cast<LiveInterval *>(*it);
1402 regno_t regNO = li->GetAssignedReg();
1403 /* 1. Basic Constraints */
1404 if (li->GetPriority() >= lowestPrio || li->GetRegType() != regType) {
1405 continue;
1406 }
1407 /* 2. If li is pre-assigned to Physical register primitively, ignore it. */
1408 if (!regInfo->IsVirtualRegister(li->GetRegNO())) {
1409 continue;
1410 }
1411 /* 3. CalleeSavedReg is preferred here. If li is assigned to Non-CalleeSavedReg, ignore it. */
1412 if (hintCalleeSavedReg && !regInfo->IsCalleeSavedReg(regNO - firstIntReg)) {
1413 continue;
1414 }
1415 /* 4. if regNO is assigned to multiple active li, ignore it. */
1416 if (activeLiAssignedRegCnt[regNO] > 1) {
1417 continue;
1418 }
1419 lowestPrio = li->GetPriority();
1420 lowestIt = it;
1421 found = true;
1422 }
1423 if (found) {
1424 targetLi = *lowestIt;
1425 itFinded = lowestIt;
1426 }
1427 return;
1428 }
1429
1430 /* Set a vreg in live interval as being marked for spill. */
SetLiSpill(LiveInterval & li)1431 void LSRALinearScanRegAllocator::SetLiSpill(LiveInterval &li)
1432 {
1433 #ifdef ARK_LITECG_DEBUG
1434 if (needDump) {
1435 LogInfo::MapleLogger() << "SetLiSpill " << li.GetRegNO();
1436 LogInfo::MapleLogger() << "(refCount " << li.GetRefCount() << ")\n";
1437 }
1438 #endif
1439 li.SetStackSlot(kSpilled);
1440 li.SetShouldSave(false);
1441 }
1442
HandleSpillForLi(LiveInterval & li)1443 uint32 LSRALinearScanRegAllocator::HandleSpillForLi(LiveInterval &li)
1444 {
1445 /* choose the lowest priority li to spill */
1446 RegType regType = li.GetRegType();
1447 LiveInterval *spillLi = nullptr;
1448 FindLowestPrioInActive(spillLi, &li, regType);
1449
1450 if (spillLi == nullptr || li.GetStackSlot() == kSpilled || li.GetRefCount() <= spillLi->GetRefCount() ||
1451 spillLi->GetLastUse() < li.GetLastUse()) {
1452 /* spill current li */
1453 #ifdef ARK_LITECG_DEBUG
1454 if (needDump) {
1455 LogInfo::MapleLogger() << "Flexible Spill: still spill " << li.GetRegNO() << ".\n";
1456 }
1457 #endif
1458 SetLiSpill(li);
1459 return 0;
1460 }
1461 DEBUG_ASSERT(spillLi != nullptr, "spillLi is null in LSRALinearScanRegAllocator::HandleSpillForLi");
1462
1463 uint32 newRegNO = spillLi->GetAssignedReg();
1464 #ifdef ARK_LITECG_DEBUG
1465 if (needDump) {
1466 LogInfo::MapleLogger() << "Flexible Spill: " << spillLi->GetRegNO() << " instead of " << li.GetRegNO() << ".\n";
1467 PrintLiveInterval(*spillLi, "TO spill: ");
1468 PrintLiveInterval(li, "Instead of: ");
1469 }
1470 #endif
1471
1472 li.SetAssignedReg(newRegNO);
1473 if (!regInfo->IsCalleeSavedReg(newRegNO) && NeedSaveAcrossCall(li)) {
1474 li.SetShouldSave(true);
1475 }
1476
1477 /* spill this live interval */
1478 (void)active.erase(itFinded);
1479 SetLiSpill(*spillLi);
1480 spillLi->SetAssignedReg(0);
1481
1482 (void)active.insert(&li);
1483 return newRegNO;
1484 }
1485
FindAvailablePhyRegAcrossCall(LiveInterval & li,bool isIntReg)1486 uint32 LSRALinearScanRegAllocator::FindAvailablePhyRegAcrossCall(LiveInterval &li, bool isIntReg)
1487 {
1488 uint32 callerRegMask = isIntReg ? intCallerMask : fpCallerMask;
1489 uint32 calleeRegMask = isIntReg ? intCalleeMask : fpCalleeMask;
1490 regno_t reg0 = isIntReg ? firstIntReg : firstFpReg;
1491
1492 /* call in live interval, use callee if available */
1493 auto bestReg = GetRegFromMask(calleeRegMask, reg0, li);
1494 if (bestReg != 0 && freeUntilPos[bestReg] >= li.GetLastUse()) {
1495 li.SetShouldSave(false);
1496 return bestReg;
1497 }
1498 /* can be optimize multi use between calls rather than in bb */
1499 if (bestReg == 0 || li.IsMultiUseInBB()) {
1500 auto secondReg = GetRegFromMask(callerRegMask, reg0, li);
1501 if (freeUntilPos[secondReg] >= li.GetLastUse()) {
1502 li.SetShouldSave(true);
1503 return secondReg;
1504 }
1505 }
1506 return 0;
1507 }
1508
FindAvailablePhyReg(LiveInterval & li,bool isIntReg)1509 uint32 LSRALinearScanRegAllocator::FindAvailablePhyReg(LiveInterval &li, bool isIntReg)
1510 {
1511 /* See if register is live accross a call */
1512 bool liAcrossCall = NeedSaveAcrossCall(li);
1513 if (liAcrossCall && !regInfo->IsPrefCallerSaveRegs(li.GetRegType(), li.GetSpillSize())) {
1514 return FindAvailablePhyRegAcrossCall(li, isIntReg);
1515 }
1516
1517 uint32 callerRegMask = isIntReg ? intCallerMask : fpCallerMask;
1518 uint32 calleeRegMask = isIntReg ? intCalleeMask : fpCalleeMask;
1519 regno_t reg0 = isIntReg ? firstIntReg : firstFpReg;
1520 auto bestReg = GetRegFromMask(callerRegMask, reg0, li);
1521 if (bestReg == 0) {
1522 bestReg = GetRegFromMask(calleeRegMask, reg0, li);
1523 } else if (freeUntilPos[bestReg] < li.GetLastUse()) {
1524 auto secondReg = GetRegFromMask(calleeRegMask, reg0, li);
1525 if (secondReg != 0) {
1526 bestReg = (freeUntilPos[bestReg] > freeUntilPos[secondReg]) ? bestReg : secondReg;
1527 }
1528 }
1529
1530 if (bestReg != 0 && freeUntilPos[bestReg] < li.GetLastUse()) {
1531 DEBUG_ASSERT(freeUntilPos[bestReg] != 0, "impossible");
1532 bestReg = 0;
1533 }
1534 /* try to fill in holes */
1535 /* try first split if no hole exists */
1536 return bestReg;
1537 }
1538
1539 /* Shell function to find a physical register for an operand. */
AssignPhysRegs(LiveInterval & li)1540 uint32 LSRALinearScanRegAllocator::AssignPhysRegs(LiveInterval &li)
1541 {
1542 if (spillAll) {
1543 return 0;
1544 }
1545
1546 /* pre spilled: */
1547 if (li.GetStackSlot() != 0xFFFFFFFF) {
1548 return 0;
1549 }
1550 #ifdef ARK_LITECG_DEBUG
1551 if (needDump) {
1552 uint32 activeSz = active.size();
1553 LogInfo::MapleLogger() << "\tAssignPhysRegs-active_sz " << activeSz << "\n";
1554 }
1555 #endif
1556
1557 uint32 regNO = FindAvailablePhyReg(li);
1558 if (regNO != 0) {
1559 li.SetAssignedReg(regNO);
1560 if (regInfo->IsCalleeSavedReg(regNO)) {
1561 #ifdef ARK_LITECG_DEBUG
1562 if (needDump) {
1563 LogInfo::MapleLogger() << "\tCallee-save register for save/restore in prologue/epilogue: " << regNO
1564 << "\n";
1565 }
1566 #endif
1567 cgFunc->AddtoCalleeSaved(regNO);
1568 }
1569 }
1570 return regNO;
1571 }
1572
AssignPhysRegsForLi(LiveInterval & li)1573 void LSRALinearScanRegAllocator::AssignPhysRegsForLi(LiveInterval &li)
1574 {
1575 uint32 newRegNO = AssignPhysRegs(li);
1576 if (newRegNO == 0) {
1577 newRegNO = HandleSpillForLi(li);
1578 }
1579
1580 if (newRegNO != 0) {
1581 (void)active.insert(&li);
1582 }
1583 }
1584
1585 /* Replace Use-Def Opnd */
GetReplaceUdOpnd(Insn & insn,Operand & opnd,uint32 & spillIdx)1586 RegOperand *LSRALinearScanRegAllocator::GetReplaceUdOpnd(Insn &insn, Operand &opnd, uint32 &spillIdx)
1587 {
1588 if (!opnd.IsRegister()) {
1589 return nullptr;
1590 }
1591 const auto *regOpnd = static_cast<RegOperand *>(&opnd);
1592
1593 uint32 vRegNO = regOpnd->GetRegisterNumber();
1594 RegType regType = regOpnd->GetRegisterType();
1595 if (regType == kRegTyCc || regType == kRegTyVary) {
1596 return nullptr;
1597 }
1598 if (regInfo->IsUnconcernedReg(vRegNO) || !regInfo->IsVirtualRegister(vRegNO)) {
1599 return nullptr;
1600 }
1601
1602 DEBUG_ASSERT(vRegNO < liveIntervalsArray.size(),
1603 "index out of range of MapleVector in LSRALinearScanRegAllocator::GetReplaceUdOpnd");
1604 LiveInterval *li = liveIntervalsArray[vRegNO];
1605
1606 regno_t regNO = li->GetAssignedReg();
1607 if (regInfo->IsCalleeSavedReg(regNO)) {
1608 cgFunc->AddtoCalleeSaved(regNO);
1609 }
1610
1611 if (li->IsShouldSave()) {
1612 InsertCallerSave(insn, opnd, false, spillIdx);
1613 InsertCallerSave(insn, opnd, true, spillIdx);
1614 } else if (li->GetStackSlot() == kSpilled) {
1615 SpillOperand(insn, opnd, false, spillIdx);
1616 SpillOperand(insn, opnd, true, spillIdx);
1617 ++spillIdx;
1618 }
1619 RegOperand *phyOpnd =
1620 regInfo->GetOrCreatePhyRegOperand(static_cast<regno_t>(li->GetAssignedReg()), opnd.GetSize(), regType);
1621
1622 return phyOpnd;
1623 }
1624
1625 /*
1626 * Create an operand with physical register assigned, or a spill register
1627 * in the case where a physical register cannot be assigned.
1628 */
GetReplaceOpnd(Insn & insn,Operand & opnd,uint32 & spillIdx,bool isDef)1629 RegOperand *LSRALinearScanRegAllocator::GetReplaceOpnd(Insn &insn, Operand &opnd, uint32 &spillIdx, bool isDef)
1630 {
1631 if (!opnd.IsRegister()) {
1632 return nullptr;
1633 }
1634 const auto *regOpnd = static_cast<RegOperand *>(&opnd);
1635
1636 uint32 vRegNO = regOpnd->GetRegisterNumber();
1637 RegType regType = regOpnd->GetRegisterType();
1638 if (regType == kRegTyCc || regType == kRegTyVary) {
1639 return nullptr;
1640 }
1641 if (regInfo->IsUnconcernedReg(vRegNO) || !regInfo->IsVirtualRegister(vRegNO)) {
1642 return nullptr;
1643 }
1644
1645 DEBUG_ASSERT(vRegNO < liveIntervalsArray.size(),
1646 "index out of range of MapleVector in LSRALinearScanRegAllocator::GetReplaceOpnd");
1647 LiveInterval *li = liveIntervalsArray[vRegNO];
1648
1649 regno_t regNO = li->GetAssignedReg();
1650 if (regInfo->IsCalleeSavedReg(regNO)) {
1651 cgFunc->AddtoCalleeSaved(regNO);
1652 }
1653 if (li->IsShouldSave() || li->GetStackSlot() == kSpilled) {
1654 spillIdx = isDef ? 0 : spillIdx;
1655 if (li->IsShouldSave()) {
1656 InsertCallerSave(insn, opnd, isDef, spillIdx);
1657 } else {
1658 SpillOperand(insn, opnd, isDef, spillIdx);
1659 }
1660 if (!isDef) {
1661 ++spillIdx;
1662 }
1663 }
1664 RegOperand *phyOpnd =
1665 regInfo->GetOrCreatePhyRegOperand(static_cast<regno_t>(li->GetAssignedReg()), opnd.GetSize(), regType);
1666
1667 return phyOpnd;
1668 }
1669
UpdateLocalDefWithBBLiveIn(const BB & bb)1670 bool LSRALinearScanRegAllocator::CallerSaveOpt::UpdateLocalDefWithBBLiveIn(const BB &bb)
1671 {
1672 bool changed = false;
1673 auto bbId = bb.GetId();
1674 for (uint32 i = 0; i < callerSaves.size(); ++i) {
1675 auto *li = callerSaves[i];
1676 auto &defLiveIn = localDefLiveIn[i];
1677 auto &defAfterInsn = localDefAfterInsn[i];
1678
1679 if (bb.GetLiveInRegNO().count(li->GetRegNO()) == 0) {
1680 continue;
1681 }
1682 if (defLiveIn.count(bbId) != 0) {
1683 continue;
1684 }
1685 bool isLocalDef = true;
1686 for (auto *pred : bb.GetPreds()) {
1687 if (defAfterInsn.count(pred->GetId()) == 0) {
1688 isLocalDef = false;
1689 break;
1690 }
1691 }
1692 if (isLocalDef) {
1693 defLiveIn.insert(bb.GetId());
1694 defAfterInsn.emplace(bb.GetId(), 0);
1695 changed = true;
1696 }
1697 }
1698 return changed;
1699 }
1700
CollectCallerNoNeedReloadByInsn(const Insn & insn)1701 void LSRALinearScanRegAllocator::CallerSaveOpt::CollectCallerNoNeedReloadByInsn(const Insn &insn)
1702 {
1703 auto curId = insn.GetId();
1704 for (uint32 i = 0; i < callerSaves.size(); ++i) {
1705 auto *li = callerSaves[i];
1706 auto &defLiveIn = localDefLiveIn[i];
1707 auto &defAfterInsn = localDefAfterInsn[i];
1708 auto posRange = li->FindPosRange(curId);
1709 if (posRange == li->GetRanges().end() || posRange->GetStart() > curId) {
1710 continue;
1711 }
1712 if (insn.IsCall()) {
1713 defAfterInsn.erase(insn.GetBB()->GetId());
1714 continue;
1715 }
1716 auto &usePositions = li->GetUsePositions();
1717 if (std::binary_search(usePositions.begin(), usePositions.end(), curId)) {
1718 auto iter = defAfterInsn.find(insn.GetBB()->GetId());
1719 if (iter == defAfterInsn.end() || iter->second > curId) {
1720 defAfterInsn.insert_or_assign(insn.GetBB()->GetId(), curId);
1721 continue;
1722 }
1723 if (iter->second == curId && defLiveIn.count(insn.GetBB()->GetId()) == 0) {
1724 continue;
1725 }
1726 li->AddNoNeedReloadPosition(curId);
1727 }
1728 }
1729 }
1730
Run()1731 void LSRALinearScanRegAllocator::CallerSaveOpt::Run()
1732 {
1733 bool changed;
1734 do {
1735 changed = false;
1736 for (auto *li : callerSaves) {
1737 li->InitRangeFinder();
1738 }
1739 for (BB *bb : bfs->sortedBBs) {
1740 if (!UpdateLocalDefWithBBLiveIn(*bb) && !firstTime) {
1741 continue;
1742 }
1743 changed = true;
1744 FOR_BB_INSNS(insn, bb)
1745 {
1746 if (!insn->IsMachineInstruction() || insn->GetId() == 0) {
1747 continue;
1748 }
1749 CollectCallerNoNeedReloadByInsn(*insn);
1750 }
1751 }
1752 firstTime = false;
1753 } while (changed);
1754 }
1755
FinalizeUseRegisters(Insn & insn,uint32 & spillIdx)1756 void LSRALinearScanRegAllocator::FinalizeUseRegisters(Insn &insn, uint32 &spillIdx)
1757 {
1758 const InsnDesc *md = insn.GetDesc();
1759
1760 for (uint32 i = 0; i < insn.GetOperandSize(); ++i) {
1761 const OpndDesc *opndDesc = md->GetOpndDes(i);
1762 DEBUG_ASSERT(opndDesc != nullptr, "pointer is null in LSRALinearScanRegAllocator::FinalizeRegisters");
1763 Operand &opnd = insn.GetOperand(i);
1764 if (opndDesc->IsDef() && !opnd.IsMemoryAccessOperand()) {
1765 continue;
1766 }
1767 if (opnd.IsList()) {
1768 auto &listOpnd = static_cast<ListOperand&>(opnd);
1769 auto *newList = &cgFunc->GetOpndBuilder()->CreateList(cgFunc->GetFuncScopeAllocator()->GetMemPool());
1770 for (auto *regOpnd : listOpnd.GetOperands()) {
1771 RegOperand *phyOpnd =
1772 regInfo->IsVirtualRegister(*regOpnd) ? GetReplaceOpnd(insn, *regOpnd, spillIdx, false) : regOpnd;
1773 newList->PushOpnd(*phyOpnd);
1774 }
1775 insn.SetOperand(i, *newList);
1776 } else if (opnd.IsMemoryAccessOperand()) {
1777 auto *memOpnd = static_cast<MemOperand*>(static_cast<MemOperand&>(opnd).Clone(*cgFunc->GetMemoryPool()));
1778 DEBUG_ASSERT(memOpnd != nullptr, "memopnd is null in LSRALinearScanRegAllocator::FinalizeRegisters");
1779 insn.SetOperand(i, *memOpnd);
1780 Operand *base = memOpnd->GetBaseRegister();
1781 Operand *offset = memOpnd->GetIndexRegister();
1782 if (base != nullptr) {
1783 auto *phyOpnd = GetReplaceOpnd(insn, *base, spillIdx, false);
1784 if (phyOpnd != nullptr) {
1785 memOpnd->SetBaseRegister(*phyOpnd);
1786 }
1787 }
1788 if (offset != nullptr) {
1789 auto *phyOpnd = GetReplaceOpnd(insn, *offset, spillIdx, false);
1790 if (phyOpnd != nullptr) {
1791 memOpnd->SetIndexRegister(*phyOpnd);
1792 }
1793 }
1794 } else {
1795 auto *phyOpnd = GetReplaceOpnd(insn, opnd, spillIdx, false);
1796 if (phyOpnd != nullptr) {
1797 insn.SetOperand(i, *phyOpnd);
1798 }
1799 }
1800 }
1801 }
1802
FinalizeUseDefRegisters(Insn & insn,uint32 & spillIdx)1803 void LSRALinearScanRegAllocator::FinalizeUseDefRegisters(Insn &insn, uint32 &spillIdx)
1804 {
1805 const InsnDesc *md = insn.GetDesc();
1806
1807 for (uint32 i = 0; i < insn.GetOperandSize(); ++i) {
1808 const OpndDesc *opndDesc = md->GetOpndDes(i);
1809 DEBUG_ASSERT(opndDesc != nullptr, "pointer is null in LSRALinearScanRegAllocator::FinalizeRegisters");
1810 Operand &opnd = insn.GetOperand(i);
1811 bool isUseDef = opndDesc->IsRegDef() && opndDesc->IsRegUse();
1812 if (!isUseDef) {
1813 continue;
1814 }
1815 RegOperand *phyOpnd = GetReplaceUdOpnd(insn, opnd, spillIdx);
1816 if (phyOpnd != nullptr) {
1817 insn.SetOperand(i, *phyOpnd);
1818 }
1819 }
1820 }
1821
FinalizeDefRegisters(Insn & insn,uint32 & spillIdx)1822 void LSRALinearScanRegAllocator::FinalizeDefRegisters(Insn &insn, uint32 &spillIdx)
1823 {
1824 const InsnDesc *md = insn.GetDesc();
1825
1826 for (uint32 i = 0; i < insn.GetOperandSize(); ++i) {
1827 const OpndDesc *opndDesc = md->GetOpndDes(i);
1828 DEBUG_ASSERT(opndDesc != nullptr, "pointer is null in LSRALinearScanRegAllocator::FinalizeRegisters");
1829 Operand &opnd = insn.GetOperand(i);
1830 if (opndDesc->IsUse() || opnd.IsMemoryAccessOperand()) {
1831 continue;
1832 }
1833 if (opnd.IsRegister()) {
1834 RegOperand *phyOpnd = GetReplaceOpnd(insn, opnd, spillIdx, true);
1835 if (phyOpnd != nullptr) {
1836 insn.SetOperand(i, *phyOpnd);
1837 }
1838 } else if (opnd.IsList()) {
1839 auto &listOpnd = static_cast<ListOperand&>(opnd);
1840 auto *newList = &cgFunc->GetOpndBuilder()->CreateList(cgFunc->GetFuncScopeAllocator()->GetMemPool());
1841 for (auto *regOpnd : listOpnd.GetOperands()) {
1842 RegOperand *phyOpnd =
1843 regInfo->IsVirtualRegister(*regOpnd) ? GetReplaceOpnd(insn, *regOpnd, spillIdx, true) : regOpnd;
1844 newList->PushOpnd(*phyOpnd);
1845 }
1846 insn.SetOperand(i, *newList);
1847 }
1848 }
1849 MOperator mOp = insn.GetMachineOpcode();
1850 if (insn.GetPrev() && multiVregMov) {
1851 if (regInfo->IsMovFromRegtoReg(mOp, insn) && insn.GetPrev()->IsLoad()) {
1852 auto ®OpndPrev = static_cast<RegOperand &>(insn.GetPrev()->GetOperand(0));
1853 auto ®Opnd = static_cast<RegOperand &>(insn.GetOperand(1));
1854 auto ®OpndDest = static_cast<RegOperand &>(insn.GetOperand(0));
1855 if ((!regInfo->IsVirtualRegister(regOpndPrev)) && (!regInfo->IsVirtualRegister(regOpnd)) &&
1856 (regOpndPrev == regOpnd)) {
1857 insn.SetOperand(1, regOpndDest);
1858 insn.GetPrev()->SetOperand(0, regOpndDest);
1859 }
1860 }
1861 }
1862 multiVregMov = false;
1863 }
FinalizeFreeReferenceSpillStack(Insn & insn)1864 void LSRALinearScanRegAllocator::FinalizeFreeReferenceSpillStack(Insn &insn)
1865 {
1866 if (!insn.IsCall()) {
1867 return;
1868 }
1869
1870 auto freeSpillStack = [this, insn](regno_t regno) {
1871 auto *li = liveIntervalsArray[regno];
1872 if (li != nullptr && insn.GetId() >= li->GetLastUse()) {
1873 regInfo->FreeSpillRegMem(regno);
1874 }
1875 };
1876
1877 // referencemap info
1878 for (auto regNO : insn.GetStackMapLiveIn()) {
1879 freeSpillStack(regNO);
1880 auto iter = derivedRef2Base.find(regNO);
1881 if (iter != derivedRef2Base.end()) {
1882 freeSpillStack(iter->second->GetRegisterNumber());
1883 }
1884 }
1885 // deoptinfo
1886 if (insn.GetStackMap() == nullptr) {
1887 return;
1888 }
1889 for (auto [_, opnd] : insn.GetStackMap()->GetDeoptInfo().GetDeoptBundleInfo()) {
1890 if (!opnd->IsRegister()) {
1891 continue;
1892 }
1893 freeSpillStack(static_cast<RegOperand*>(opnd)->GetRegisterNumber());
1894 }
1895 }
1896
1897 /* Iterate through all instructions and change the vreg to preg. */
FinalizeRegisters()1898 void LSRALinearScanRegAllocator::FinalizeRegisters()
1899 {
1900 RA_TIMER_REGISTER(lsra, "LSRA FinalizeRegisters");
1901 CallerSaveOpt opt(liveIntervalsArray, bfs);
1902 opt.Run();
1903 for (BB *bb : bfs->sortedBBs) {
1904 FOR_BB_INSNS(insn, bb)
1905 {
1906 if (!insn->IsMachineInstruction() || insn->GetId() == 0) {
1907 continue;
1908 }
1909
1910 uint32 spillIdx = 0;
1911 // Handle source(use) opernads first
1912 FinalizeUseRegisters(*insn, spillIdx);
1913
1914 // Handle ud(use-def) opernads
1915 FinalizeUseDefRegisters(*insn, spillIdx);
1916
1917 // Handle dest(def) opernads last
1918 FinalizeDefRegisters(*insn, spillIdx);
1919
1920 // Free spill stack for reference reg
1921 FinalizeFreeReferenceSpillStack(*insn);
1922 }
1923 }
1924 }
1925
SetStackMapDerivedInfo()1926 void LSRALinearScanRegAllocator::SetStackMapDerivedInfo()
1927 {
1928 for (BB *bb : bfs->sortedBBs) {
1929 FOR_BB_INSNS(insn, bb) {
1930 if (!insn->IsMachineInstruction()) {
1931 continue;
1932 }
1933 const InsnDesc *md = insn->GetDesc();
1934 for (uint32 i = 0; i < insn->GetOperandSize(); ++i) {
1935 if (!md->GetOpndDes(i)->IsRegDef()) {
1936 continue;
1937 }
1938 auto ®Opnd = static_cast<RegOperand&>(insn->GetOperand(i));
1939 if (regOpnd.GetBaseRefOpnd() != nullptr) {
1940 // set the base reference of derived reference for stackmap
1941 derivedRef2Base[regOpnd.GetRegisterNumber()] = regOpnd.GetBaseRefOpnd();
1942 }
1943 }
1944 }
1945 }
1946 }
1947
CollectReferenceMap()1948 void LSRALinearScanRegAllocator::CollectReferenceMap()
1949 {
1950 RA_TIMER_REGISTER(lsra, "LSRA CollectReferenceMap");
1951 const auto &referenceMapInsns = cgFunc->GetStackMapInsns();
1952 #ifdef ARK_LITECG_DEBUG
1953 if (needDump) {
1954 LogInfo::MapleLogger() << "===========reference map stack info================\n";
1955 }
1956 #endif
1957
1958 for (auto *insn : referenceMapInsns) {
1959 for (auto regNO : insn->GetStackMapLiveIn()) {
1960 if (!cgFunc->IsRegReference(regNO)) {
1961 continue;
1962 }
1963
1964 auto *li = liveIntervalsArray[regNO];
1965 if (li == nullptr) {
1966 continue;
1967 }
1968
1969 if (li->IsShouldSave() || li->GetStackSlot() == kSpilled) {
1970 auto itr = derivedRef2Base.find(regNO);
1971 if (itr != derivedRef2Base.end()) {
1972 auto baseRegNum = (itr->second)->GetRegisterNumber();
1973 MemOperand *baseRegMemOpnd = cgFunc->GetOrCreatSpillMem(baseRegNum, k64BitSize);
1974 int64 baseRefMemoffset = baseRegMemOpnd->GetOffsetImmediate()->GetOffsetValue();
1975 insn->GetStackMap()->GetReferenceMap().ReocordStackRoots(baseRefMemoffset);
1976 #ifdef ARK_LITECG_DEBUG
1977 if (needDump) {
1978 LogInfo::MapleLogger() << "--------insn id: " << insn->GetId()
1979 << " base regNO: " << baseRegNum << " offset: "
1980 << baseRefMemoffset << std::endl;
1981 }
1982 #endif
1983 }
1984 MemOperand *memOperand = cgFunc->GetOrCreatSpillMem(regNO, k64BitSize);
1985 int64 offset = memOperand->GetOffsetImmediate()->GetOffsetValue();
1986 insn->GetStackMap()->GetReferenceMap().ReocordStackRoots(offset);
1987 if (itr == derivedRef2Base.end()) {
1988 insn->GetStackMap()->GetReferenceMap().ReocordStackRoots(offset);
1989 }
1990 #ifdef ARK_LITECG_DEBUG
1991 if (needDump) {
1992 LogInfo::MapleLogger() << "--------insn id: " << insn->GetId() << " regNO: " << regNO
1993 << " offset: " << offset << std::endl;
1994 }
1995 #endif
1996 } else {
1997 // li->GetAssignedReg - R0/RAX?
1998 CHECK_FATAL(false, "not support currently");
1999 insn->GetStackMap()->GetReferenceMap().ReocordRegisterRoots(li->GetAssignedReg());
2000 }
2001 }
2002 }
2003
2004 #ifdef ARK_LITECG_DEBUG
2005 if (needDump) {
2006 LogInfo::MapleLogger() << "===========reference map stack info end================\n";
2007 }
2008 if (needDump) {
2009 LogInfo::MapleLogger() << "===========reference map info================\n";
2010 for (auto *insn : referenceMapInsns) {
2011 LogInfo::MapleLogger() << " referenceMap insn: ";
2012 insn->Dump();
2013 insn->GetStackMap()->GetReferenceMap().Dump();
2014 }
2015 }
2016 #endif
2017 }
2018
SolveRegOpndDeoptInfo(const RegOperand & regOpnd,DeoptInfo & deoptInfo,int32 deoptVregNO) const2019 void LSRALinearScanRegAllocator::SolveRegOpndDeoptInfo(const RegOperand ®Opnd, DeoptInfo &deoptInfo,
2020 int32 deoptVregNO) const
2021 {
2022 if (!regInfo->IsVirtualRegister(regOpnd)) {
2023 // Get Register No
2024 deoptInfo.RecordDeoptVreg2LocationInfo(deoptVregNO, LocationInfo({kInRegister, 0}));
2025 return;
2026 }
2027 // process virtual RegOperand
2028 regno_t vRegNO = regOpnd.GetRegisterNumber();
2029 LiveInterval *li = liveIntervalsArray[vRegNO];
2030 if (li->IsShouldSave() || li->GetStackSlot() == kSpilled) {
2031 MemOperand *memOpnd = cgFunc->GetOrCreatSpillMem(vRegNO, regOpnd.GetSize());
2032 SolveMemOpndDeoptInfo(*(static_cast<const MemOperand *>(memOpnd)), deoptInfo, deoptVregNO);
2033 } else {
2034 // Get Register NO
2035 deoptInfo.RecordDeoptVreg2LocationInfo(deoptVregNO, LocationInfo({kInRegister, li->GetAssignedReg()}));
2036 }
2037 }
2038
SolveMemOpndDeoptInfo(const MemOperand & memOpnd,DeoptInfo & deoptInfo,int32 deoptVregNO) const2039 void LSRALinearScanRegAllocator::SolveMemOpndDeoptInfo(const MemOperand &memOpnd, DeoptInfo &deoptInfo,
2040 int32 deoptVregNO) const
2041 {
2042 int64 offset = memOpnd.GetOffsetImmediate()->GetOffsetValue();
2043 deoptInfo.RecordDeoptVreg2LocationInfo(deoptVregNO, LocationInfo({kOnStack, offset}));
2044 }
2045
CollectDeoptInfo()2046 void LSRALinearScanRegAllocator::CollectDeoptInfo()
2047 {
2048 RA_TIMER_REGISTER(lsra, "LSRA CollectDeoptInfo");
2049 const auto referenceMapInsns = cgFunc->GetStackMapInsns();
2050 for (auto *insn : referenceMapInsns) {
2051 auto &deoptInfo = insn->GetStackMap()->GetDeoptInfo();
2052 const auto &deoptBundleInfo = deoptInfo.GetDeoptBundleInfo();
2053 if (deoptBundleInfo.empty()) {
2054 continue;
2055 }
2056 for (const auto &item : deoptBundleInfo) {
2057 const auto *opnd = item.second;
2058 if (opnd->IsRegister()) {
2059 SolveRegOpndDeoptInfo(*static_cast<const RegOperand *>(opnd), deoptInfo, item.first);
2060 continue;
2061 }
2062 if (opnd->IsImmediate()) {
2063 const auto *immOpnd = static_cast<const ImmOperand *>(opnd);
2064 deoptInfo.RecordDeoptVreg2LocationInfo(item.first, LocationInfo({kInConstValue, immOpnd->GetValue()}));
2065 continue;
2066 }
2067 if (opnd->IsMemoryAccessOperand()) {
2068 SolveMemOpndDeoptInfo(*(static_cast<const MemOperand *>(opnd)), deoptInfo, item.first);
2069 continue;
2070 }
2071 DEBUG_ASSERT(false, "can't reach here!");
2072 }
2073 }
2074 #ifdef ARK_LITECG_DEBUG
2075 if (needDump) {
2076 LogInfo::MapleLogger() << "===========deopt info================\n";
2077 for (auto *insn : referenceMapInsns) {
2078 LogInfo::MapleLogger() << "---- deoptInfo insn: ";
2079 insn->Dump();
2080 insn->GetStackMap()->GetDeoptInfo().Dump();
2081 }
2082 }
2083 #endif
2084 }
2085
SetAllocMode()2086 void LSRALinearScanRegAllocator::SetAllocMode()
2087 {
2088 #ifdef ARK_LITECG_DEBUG
2089 if (needDump) {
2090 if (spillAll) {
2091 LogInfo::MapleLogger() << "spillAll mode on\n";
2092 }
2093 }
2094 #endif
2095 }
2096
LinearScanRegAllocator()2097 void LSRALinearScanRegAllocator::LinearScanRegAllocator()
2098 {
2099 RA_TIMER_REGISTER(lsra, "LSRA LinearScanRegAllocator");
2100 #ifdef ARK_LITECG_DEBUG
2101 if (needDump) {
2102 PrintParamQueue("Initial param queue");
2103 PrintCallQueue("Initial call queue");
2104 }
2105 #endif
2106 freeUntilPos.resize(regInfo->GetAllRegNum(), UINT32_MAX);
2107 MapleVector<uint32> initialPosVec(freeUntilPos);
2108 uint32 curInsnID = 0;
2109
2110 while (!liQue.empty()) {
2111 LiveInterval *li = liQue.front();
2112 liQue.pop_front();
2113 if (li->GetRangesSize() == 0) {
2114 /* range building has been skiped */
2115 li->AddRange(li->GetFirstDef(), li->GetLastUse());
2116 }
2117 li->InitRangeFinder();
2118 #ifdef ARK_LITECG_DEBUG
2119 if (needDump) {
2120 LogInfo::MapleLogger() << "======Alloc R" << li->GetRegNO() << "======"
2121 << "\n";
2122 }
2123 #endif
2124 blockForbiddenMask = 0;
2125 freeUntilPos = initialPosVec;
2126 DEBUG_ASSERT(li->GetFirstDef() >= curInsnID, "wrong li order");
2127 curInsnID = li->GetFirstDef();
2128 RetireActive(*li, curInsnID);
2129 UpdateCallQueueAtRetirement(curInsnID);
2130 UpdateActiveAllocateInfo(*li);
2131 UpdateParamAllocateInfo(*li);
2132 #ifdef ARK_LITECG_DEBUG
2133 if (needDump) {
2134 DebugCheckActiveList();
2135 LogInfo::MapleLogger() << "freeUntilPos:";
2136 for (uint32 i = 0; i < freeUntilPos.size(); ++i) {
2137 LogInfo::MapleLogger() << "[" << i << "," << freeUntilPos[i] << "], ";
2138 }
2139 LogInfo::MapleLogger() << "\n";
2140 }
2141 #endif
2142 AssignPhysRegsForLi(*li);
2143 }
2144 }
2145
CollectStackMapInfo()2146 void LSRALinearScanRegAllocator::CollectStackMapInfo()
2147 {
2148 CollectReferenceMap();
2149 CollectDeoptInfo();
2150 }
2151
2152 /* Main entrance for the LSRA register allocator */
AllocateRegisters()2153 bool LSRALinearScanRegAllocator::AllocateRegisters()
2154 {
2155 cgFunc->SetIsAfterRegAlloc();
2156 liveIntervalsArray.resize(cgFunc->GetMaxVReg());
2157 regInfo->Fini();
2158 SetAllocMode();
2159 #ifdef ARK_LITECG_DEBUG
2160 if (needDump) {
2161 const MIRModule &mirModule = cgFunc->GetMirModule();
2162 DotGenerator::GenerateDot("RA", *cgFunc, mirModule);
2163 DotGenerator::GenerateDot("RAe", *cgFunc, mirModule);
2164 LogInfo::MapleLogger() << "Entering LinearScanRegAllocator: " << cgFunc->GetName() << "\n";
2165 }
2166 #endif
2167
2168 if (!cgFunc->IsStackMapComputed()) {
2169 SetStackMapDerivedInfo();
2170 }
2171
2172 ComputeLiveInterval();
2173
2174 if (!cgFunc->IsStackMapComputed()) {
2175 SpillStackMapInfo();
2176 }
2177
2178 InitFreeRegPool();
2179
2180 LinearScanRegAllocator();
2181
2182 #ifdef ARK_LITECG_DEBUG
2183 if (needDump) {
2184 PrintAllLiveRanges();
2185 }
2186 #endif
2187
2188 FinalizeRegisters();
2189
2190 if (!cgFunc->IsStackMapComputed()) {
2191 CollectStackMapInfo();
2192 }
2193
2194 #ifdef ARK_LITECG_DEBUG
2195 if (needDump) {
2196 LogInfo::MapleLogger() << "Total " << spillCount << " spillCount in " << cgFunc->GetName() << " \n";
2197 LogInfo::MapleLogger() << "Total " << reloadCount << " reloadCount\n";
2198 LogInfo::MapleLogger() << "Total "
2199 << "(" << spillCount << "+ " << callerSaveSpillCount
2200 << ") = " << (spillCount + callerSaveSpillCount) << " SPILL\n";
2201 LogInfo::MapleLogger() << "Total "
2202 << "(" << reloadCount << "+ " << callerSaveReloadCount
2203 << ") = " << (reloadCount + callerSaveReloadCount) << " RELOAD\n";
2204 uint32_t insertInsn = spillCount + callerSaveSpillCount + reloadCount + callerSaveReloadCount;
2205 float rate = (float(insertInsn) / float(maxInsnNum));
2206 LogInfo::MapleLogger() << "insn Num Befor RA:" << maxInsnNum << ", insert " << insertInsn << " insns: "
2207 << ", insertInsn/insnNumBeforRA: " << rate << "\n";
2208 }
2209 #endif
2210
2211 bfs = nullptr; /* bfs is not utilized outside the function. */
2212
2213 return true;
2214 }
2215
2216 } /* namespace maplebe */
2217